answer
stringlengths
15
1.25M
package weixin.popular.bean.scan.crud; import weixin.popular.bean.scan.base.ProductGet; import weixin.popular.bean.scan.info.BrandInfo; public class ProductCreate extends ProductGet { private BrandInfo brand_info; public BrandInfo getBrand_info() { return brand_info; } public void setBrand_info(BrandInfo brand_info) { this.brand_info = brand_info; } }
<!doctype html> <html ng-app="frontend"> <head> <meta charset="utf-8"> <title>frontend</title> <meta name="description" content=""> <meta name="viewport" content="width=device-width"> <!-- Place favicon.ico and apple-touch-icon.png in the root directory --> <!-- build:css({.tmp/serve,src}) styles/vendor.css --> <!-- bower:css --> <!-- run `gulp inject` to automatically populate bower styles dependencies --> <!-- endbower --> <!-- endbuild --> <!-- build:css({.tmp/serve,src}) styles/app.css --> <!-- inject:css --> <!-- css files will be automatically insert here --> <!-- endinject --> <!-- endbuild --> </head> <body> <!--[if lt IE 10]> <p class="browsehappy">You are using an <strong>outdated</strong> browser. Please <a href="http://browsehappy.com/">upgrade your browser</a> to improve your experience.</p> <![endif] <div ui-view></div> <!-- build:js(src) scripts/vendor.js --> <!-- bower:js --> <!-- run `gulp inject` to automatically populate bower script dependencies --> <!-- endbower --> <!-- endbuild --> <!-- build:js({.tmp/serve,.tmp/partials}) scripts/app.js --> <!-- inject:js --> <!-- js files will be automatically insert here --> <!-- endinject --> <script src="https://api-maps.yandex.ru/2.1/?lang=ru_RU" type="text/javascript"></script> <!-- inject:partials --> <!-- angular templates will be automatically converted in js and inserted here --> <!-- endinject --> <!-- endbuild --> </body> </html>
package Paws::CloudDirectory::<API key>; use Moose; has IndexReference => (is => 'ro', isa => 'Paws::CloudDirectory::ObjectReference', required => 1); has TargetReference => (is => 'ro', isa => 'Paws::CloudDirectory::ObjectReference', required => 1); 1; main pod documentation begin =head1 NAME Paws::CloudDirectory::<API key> =head1 USAGE This class represents one of two things: =head3 Arguments in a call to a service Use the attributes of this class as arguments to methods. You shouldn't make instances of this class. Each attribute should be used as a named argument in the calls that expect this type of object. As an example, if Att1 is expected to be a Paws::CloudDirectory::<API key> object: $service_obj->Method(Att1 => { IndexReference => $value, ..., TargetReference => $value }); =head3 Results returned from an API call Use accessors for each attribute. If Att1 is expected to be an Paws::CloudDirectory::<API key> object: $result = $service_obj->Method(...); $result->Att1->IndexReference =head1 DESCRIPTION Detaches the specified object from the specified index inside a BatchRead operation. For more information, see DetachFromIndex and BatchReadRequest$Operations. =head1 ATTRIBUTES =head2 B<REQUIRED> IndexReference => L<Paws::CloudDirectory::ObjectReference> A reference to the index object. =head2 B<REQUIRED> TargetReference => L<Paws::CloudDirectory::ObjectReference> A reference to the object being detached from the index. =head1 SEE ALSO This class forms part of L<Paws>, describing an object used in L<Paws::CloudDirectory> =head1 BUGS and CONTRIBUTIONS The source code is located here: https://github.com/pplu/aws-sdk-perl Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues =cut
#!/usr/bin/env bash systemctl stop fuseki systemctl stop marple #general vars echo ">>>> Updating Fuseki" export TC_USER=fuseki export TC_GROUP=fuseki # set erb vars # endpoint name for fuseki export EP_NAME=core export SVC=fuseki export SVC_DESC="Jena-Fuseki Tomcat container" export MARPLE_SVC=marple export MARPLE_SVC_DESC="Marple service for fuseki Lucene indexes" export JAVA_HOME=`type -p javac|xargs readlink -f|xargs dirname|xargs dirname` export LUCENE_BO_VER=1.5.0 export LUCENE_BO_JAR="lucene-bo-${LUCENE_BO_VER}.jar" export LUCENE_BO_REL="https://github.com/buda-base/lucene-bo/releases/download/v${LUCENE_BO_VER}/${LUCENE_BO_JAR}" export LUCENE_ZH_VER=0.4.1 export LUCENE_ZH_JAR="lucene-zh-${LUCENE_ZH_VER}.jar" export LUCENE_ZH_REL="https://github.com/buda-base/lucene-zh/releases/download/v${LUCENE_ZH_VER}/${LUCENE_ZH_JAR}" export LUCENE_SA_VER=1.1.0 export LUCENE_SA_JAR="lucene-sa-${LUCENE_SA_VER}.jar" export LUCENE_SA_REL="https://github.com/buda-base/lucene-sa/releases/download/v${LUCENE_SA_VER}/${LUCENE_SA_JAR}" export MARPLE_REL="https://github.com/flaxsearch/marple/releases/download/v1.0/marple-1.0.jar" if [ -d /mnt/data ] ; then export DATA_DIR=/mnt/data ; else export DATA_DIR=/usr/local ; fi echo ">>>> DATA_DIR: " $DATA_DIR export DOWNLOADS=$DATA_DIR/downloads export THE_HOME=$DATA_DIR/$SVC export THE_BASE=$THE_HOME/base export CAT_HOME=$THE_HOME/tomcat echo ">>>>>>>> updating {$EP_NAME}.ttl to {$THE_BASE}/configuration/" erb /vagrant/conf/fuseki/ttl.erb > $THE_BASE/configuration/$EP_NAME.ttl echo ">>>>>>>> updating qonsole-config.js to {$CAT_HOME}/webapps/fuseki/js/app/" cp /vagrant/conf/fuseki/qonsole-config.js $CAT_HOME/webapps/fuseki/js/app/ echo ">>>>>>>> updating analyzers to {$CAT_HOME}/webapps/fuseki/WEB-INF/lib/" # the lucene-bo jar has to be added to fuseki/WEB-INF/lib/ otherwise # tomcat class loading cannot find rest of Lucene classes rm -f $CAT_HOME/webapps/fuseki/WEB-INF/lib/lucene-bo-*.jar rm -f $CAT_HOME/webapps/fuseki/WEB-INF/lib/lucene-sa-*.jar rm -f $CAT_HOME/webapps/fuseki/WEB-INF/lib/lucene-zh-*.jar pushd $DOWNLOADS; # wget -q -c $LUCENE_BO_REL wget -q $LUCENE_BO_REL -O $LUCENE_BO_JAR cp $LUCENE_BO_JAR $CAT_HOME/webapps/fuseki/WEB-INF/lib/ wget -q -c $LUCENE_ZH_REL cp $LUCENE_ZH_JAR $CAT_HOME/webapps/fuseki/WEB-INF/lib/ wget -q -c $LUCENE_SA_REL cp $LUCENE_SA_JAR $CAT_HOME/webapps/fuseki/WEB-INF/lib/ popd echo ">>>> restarting ${SVC}" systemctl start fuseki systemctl start marple echo ">>>> ${SVC} service listening on ${MAIN_PORT}" echo ">>>> Fuseki updating complete"
package com.mattinsler.guiceymongo.data.query; import org.bson.BSON; public enum BSONType { Double(BSON.NUMBER), String(BSON.STRING), Object(BSON.OBJECT), Array(BSON.ARRAY), BinaryData(BSON.BINARY), ObjectId(BSON.OID), Boolean(BSON.BOOLEAN), Date(BSON.DATE), Null(BSON.NULL), RegularExpression(BSON.REGEX), Code(BSON.CODE), Symbol(BSON.SYMBOL), CodeWithScope(BSON.CODE_W_SCOPE), Integer(BSON.NUMBER_INT), Timestamp(BSON.TIMESTAMP), Long(BSON.NUMBER_LONG), MinKey(BSON.MINKEY), MaxKey(BSON.MAXKEY); private final byte _typeCode; BSONType(byte typeCode) { _typeCode = typeCode; } byte getTypeCode() { return _typeCode; } }
package com.winsun.fruitmix.model; public class Equipment { private String serviceName; private String host; private int port; public Equipment(String serviceName, String host, int port) { this.serviceName = serviceName; this.host = host; this.port = port; } public Equipment() { } public String getServiceName() { return serviceName; } public void setServiceName(String serviceName) { this.serviceName = serviceName; } public String getHost() { return host; } public void setHost(String host) { this.host = host; } public int getPort() { return port; } public void setPort(int port) { this.port = port; } }
title: End of August Blooms date: 2009-08-31 00:00:00 -06:00 categories: - whats-blooming layout: post blog-banner: <API key>.jpg post-date: August 31, 2009 post-time: 8:09 AM blog-image: wbn-default.jpg <div class = "text-center"> <p>Look for these beauties as you stroll through the garden.</p> </div> <div class="text-center"> <img src="/images/blogs/old-posts/Buddleja davidii 'Pink Delight'.jpg" width="450" height="450" alt="" title="" /> </div> <br> <div class="text-center"> <img src="/images/blogs/old-posts/Caryopteris x clandonensis 'First Choice'.jpg" width="450" height="450" alt="" title="" /> </div> <br> <div class="text-center"> <img src="/images/blogs/old-posts/Chasmanthium latifolium.jpg" width="450" height="450" alt="" title="" /> </div> <br> <div class="text-center"> <img src="/images/blogs/old-posts/Cirsium undulatum.jpg" width="450" height="450" alt="" title="" /> </div> <br> <div class="text-center"> <img src="/images/blogs/old-posts/Linaria dalmatica.jpg" width="450" height="450" alt="" title="" /> </div> <br> <div class= "text-center"> Don't forget to visit the What's Blooming Blog every day for cool and interesting facts about each of these plants. </div>
package com.wangshan.service.impl; import com.wangshan.dao.UserDao; import com.wangshan.models.User; import com.wangshan.service.ValidateService; import com.wangshan.utils.gabriel.EncryptUtil; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @Service public class ValidateServiceImpl implements ValidateService{ @Autowired private UserDao userDao; @Override public Boolean validatePassword(String email, String password){ User user = userDao.getUserByEmail(email); if(user != null && new EncryptUtil().encrypt(password + "-" + user.getSalt(), "SHA-1").equals(user.getPassword())){ return true; } else { return false; } } @Override public Boolean <API key>(String mobile){ return false; } @Override public Boolean validateEmailRepeat(String email){ return false; } }
## Versioning & Releasing Increments in the version should be done by one of the project owners. The version should follow the standard `major.minor.patch` style, see http://semver.org/ for which defines the specification. The examples below are a paraphrasing of that specification. # Examples of changes that would warrant a `patch` version changes * Small changes that are completely backwards compatible, normally bug fixes. * changes in dependencies # Examples of changes that would warrant a `minor` version changes * Introduction of new functionality, without breaking backwards compatibility. # Examples of changes that would warrant a `major` version changes * Any break in backwards compatibility must result in an increment of the `major` version.
<?php //dl('<API key>.dll'); // phpinfo(); if(isset($_SESSION['login'])){ } elseif(isset($_POST['username']) && isset($_POST['password'])){ $u = $_POST['username']; $p = $_POST['password']; $serverName = "MMDES"; //serverName\instanceName // Since UID and PWD are not specified in the $connectionInfo array, // The connection will be attempted using Windows Authentication. $connectionInfo = array( "Database"=>"officeAutomation"); $conn = sqlsrv_connect( $serverName, $connectionInfo); if( $conn ) { // echo "Connection established.<br />"; }else{ // echo "Connection could not be established.<br />"; // die( print_r( sqlsrv_errors(), true)); exec("echo connection was not established >> debug.txt"); } $query = ""; $query = "SELECT * FROM sysUser WHERE Username='".$u . "'"; $result = sqlsrv_query( $conn , $query); if (!$result) die( print_r( sqlsrv_errors(), true)); $row = sqlsrv_fetch_array($result); if( $row['Password'] == $p ){ $query2 = "SELECT firstName,lastName,Gender FROM Person JOIN Employee on Person.NationalID=Employee.NationalID WHERE PersonalID='".$row['PersonalID'] . "'"; $result2 = sqlsrv_query( $conn , $query2); if (!$result2) die( print_r( sqlsrv_errors(), true)); $row2 = sqlsrv_fetch_array($result2); // print_r($row2); $tempAry=array('username'=>$row['Username'],'role'=>$row['Role'],'personalId'=>$row['PersonalID'], 'firstName'=>$row2['firstName'],'lastName'=>$row2['lastName'],'gender'=>$row2['Gender']); $_SESSION['login'] = $tempAry; header('location: '); // print_r($_SESSION); } else{ header('location: ?invalid'); die(); } } elseif (isset($_GET['invalid'])){ ?> <body> <div class="container sign-in-container"> <p class="invalid-text">Invalid username or password,<br> Try again!</p> <form method="post" class="form-signin login-form"> <h2 class="form-signin-heading">Please sign in</h2> <label for="inputEmail" class="sr-only">Username</label> <input name="username" type="text" id="inputEmail" class="username-input form-control" placeholder="Username" required autofocus> <label for="inputPassword" class="password-input sr-only">Password</label> <input name="password" type="password" id="inputPassword" class="form-control" placeholder="Password" required> <button class="submit-button btn btn-lg btn-primary btn-block" type="submit">Sign in</button> </form> </div> <!-- /container --> </body> </html> <?php } else{ ?> <body> <div class="container sign-in-container"> <form method="post" class="form-signin login-form"> <h2 class="form-signin-heading">Please sign in</h2> <label for="inputEmail" class="sr-only">Username</label> <input name="username" type="text" id="inputEmail" class="username-input form-control" placeholder="Username" required autofocus> <label for="inputPassword" class="password-input sr-only">Password</label> <input name="password" type="password" id="inputPassword" class="form-control" placeholder="Password" required> <button class="submit-button btn btn-lg btn-primary btn-block" type="submit">Sign in</button> </form> </div> <!-- /container --> </body> </html> <?php } ?>
using System; using System.Collections.Generic; using System.Linq; using System.Net; using System.Text.RegularExpressions; using HtmlAgilityPack; namespace Html2Markdown.Replacement { internal static class HtmlParser { private static readonly Regex NoChildren = new Regex(@"<(ul|ol)\b[^>]*>(?:(?!<ul|<ol)[\s\S])*?<\/\1>"); internal static string ReplaceLists(string html) { var finalHtml = html; while (HasNoChildLists(finalHtml)) { var listToReplace = NoChildren.Match(finalHtml).Value; var formattedList = ReplaceList(listToReplace); finalHtml = finalHtml.Replace(listToReplace, formattedList); } return finalHtml; } private static string ReplaceList(string html) { var list = Regex.Match(html, @"<(ul|ol)\b[^>]*>([\s\S]*?)<\/\1>"); var listType = list.Groups[1].Value; var listItems = Regex.Split(list.Groups[2].Value, "<li[^>]*>"); if(listItems.All(string.IsNullOrEmpty)) { return String.Empty; } listItems = listItems.Skip(1).ToArray(); var counter = 0; var markdownList = new List<string>(); listItems.ToList().ForEach(listItem => { var listPrefix = (listType.Equals("ol")) ? $"{++counter}. " : "* "; var finalList = listItem.Replace(@"</li>", string.Empty); if (finalList.Trim().Length == 0) { return; } finalList = Regex.Replace(finalList, @"^\s+", string.Empty); finalList = Regex.Replace(finalList, @"\n{2}", $"{Environment.NewLine}{Environment.NewLine} "); // indent nested lists finalList = Regex.Replace(finalList, @"\n([ ]*)+(\*|\d+\.)", "\n$1 $2"); markdownList.Add($"{listPrefix}{finalList}"); }); return Environment.NewLine + Environment.NewLine + markdownList.Aggregate((current, item) => current + Environment.NewLine + item); } private static bool HasNoChildLists(string html) { return NoChildren.Match(html).Success; } internal static string ReplacePre(string html) { var doc = GetHtmlDocument(html); var nodes = doc.DocumentNode.SelectNodes("//pre"); if (nodes == null) { return html; } nodes.ToList().ForEach(node => { var tagContents = node.InnerHtml; var markdown = ConvertPre(tagContents); ReplaceNode(node, markdown); }); return doc.DocumentNode.OuterHtml; } private static string ConvertPre(string html) { var tag = TabsToSpaces(html); tag = IndentNewLines(tag); return Environment.NewLine + Environment.NewLine + tag + Environment.NewLine; } private static string IndentNewLines(string tag) { return tag.Replace(Environment.NewLine, Environment.NewLine + " "); } private static string TabsToSpaces(string tag) { return tag.Replace("\t", " "); } internal static string ReplaceImg(string html) { var doc = GetHtmlDocument(html); var nodes = doc.DocumentNode.SelectNodes("//img"); if (nodes == null) { return html; } nodes.ToList().ForEach(node => { var src = node.Attributes.GetAttributeOrEmpty("src"); var alt = node.Attributes.GetAttributeOrEmpty("alt"); var title = node.Attributes.GetAttributeOrEmpty("title"); var markdown = $@"![{alt}]({src}{((title.Length > 0) ? $" \"{title}\"" : "")})"; ReplaceNode(node, markdown); }); return doc.DocumentNode.OuterHtml; } public static string ReplaceAnchor(string html) { var doc = GetHtmlDocument(html); var nodes = doc.DocumentNode.SelectNodes(" if (nodes == null) { return html; } nodes.ToList().ForEach(node => { var linkText = node.InnerHtml; var href = node.Attributes.GetAttributeOrEmpty("href"); var title = node.Attributes.GetAttributeOrEmpty("title"); var markdown = ""; if (!IsEmptyLink(linkText, href)) { markdown = $@"[{linkText}]({href}{((title.Length > 0) ? $" \"{title}\"" : "")})"; } ReplaceNode(node, markdown); }); return doc.DocumentNode.OuterHtml; } public static string ReplaceCode(string html) { var finalHtml = html; var doc = GetHtmlDocument(finalHtml); var nodes = doc.DocumentNode.SelectNodes("//code"); if (nodes == null) { return finalHtml; } nodes.ToList().ForEach(node => { var code = node.InnerHtml; string markdown; if(<API key>(code)) { markdown = "`" + code + "`"; } else { markdown = <API key>(code); markdown = Regex.Replace(markdown, "^\r\n", ""); markdown = Regex.Replace(markdown, "\r\n$", ""); markdown = "```" + Environment.NewLine + markdown + Environment.NewLine + "```"; } ReplaceNode(node, markdown); }); return doc.DocumentNode.OuterHtml; } private static string <API key>(string code) { return Regex.Replace(code, "<\\s*?/?\\s*?br\\s*?>", ""); } private static bool <API key>(string code) { // single line code blocks do not have new line characters return code.IndexOf(Environment.NewLine, StringComparison.Ordinal) == -1; } public static string ReplaceBlockquote(string html) { var doc = GetHtmlDocument(html); var nodes = doc.DocumentNode.SelectNodes("//blockquote"); if (nodes == null) { return html; } nodes.ToList().ForEach(node => { var quote = node.InnerHtml; var lines = quote.TrimStart().Split(new[] { Environment.NewLine }, StringSplitOptions.None); var markdown = ""; lines.ToList().ForEach(line => { markdown += $"> {line.TrimEnd()}{Environment.NewLine}"; }); markdown = Regex.Replace(markdown, @"(>\s\r\n)+$", ""); markdown = Environment.NewLine + Environment.NewLine + markdown + Environment.NewLine + Environment.NewLine; ReplaceNode(node, markdown); }); return doc.DocumentNode.OuterHtml; } public static string ReplaceEntites(string html) { return WebUtility.HtmlDecode(html); } public static string ReplaceParagraph(string html) { var doc = GetHtmlDocument(html); var nodes = doc.DocumentNode.SelectNodes(" if (nodes == null) { return html; } nodes.ToList().ForEach(node => { var text = node.InnerHtml; var markdown = Regex.Replace(text, @"\s+", " "); markdown = markdown.Replace(Environment.NewLine, " "); markdown = Environment.NewLine + Environment.NewLine + markdown + Environment.NewLine; ReplaceNode(node, markdown); }); return doc.DocumentNode.OuterHtml; } private static bool IsEmptyLink(string linkText, string href) { var length = linkText.Length + href.Length; return length == 0; } private static HtmlDocument GetHtmlDocument(string html) { var doc = new HtmlDocument(); doc.LoadHtml(html); return doc; } private static void ReplaceNode(HtmlNode node, string markdown) { if (string.IsNullOrEmpty(markdown)) { node.ParentNode.RemoveChild(node); } else { node.<API key>(markdown); } } } }
from django.conf.urls import url, include from django.contrib import admin version = 'v1.0' urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'api/%s/' % version, include('apis.urls')) ]
package com.asura.monitor.platform.dao; import com.asura.framework.base.paging.PagingResult; import com.asura.framework.base.paging.SearchMap; import com.asura.framework.dao.mybatis.base.MybatisDaoContext; import com.asura.framework.dao.mybatis.paginator.domain.PageBounds; import com.asura.common.dao.BaseDao; import com.asura.monitor.platform.entity.<API key>; import org.springframework.stereotype.Repository; import javax.annotation.Resource; @Repository("com.asura.monitor.configure.dao.<API key>") public class <API key> extends BaseDao<<API key>>{ @Resource(name="monitor.MybatisDaoContext") private MybatisDaoContext mybatisDaoContext; /** * * @param searchMap * @param pageBounds * @return */ public PagingResult<<API key>> findAll(SearchMap searchMap, PageBounds pageBounds, String sqlId){ return mybatisDaoContext.findForPage(this.getClass().getName()+"."+sqlId,<API key>.class,searchMap,pageBounds); } }
package org.hibernate.beanvalidation.tck.tests.constraints.<API key>; import static org.hibernate.beanvalidation.tck.util.<API key>.assertNoViolations; import static org.hibernate.beanvalidation.tck.util.<API key>.assertThat; import static org.hibernate.beanvalidation.tck.util.<API key>.violationOf; import static org.testng.Assert.assertEquals; import java.util.Set; import jakarta.validation.ConstraintViolation; import jakarta.validation.Validator; import jakarta.validation.constraints.Size; import jakarta.validation.groups.Default; import jakarta.validation.metadata.<API key>; import org.hibernate.beanvalidation.tck.beanvalidation.Sections; import org.hibernate.beanvalidation.tck.tests.AbstractTCKTest; import org.hibernate.beanvalidation.tck.util.TestUtil; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.shrinkwrap.api.spec.WebArchive; import org.jboss.test.audit.annotations.SpecAssertion; import org.jboss.test.audit.annotations.SpecVersion; import org.testng.annotations.Test; /** * @author Hardy Ferentschik * @author Guillaume Smet */ @SpecVersion(spec = "beanvalidation", version = "3.0.0") public class <API key> extends AbstractTCKTest { @Deployment public static WebArchive createTestArchive() { return webArchiveBuilder() .<API key>( <API key>.class ) .build(); } @Test @SpecAssertion(section = Sections.<API key>, id = "a") @SpecAssertion(section = Sections.<API key>, id = "a") public void <API key>() { Validator validator = TestUtil.<API key>(); Set<<API key><?>> descriptors = validator.<API key>( Person.class ) .<API key>( "lastName" ) .<API key>(); assertEquals( descriptors.size(), 2, "There should be two constraints on the lastName property." ); for ( <API key><?> descriptor : descriptors ) { assertEquals( descriptor.getAnnotation().annotationType().getName(), AlwaysValid.class.getName(), "Wrong annotation type." ); } Set<ConstraintViolation<Person>> <API key> = validator.validate( new Person( "John", "Doe" ) ); assertThat( <API key> ).<API key>( violationOf( AlwaysValid.class ) ); } @Test @SpecAssertion(section = Sections.<API key>, id = "a") @SpecAssertion(section = Sections.<API key>, id = "b") public void <API key>() { Validator validator = TestUtil.<API key>(); Set<<API key><?>> descriptors = validator.<API key>( Movie.class ) .<API key>( "title" ) .<API key>(); assertEquals( descriptors.size(), 2, "There should be two constraints on the title property." ); for ( <API key><?> descriptor : descriptors ) { assertEquals( descriptor.getAnnotation().annotationType().getName(), Size.class.getName(), "Wrong annotation type." ); } Set<ConstraintViolation<Movie>> <API key> = validator.validate( new Movie( "Title" ) ); assertNoViolations( <API key> ); <API key> = validator.validate( new Movie( "A" ) ); assertThat( <API key> ).<API key>( violationOf( Size.class ) ); <API key> = validator.validate( new Movie( "A movie title far too long that does not respect the constraint" ) ); assertThat( <API key> ).<API key>( violationOf( Size.class ) ); } @Test @SpecAssertion(section = Sections.<API key>, id = "d") public void <API key>() { Validator validator = TestUtil.<API key>(); <API key><?> descriptor = validator.<API key>( Person.class ) .<API key>( "firstName" ) .<API key>() .iterator() .next(); Set<Class<?>> groups = descriptor.getGroups(); assertEquals( groups.size(), 1, "The group set should only contain one entry." ); assertEquals( groups.iterator().next(), Default.class, "The Default group should be returned." ); } }
<div class="btn-group mw-version-selector"> <button type="button" class="btn btn-default dropdown-toggle" data-toggle="dropdown"> <span class="descriptor">Version </span> <span class="descriptor-sm">V. </span> {{currentVersionModel.attributes[versionNumberKey]}} <span ng-if="currentVersionModel.attributes.published" mw-icon="rln-icon published"></span> </button> <ul class="version-dropdown dropdown-menu pull-right" style="min-width:100%" role="menu"> <li ng-repeat="version in versionCollection.models" ng-class="{active:(version.attributes.uuid === currentVersionModel.attributes.uuid)}"> <a ng-href="{{getUrl(version.attributes.uuid)}}"> {{version.attributes[versionNumberKey]}} <span ng-if="version.attributes.published" mw-icon="rln-icon published"></span> </a> </li> </ul> </div>
## Slurm :o: | | | | | title | Slurm | | status | 10 | | section | Cluster Resource Management | | keywords | Cluster Resource Management | Simple Linux Utility for Resource Management (SLURM) workload manager is an open source, scalable cluster resource management tool used for job scheduling in small to large Linux cluster using multi-core architecture. SLURM has three key functions. First, it allocates resources to users for some duration with exclusive and/or non-exclusive access. Second, it enables users to start, execute and monitor jobs on the resources allocated to them. Finally, it intermediates to resolve conflicts on resources for pending work by maintaining them in a queue [@<API key>]. The slurm architecture has following components: a centralized manager to monitor resources and work, may have a backup manager, daemon on each server to provide fault-tolerant communications, an optional daemon for clusters with multiple mangers and tools to initiate, terminate and report about jobs in a graphical view with network topology. It also provides around twenty additional plugins that could be used for functionalities like accounting, advanced reservation, gang scheduling, back fill scheduling and multifactor job prioritization. Though originally developed for Linux, SLURM also provides full support on platforms like AIX, FreeBSD, NetBSD and Solaris [@<API key>] [@www-slurm].
<?php defined('ABSPATH') or die('No script kiddies please!'); /** * Undocumented function * * @return void */ function <API key>() { if(is_singular()) return; global $wp_query; /** Stop execution if there's only 1 page */ if($wp_query->max_num_pages <= 1) return; $paged = get_query_var('paged') ? absint(get_query_var('paged')) : 1; $max = intval($wp_query->max_num_pages); /** Add current page to the array */ if ($paged >= 1) $links[] = $paged; /** Add the pages around the current page to the array */ if ($paged >= 3) { $links[] = $paged - 1; $links[] = $paged - 2; } if (($paged + 2) <= $max) { $links[] = $paged + 2; $links[] = $paged + 1; } echo '<div class="navigation"><ul class="pagination">' . "\n"; /** Previous Post Link */ if (<API key>()) printf( '<li>%s</li>' . "\n", <API key>() ); /** Link to first page, plus ellipses if necessary */ if (!in_array(1, $links )) { $class = 1 == $paged ? ' class="active"' : ''; printf('<li%s><a href="%s" aria-label="Previous"><span aria-hidden="true"%s</span></a></li>' . "\n", $class, esc_url(get_pagenum_link(1)), '1'); if (!in_array(2, $links)) echo '<li>…</li>'; } /** Link to current page, plus 2 pages in either direction if necessary */ sort($links); foreach ((array)$links as $link) { $class = $paged == $link ? ' class="active"' : ''; printf('<li%s><a href="%s">%s</a></li>' . "\n", $class, esc_url(get_pagenum_link($link)), $link); } /** Link to last page, plus ellipses if necessary */ if (!in_array($max, $links)) { if (!in_array($max - 1, $links)) echo '<li>…</li>' . "\n"; $class = $paged == $max ? ' class="active"' : ''; printf('<li%s><a href="%s" aria-label="Next"><span aria-hidden="true">%s</span></a></li>' . "\n", $class, esc_url(get_pagenum_link($max)), $max); } /** Next Post Link */ if (get_next_posts_link()) printf('<li>%s</li>' . "\n", get_next_posts_link()); echo '</ul></div>' . "\n"; }
package io.strimzi.systemtest.kafka; import io.fabric8.kubernetes.api.model.ConfigMap; import io.fabric8.kubernetes.api.model.HasMetadata; import io.fabric8.kubernetes.api.model.<API key>; import io.fabric8.kubernetes.api.model.Pod; import io.fabric8.kubernetes.api.model.Quantity; import io.fabric8.kubernetes.api.model.<API key>; import io.fabric8.kubernetes.api.model.Secret; import io.fabric8.kubernetes.api.model.<API key>; import io.fabric8.kubernetes.api.model.Service; import io.fabric8.kubernetes.api.model.apps.StatefulSet; import io.fabric8.kubernetes.client.dsl.base.<API key>; import io.strimzi.api.kafka.Crds; import io.strimzi.api.kafka.KafkaTopicList; import io.strimzi.api.kafka.model.EntityOperatorSpec; import io.strimzi.api.kafka.model.<API key>; import io.strimzi.api.kafka.model.<API key>; import io.strimzi.api.kafka.model.Kafka; import io.strimzi.api.kafka.model.KafkaClusterSpec; import io.strimzi.api.kafka.model.KafkaResources; import io.strimzi.api.kafka.model.KafkaTopic; import io.strimzi.api.kafka.model.SystemProperty; import io.strimzi.api.kafka.model.<API key>; import io.strimzi.api.kafka.model.<API key>; import io.strimzi.api.kafka.model.listener.arraylistener.<API key>; import io.strimzi.api.kafka.model.listener.arraylistener.<API key>; import io.strimzi.api.kafka.model.listener.arraylistener.KafkaListenerType; import io.strimzi.api.kafka.model.storage.JbodStorage; import io.strimzi.api.kafka.model.storage.JbodStorageBuilder; import io.strimzi.api.kafka.model.storage.<API key>; import io.strimzi.operator.common.model.Labels; import io.strimzi.systemtest.AbstractST; import io.strimzi.systemtest.Constants; import io.strimzi.systemtest.Environment; import io.strimzi.systemtest.resources.operator.<API key>; import io.strimzi.systemtest.annotations.OpenShiftOnly; import io.strimzi.systemtest.annotations.<API key>; import io.strimzi.systemtest.cli.KafkaCmdClient; import io.strimzi.systemtest.kafkaclients.internalClients.InternalKafkaClient; import io.strimzi.systemtest.resources.ResourceOperation; import io.strimzi.systemtest.resources.crd.KafkaResource; import io.strimzi.systemtest.resources.crd.KafkaTopicResource; import io.strimzi.systemtest.templates.crd.<API key>; import io.strimzi.systemtest.templates.crd.KafkaTemplates; import io.strimzi.systemtest.templates.crd.KafkaTopicTemplates; import io.strimzi.systemtest.templates.crd.KafkaUserTemplates; import io.strimzi.systemtest.utils.StUtils; import io.strimzi.systemtest.utils.kafkaUtils.KafkaTopicUtils; import io.strimzi.systemtest.utils.kafkaUtils.KafkaUtils; import io.strimzi.systemtest.utils.kubeUtils.controllers.ConfigMapUtils; import io.strimzi.systemtest.utils.kubeUtils.controllers.DeploymentUtils; import io.strimzi.systemtest.utils.kubeUtils.controllers.StatefulSetUtils; import io.strimzi.systemtest.utils.kubeUtils.objects.<API key>; import io.strimzi.systemtest.utils.kubeUtils.objects.PodUtils; import io.strimzi.systemtest.utils.kubeUtils.objects.ServiceUtils; import io.strimzi.test.TestUtils; import io.strimzi.test.executor.ExecResult; import io.strimzi.test.timemeasuring.Operation; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.hamcrest.CoreMatchers; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.extension.ExtensionContext; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Properties; import java.util.stream.Collectors; import static io.strimzi.api.kafka.model.KafkaResources.<API key>; import static io.strimzi.api.kafka.model.KafkaResources.<API key>; import static io.strimzi.systemtest.Constants.CRUISE_CONTROL; import static io.strimzi.systemtest.Constants.<API key>; import static io.strimzi.systemtest.Constants.<API key>; import static io.strimzi.systemtest.Constants.REGRESSION; import static io.strimzi.systemtest.Constants.STATEFUL_SET; import static io.strimzi.systemtest.utils.StUtils.<API key>; import static io.strimzi.systemtest.utils.StUtils.stringToProperties; import static io.strimzi.test.TestUtils.fromYamlString; import static io.strimzi.test.TestUtils.map; import static io.strimzi.test.k8s.KubeClusterResource.cmdKubeClient; import static io.strimzi.test.k8s.KubeClusterResource.kubeClient; import static java.util.Arrays.asList; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.emptyOrNullString; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.junit.jupiter.api.Assumptions.assumeFalse; @Tag(REGRESSION) @SuppressWarnings("checkstyle:<API key>") class KafkaST extends AbstractST { private static final Logger LOGGER = LogManager.getLogger(KafkaST.class); private static final String TEMPLATE_PATH = TestUtils.USER_PATH + "/../packaging/examples/templates/cluster-operator"; public static final String NAMESPACE = "kafka-cluster-test"; private static final String <API key> = "<API key>"; @<API key> @OpenShiftOnly void <API key>(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); cluster.<API key>(extensionContext, TEMPLATE_PATH); String templateName = "strimzi-ephemeral"; cmdKubeClient(namespaceName).<API key>(templateName, map("CLUSTER_NAME", <API key>)); StatefulSetUtils.<API key>(namespaceName, KafkaResources.<API key>(<API key>), 3, ResourceOperation.<API key>(STATEFUL_SET)); StatefulSetUtils.<API key>(namespaceName, KafkaResources.<API key>(<API key>), 3, ResourceOperation.<API key>(STATEFUL_SET)); DeploymentUtils.<API key>(namespaceName, KafkaResources.<API key>(<API key>), 1); //Testing docker images <API key>(<API key>, NAMESPACE, namespaceName, 3, 3, false); //Testing labels <API key>(NAMESPACE, namespaceName, <API key>, templateName); LOGGER.info("Deleting Kafka cluster {} after test", <API key>); cmdKubeClient(namespaceName).deleteByName("Kafka", <API key>); //Wait for kafka deletion cmdKubeClient(namespaceName).<API key>(Kafka.RESOURCE_KIND, <API key>); kubeClient(namespaceName).listPods(namespaceName).stream() .filter(p -> p.getMetadata().getName().startsWith(<API key>)) .forEach(p -> PodUtils.deletePodWithWait(p.getMetadata().getName())); StatefulSetUtils.<API key>(namespaceName, KafkaResources.<API key>(<API key>)); StatefulSetUtils.<API key>(namespaceName, KafkaResources.<API key>(<API key>)); DeploymentUtils.<API key>(namespaceName, KafkaResources.<API key>(<API key>)); } @<API key> void testEODeletion(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).build()); // Get pod name to check termination process Pod pod = kubeClient(namespaceName).listPods(namespaceName).stream() .filter(p -> p.getMetadata().getName().startsWith(KafkaResources.<API key>(clusterName))) .findAny() .orElseThrow(); assertThat("Entity operator pod does not exist", pod, notNullValue()); LOGGER.info("Setting entity operator to null"); KafkaResource.<API key>(clusterName, kafka -> kafka.getSpec().setEntityOperator(null), namespaceName); // Wait when EO(UO + TO) will be removed DeploymentUtils.<API key>(namespaceName, KafkaResources.<API key>(clusterName)); PodUtils.deletePodWithWait(namespaceName, pod.getMetadata().getName()); LOGGER.info("Entity operator was deleted"); } @<API key> @SuppressWarnings({"checkstyle:MethodLength", "checkstyle:JavaNCSS"}) void <API key>(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); LinkedHashMap<String, String> envVarGeneral = new LinkedHashMap<>(); envVarGeneral.put("TEST_ENV_1", "test.env.one"); envVarGeneral.put("TEST_ENV_2", "test.env.two"); LinkedHashMap<String, String> envVarUpdated = new LinkedHashMap<>(); envVarUpdated.put("TEST_ENV_2", "updated.test.env.two"); envVarUpdated.put("TEST_ENV_3", "test.env.three"); // Kafka Broker config Map<String, Object> kafkaConfig = new HashMap<>(); kafkaConfig.put("offsets.topic.replication.factor", "1"); kafkaConfig.put("transaction.state.log.replication.factor", "1"); kafkaConfig.put("default.replication.factor", "1"); Map<String, Object> updatedKafkaConfig = new HashMap<>(); updatedKafkaConfig.put("offsets.topic.replication.factor", "2"); updatedKafkaConfig.put("transaction.state.log.replication.factor", "2"); updatedKafkaConfig.put("default.replication.factor", "2"); // Zookeeper Config Map<String, Object> zookeeperConfig = new HashMap<>(); zookeeperConfig.put("tickTime", "2000"); zookeeperConfig.put("initLimit", "5"); zookeeperConfig.put("syncLimit", "2"); zookeeperConfig.put("autopurge.purgeInterval", "1"); Map<String, Object> <API key> = new HashMap<>(); <API key>.put("tickTime", "2500"); <API key>.put("initLimit", "3"); <API key>.put("syncLimit", "5"); final int initialDelaySeconds = 30; final int timeoutSeconds = 10; final int <API key> = 31; final int <API key> = 11; final int periodSeconds = 10; final int successThreshold = 1; final int failureThreshold = 3; final int <API key> = 5; final int <API key> = 1; resourceManager.createResource(extensionContext, KafkaTemplates.kafkaPersistent(clusterName, 2) .editSpec() .editKafka() .<API key>() .<API key>(initialDelaySeconds) .withTimeoutSeconds(timeoutSeconds) .withPeriodSeconds(periodSeconds) .<API key>(successThreshold) .<API key>(failureThreshold) .endReadinessProbe() .<API key>() .<API key>(initialDelaySeconds) .withTimeoutSeconds(timeoutSeconds) .withPeriodSeconds(periodSeconds) .<API key>(successThreshold) .<API key>(failureThreshold) .endLivenessProbe() .withConfig(kafkaConfig) .withNewTemplate() .<API key>() .withEnv(StUtils.<API key>(envVarGeneral)) .endKafkaContainer() .endTemplate() .endKafka() .editZookeeper() .withReplicas(2) .<API key>() .<API key>(initialDelaySeconds) .withTimeoutSeconds(timeoutSeconds) .endReadinessProbe() .<API key>() .<API key>(initialDelaySeconds) .withTimeoutSeconds(timeoutSeconds) .endLivenessProbe() .withConfig(zookeeperConfig) .withNewTemplate() .<API key>() .withEnv(StUtils.<API key>(envVarGeneral)) .<API key>() .endTemplate() .endZookeeper() .editEntityOperator() .withNewTemplate() .<API key>() .withEnv(StUtils.<API key>(envVarGeneral)) .<API key>() .<API key>() .withEnv(StUtils.<API key>(envVarGeneral)) .<API key>() .<API key>() .withEnv(StUtils.<API key>(envVarGeneral)) .<API key>() .endTemplate() .editUserOperator() .<API key>() .<API key>(initialDelaySeconds) .withTimeoutSeconds(timeoutSeconds) .withPeriodSeconds(periodSeconds) .<API key>(successThreshold) .<API key>(failureThreshold) .endReadinessProbe() .<API key>() .<API key>(initialDelaySeconds) .withTimeoutSeconds(timeoutSeconds) .withPeriodSeconds(periodSeconds) .<API key>(successThreshold) .<API key>(failureThreshold) .endLivenessProbe() .endUserOperator() .editTopicOperator() .<API key>() .<API key>(initialDelaySeconds) .withTimeoutSeconds(timeoutSeconds) .withPeriodSeconds(periodSeconds) .<API key>(successThreshold) .<API key>(failureThreshold) .endReadinessProbe() .<API key>() .<API key>(initialDelaySeconds) .withTimeoutSeconds(timeoutSeconds) .withPeriodSeconds(periodSeconds) .<API key>(successThreshold) .<API key>(failureThreshold) .endLivenessProbe() .endTopicOperator() .withNewTlsSidecar() .<API key>() .<API key>(initialDelaySeconds) .withTimeoutSeconds(timeoutSeconds) .withPeriodSeconds(periodSeconds) .<API key>(successThreshold) .<API key>(failureThreshold) .endReadinessProbe() .<API key>() .<API key>(initialDelaySeconds) .withTimeoutSeconds(timeoutSeconds) .withPeriodSeconds(periodSeconds) .<API key>(successThreshold) .<API key>(failureThreshold) .endLivenessProbe() .endTlsSidecar() .endEntityOperator() .endSpec() .build()); final Map<String, String> kafkaSnapshot = StatefulSetUtils.ssSnapshot(namespaceName, KafkaResources.<API key>(clusterName)); final Map<String, String> zkSnapshot = StatefulSetUtils.ssSnapshot(namespaceName, KafkaResources.<API key>(clusterName)); final Map<String, String> eoPod = DeploymentUtils.depSnapshot(namespaceName, KafkaResources.<API key>(clusterName)); LOGGER.info("Verify values before update"); <API key>(namespaceName, <API key>(clusterName), "kafka", initialDelaySeconds, timeoutSeconds, periodSeconds, successThreshold, failureThreshold); <API key>(namespaceName, <API key>(clusterName), kafkaConfig, clusterName); <API key>(namespaceName, <API key>(clusterName), "kafka", envVarGeneral); String kafkaConfiguration = kubeClient().getConfigMap(namespaceName, KafkaResources.<API key>(clusterName)).getData().get("server.config"); assertThat(kafkaConfiguration, containsString("offsets.topic.replication.factor=1")); assertThat(kafkaConfiguration, containsString("transaction.state.log.replication.factor=1")); assertThat(kafkaConfiguration, containsString("default.replication.factor=1")); String <API key> = cmdKubeClient(namespaceName).execInPod(KafkaResources.kafkaPodName(clusterName, 0), "cat", "/tmp/strimzi.properties").out(); assertThat(<API key>, containsString("offsets.topic.replication.factor=1")); assertThat(<API key>, containsString("transaction.state.log.replication.factor=1")); assertThat(<API key>, containsString("default.replication.factor=1")); LOGGER.info("Testing Zookeepers"); <API key>(namespaceName, <API key>(clusterName), "zookeeper", initialDelaySeconds, timeoutSeconds, periodSeconds, successThreshold, failureThreshold); <API key>(namespaceName, <API key>(clusterName), "zookeeper", "<API key>", zookeeperConfig); <API key>(namespaceName, <API key>(clusterName), "zookeeper", envVarGeneral); LOGGER.info("Checking configuration of TO and UO"); <API key>(namespaceName, KafkaResources.<API key>(clusterName), "topic-operator", initialDelaySeconds, timeoutSeconds, periodSeconds, successThreshold, failureThreshold); <API key>(namespaceName, KafkaResources.<API key>(clusterName), "topic-operator", envVarGeneral); <API key>(namespaceName, KafkaResources.<API key>(clusterName), "user-operator", initialDelaySeconds, timeoutSeconds, periodSeconds, successThreshold, failureThreshold); <API key>(namespaceName, KafkaResources.<API key>(clusterName), "user-operator", envVarGeneral); <API key>(namespaceName, KafkaResources.<API key>(clusterName), "tls-sidecar", initialDelaySeconds, timeoutSeconds, periodSeconds, successThreshold, failureThreshold); <API key>(namespaceName, KafkaResources.<API key>(clusterName), "tls-sidecar", envVarGeneral); LOGGER.info("Updating configuration of Kafka cluster"); KafkaResource.<API key>(clusterName, k -> { KafkaClusterSpec kafkaClusterSpec = k.getSpec().getKafka(); kafkaClusterSpec.getLivenessProbe().<API key>(<API key>); kafkaClusterSpec.getReadinessProbe().<API key>(<API key>); kafkaClusterSpec.getLivenessProbe().setTimeoutSeconds(<API key>); kafkaClusterSpec.getReadinessProbe().setTimeoutSeconds(<API key>); kafkaClusterSpec.getLivenessProbe().setPeriodSeconds(<API key>); kafkaClusterSpec.getReadinessProbe().setPeriodSeconds(<API key>); kafkaClusterSpec.getLivenessProbe().setFailureThreshold(<API key>); kafkaClusterSpec.getReadinessProbe().setFailureThreshold(<API key>); kafkaClusterSpec.setConfig(updatedKafkaConfig); kafkaClusterSpec.getTemplate().getKafkaContainer().setEnv(StUtils.<API key>(envVarUpdated)); <API key> <API key> = k.getSpec().getZookeeper(); <API key>.getLivenessProbe().<API key>(<API key>); <API key>.getReadinessProbe().<API key>(<API key>); <API key>.getLivenessProbe().setTimeoutSeconds(<API key>); <API key>.getReadinessProbe().setTimeoutSeconds(<API key>); <API key>.getLivenessProbe().setPeriodSeconds(<API key>); <API key>.getReadinessProbe().setPeriodSeconds(<API key>); <API key>.getLivenessProbe().setFailureThreshold(<API key>); <API key>.getReadinessProbe().setFailureThreshold(<API key>); <API key>.setConfig(<API key>); <API key>.getTemplate().<API key>().setEnv(StUtils.<API key>(envVarUpdated)); // Configuring TO and UO to use new values for InitialDelaySeconds and TimeoutSeconds EntityOperatorSpec entityOperatorSpec = k.getSpec().getEntityOperator(); entityOperatorSpec.getTopicOperator().getLivenessProbe().<API key>(<API key>); entityOperatorSpec.getTopicOperator().getReadinessProbe().<API key>(<API key>); entityOperatorSpec.getTopicOperator().getLivenessProbe().setTimeoutSeconds(<API key>); entityOperatorSpec.getTopicOperator().getReadinessProbe().setTimeoutSeconds(<API key>); entityOperatorSpec.getTopicOperator().getLivenessProbe().setPeriodSeconds(<API key>); entityOperatorSpec.getTopicOperator().getReadinessProbe().setPeriodSeconds(<API key>); entityOperatorSpec.getTopicOperator().getLivenessProbe().setFailureThreshold(<API key>); entityOperatorSpec.getTopicOperator().getReadinessProbe().setFailureThreshold(<API key>); entityOperatorSpec.getUserOperator().getLivenessProbe().<API key>(<API key>); entityOperatorSpec.getUserOperator().getReadinessProbe().<API key>(<API key>); entityOperatorSpec.getUserOperator().getLivenessProbe().setTimeoutSeconds(<API key>); entityOperatorSpec.getUserOperator().getReadinessProbe().setTimeoutSeconds(<API key>); entityOperatorSpec.getUserOperator().getLivenessProbe().setPeriodSeconds(<API key>); entityOperatorSpec.getUserOperator().getReadinessProbe().setPeriodSeconds(<API key>); entityOperatorSpec.getUserOperator().getLivenessProbe().setFailureThreshold(<API key>); entityOperatorSpec.getUserOperator().getReadinessProbe().setFailureThreshold(<API key>); entityOperatorSpec.getTlsSidecar().getLivenessProbe().<API key>(<API key>); entityOperatorSpec.getTlsSidecar().getReadinessProbe().<API key>(<API key>); entityOperatorSpec.getTlsSidecar().getLivenessProbe().setTimeoutSeconds(<API key>); entityOperatorSpec.getTlsSidecar().getReadinessProbe().setTimeoutSeconds(<API key>); entityOperatorSpec.getTlsSidecar().getLivenessProbe().setPeriodSeconds(<API key>); entityOperatorSpec.getTlsSidecar().getReadinessProbe().setPeriodSeconds(<API key>); entityOperatorSpec.getTlsSidecar().getLivenessProbe().setFailureThreshold(<API key>); entityOperatorSpec.getTlsSidecar().getReadinessProbe().setFailureThreshold(<API key>); entityOperatorSpec.getTemplate().<API key>().setEnv(StUtils.<API key>(envVarUpdated)); entityOperatorSpec.getTemplate().<API key>().setEnv(StUtils.<API key>(envVarUpdated)); entityOperatorSpec.getTemplate().<API key>().setEnv(StUtils.<API key>(envVarUpdated)); }, namespaceName); StatefulSetUtils.waitTillSsHasRolled(namespaceName, KafkaResources.<API key>(clusterName), 2, zkSnapshot); StatefulSetUtils.waitTillSsHasRolled(namespaceName, KafkaResources.<API key>(clusterName), 2, kafkaSnapshot); DeploymentUtils.<API key>(namespaceName, KafkaResources.<API key>(clusterName), 1, eoPod); KafkaUtils.waitForKafkaReady(namespaceName, clusterName); LOGGER.info("Verify values after update"); <API key>(namespaceName, <API key>(clusterName), "kafka", <API key>, <API key>, <API key>, successThreshold, <API key>); <API key>(namespaceName, <API key>(clusterName), updatedKafkaConfig, clusterName); <API key>(namespaceName, <API key>(clusterName), "kafka", envVarUpdated); kafkaConfiguration = kubeClient(namespaceName).getConfigMap(namespaceName, KafkaResources.<API key>(clusterName)).getData().get("server.config"); assertThat(kafkaConfiguration, containsString("offsets.topic.replication.factor=2")); assertThat(kafkaConfiguration, containsString("transaction.state.log.replication.factor=2")); assertThat(kafkaConfiguration, containsString("default.replication.factor=2")); <API key> = cmdKubeClient(namespaceName).execInPod(KafkaResources.kafkaPodName(clusterName, 0), "cat", "/tmp/strimzi.properties").out(); assertThat(<API key>, containsString("offsets.topic.replication.factor=2")); assertThat(<API key>, containsString("transaction.state.log.replication.factor=2")); assertThat(<API key>, containsString("default.replication.factor=2")); LOGGER.info("Testing Zookeepers"); <API key>(namespaceName, <API key>(clusterName), "zookeeper", <API key>, <API key>, <API key>, successThreshold, <API key>); <API key>(namespaceName, <API key>(clusterName), "zookeeper", "<API key>", <API key>); <API key>(namespaceName, <API key>(clusterName), "zookeeper", envVarUpdated); LOGGER.info("Getting entity operator to check configuration of TO and UO"); <API key>(namespaceName, KafkaResources.<API key>(clusterName), "topic-operator", <API key>, <API key>, <API key>, successThreshold, <API key>); <API key>(namespaceName, KafkaResources.<API key>(clusterName), "topic-operator", envVarUpdated); <API key>(namespaceName, KafkaResources.<API key>(clusterName), "user-operator", <API key>, <API key>, <API key>, successThreshold, <API key>); <API key>(namespaceName, KafkaResources.<API key>(clusterName), "user-operator", envVarUpdated); <API key>(namespaceName, KafkaResources.<API key>(clusterName), "tls-sidecar", <API key>, <API key>, <API key>, successThreshold, <API key>); <API key>(namespaceName, KafkaResources.<API key>(clusterName), "tls-sidecar", envVarUpdated); } @<API key> void testJvmAndResources(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); ArrayList<SystemProperty> javaSystemProps = new ArrayList<>(); javaSystemProps.add(new <API key>().withName("javax.net.debug") .withValue("verbose").build()); Map<String, String> jvmOptionsXX = new HashMap<>(); jvmOptionsXX.put("UseG1GC", "true"); resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 1, 1) .editSpec() .editKafka() .withResources(new <API key>() .addToLimits("memory", new Quantity("1.5Gi")) .addToLimits("cpu", new Quantity("1")) .addToRequests("memory", new Quantity("1Gi")) .addToRequests("cpu", new Quantity("50m")) .build()) .withNewJvmOptions() .withXmx("1g") .withXms("512m") .withXx(jvmOptionsXX) .endJvmOptions() .endKafka() .editZookeeper() .withResources( new <API key>() .addToLimits("memory", new Quantity("1G")) .addToLimits("cpu", new Quantity("0.5")) .addToRequests("memory", new Quantity("0.5G")) .addToRequests("cpu", new Quantity("25m")) .build()) .withNewJvmOptions() .withXmx("1G") .withXms("512M") .withXx(jvmOptionsXX) .endJvmOptions() .endZookeeper() .<API key>() .<API key>() .withResources( new <API key>() .addToLimits("memory", new Quantity("1024Mi")) .addToLimits("cpu", new Quantity("500m")) .addToRequests("memory", new Quantity("384Mi")) .addToRequests("cpu", new Quantity("0.025")) .build()) .withNewJvmOptions() .withXmx("2G") .withXms("1024M") .<API key>(javaSystemProps) .endJvmOptions() .endTopicOperator() .withNewUserOperator() .withResources( new <API key>() .addToLimits("memory", new Quantity("512M")) .addToLimits("cpu", new Quantity("300m")) .addToRequests("memory", new Quantity("256M")) .addToRequests("cpu", new Quantity("30m")) .build()) .withNewJvmOptions() .withXmx("1G") .withXms("512M") .<API key>(javaSystemProps) .endJvmOptions() .endUserOperator() .endEntityOperator() .endSpec() .build()); // Make snapshots for Kafka cluster to meke sure that there is no rolling update after CO reconciliation final String zkStsName = KafkaResources.<API key>(clusterName); final String kafkaStsName = <API key>(clusterName); final String eoDepName = KafkaResources.<API key>(clusterName); final Map<String, String> zkPods = StatefulSetUtils.ssSnapshot(namespaceName, zkStsName); final Map<String, String> kafkaPods = StatefulSetUtils.ssSnapshot(namespaceName, kafkaStsName); final Map<String, String> eoPods = DeploymentUtils.depSnapshot(namespaceName, eoDepName); assertResources(namespaceName, KafkaResources.kafkaPodName(clusterName, 0), "kafka", "1536Mi", "1", "1Gi", "50m"); <API key>(namespaceName, KafkaResources.kafkaPodName(clusterName, 0), "kafka", "-Xmx1g", "-Xms512m", "-XX:+UseG1GC"); assertResources(namespaceName, KafkaResources.zookeeperPodName(clusterName, 0), "zookeeper", "1G", "500m", "500M", "25m"); <API key>(namespaceName, KafkaResources.zookeeperPodName(clusterName, 0), "zookeeper", "-Xmx1G", "-Xms512M", "-XX:+UseG1GC"); Optional<Pod> pod = kubeClient(namespaceName).listPods(namespaceName) .stream().filter(p -> p.getMetadata().getName().startsWith(KafkaResources.<API key>(clusterName))) .findFirst(); assertThat("EO pod does not exist", pod.isPresent(), is(true)); assertResources(namespaceName, pod.get().getMetadata().getName(), "topic-operator", "1Gi", "500m", "384Mi", "25m"); assertResources(namespaceName, pod.get().getMetadata().getName(), "user-operator", "512M", "300m", "256M", "30m"); <API key>(namespaceName, pod.get().getMetadata().getName(), "topic-operator", "-Xmx2G", "-Xms1024M", null); <API key>(namespaceName, pod.get().getMetadata().getName(), "user-operator", "-Xmx1G", "-Xms512M", null); String eoPod = eoPods.keySet().toArray()[0].toString(); kubeClient(namespaceName).getPod(namespaceName, eoPod).getSpec().getContainers().forEach(container -> { if (!container.getName().equals("tls-sidecar")) { LOGGER.info("Check if -D java options are present in {}", container.getName()); String javaSystemProp = container.getEnv().stream().filter(envVar -> envVar.getName().equals("<API key>")).findFirst().orElseThrow().getValue(); String javaOpts = container.getEnv().stream().filter(envVar -> envVar.getName().equals("STRIMZI_JAVA_OPTS")).findFirst().orElseThrow().getValue(); assertThat(javaSystemProp, is("-Djavax.net.debug=verbose")); if (container.getName().equals("topic-operator")) { assertThat(javaOpts, is("-Xms1024M -Xmx2G")); } if (container.getName().equals("user-operator")) { assertThat(javaOpts, is("-Xms512M -Xmx1G")); } } }); LOGGER.info("Checking no rolling update for Kafka cluster"); StatefulSetUtils.<API key>(namespaceName, zkStsName, zkPods); StatefulSetUtils.<API key>(namespaceName, kafkaStsName, kafkaPods); DeploymentUtils.<API key>(namespaceName, eoDepName, eoPods); } @<API key> void <API key>(ExtensionContext extensionContext) throws <API key> { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).build()); final String topicName = KafkaTopicUtils.<API key>(); final String cliTopicName = "topic-from-cli"; //Creating topics for testing resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).build()); KafkaTopicUtils.<API key>(namespaceName, topicName); assertThat(KafkaTopicResource.kafkaTopicClient().inNamespace(namespaceName).withName(topicName).get().getMetadata().getName(), is(topicName)); assertThat(KafkaCmdClient.<API key>(namespaceName, clusterName, 0), hasItem(topicName)); KafkaCmdClient.<API key>(namespaceName, clusterName, 0, cliTopicName, 1, 1); assertThat(KafkaCmdClient.<API key>(namespaceName, clusterName, 0), hasItems(topicName, cliTopicName)); assertThat(cmdKubeClient(namespaceName).list(KafkaTopic.RESOURCE_KIND), hasItems(cliTopicName, topicName)); //Updating first topic using pod CLI KafkaCmdClient.<API key>(namespaceName, clusterName, 0, topicName, 2); KafkaUtils.waitForKafkaReady(namespaceName, clusterName); assertThat(KafkaCmdClient.<API key>(namespaceName, clusterName, 0, topicName), hasItems("PartitionCount:2")); KafkaTopic testTopic = fromYamlString(cmdKubeClient().get(KafkaTopic.RESOURCE_KIND, topicName), KafkaTopic.class); assertThat(testTopic, is(CoreMatchers.notNullValue())); assertThat(testTopic.getSpec(), is(CoreMatchers.notNullValue())); assertThat(testTopic.getSpec().getPartitions(), is(Integer.valueOf(2))); //Updating second topic via KafkaTopic update KafkaTopicResource.<API key>(cliTopicName, topic -> topic.getSpec().setPartitions(2), namespaceName); KafkaUtils.waitForKafkaReady(namespaceName, clusterName); assertThat(KafkaCmdClient.<API key>(namespaceName, clusterName, 0, cliTopicName), hasItems("PartitionCount:2")); testTopic = fromYamlString(cmdKubeClient(namespaceName).get(KafkaTopic.RESOURCE_KIND, cliTopicName), KafkaTopic.class); assertThat(testTopic, is(CoreMatchers.notNullValue())); assertThat(testTopic.getSpec(), is(CoreMatchers.notNullValue())); assertThat(testTopic.getSpec().getPartitions(), is(Integer.valueOf(2))); //Deleting first topic by deletion of CM cmdKubeClient(namespaceName).deleteByName(KafkaTopic.RESOURCE_KIND, cliTopicName); //Deleting another topic using pod CLI KafkaCmdClient.<API key>(namespaceName, clusterName, 0, topicName); KafkaTopicUtils.<API key>(namespaceName, topicName); //Checking all topics were deleted Thread.sleep(Constants.TIMEOUT_TEARDOWN); List<String> topics = KafkaCmdClient.<API key>(namespaceName, clusterName, 0); assertThat(topics, not(hasItems(topicName))); assertThat(topics, not(hasItems(cliTopicName))); } @<API key> void <API key>(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); LOGGER.info("Deploying Kafka cluster {}", clusterName); resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).build()); String eoPodName = kubeClient(namespaceName).<API key>(namespaceName, KafkaResources.<API key>(clusterName)) .get(0).getMetadata().getName(); KafkaResource.<API key>(clusterName, k -> k.getSpec().getEntityOperator().setTopicOperator(null), namespaceName); //Waiting when EO pod will be recreated without TO PodUtils.deletePodWithWait(namespaceName, eoPodName); DeploymentUtils.<API key>(namespaceName, KafkaResources.<API key>(clusterName), 1); PodUtils.<API key>(namespaceName, KafkaResources.<API key>(clusterName), 2); //Checking that TO was removed kubeClient(namespaceName).<API key>(namespaceName, KafkaResources.<API key>(clusterName)).forEach(pod -> { pod.getSpec().getContainers().forEach(container -> { assertThat(container.getName(), not(containsString("topic-operator"))); }); }); eoPodName = kubeClient(namespaceName).<API key>(namespaceName, KafkaResources.<API key>(clusterName)) .get(0).getMetadata().getName(); KafkaResource.<API key>(clusterName, k -> k.getSpec().getEntityOperator().setTopicOperator(new <API key>()), namespaceName); //Waiting when EO pod will be recreated with TO PodUtils.deletePodWithWait(namespaceName, eoPodName); DeploymentUtils.<API key>(namespaceName, KafkaResources.<API key>(clusterName), 1); //Checking that TO was created kubeClient(namespaceName).<API key>(namespaceName, KafkaResources.<API key>(clusterName)).forEach(pod -> { pod.getSpec().getContainers().forEach(container -> { assertThat(container.getName(), anyOf( containsString("topic-operator"), containsString("user-operator"), containsString("tls-sidecar")) ); }); }); } @<API key> void <API key>(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); LOGGER.info("Deploying Kafka cluster {}", clusterName); String operationId = timeMeasuringSystem.startTimeMeasuring(Operation.CLUSTER_DEPLOYMENT, extensionContext.<API key>().getName(), extensionContext.getDisplayName()); resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).build()); String eoPodName = kubeClient(namespaceName).<API key>(namespaceName, KafkaResources.<API key>(clusterName)) .get(0).getMetadata().getName(); KafkaResource.<API key>(clusterName, k -> k.getSpec().getEntityOperator().setUserOperator(null), namespaceName); //Waiting when EO pod will be recreated without UO PodUtils.deletePodWithWait(namespaceName, eoPodName); DeploymentUtils.<API key>(namespaceName, KafkaResources.<API key>(clusterName), 1); PodUtils.<API key>(namespaceName, KafkaResources.<API key>(clusterName), 2); //Checking that UO was removed kubeClient(namespaceName).<API key>(namespaceName, KafkaResources.<API key>(clusterName)).forEach(pod -> { pod.getSpec().getContainers().forEach(container -> { assertThat(container.getName(), not(containsString("user-operator"))); }); }); eoPodName = kubeClient(namespaceName).<API key>(namespaceName, KafkaResources.<API key>(clusterName)) .get(0).getMetadata().getName(); KafkaResource.<API key>(clusterName, k -> k.getSpec().getEntityOperator().setUserOperator(new <API key>()), namespaceName); //Waiting when EO pod will be recreated with UO PodUtils.deletePodWithWait(namespaceName, eoPodName); DeploymentUtils.<API key>(namespaceName, KafkaResources.<API key>(clusterName), 1); //Checking that UO was created kubeClient(namespaceName).<API key>(namespaceName, KafkaResources.<API key>(clusterName)).forEach(pod -> { pod.getSpec().getContainers().forEach(container -> { assertThat(container.getName(), anyOf( containsString("topic-operator"), containsString("user-operator"), containsString("tls-sidecar")) ); }); }); timeMeasuringSystem.stopOperation(operationId, extensionContext.<API key>().getName(), extensionContext.getDisplayName()); <API key>(NAMESPACE, timeMeasuringSystem.<API key>(extensionContext.<API key>().getName(), extensionContext.getDisplayName(), operationId)); } @<API key> void <API key>(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); // TODO issue #4152 - temporarily disabled for Namespace RBAC scoped assumeFalse(Environment.<API key>()); LOGGER.info("Deploying Kafka cluster {}", clusterName); String operationId = timeMeasuringSystem.startTimeMeasuring(Operation.CLUSTER_DEPLOYMENT, extensionContext.<API key>().getName(), extensionContext.getDisplayName()); resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).build()); String eoDeploymentName = KafkaResources.<API key>(clusterName); KafkaResource.<API key>(clusterName, k -> { k.getSpec().getEntityOperator().setTopicOperator(null); k.getSpec().getEntityOperator().setUserOperator(null); }, namespaceName); PodUtils.<API key>(namespaceName, eoDeploymentName, 0); KafkaResource.<API key>(clusterName, k -> { k.getSpec().getEntityOperator().setTopicOperator(new <API key>()); k.getSpec().getEntityOperator().setUserOperator(new <API key>()); }, namespaceName); DeploymentUtils.<API key>(namespaceName, eoDeploymentName); //Checking that EO was created kubeClient().<API key>(namespaceName, eoDeploymentName).forEach(pod -> { pod.getSpec().getContainers().forEach(container -> { assertThat(container.getName(), anyOf( containsString("topic-operator"), containsString("user-operator"), containsString("tls-sidecar")) ); }); }); timeMeasuringSystem.stopOperation(operationId, extensionContext.<API key>().getName(), extensionContext.getDisplayName()); <API key>(NAMESPACE, timeMeasuringSystem.<API key>(extensionContext.<API key>().getName(), extensionContext.getDisplayName(), operationId)); } @<API key> void <API key>(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); LOGGER.info("Deploying Kafka cluster without TO in EO"); String operationId = timeMeasuringSystem.startTimeMeasuring(Operation.CLUSTER_DEPLOYMENT, extensionContext.<API key>().getName(), extensionContext.getDisplayName()); resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3) .editSpec() .<API key>() .withNewUserOperator() .endUserOperator() .endEntityOperator() .endSpec() .build()); timeMeasuringSystem.stopOperation(operationId, extensionContext.<API key>().getName(), extensionContext.getDisplayName()); <API key>(NAMESPACE, timeMeasuringSystem.<API key>(extensionContext.<API key>().getName(), extensionContext.getDisplayName(), operationId)); //Checking that TO was not deployed kubeClient(namespaceName).<API key>(namespaceName, KafkaResources.<API key>(clusterName)).forEach(pod -> { pod.getSpec().getContainers().forEach(container -> { assertThat(container.getName(), not(containsString("topic-operator"))); }); }); } @<API key> void <API key>(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); LOGGER.info("Deploying Kafka cluster without UO in EO"); String operationId = timeMeasuringSystem.startTimeMeasuring(Operation.CLUSTER_DEPLOYMENT, extensionContext.<API key>().getName(), extensionContext.getDisplayName()); resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3) .editSpec() .<API key>() .<API key>() .endTopicOperator() .endEntityOperator() .endSpec() .build()); timeMeasuringSystem.stopOperation(operationId, extensionContext.<API key>().getName(), extensionContext.getDisplayName()); <API key>(NAMESPACE, timeMeasuringSystem.<API key>(extensionContext.<API key>().getName(), extensionContext.getDisplayName(), operationId)); //Checking that UO was not deployed kubeClient(namespaceName).<API key>(namespaceName, KafkaResources.<API key>(clusterName)).forEach(pod -> { pod.getSpec().getContainers().forEach(container -> { assertThat(container.getName(), not(containsString("user-operator"))); }); }); } @<API key> void <API key>(ExtensionContext extensionContext) { String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); LOGGER.info("Deploying Kafka cluster without UO and TO in EO"); String operationId = timeMeasuringSystem.startTimeMeasuring(Operation.CLUSTER_DEPLOYMENT, extensionContext.<API key>().getName(), extensionContext.getDisplayName()); resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3) .editSpec() .<API key>() .endEntityOperator() .endSpec() .build()); timeMeasuringSystem.stopOperation(operationId, extensionContext.<API key>().getName(), extensionContext.getDisplayName()); <API key>(NAMESPACE, timeMeasuringSystem.<API key>(extensionContext.<API key>().getName(), extensionContext.getDisplayName(), operationId)); //Checking that EO was not deployed assertThat("EO should not be deployed", kubeClient().<API key>(KafkaResources.<API key>(clusterName)).size(), is(0)); } @<API key> void <API key>(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); // Negative scenario: creating topic without any labels and make sure that TO can't handle this topic resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).build()); // Creating topic without any label resourceManager.createResource(extensionContext, false, KafkaTopicTemplates.topic(clusterName, "<API key>", 1, 1, 1) .editMetadata() .withLabels(null) .endMetadata() .build()); // Checking that resource was created assertThat(cmdKubeClient(namespaceName).list("kafkatopic"), hasItems("<API key>")); // Checking that TO didn't handle new topic and zk pods don't contain new topic assertThat(KafkaCmdClient.<API key>(namespaceName, clusterName, 0), not(hasItems("<API key>"))); // Checking TO logs String tOPodName = cmdKubeClient(namespaceName).<API key>("pod", Labels.STRIMZI_NAME_LABEL + "=" + clusterName + "-entity-operator").get(0); String tOlogs = kubeClient(namespaceName).<API key>(namespaceName, tOPodName, "topic-operator"); assertThat(tOlogs, not(containsString("Created topic '<API key>'"))); //Deleting topic cmdKubeClient(namespaceName).deleteByName("kafkatopic", "<API key>"); KafkaTopicUtils.<API key>(namespaceName, "<API key>"); //Checking all topics were deleted List<String> topics = KafkaCmdClient.<API key>(namespaceName, clusterName, 0); assertThat(topics, not(hasItems("<API key>"))); } @<API key> void <API key>(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); final int kafkaReplicas = 2; final String diskSizeGi = "10"; JbodStorage jbodStorage = new JbodStorageBuilder().withVolumes( new <API key>().withDeleteClaim(false).withId(0).withSize(diskSizeGi + "Gi").build(), new <API key>().withDeleteClaim(true).withId(1).withSize(diskSizeGi + "Gi").build()).build(); resourceManager.createResource(extensionContext, KafkaTemplates.kafkaJBOD(clusterName, kafkaReplicas, jbodStorage).build()); // kafka cluster already deployed <API key>(namespaceName, clusterName, kafkaReplicas, 2, diskSizeGi); final int volumesCount = kubeClient(namespaceName).<API key>(namespaceName, clusterName).size(); LOGGER.info("Deleting cluster"); cmdKubeClient(namespaceName).deleteByName("kafka", clusterName); LOGGER.info("Waiting for PVC deletion"); <API key>.waitForPVCDeletion(namespaceName, volumesCount, jbodStorage, clusterName); } @<API key> void <API key>(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); final int kafkaReplicas = 2; final String diskSizeGi = "10"; JbodStorage jbodStorage = new JbodStorageBuilder().withVolumes( new <API key>().withDeleteClaim(true).withId(0).withSize(diskSizeGi + "Gi").build(), new <API key>().withDeleteClaim(true).withId(1).withSize(diskSizeGi + "Gi").build()).build(); resourceManager.createResource(extensionContext, KafkaTemplates.kafkaJBOD(clusterName, kafkaReplicas, jbodStorage).build()); // kafka cluster already deployed <API key>(namespaceName, clusterName, kafkaReplicas, 2, diskSizeGi); final int volumesCount = kubeClient(namespaceName).<API key>(namespaceName, clusterName).size(); LOGGER.info("Deleting cluster"); cmdKubeClient(namespaceName).deleteByName("kafka", clusterName); LOGGER.info("Waiting for PVC deletion"); <API key>.waitForPVCDeletion(namespaceName, volumesCount, jbodStorage, clusterName); } @<API key> void <API key>(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); final int kafkaReplicas = 2; final String diskSizeGi = "10"; JbodStorage jbodStorage = new JbodStorageBuilder().withVolumes( new <API key>().withDeleteClaim(false).withId(0).withSize(diskSizeGi + "Gi").build(), new <API key>().withDeleteClaim(false).withId(1).withSize(diskSizeGi + "Gi").build()).build(); resourceManager.createResource(extensionContext, KafkaTemplates.kafkaJBOD(clusterName, kafkaReplicas, jbodStorage).build()); // kafka cluster already deployed <API key>(namespaceName, clusterName, kafkaReplicas, 2, diskSizeGi); int volumesCount = kubeClient(namespaceName).<API key>(namespaceName, clusterName).size(); LOGGER.info("Deleting cluster"); cmdKubeClient(namespaceName).deleteByName("kafka", clusterName); LOGGER.info("Waiting for PVC deletion"); <API key>.waitForPVCDeletion(namespaceName, volumesCount, jbodStorage, clusterName); } @<API key> @Tag(<API key>) void <API key>(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName()); final String[] diskSizes = {"70Gi", "20Gi"}; final int kafkaRepl = 2; final int diskCount = 2; JbodStorage jbodStorage = new JbodStorageBuilder() .withVolumes( new <API key>().withDeleteClaim(false).withId(0).withSize(diskSizes[0]).build(), new <API key>().withDeleteClaim(false).withId(1).withSize(diskSizes[1]).build() ).build(); resourceManager.createResource(extensionContext, KafkaTemplates.kafkaPersistent(clusterName, kafkaRepl) .editSpec() .editKafka() .withStorage(jbodStorage) .endKafka() .editZookeeper(). withReplicas(1) .endZookeeper() .endSpec() .build()); resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).build()); resourceManager.createResource(extensionContext, <API key>.kafkaClients(false, clusterName + "-" + Constants.KAFKA_CLIENTS).build()); List<<API key>> volumes = kubeClient(namespaceName).<API key>(namespaceName, clusterName).stream().filter( <API key> -> <API key>.getMetadata().getName().contains(clusterName)).collect(Collectors.toList()); <API key>(volumes, diskSizes, kafkaRepl, diskCount); String kafkaClientsPodName = kubeClient(namespaceName).<API key>(namespaceName, clusterName + "-" + Constants.KAFKA_CLIENTS).get(0).getMetadata().getName(); InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder() .withUsingPodName(kafkaClientsPodName) .withTopicName(topicName) .withNamespaceName(namespaceName) .withClusterName(clusterName) .withMessageCount(MESSAGE_COUNT) .withListenerName(Constants.<API key>) .build(); LOGGER.info("Checking produced and consumed messages to pod:{}", kafkaClientsPodName); internalKafkaClient.<API key>( internalKafkaClient.sendMessagesPlain(), internalKafkaClient.<API key>() ); } @<API key> @Tag(<API key>) void <API key>(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); LOGGER.info("Creating kafka without external listener"); resourceManager.createResource(extensionContext, KafkaTemplates.kafkaPersistent(clusterName, 3, 1).build()); final String brokerSecret = clusterName + "-kafka-brokers"; <API key> = kubeClient(namespaceName).getSecret(namespaceName, brokerSecret); LOGGER.info("Editing kafka with external listener"); KafkaResource.<API key>(clusterName, kafka -> { List<<API key>> lst = asList( new <API key>() .withName(Constants.<API key>) .withPort(9092) .withType(KafkaListenerType.INTERNAL) .withTls(false) .build(), new <API key>() .withName(Constants.<API key>) .withPort(9094) .withType(KafkaListenerType.LOADBALANCER) .withTls(true) .<API key>() .withFinalizers(LB_FINALIZERS) .endConfiguration() .build() ); kafka.getSpec().getKafka().setListeners(lst); }, namespaceName); StatefulSetUtils.waitTillSsHasRolled(namespaceName, <API key>(clusterName), 3, StatefulSetUtils.ssSnapshot(namespaceName, <API key>(clusterName))); Secret secretsWithExt = kubeClient(namespaceName).getSecret(namespaceName, brokerSecret); LOGGER.info("Checking secrets"); kubeClient(namespaceName).<API key>(namespaceName, KafkaResources.<API key>(clusterName)).forEach(kafkaPod -> { String kafkaPodName = kafkaPod.getMetadata().getName(); assertThat(secretsWithExt.getData().get(kafkaPodName + ".crt"), is(not(secretsWithoutExt.getData().get(kafkaPodName + ".crt")))); assertThat(secretsWithExt.getData().get(kafkaPodName + ".key"), is(not(secretsWithoutExt.getData().get(kafkaPodName + ".key")))); }); } @<API key> @Tag(<API key>) void <API key>(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName()); Map<String, String> labels = new HashMap<>(); final String[] labelKeys = {"label-name-1", "label-name-2", ""}; final String[] labelValues = {"name-of-the-label-1", "name-of-the-label-2", ""}; labels.put(labelKeys[0], labelValues[0]); labels.put(labelKeys[1], labelValues[1]); resourceManager.createResource(extensionContext, KafkaTemplates.kafkaPersistent(clusterName, 3, 1) .editMetadata() .withLabels(labels) .endMetadata() .build()); resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).build()); resourceManager.createResource(extensionContext, <API key>.kafkaClients(false, clusterName + "-" + Constants.KAFKA_CLIENTS).build()); final String kafkaClientsPodName = kubeClient(namespaceName).<API key>(namespaceName, clusterName + "-" + Constants.KAFKA_CLIENTS).get(0).getMetadata().getName(); InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder() .withUsingPodName(kafkaClientsPodName) .withTopicName(topicName) .withNamespaceName(namespaceName) .withClusterName(clusterName) .withMessageCount(MESSAGE_COUNT) .withListenerName(Constants.<API key>) .build(); Map<String, String> kafkaPods = StatefulSetUtils.ssSnapshot(namespaceName, <API key>(clusterName)); LOGGER.info("Waiting for kafka stateful set labels changed {}", labels); StatefulSetUtils.<API key>(namespaceName, KafkaResources.<API key>(clusterName), labels); LOGGER.info("Getting labels from stateful set resource"); StatefulSet statefulSet = kubeClient(namespaceName).getStatefulSet(namespaceName, KafkaResources.<API key>(clusterName)); LOGGER.info("Verifying default labels in the Kafka CR"); assertThat("Label exists in stateful set with concrete value", labelValues[0].equals(statefulSet.getSpec().getTemplate().getMetadata().getLabels().get(labelKeys[0]))); assertThat("Label exists in stateful set with concrete value", labelValues[1].equals(statefulSet.getSpec().getTemplate().getMetadata().getLabels().get(labelKeys[1]))); labelValues[0] = "<API key>"; labelValues[1] = "<API key>"; labelKeys[2] = "label-name-3"; labelValues[2] = "name-of-the-label-3"; LOGGER.info("Setting new values of labels from {} to {} | from {} to {} and adding one {} with value {}", "name-of-the-label-1", labelValues[0], "name-of-the-label-2", labelValues[1], labelKeys[2], labelValues[2]); LOGGER.info("Edit kafka labels in Kafka CR"); KafkaResource.<API key>(clusterName, resource -> { resource.getMetadata().getLabels().put(labelKeys[0], labelValues[0]); resource.getMetadata().getLabels().put(labelKeys[1], labelValues[1]); resource.getMetadata().getLabels().put(labelKeys[2], labelValues[2]); }, namespaceName); labels.put(labelKeys[0], labelValues[0]); labels.put(labelKeys[1], labelValues[1]); labels.put(labelKeys[2], labelValues[2]); LOGGER.info("Waiting for kafka service labels changed {}", labels); ServiceUtils.<API key>(namespaceName, KafkaResources.brokersServiceName(clusterName), labels); LOGGER.info("Verifying kafka labels via services"); Service service = kubeClient(namespaceName).getService(namespaceName, KafkaResources.brokersServiceName(clusterName)); verifyPresentLabels(labels, service); LOGGER.info("Waiting for kafka config map labels changed {}", labels); ConfigMapUtils.<API key>(namespaceName, KafkaResources.<API key>(clusterName), labels); LOGGER.info("Verifying kafka labels via config maps"); ConfigMap configMap = kubeClient(namespaceName).getConfigMap(namespaceName, KafkaResources.<API key>(clusterName)); verifyPresentLabels(labels, configMap); LOGGER.info("Waiting for kafka stateful set labels changed {}", labels); StatefulSetUtils.<API key>(namespaceName, KafkaResources.<API key>(clusterName), labels); LOGGER.info("Verifying kafka labels via stateful set"); statefulSet = kubeClient(namespaceName).getStatefulSet(namespaceName, KafkaResources.<API key>(clusterName)); verifyPresentLabels(labels, statefulSet); StatefulSetUtils.waitTillSsHasRolled(namespaceName, <API key>(clusterName), 3, kafkaPods); LOGGER.info("Verifying via kafka pods"); labels = kubeClient(namespaceName).getPod(namespaceName, KafkaResources.kafkaPodName(clusterName, 0)).getMetadata().getLabels(); assertThat("Label exists in kafka pods", labelValues[0].equals(labels.get(labelKeys[0]))); assertThat("Label exists in kafka pods", labelValues[1].equals(labels.get(labelKeys[1]))); assertThat("Label exists in kafka pods", labelValues[2].equals(labels.get(labelKeys[2]))); LOGGER.info("Removing labels: {} -> {}, {} -> {}, {} -> {}", labelKeys[0], labels.get(labelKeys[0]), labelKeys[1], labels.get(labelKeys[1]), labelKeys[2], labels.get(labelKeys[2])); KafkaResource.<API key>(clusterName, resource -> { resource.getMetadata().getLabels().remove(labelKeys[0]); resource.getMetadata().getLabels().remove(labelKeys[1]); resource.getMetadata().getLabels().remove(labelKeys[2]); }, namespaceName); labels.remove(labelKeys[0]); labels.remove(labelKeys[1]); labels.remove(labelKeys[2]); LOGGER.info("Waiting for kafka service labels deletion {}", labels.toString()); ServiceUtils.<API key>(namespaceName, KafkaResources.brokersServiceName(clusterName), labelKeys[0], labelKeys[1], labelKeys[2]); LOGGER.info("Verifying kafka labels via services"); service = kubeClient(namespaceName).getService(namespaceName, KafkaResources.brokersServiceName(clusterName)); verifyNullLabels(labelKeys, service); LOGGER.info("Verifying kafka labels via config maps"); ConfigMapUtils.<API key>(namespaceName, KafkaResources.<API key>(clusterName), labelKeys[0], labelKeys[1], labelKeys[2]); configMap = kubeClient(namespaceName).getConfigMap(namespaceName, KafkaResources.<API key>(clusterName)); verifyNullLabels(labelKeys, configMap); LOGGER.info("Waiting for kafka stateful set labels changed {}", labels); String statefulSetName = kubeClient(namespaceName).getStatefulSet(namespaceName, KafkaResources.<API key>(clusterName)).getMetadata().getName(); StatefulSetUtils.<API key>(namespaceName, statefulSetName, labelKeys[0], labelKeys[1], labelKeys[2]); statefulSet = kubeClient(namespaceName).getStatefulSet(namespaceName, KafkaResources.<API key>(clusterName)); LOGGER.info("Verifying kafka labels via stateful set"); verifyNullLabels(labelKeys, statefulSet); StatefulSetUtils.waitTillSsHasRolled(namespaceName, <API key>(clusterName), 3, kafkaPods); LOGGER.info("Waiting for kafka pod labels deletion {}", labels.toString()); PodUtils.<API key>(namespaceName, KafkaResources.kafkaPodName(clusterName, 0), labelKeys[0], labelKeys[1], labelKeys[2]); labels = kubeClient(namespaceName).getPod(namespaceName, KafkaResources.kafkaPodName(clusterName, 0)).getMetadata().getLabels(); LOGGER.info("Verifying via kafka pods"); verifyNullLabels(labelKeys, labels); internalKafkaClient.<API key>( internalKafkaClient.sendMessagesPlain(), internalKafkaClient.<API key>() ); } @<API key> @Tag(<API key>) void testAppDomainLabels(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName()); resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3, 1).build()); resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).build()); resourceManager.createResource(extensionContext, <API key>.kafkaClients(false, clusterName + "-" + Constants.KAFKA_CLIENTS).build()); final String kafkaClientsPodName = kubeClient(namespaceName).<API key>(namespaceName, clusterName + "-" + Constants.KAFKA_CLIENTS).get(0).getMetadata().getName(); InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder() .withUsingPodName(kafkaClientsPodName) .withTopicName(topicName) .withNamespaceName(namespaceName) .withClusterName(clusterName) .withMessageCount(MESSAGE_COUNT) .withListenerName(Constants.<API key>) .build(); Map<String, String> labels; LOGGER.info(" List<Pod> pods = kubeClient(namespaceName).listPods(namespaceName, clusterName).stream() .filter(pod -> pod.getMetadata().getName().startsWith(clusterName)) .filter(pod -> !pod.getMetadata().getName().startsWith(clusterName + "-" + Constants.KAFKA_CLIENTS)) .collect(Collectors.toList()); for (Pod pod : pods) { LOGGER.info("Getting labels from {} pod", pod.getMetadata().getName()); verifyAppLabels(pod.getMetadata().getLabels()); } LOGGER.info(" LOGGER.info("Getting labels from stateful set of kafka resource"); labels = kubeClient(namespaceName).getStatefulSet(namespaceName, KafkaResources.<API key>(clusterName)).getMetadata().getLabels(); verifyAppLabels(labels); LOGGER.info("Getting labels from stateful set of zookeeper resource"); labels = kubeClient(namespaceName).getStatefulSet(namespaceName, KafkaResources.<API key>(clusterName)).getMetadata().getLabels(); verifyAppLabels(labels); LOGGER.info(" List<Service> services = kubeClient(namespaceName).listServices(namespaceName).stream() .filter(service -> service.getMetadata().getName().startsWith(clusterName)) .collect(Collectors.toList()); for (Service service : services) { LOGGER.info("Getting labels from {} service", service.getMetadata().getName()); verifyAppLabels(service.getMetadata().getLabels()); } LOGGER.info(" List<Secret> secrets = kubeClient(namespaceName).listSecrets(namespaceName).stream() .filter(secret -> secret.getMetadata().getName().startsWith(clusterName) && secret.getType().equals("Opaque")) .collect(Collectors.toList()); for (Secret secret : secrets) { LOGGER.info("Getting labels from {} secret", secret.getMetadata().getName()); <API key>(secret.getMetadata().getLabels()); } LOGGER.info(" List<ConfigMap> configMaps = kubeClient(namespaceName).<API key>(namespaceName, clusterName); for (ConfigMap configMap : configMaps) { LOGGER.info("Getting labels from {} config map", configMap.getMetadata().getName()); <API key>(configMap.getMetadata().getLabels()); } internalKafkaClient.<API key>( internalKafkaClient.sendMessagesPlain(), internalKafkaClient.<API key>() ); } @<API key> void <API key>(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); final String userName = mapWithTestUsers.get(extensionContext.getDisplayName()); final String firstClusterName = "my-cluster-1"; final String secondClusterName = "my-cluster-2"; resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(firstClusterName, 3, 1).build()); resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(secondClusterName, 3, 1).build()); resourceManager.createResource(extensionContext, KafkaUserTemplates.tlsUser(firstClusterName, userName).build()); LOGGER.info("Verifying that user {} in cluster {} is created", userName, firstClusterName); String <API key> = kubeClient(namespaceName).<API key>(namespaceName, Labels.STRIMZI_NAME_LABEL, KafkaResources.<API key>(firstClusterName)).get(0); String uOLogs = kubeClient(namespaceName).<API key>(namespaceName, <API key>, "user-operator"); assertThat(uOLogs, containsString("User " + userName + " in namespace " + namespaceName + " was ADDED")); LOGGER.info("Verifying that user {} in cluster {} is not created", userName, secondClusterName); <API key> = kubeClient(namespaceName).<API key>(namespaceName, Labels.STRIMZI_NAME_LABEL, KafkaResources.<API key>(secondClusterName)).get(0); uOLogs = kubeClient(namespaceName).<API key>(namespaceName, <API key>, "user-operator"); assertThat(uOLogs, not(containsString("User " + userName + " in namespace " + namespaceName + " was ADDED"))); LOGGER.info("Verifying that user belongs to {} cluster", firstClusterName); String kafkaUserResource = cmdKubeClient(namespaceName).getResourceAsYaml("kafkauser", userName); assertThat(kafkaUserResource, containsString(Labels.<API key> + ": " + firstClusterName)); } @<API key> @Tag(<API key>) void <API key>(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); final String topicName = KafkaTopicUtils.<API key>(); resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 1, 1).build()); Map<String, String> kafkaPodsSnapshot = StatefulSetUtils.ssSnapshot(namespaceName, <API key>(clusterName)); resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName, 1, 1).build()); resourceManager.createResource(extensionContext, <API key>.kafkaClients(false, clusterName + "-" + Constants.KAFKA_CLIENTS).build()); final String kafkaClientsPodName = kubeClient(namespaceName).<API key>(namespaceName, clusterName + "-" + Constants.KAFKA_CLIENTS).get(0).getMetadata().getName(); InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder() .withUsingPodName(kafkaClientsPodName) .withTopicName(topicName) .withNamespaceName(namespaceName) .withClusterName(clusterName) .withMessageCount(MESSAGE_COUNT) .withListenerName(Constants.<API key>) .build(); TestUtils.waitFor("KafkaTopic creation inside kafka pod", Constants.<API key>, Constants.GLOBAL_TIMEOUT, () -> cmdKubeClient(namespaceName).execInPod(KafkaResources.kafkaPodName(clusterName, 0), "/bin/bash", "-c", "cd /var/lib/kafka/data/kafka-log0; ls -1").out().contains(topicName)); String topicDirNameInPod = cmdKubeClient(namespaceName).execInPod(KafkaResources.kafkaPodName(clusterName, 0), "/bin/bash", "-c", "cd /var/lib/kafka/data/kafka-log0; ls -1 | sed -n '/" + topicName + "/p'").out(); String <API key> = "cd /var/lib/kafka/data/kafka-log0/" + topicDirNameInPod + "/;cat <API key>.log"; LOGGER.info("Executing command {} in {}", <API key>, KafkaResources.kafkaPodName(clusterName, 0)); String topicData = cmdKubeClient(namespaceName).execInPod(KafkaResources.kafkaPodName(clusterName, 0), "/bin/bash", "-c", <API key>).out(); LOGGER.info("Topic {} is present in kafka broker {} with no data", topicName, KafkaResources.kafkaPodName(clusterName, 0)); assertThat("Topic contains data", topicData, emptyOrNullString()); internalKafkaClient.<API key>( internalKafkaClient.sendMessagesPlain(), internalKafkaClient.<API key>() ); LOGGER.info("Executing command {} in {}", <API key>, KafkaResources.kafkaPodName(clusterName, 0)); topicData = cmdKubeClient(namespaceName).execInPod(KafkaResources.kafkaPodName(clusterName, 0), "/bin/bash", "-c", <API key>).out(); assertThat("Topic has no data", topicData, notNullValue()); List<Pod> kafkaPods = kubeClient(namespaceName).<API key>(namespaceName, KafkaResources.<API key>(clusterName)); for (Pod kafkaPod : kafkaPods) { LOGGER.info("Deleting kafka pod {}", kafkaPod.getMetadata().getName()); kubeClient(namespaceName).deletePod(namespaceName, kafkaPod); } LOGGER.info("Wait for kafka to rolling restart ..."); StatefulSetUtils.waitTillSsHasRolled(namespaceName, <API key>(clusterName), 1, kafkaPodsSnapshot); LOGGER.info("Executing command {} in {}", <API key>, KafkaResources.kafkaPodName(clusterName, 0)); topicData = cmdKubeClient(namespaceName).execInPod(KafkaResources.kafkaPodName(clusterName, 0), "/bin/bash", "-c", <API key>).out(); assertThat("Topic has no data", topicData, notNullValue()); } @<API key> @Tag(<API key>) void <API key>(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName()); final Map<String, Object> kafkaConfig = new HashMap<>(); kafkaConfig.put("offsets.topic.replication.factor", "3"); kafkaConfig.put("offsets.topic.num.partitions", "100"); resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3, 1) .editSpec() .editKafka() .withConfig(kafkaConfig) .endKafka() .endSpec() .build()); resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName, 3, 1).build()); resourceManager.createResource(extensionContext, <API key>.kafkaClients(false, clusterName + "-" + Constants.KAFKA_CLIENTS).build()); final String kafkaClientsPodName = kubeClient(namespaceName).<API key>(namespaceName, clusterName + "-" + Constants.KAFKA_CLIENTS).get(0).getMetadata().getName(); InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder() .withUsingPodName(kafkaClientsPodName) .withTopicName(topicName) .withNamespaceName(namespaceName) .withClusterName(clusterName) .withMessageCount(MESSAGE_COUNT) .withListenerName(Constants.<API key>) .build(); String commandToGetFiles = "cd /var/lib/kafka/data/kafka-log0/;" + "ls -1 | sed -n \"s#__consumer_offsets-\\([0-9]*\\)#\\1#p\" | sort -V"; LOGGER.info("Executing command {} in {}", commandToGetFiles, KafkaResources.kafkaPodName(clusterName, 0)); String result = cmdKubeClient(namespaceName).execInPod(KafkaResources.kafkaPodName(clusterName, 0), "/bin/bash", "-c", commandToGetFiles).out(); // TODO / FIXME //assertThat("Folder kafka-log0 has data in files:\n" + result, result.equals("")); LOGGER.info("Result: \n" + result); internalKafkaClient.<API key>( internalKafkaClient.sendMessagesPlain(), internalKafkaClient.<API key>() ); LOGGER.info("Executing command {} in {}", commandToGetFiles, KafkaResources.kafkaPodName(clusterName, 0)); result = cmdKubeClient(namespaceName).execInPod(KafkaResources.kafkaPodName(clusterName, 0), "/bin/bash", "-c", commandToGetFiles).out(); StringBuilder stringToMatch = new StringBuilder(); for (int i = 0; i < 100; i++) { stringToMatch.append(i).append("\n"); } assertThat("Folder kafka-log0 doesn't contain 100 files", result, containsString(stringToMatch.toString())); } @<API key> void <API key>(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); final String labelAnnotationKey = "testKey"; final String firstValue = "testValue"; final String changedValue = "editedTestValue"; Map<String, String> pvcLabel = new HashMap<>(); pvcLabel.put(labelAnnotationKey, firstValue); Map<String, String> pvcAnnotation = pvcLabel; Map<String, String> statefulSetLabels = new HashMap<>(); statefulSetLabels.put("app.kubernetes.io/part-of", "some-app"); statefulSetLabels.put("app.kubernetes.io/managed-by", "some-app"); resourceManager.createResource(extensionContext, KafkaTemplates.kafkaPersistent(clusterName, 3, 1) .editSpec() .editKafka() .withNewTemplate() .withNewStatefulset() .withNewMetadata() .withLabels(statefulSetLabels) .endMetadata() .endStatefulset() .<API key>() .withNewMetadata() .addToLabels(pvcLabel) .addToAnnotations(pvcAnnotation) .endMetadata() .<API key>() .endTemplate() .withStorage(new JbodStorageBuilder().withVolumes( new <API key>() .withDeleteClaim(false) .withId(0) .withSize("20Gi") .build(), new <API key>() .withDeleteClaim(true) .withId(1) .withSize("10Gi") .build()) .build()) .endKafka() .editZookeeper() .withNewTemplate() .<API key>() .withNewMetadata() .addToLabels(pvcLabel) .addToAnnotations(pvcAnnotation) .endMetadata() .<API key>() .endTemplate() .<API key>() .withDeleteClaim(false) .withId(0) .withSize("3Gi") .<API key>() .endZookeeper() .endSpec() .build()); LOGGER.info("Check if Kubernetes labels are applied"); Map<String, String> <API key> = kubeClient(namespaceName).getStatefulSet(namespaceName, KafkaResources.<API key>(clusterName)).getMetadata().getLabels(); assertThat(<API key>.get("app.kubernetes.io/part-of"), is("some-app")); assertThat(<API key>.get("app.kubernetes.io/managed-by"), is("some-app")); LOGGER.info("Kubernetes labels are correctly set and present"); List<<API key>> pvcs = kubeClient(namespaceName).<API key>(namespaceName, clusterName).stream().filter( <API key> -> <API key>.getMetadata().getName().contains(clusterName)).collect(Collectors.toList()); assertThat(pvcs.size(), is(7)); for (<API key> pvc : pvcs) { LOGGER.info("Verifying that PVC label {} - {} = {}", pvc.getMetadata().getName(), firstValue, pvc.getMetadata().getLabels().get(labelAnnotationKey)); assertThat(firstValue, is(pvc.getMetadata().getLabels().get(labelAnnotationKey))); assertThat(firstValue, is(pvc.getMetadata().getAnnotations().get(labelAnnotationKey))); } pvcLabel.put(labelAnnotationKey, changedValue); pvcAnnotation.put(labelAnnotationKey, changedValue); KafkaResource.<API key>(clusterName, kafka -> { LOGGER.info("Replacing kafka && zookeeper labels and annotations from {} to {}", labelAnnotationKey, changedValue); kafka.getSpec().getKafka().getTemplate().<API key>().getMetadata().setLabels(pvcLabel); kafka.getSpec().getKafka().getTemplate().<API key>().getMetadata().setAnnotations(pvcAnnotation); kafka.getSpec().getZookeeper().getTemplate().<API key>().getMetadata().setLabels(pvcLabel); kafka.getSpec().getZookeeper().getTemplate().<API key>().getMetadata().setAnnotations(pvcAnnotation); }, namespaceName); <API key>.<API key>(namespaceName, clusterName, pvcLabel, labelAnnotationKey); <API key>.<API key>(namespaceName, clusterName, pvcAnnotation, labelAnnotationKey); KafkaUtils.waitForKafkaReady(namespaceName, clusterName); pvcs = kubeClient(namespaceName).<API key>(namespaceName, clusterName).stream().filter( <API key> -> <API key>.getMetadata().getName().contains(clusterName)).collect(Collectors.toList()); LOGGER.info(pvcs.toString()); assertThat(pvcs.size(), is(7)); for (<API key> pvc : pvcs) { LOGGER.info("Verifying replaced PVC label {} - {} = {}", pvc.getMetadata().getName(), firstValue, pvc.getMetadata().getLabels().get(labelAnnotationKey)); assertThat(pvc.getMetadata().getLabels().get(labelAnnotationKey), is(changedValue)); assertThat(pvc.getMetadata().getAnnotations().get(labelAnnotationKey), is(changedValue)); } } @<API key> void <API key>(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); resourceManager.createResource(extensionContext, false, KafkaTemplates.kafkaEphemeral(clusterName, 3, 1) .editSpec() .editKafka() .addToConfig("offsets.topic.replication.factor", 4) .addToConfig("transaction.state.log.min.isr", 4) .addToConfig("transaction.state.log.replication.factor", 4) .endKafka() .endSpec().build()); KafkaUtils.<API key>(clusterName, namespaceName, "Kafka configuration option .* should be set to " + 3 + " or less because 'spec.kafka.replicas' is " + 3); } @<API key> @Tag(<API key>) @Tag(CRUISE_CONTROL) void <API key>(ExtensionContext extensionContext) { final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName()); final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName()); resourceManager.createResource(extensionContext, KafkaTemplates.kafkaPersistent(clusterName, 3, 3) .editSpec() .editKafka() .withNewTemplate() .<API key>() .withSecurityContext(new <API key>().<API key>(true).build()) .endKafkaContainer() .endTemplate() .endKafka() .editZookeeper() .withNewTemplate() .<API key>() .withSecurityContext(new <API key>().<API key>(true).build()) .<API key>() .endTemplate() .endZookeeper() .editEntityOperator() .withNewTemplate() .<API key>() .withSecurityContext(new <API key>().<API key>(true).build()) .<API key>() .<API key>() .withSecurityContext(new <API key>().<API key>(true).build()) .<API key>() .<API key>() .withSecurityContext(new <API key>().<API key>(true).build()) .<API key>() .endTemplate() .endEntityOperator() .<API key>() .withNewTemplate() .withNewContainer() .withSecurityContext(new <API key>().<API key>(true).build()) .endContainer() .endTemplate() .endKafkaExporter() .<API key>() .withNewTemplate() .<API key>() .withSecurityContext(new <API key>().<API key>(true).build()) .<API key>() .<API key>() .withSecurityContext(new <API key>().<API key>(true).build()) .<API key>() .endTemplate() .endCruiseControl() .endSpec() .build()); KafkaUtils.waitForKafkaReady(namespaceName, clusterName); resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).build()); resourceManager.createResource(extensionContext, <API key>.kafkaClients(false, clusterName + "-" + Constants.KAFKA_CLIENTS).build()); final String kafkaClientsPodName = kubeClient(namespaceName).<API key>(namespaceName, clusterName + "-" + Constants.KAFKA_CLIENTS).get(0).getMetadata().getName(); InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder() .withUsingPodName(kafkaClientsPodName) .withTopicName(topicName) .withNamespaceName(namespaceName) .withClusterName(clusterName) .withMessageCount(MESSAGE_COUNT) .withListenerName(Constants.<API key>) .build(); LOGGER.info("Checking produced and consumed messages to pod:{}", kafkaClientsPodName); internalKafkaClient.<API key>( internalKafkaClient.sendMessagesPlain(), internalKafkaClient.<API key>() ); } protected void <API key>(String namespaceName, String podNamePrefix, Map<String, Object> config, String clusterName) { LOGGER.info("Checking kafka configuration"); List<Pod> pods = kubeClient(namespaceName).<API key>(namespaceName, podNamePrefix); Properties properties = <API key>(kubeClient(namespaceName).getConfigMap(namespaceName, clusterName + "-kafka-config")); for (Map.Entry<String, Object> property : config.entrySet()) { String key = property.getKey(); Object val = property.getValue(); assertThat(properties.keySet().contains(key), is(true)); assertThat(properties.getProperty(key), is(val)); } for (Pod pod: pods) { ExecResult result = cmdKubeClient(namespaceName).execInPod(pod.getMetadata().getName(), "/bin/bash", "-c", "cat /tmp/strimzi.properties"); Properties execProperties = stringToProperties(result.out()); for (Map.Entry<String, Object> property : config.entrySet()) { String key = property.getKey(); Object val = property.getValue(); assertThat(execProperties.keySet().contains(key), is(true)); assertThat(execProperties.getProperty(key), is(val)); } } } void <API key>(List<<API key>> volumes, String[] diskSizes, int kafkaRepl, int diskCount) { int k = 0; for (int i = 0; i < kafkaRepl; i++) { for (int j = 0; j < diskCount; j++) { LOGGER.info("Checking volume {} and size of storage {}", volumes.get(k).getMetadata().getName(), volumes.get(k).getSpec().getResources().getRequests().get("storage")); assertThat(volumes.get(k).getSpec().getResources().getRequests().get("storage"), is(new Quantity(diskSizes[i]))); k++; } } } void <API key>(String namespaceName, String clusterName, int kafkaReplicas, int diskCountPerReplica, String diskSizeGi) { ArrayList<String> pvcs = new ArrayList<>(); kubeClient(namespaceName).<API key>(namespaceName, clusterName).stream() .filter(pvc -> pvc.getMetadata().getName().contains(clusterName + "-kafka")) .forEach(volume -> { String volumeName = volume.getMetadata().getName(); pvcs.add(volumeName); LOGGER.info("Checking labels for volume:" + volumeName); assertThat(volume.getMetadata().getLabels().get(Labels.<API key>), is(clusterName)); assertThat(volume.getMetadata().getLabels().get(Labels.STRIMZI_KIND_LABEL), is(Kafka.RESOURCE_KIND)); assertThat(volume.getMetadata().getLabels().get(Labels.STRIMZI_NAME_LABEL), is(clusterName.concat("-kafka"))); assertThat(volume.getSpec().getResources().getRequests().get("storage"), is(new Quantity(diskSizeGi, "Gi"))); }); LOGGER.info("Checking PVC names included in JBOD array"); for (int i = 0; i < kafkaReplicas; i++) { for (int j = 0; j < diskCountPerReplica; j++) { assertThat(pvcs.contains("data-" + j + "-" + clusterName + "-kafka-" + i), is(true)); } } LOGGER.info("Checking PVC on Kafka pods"); for (int i = 0; i < kafkaReplicas; i++) { ArrayList<String> dataSourcesOnPod = new ArrayList<>(); ArrayList<String> pvcsOnPod = new ArrayList<>(); LOGGER.info("Getting list of mounted data sources and PVCs on Kafka pod " + i); for (int j = 0; j < diskCountPerReplica; j++) { dataSourcesOnPod.add(kubeClient(namespaceName).getPod(namespaceName, clusterName.concat("-kafka-" + i)) .getSpec().getVolumes().get(j).getName()); pvcsOnPod.add(kubeClient(namespaceName).getPod(namespaceName, clusterName.concat("-kafka-" + i)) .getSpec().getVolumes().get(j).<API key>().getClaimName()); } LOGGER.info("Verifying mounted data sources and PVCs on Kafka pod " + i); for (int j = 0; j < diskCountPerReplica; j++) { assertThat(dataSourcesOnPod.contains("data-" + j), is(true)); assertThat(pvcsOnPod.contains("data-" + j + "-" + clusterName + "-kafka-" + i), is(true)); } } } void verifyPresentLabels(Map<String, String> labels, HasMetadata resources) { for (Map.Entry<String, String> label : labels.entrySet()) { assertThat("Label exists with concrete value in HasMetadata(Services, CM, STS) resources", label.getValue().equals(resources.getMetadata().getLabels().get(label.getKey()))); } } void verifyNullLabels(String[] labelKeys, Map<String, String> labels) { for (String labelKey : labelKeys) { assertThat(labels.get(labelKey), nullValue()); } } void verifyNullLabels(String[] labelKeys, HasMetadata resources) { for (String labelKey : labelKeys) { assertThat(resources.getMetadata().getLabels().get(labelKey), nullValue()); } } void verifyAppLabels(Map<String, String> labels) { LOGGER.info("Verifying labels {}", labels); assertThat("Label " + Labels.<API key> + " is not present", labels.containsKey(Labels.<API key>)); assertThat("Label " + Labels.STRIMZI_KIND_LABEL + " is not present", labels.containsKey(Labels.STRIMZI_KIND_LABEL)); assertThat("Label " + Labels.STRIMZI_NAME_LABEL + " is not present", labels.containsKey(Labels.STRIMZI_NAME_LABEL)); } void <API key>(Map<String, String> labels) { LOGGER.info("Verifying labels {}", labels); assertThat("Label " + Labels.<API key> + " is not present", labels.containsKey(Labels.<API key>)); assertThat("Label " + Labels.STRIMZI_KIND_LABEL + " is not present", labels.containsKey(Labels.STRIMZI_KIND_LABEL)); } @BeforeAll void setup(ExtensionContext extensionContext) { install = new <API key>.<API key>() .<API key>(extensionContext) .withNamespace(NAMESPACE) .<API key>(Constants.<API key>) .createInstallation() .runInstallation(); } protected void <API key>(ExtensionContext extensionContext) throws Exception { resourceManager.deleteResources(extensionContext); final String namespaceName = StUtils.<API key>(NAMESPACE, extensionContext); if (cluster.<API key>().contains(TEMPLATE_PATH)) { cluster.<API key>(extensionContext, TEMPLATE_PATH); } if (KafkaResource.kafkaClient().inNamespace(namespaceName).withName(<API key>).get() != null) { cmdKubeClient(namespaceName).deleteByName(Kafka.RESOURCE_KIND, <API key>); } kubeClient(namespaceName).listPods(namespaceName).stream() .filter(p -> p.getMetadata().getName().startsWith(<API key>)) .forEach(p -> PodUtils.deletePodWithWait(p.getMetadata().getName())); kubeClient(namespaceName).getClient().customResources(<API key>.fromCrd(Crds.kafkaTopic()), KafkaTopic.class, KafkaTopicList.class).inNamespace(namespaceName).delete(); kubeClient(namespaceName).getClient().<API key>().inNamespace(namespaceName).delete(); } }
import os,json from cgi import escape def unescape(s): s = s.replace("&lt;", "<") s = s.replace("&gt;", ">") # this has to be last: s = s.replace("&amp;", "&") return s class FilesystemMixin: def h_fs_get(_,path,eltName=''): from stat import S_ISDIR data = (escape(open(path).read()) if not S_ISDIR(os.stat(path).st_mode) else [(p,S_ISDIR(os.stat(path+'/'+p).st_mode)) for p in os.listdir(path)]) _.ws.send(json.dumps({"method":"fs_get","result":[path,data,eltName]})) pass def h_fs_put(_,path,data): f=open(path,'w') for x in data: f.write(unescape(x)) f.close() pass def h_fs_system(_,path,eltName='',cwd=None): import subprocess as sp import shlex data=sp.Popen(shlex.split(path),cwd=cwd,stdout=sp.PIPE, stderr=sp.PIPE).communicate() _.ws.send(json.dumps({"method":"fs_system","result":[path,data,eltName]})); pass def h_fs_mkdir (_,path): os.mkdir(path) def h_fs_rmdir (_,path): os.rmdir(path) def h_fs_touch (_,path): open(path,'w').close() def h_fs_unlink(_,path): os.unlink(path) pass class FsApp(FilesystemMixin): def __init__(_,ws):_.ws=ws
package com.basicalgorithms.coding_games; import java.util.HashSet; import java.util.Objects; import java.util.Scanner; import java.util.Set; public class CodersStrikeBack { static double longestDist = Integer.MIN_VALUE; static Point initialPoint = null; static boolean hasFinishedOneLap; static Point from = null; static Point lastCheckpoint = null; static final Set<Point> visitedCheckPoints = new HashSet<>(); static boolean hasBoosted = false; public static void main(String args[]) { Scanner in = new Scanner(System.in); // game loop while (true) { int x = in.nextInt(); int y = in.nextInt(); int nextCheckpointX = in.nextInt(); // x position of the next check point int nextCheckpointY = in.nextInt(); // y position of the next check point int nextCheckpointDist = in.nextInt(); // distance to the next checkpoint int nextCheckpointAngle = in.nextInt(); // angle between your pod orientation and the direction of the next checkpoint int opponentX = in.nextInt(); int opponentY = in.nextInt(); // Write an action using System.out.println() // To debug: System.err.println("Debug messages..."); // You have to output the target position // followed by the power (0 <= thrust <= 100) // i.e.: "x y thrust" final Point nextCheckpoint = new Point(nextCheckpointX, nextCheckpointY); final Point currentPosition = new Point(x, y); final Point enemyPosition = new Point(opponentX, opponentY); if (visitedCheckPoints.size() > 1 && enemyInRange(currentPosition, enemyPosition)) { ramEnemyShip(currentPosition, enemyPosition); } else { cruise(currentPosition, nextCheckpoint, nextCheckpointAngle); } if (!nextCheckpoint.equals(lastCheckpoint)) { from = lastCheckpoint; } lastCheckpoint = nextCheckpoint; } } private static void ramEnemyShip(final Point currentPosition, final Point enemyPosition) { sailToDestination((enemyPosition.x), enemyPosition.y, "100"); } private static boolean enemyInRange(final Point currentPosition, final Point enemyPosition) { return getDistant(currentPosition, enemyPosition) <= 1000; } private static void cruise( final Point currentPosition, final Point nextCheckpoint, final int nextCheckpointAngle) { if (initialPoint == null) { initialPoint = currentPosition; } int thrust = isWithinAngle(nextCheckpointAngle) ? 100 : 0; String power = String.valueOf(thrust); visitedCheckPoints.add(nextCheckpoint); System.err.println( "Checkpoint added:" + " nextCheckpointX=" + nextCheckpoint.x + ", nextCheckpointY=" + nextCheckpoint.y); for (final Point visitedCheckPoint : visitedCheckPoints) { System.err.println("Visited checkpoint: (" + visitedCheckPoint.x + ", " + visitedCheckPoint.y + ")"); } if (shouldSlowDown(currentPosition, nextCheckpoint)) { power = String.valueOf(35); } if (hasFinishedOneLap(nextCheckpoint) && isLongestDistant(from, nextCheckpoint) && isWithinSharpAngle(nextCheckpointAngle) && !hasBoosted) { power = "BOOST"; hasBoosted = true; System.err.println("Boosted!!!"); } sailToDestination(nextCheckpoint.x, nextCheckpoint.y, power); } private static boolean shouldSlowDown( final Point currentPosition, final Point nextCheckpoint) { return getDistant(currentPosition, nextCheckpoint) < 1000; } private static void sailToDestination(final int nextCheckpointX, final int nextCheckpointY, final String power) { System.out.println(nextCheckpointX + " " + nextCheckpointY + " " + power); System.err.println("Thrust:" + power); } private static boolean isWithinAngle(final int nextCheckpointAngle) { return -90 < nextCheckpointAngle && nextCheckpointAngle < 90; } private static boolean isWithinSharpAngle(final int nextCheckpointAngle) { return -15 < nextCheckpointAngle && nextCheckpointAngle < 15; } private static boolean hasFinishedOneLap(final Point point) { if (hasFinishedOneLap) { return true; } if (initialPoint == null) { return false; } hasFinishedOneLap = getDistant(initialPoint, point) <= 600; return hasFinishedOneLap; } private static boolean isLongestDistant(final Point from, final Point endPoint) { if (from == null) { return false; } System.err.println("Start Point: (" + from.x + ", " + from.y + "); End Point: (" + endPoint.x + ", " + endPoint.y + ") "); double dist = getDistant(from, endPoint); System.err.println("dist=" + dist + ", longestDist=" + longestDist); if (dist >= longestDist) { longestDist = dist; return true; } return false; } private static double getDistant(final Point from, final Point endPoint) { return Math.sqrt(Math.pow(from.x - endPoint.x, 2) + Math.pow(from.y - endPoint.y, 2)); } private static class Point { final int x; final int y; private Point(final int t1, final int t2) { this.x = t1; this.y = t2; } @Override public boolean equals(final Object o) { if (this == o) { return true; } if (!(o instanceof Point)) { return false; } final Point point = (Point) o; return x == point.x && y == point.y; } @Override public int hashCode() { return Objects.hash(x, y); } } }
'use strict'; var path = require('path'); var util = require('util'); module.exports = function(grunt) { grunt.registerMultiTask('vjslanguages', 'A Grunt plugin for compiling VideoJS language assets.', function() { var createLanguageFile = function(languageName, languageData, jsFilePath) { var jsTemplate = 'videojs.addLanguage("' + languageName + '",' + JSON.stringify(languageData,null,' ') + ');'; grunt.file.write(jsFilePath, jsTemplate); grunt.log.writeln('- [' + languageName +'] Language Built. File "' + jsFilePath + '" created.'); }; this.files.forEach(function(f) { var languageName, languageData, jsFilePath; // Multiple Files Case if(util.isArray(f.src)){ for(var i =0; i < f.src.length; i++) { languageName = path.basename(f.src[i], '.json'); languageData = grunt.file.readJSON(f.src[i]); jsFilePath = path.join(f.dest, languageName + '.js'); createLanguageFile(languageName, languageData, jsFilePath); } } // Singular File Case else { languageName = path.basename(f.src, '.json'); languageData = grunt.file.readJSON(f.src); jsFilePath = path.join(f.dest, languageName + '.js'); createLanguageFile(languageName, languageData, jsFilePath); } }); }); };
// WJAuthorView.h #import <UIKit/UIKit.h> @class WJFrameAuthor,WJAuthor; @interface WJAuthorView : UIView //@property(nonatomic,strong)WJAuthor *author; @property(nonatomic,strong)WJFrameAuthor *authorFrame; @end
// <API key>.h // AirTouch #import "BaseViewController.h" #import "<API key>.h" @interface <API key> : BaseViewController @property (nonatomic, weak) id<<API key>> delegate; @end
<?php /** * @see <API key> */ require_once 'Zend/Filter/Interface.php'; class <API key> implements <API key> { /** * Defined by <API key> * * Returns basename($value) * * @param string $value * @return string */ public function filter($value) { return basename((string) $value); } }
$(document).ready(function(){ $("#inc_tab #tb1").removeClass(); $("#inc_tab #tb4").addClass("active"); $("#user_name").blur(function(){ var user_name = $.trim($(this).val()); $(this).val(user_name); if (user_name.length==0){ $(this).parent().find("#user_name_null_warn").show(); $(this).parent().find("#<API key>").hide(); return; } $(this).parent().find("#user_name_null_warn").hide(); var user_id = $(this).parent().find("#user_id").val(); var obj = $(this).parent().find("#<API key>"); $.post(app.global.variable.base_path +"user/name/verify", {user_id:user_id, user_name:user_name}, function(data) { if(data.toString().length > 0){ obj.show(); }else{ obj.hide(); } }) }) $('#user_save_cancel').click(function(){ window.location.href=app.global.variable.base_path +'user/list'; }) selectRoleChange(); }) function selectRoleChange(){ var obj = $("#select_role_id"); var role_id_obj = obj.parent().find("#role_id"); $("#role_authority_"+role_id_obj.val()).hide(); $("#role_authority_"+obj.val()).show(); role_id_obj.val(obj.val()); } function user_sava_check(){ var obj = $("#user_editor_form"); var valid = true; obj.find(".functionWarn").each(function(){ if($(this).is(":visible")){ valid = false; } }) var user_name = obj.find("#user_name").val(); if(isSpace(user_name)){ obj.find("#user_name_null_warn").show(); valid = false; }else{ obj.find("#user_name_null_warn").hide(); } return valid; }
package com.yahoo.jdisc.client; import com.google.inject.AbstractModule; import com.google.inject.Inject; import org.junit.Test; import static org.junit.Assert.assertEquals; /** * @author Simon Thoresen Hult */ public class <API key> { @Test public void <API key>() throws Exception { MyModule module = new MyModule(); ClientDriver.runApplication(new MyApplication(module)); assertEquals(5, module.state); } @Test public void <API key>() throws Exception { MyModule module = new MyModule(); ClientDriver.runApplication(MyApplication.class, module); assertEquals(5, module.state); } private static class MyApplication implements ClientApplication { final MyModule module; @Inject MyApplication(MyModule module) { this.module = module; module.state = 1; } @Override public void start() { if (++module.state != 2) { throw new <API key>(); } } @Override public void run() { if (++module.state != 3) { throw new <API key>(); } } @Override public void stop() { if (++module.state != 4) { throw new <API key>(); } } @Override public void destroy() { if (++module.state != 5) { throw new <API key>(); } } } private static class MyModule extends AbstractModule { int state = 0; @Override protected void configure() { bind(MyModule.class).toInstance(this); } } }
package sample.multiversion; public interface Core { String getVersion(); String <API key>(); }
package org.example; import org.camunda.bpm.spring.boot.starter.annotation.<API key>; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.<API key>; @<API key> @<API key>("<API key>") public class CamundaApplication { public static void main(String... args) { SpringApplication.run(CamundaApplication.class, args); } }
package org.galaxy.myhttp; import org.junit.Test; import static org.junit.Assert.*; /** * To work on unit tests, switch the Test Artifact in the Build Variants view. */ public class ExampleUnitTest { @Test public void addition_isCorrect() throws Exception { assertEquals(4, 2 + 2); } }
<Global.Microsoft.VisualBasic.CompilerServices.DesignerGenerated()> _ Partial Class <API key> Inherits System.Windows.Forms.Form 'Form overrides dispose to clean up the component list. <System.Diagnostics.DebuggerNonUserCode()> _ Protected Overrides Sub Dispose(ByVal disposing As Boolean) Try If disposing AndAlso components IsNot Nothing Then components.Dispose() End If Finally MyBase.Dispose(disposing) End Try End Sub 'Required by the Windows Form Designer Private components As System.ComponentModel.IContainer 'NOTE: The following procedure is required by the Windows Form Designer 'It can be modified using the Windows Form Designer. 'Do not modify it using the code editor. <System.Diagnostics.DebuggerStepThrough()> _ Private Sub InitializeComponent() Me.components = New System.ComponentModel.Container Dim resources As System.ComponentModel.<API key> = New System.ComponentModel.<API key>(GetType(<API key>)) Me.LBLNama = New System.Windows.Forms.Label Me.ListView1 = New System.Windows.Forms.ListView Me.FileName = New System.Windows.Forms.ColumnHeader Me.Lokasi = New System.Windows.Forms.ColumnHeader Me.BTNScan = New System.Windows.Forms.Button Me.GroupBox1 = New System.Windows.Forms.GroupBox Me.BTNHapus = New System.Windows.Forms.Button Me.BTNSImpan = New System.Windows.Forms.Button Me.Label1 = New System.Windows.Forms.Label Me._twain32 = New Saraff.Twain.Twain32(Me.components) Me.Label12 = New System.Windows.Forms.Label Me.Label23 = New System.Windows.Forms.Label Me.GroupBox3 = New System.Windows.Forms.GroupBox Me.BTNTutup = New System.Windows.Forms.Button Me.picboxDeleteAll = New System.Windows.Forms.PictureBox Me.picboxDelete = New System.Windows.Forms.PictureBox Me.PictureBox1 = New System.Windows.Forms.PictureBox Me.ToolTip1 = New System.Windows.Forms.ToolTip(Me.components) Me.GroupBox1.SuspendLayout() Me.GroupBox3.SuspendLayout() CType(Me.picboxDeleteAll, System.ComponentModel.ISupportInitialize).BeginInit() CType(Me.picboxDelete, System.ComponentModel.ISupportInitialize).BeginInit() CType(Me.PictureBox1, System.ComponentModel.ISupportInitialize).BeginInit() Me.SuspendLayout() ' 'LBLNama ' Me.LBLNama.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle Me.LBLNama.Font = New System.Drawing.Font("Microsoft Sans Serif", 9.75!, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, CType(0, Byte)) Me.LBLNama.Location = New System.Drawing.Point(94, 34) Me.LBLNama.Name = "LBLNama" Me.LBLNama.Size = New System.Drawing.Size(133, 23) Me.LBLNama.TabIndex = 5 Me.ToolTip1.SetToolTip(Me.LBLNama, "Nama File") ' 'ListView1 ' Me.ListView1.Columns.AddRange(New System.Windows.Forms.ColumnHeader() {Me.FileName, Me.Lokasi}) Me.ListView1.Dock = System.Windows.Forms.DockStyle.Bottom Me.ListView1.Location = New System.Drawing.Point(3, 19) Me.ListView1.Name = "ListView1" Me.ListView1.Size = New System.Drawing.Size(230, 151) Me.ListView1.TabIndex = 0 Me.ListView1.<API key> = False Me.ListView1.View = System.Windows.Forms.View.Details ' 'FileName ' Me.FileName.Text = "Nama File" ' 'Lokasi ' Me.Lokasi.Text = "Lokasi" ' 'BTNScan ' Me.BTNScan.Font = New System.Drawing.Font("Microsoft Sans Serif", 9.75!, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, CType(0, Byte)) Me.BTNScan.Location = New System.Drawing.Point(65, 74) Me.BTNScan.Name = "BTNScan" Me.BTNScan.Size = New System.Drawing.Size(122, 43) Me.BTNScan.TabIndex = 6 Me.BTNScan.Text = "Scan" Me.ToolTip1.SetToolTip(Me.BTNScan, "Scan") Me.BTNScan.<API key> = True ' 'GroupBox1 ' Me.GroupBox1.Controls.Add(Me.BTNScan) Me.GroupBox1.Controls.Add(Me.BTNHapus) Me.GroupBox1.Controls.Add(Me.LBLNama) Me.GroupBox1.Controls.Add(Me.BTNSImpan) Me.GroupBox1.Controls.Add(Me.Label1) Me.GroupBox1.Font = New System.Drawing.Font("Microsoft Sans Serif", 9.0!, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, CType(0, Byte)) Me.GroupBox1.Location = New System.Drawing.Point(347, 11) Me.GroupBox1.Name = "GroupBox1" Me.GroupBox1.Size = New System.Drawing.Size(236, 184) Me.GroupBox1.TabIndex = 169 Me.GroupBox1.TabStop = False Me.GroupBox1.Text = "Scanning" ' 'BTNHapus ' Me.BTNHapus.Font = New System.Drawing.Font("Microsoft Sans Serif", 9.75!, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, CType(0, Byte)) Me.BTNHapus.Location = New System.Drawing.Point(128, 136) Me.BTNHapus.Name = "BTNHapus" Me.BTNHapus.Size = New System.Drawing.Size(99, 31) Me.BTNHapus.TabIndex = 7 Me.BTNHapus.Text = "Hapus" Me.ToolTip1.SetToolTip(Me.BTNHapus, "Hapus Hasil Scan") Me.BTNHapus.<API key> = True ' 'BTNSImpan ' Me.BTNSImpan.Font = New System.Drawing.Font("Microsoft Sans Serif", 9.75!, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, CType(0, Byte)) Me.BTNSImpan.Location = New System.Drawing.Point(21, 136) Me.BTNSImpan.Name = "BTNSImpan" Me.BTNSImpan.Size = New System.Drawing.Size(93, 31) Me.BTNSImpan.TabIndex = 6 Me.BTNSImpan.Text = "Simpan" Me.ToolTip1.SetToolTip(Me.BTNSImpan, "Simpan Hasil Scan") Me.BTNSImpan.<API key> = True ' 'Label1 ' Me.Label1.AutoSize = True Me.Label1.Font = New System.Drawing.Font("Microsoft Sans Serif", 9.75!, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, CType(0, Byte)) Me.Label1.Location = New System.Drawing.Point(18, 38) Me.Label1.Name = "Label1" Me.Label1.Size = New System.Drawing.Size(70, 16) Me.Label1.TabIndex = 4 Me.Label1.Text = "Nama File" ' '_twain32 ' Me._twain32.AppProductName = "Saraff.Twain" Me._twain32.Parent = Nothing ' 'Label12 ' Me.Label12.AutoSize = True Me.Label12.BackColor = System.Drawing.Color.WhiteSmoke Me.Label12.Font = New System.Drawing.Font("Segoe UI", 9.0!, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, CType(0, Byte)) Me.Label12.Location = New System.Drawing.Point(363, 387) Me.Label12.Margin = New System.Windows.Forms.Padding(4, 0, 4, 0) Me.Label12.Name = "Label12" Me.Label12.Size = New System.Drawing.Size(106, 15) Me.Label12.TabIndex = 171 Me.Label12.Text = "Delete && Delete All" ' 'Label23 ' Me.Label23.AutoSize = True Me.Label23.Location = New System.Drawing.Point(472, 198) Me.Label23.Name = "Label23" Me.Label23.Size = New System.Drawing.Size(45, 13) Me.Label23.TabIndex = 174 Me.Label23.Text = "Label23" Me.Label23.Visible = False ' 'GroupBox3 ' Me.GroupBox3.Controls.Add(Me.ListView1) Me.GroupBox3.Location = New System.Drawing.Point(347, 211) Me.GroupBox3.Name = "GroupBox3" Me.GroupBox3.Size = New System.Drawing.Size(236, 173) Me.GroupBox3.TabIndex = 170 Me.GroupBox3.TabStop = False Me.GroupBox3.Text = "Daftar Gambar" ' 'BTNTutup ' Me.BTNTutup.DialogResult = System.Windows.Forms.DialogResult.Cancel Me.BTNTutup.Font = New System.Drawing.Font("Microsoft Sans Serif", 9.75!, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, CType(0, Byte)) Me.BTNTutup.Location = New System.Drawing.Point(489, 405) Me.BTNTutup.Name = "BTNTutup" Me.BTNTutup.Size = New System.Drawing.Size(91, 34) Me.BTNTutup.TabIndex = 168 Me.BTNTutup.Text = "Selesai" Me.ToolTip1.SetToolTip(Me.BTNTutup, "Selesai") Me.BTNTutup.<API key> = True ' 'picboxDeleteAll ' Me.picboxDeleteAll.Image = Global.SIMARSIP.My.Resources.Resources.<API key> Me.picboxDeleteAll.Location = New System.Drawing.Point(412, 405) Me.picboxDeleteAll.Name = "picboxDeleteAll" Me.picboxDeleteAll.Size = New System.Drawing.Size(61, 36) Me.picboxDeleteAll.TabIndex = 173 Me.picboxDeleteAll.TabStop = False Me.picboxDeleteAll.Tag = "Delete All" Me.ToolTip1.SetToolTip(Me.picboxDeleteAll, "Hapus Semua") ' 'picboxDelete ' Me.picboxDelete.Image = Global.SIMARSIP.My.Resources.Resources.picboxDelete_Leave Me.picboxDelete.Location = New System.Drawing.Point(353, 405) Me.picboxDelete.Name = "picboxDelete" Me.picboxDelete.Size = New System.Drawing.Size(60, 36) Me.picboxDelete.TabIndex = 172 Me.picboxDelete.TabStop = False Me.picboxDelete.Tag = "Delete Current Image" Me.ToolTip1.SetToolTip(Me.picboxDelete, "Hapus") ' 'PictureBox1 ' Me.PictureBox1.BackColor = System.Drawing.SystemColors.ControlDark Me.PictureBox1.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle Me.PictureBox1.ImageLocation = "" Me.PictureBox1.Location = New System.Drawing.Point(13, 13) Me.PictureBox1.Name = "PictureBox1" Me.PictureBox1.Size = New System.Drawing.Size(316, 429) Me.PictureBox1.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom Me.PictureBox1.TabIndex = 167 Me.PictureBox1.TabStop = False ' '<API key> ' Me.AutoScaleDimensions = New System.Drawing.SizeF(6.0!, 13.0!) Me.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font Me.AutoSizeMode = System.Windows.Forms.AutoSizeMode.GrowAndShrink Me.CancelButton = Me.BTNTutup Me.ClientSize = New System.Drawing.Size(596, 452) Me.ControlBox = False Me.Controls.Add(Me.picboxDeleteAll) Me.Controls.Add(Me.picboxDelete) Me.Controls.Add(Me.PictureBox1) Me.Controls.Add(Me.GroupBox1) Me.Controls.Add(Me.Label12) Me.Controls.Add(Me.Label23) Me.Controls.Add(Me.GroupBox3) Me.Controls.Add(Me.BTNTutup) Me.Icon = CType(resources.GetObject("$this.Icon"), System.Drawing.Icon) Me.Name = "<API key>" Me.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen Me.Text = "Scanning" Me.GroupBox1.ResumeLayout(False) Me.GroupBox1.PerformLayout() Me.GroupBox3.ResumeLayout(False) CType(Me.picboxDeleteAll, System.ComponentModel.ISupportInitialize).EndInit() CType(Me.picboxDelete, System.ComponentModel.ISupportInitialize).EndInit() CType(Me.PictureBox1, System.ComponentModel.ISupportInitialize).EndInit() Me.ResumeLayout(False) Me.PerformLayout() End Sub Friend WithEvents LBLNama As System.Windows.Forms.Label Private WithEvents picboxDeleteAll As System.Windows.Forms.PictureBox Private WithEvents picboxDelete As System.Windows.Forms.PictureBox Friend WithEvents PictureBox1 As System.Windows.Forms.PictureBox Friend WithEvents ListView1 As System.Windows.Forms.ListView Friend WithEvents FileName As System.Windows.Forms.ColumnHeader Friend WithEvents Lokasi As System.Windows.Forms.ColumnHeader Friend WithEvents BTNScan As System.Windows.Forms.Button Friend WithEvents GroupBox1 As System.Windows.Forms.GroupBox Friend WithEvents BTNHapus As System.Windows.Forms.Button Friend WithEvents BTNSImpan As System.Windows.Forms.Button Friend WithEvents Label1 As System.Windows.Forms.Label Friend WithEvents _twain32 As Saraff.Twain.Twain32 Private WithEvents Label12 As System.Windows.Forms.Label Friend WithEvents Label23 As System.Windows.Forms.Label Friend WithEvents GroupBox3 As System.Windows.Forms.GroupBox Friend WithEvents BTNTutup As System.Windows.Forms.Button Friend WithEvents ToolTip1 As System.Windows.Forms.ToolTip End Class
require_relative '../netapp_cmode' Puppet::Type.type(:netapp_lun).provide(:cmode, :parent => Puppet::Provider::NetappCmode) do @doc = "Manage Netapp Lun creation, modification and deletion. [Family: vserver]" confine :feature => :posix defaultfor :feature => :posix netapp_commands :lunlist => {:api => 'lun-get-iter', :iter => true, :result_element => 'attributes-list'} netapp_commands :luncreate => 'lun-create-by-size' netapp_commands :lundestroy => 'lun-destroy' netapp_commands :lunresize => 'lun-resize' netapp_commands :lunonline => 'lun-online' netapp_commands :lunoffline => 'lun-offline' mk_resource_methods def self.instances Puppet.debug("Puppet::Provider::Netapp_lun.cmode: Got to self.instances.") luns = [] #Get a list of all Lun's results = lunlist() || [] # Itterate through the results results.each do |lun| lun_path = lun.child_get_string('path') Puppet.debug("Puppet::Provider::Netapp_lun.cmode: Processing lun #{lun_path}.") # Construct initial hash for lun lun_hash = { :name => lun_path, :ensure => :present } # Grab additional elements # Lun state - Need to map true/false to online/offline lun_state = lun.child_get_string('online') if lun_state == 'true' lun_hash[:state] = 'online' else lun_hash[:state] = 'offline' end # Get size lun_hash[:size] = lun.child_get_string('size') # Create the instance and add to luns array Puppet.debug("Puppet::Provider::Netapp_lun.cmode: Creating instance for #{lun_path}\n Contents = #{lun_hash.inspect}.") luns << new(lun_hash) end # Return the final luns array Puppet.debug("Puppet::Provider::Netapp_lun.cmode: Returning luns array.") luns end def self.prefetch(resources) Puppet.debug("Puppet::Provider::Netapp_lun.cmode: Got to self.prefetch.") # Itterate instances and match provider where relevant. instances.each do |prov| Puppet.debug("Prov.path = #{resources[prov.name]}. ") if resource = resources[prov.name] resource.provider = prov end end end def flush Puppet.debug("Puppet::Provider::Netapp_lun.cmode: flushing Netapp Lun #{@resource[:path]}.") # Are we updating or destroying? Puppet.debug("Puppet::Provider::Netapp_lun.cmode: required resource state = #{@property_hash[:ensure]}") if @property_hash[:ensure] == :absent Puppet.debug("Puppet::Provider::Netapp_lun.cmode: Ensure is absent. Destroying...") # Deleting the lun lundestroy('path', @resource[:path]) Puppet.debug("Puppet::Provider::Netapp_lun.cmode: Lun #{@resource[:path]} has been destroyed successfully. ") return true end end # Set lun size def size=(value) Puppet.debug("Puppet::Provider::Netapp_lun.cmode size=: Setting lun size for #{@resource[:path]} to #{@resource[:size]}.") force if @resource[:force] == nil force = false else force = @resource[:force] end # Resize the volume result = lunresize('force', force, 'path', @resource[:path], 'size', @resource[:size]) if result.results_status() != "failed" Puppet.debug("Puppet::Provider::Netapp_lun.cmode size=: Lun has been resized.") return true end end # Set lun state def state=(value) Puppet.debug("Puppet::Provider::Netapp_lun.cmode state=: Setting lun state for #{@resource[:path]} to #{@resource[:state]}.") case @resource[:state] when :online Puppet.debug("Puppet::Provider::Netapp_lun.cmode state=: Onlineing lun.") result = lunonline('path', @resource[:path]) Puppet.debug("Puppet::Provider::Netapp_lun.cmode state=: Lun has been onlined.") return true when :offline Puppet.debug("Puppet::Provider::Netapp_lun.cmode state=: Offlining lun.") result = lunoffline('path', @resource[:path]) Puppet.debug("Puppet::Provider::Netapp_lun.cmode state=: Lun has been offlined.") return true end end def create Puppet.debug("Puppet::Provider::Netapp_lun.cmode: creating Netapp Lun #{@resource[:path]}.") # Lun create args luncreate_args = [] luncreate_args << 'path' << @resource[:path] luncreate_args << 'size' << @resource[:size] luncreate_args << 'class' << @resource[:lunclass] luncreate_args << 'ostype' << @resource[:ostype] luncreate_args << '<API key>' << @resource[:spaceresenabled] # Optional fields luncreate_args << 'prefix-size' << @resource[:prefixsize] unless @resource[:prefixsize].nil? luncreate_args << 'qos-policy-group' << @resource[:qospolicygroup] unless @resource[:qospolicygroup].nil? # Create the lun result = luncreate(*luncreate_args) # Lun created successfully Puppet.debug("Puppet::Provider::Netapp_lun.cmode: Lun #{@resource[:path]} created successfully.") return true end def destroy Puppet.debug("Puppet::Provider::Netapp_lun.cmode: destroying Netapp Lun #{@resource[:path]}.") @property_hash[:ensure] = :absent end def exists? Puppet.debug("Puppet::Provider::Netapp_lun.cmode: checking existance of Netapp Lun #{@resource[:path]}.") @property_hash[:ensure] == :present end end
package org.commcare; import org.commcare.models.database.<API key>; import org.commcare.models.database.<API key>; import org.javarosa.core.services.storage.Persistable; /** * Delegator around CommCareApp allowing the test suite to override logic. * * @author Phillip Mates (pmates@dimagi.com). */ public class CommCareTestApp extends CommCareApp { private final CommCareApp app; public CommCareTestApp(CommCareApp app) { super(app.getAppRecord()); fileRoot = app.fileRoot; setAppResourceState(app.getAppResourceState()); this.app = app; } @Override public <T extends Persistable> <API key><T> <API key>(String name, Class<T> c) { return new <API key><>(name, c, app.<API key>(), app); } }
// Code generated by go-swagger; DO NOT EDIT. package models // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( strfmt "github.com/go-openapi/strfmt" "github.com/go-openapi/errors" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // SendPhotoLinkBody send photo link body // swagger:model SendPhotoLinkBody type SendPhotoLinkBody struct { // caption Caption string `json:"caption,omitempty"` // chat id // Required: true ChatID interface{} `json:"chat_id"` // disable notification DisableNotification bool `json:"<API key>,omitempty"` // photo // Required: true Photo *string `json:"photo"` // reply markup ReplyMarkup interface{} `json:"reply_markup,omitempty"` // reply to message id ReplyToMessageID int64 `json:"reply_to_message_id,omitempty"` } // Validate validates this send photo link body func (m *SendPhotoLinkBody) Validate(formats strfmt.Registry) error { var res []error if err := m.validateChatID(formats); err != nil { // prop res = append(res, err) } if err := m.validatePhoto(formats); err != nil { // prop res = append(res, err) } if len(res) > 0 { return errors.<API key>(res...) } return nil } func (m *SendPhotoLinkBody) validateChatID(formats strfmt.Registry) error { return nil } func (m *SendPhotoLinkBody) validatePhoto(formats strfmt.Registry) error { if err := validate.Required("photo", "body", m.Photo); err != nil { return err } return nil } // MarshalBinary interface implementation func (m *SendPhotoLinkBody) MarshalBinary() ([]byte, error) { if m == nil { return nil, nil } return swag.WriteJSON(m) } // UnmarshalBinary interface implementation func (m *SendPhotoLinkBody) UnmarshalBinary(b []byte) error { var res SendPhotoLinkBody if err := swag.ReadJSON(b, &res); err != nil { return err } *m = res return nil }
<!DOCTYPE HTML PUBLIC "- <!-- NewPage --> <html lang="de"> <head> <!-- Generated by javadoc (version 1.7.0_17) on Tue May 14 03:45:04 CEST 2013 --> <title>Uses of Class com.badlogic.gdx.net.HttpStatus (libgdx API)</title> <meta name="date" content="2013-05-14"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> </head> <body> <script type="text/javascript"><! if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class com.badlogic.gdx.net.HttpStatus (libgdx API)"; } </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <div class="topNav"><a name="navbar_top"> </a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow"> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../com/badlogic/gdx/net/HttpStatus.html" title="class in com.badlogic.gdx.net">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../../../../../overview-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> <div class="aboutLanguage"><em> libgdx API <style> body, td, th { font-family:Helvetica, Tahoma, Arial, sans-serif; font-size:10pt } pre, code, tt { font-size:9pt; font-family:Lucida Console, Courier New, sans-serif } h1, h2, h3, .FrameTitleFont, .FrameHeadingFont, .TableHeadingColor font { font-size:105%; font-weight:bold } .TableHeadingColor { background:#EEEEFF; } a { text-decoration:none } a:hover { text-decoration:underline } a:link, a:visited { color:blue } table { border:0px } .TableRowColor td:first-child { border-left:1px solid black } .TableRowColor td { border:0px; border-bottom:1px solid black; border-right:1px solid black } hr { border:0px; border-bottom:1px solid #333366; } </style> </em></div> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?com/badlogic/gdx/net/class-use/HttpStatus.html" target="_top">Frames</a></li> <li><a href="HttpStatus.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="<API key>"> <li><a href="../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><! allClassesLink = document.getElementById("<API key>"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } </script> </div> <a name="skip-navbar_top"> </a></div> <div class="header"> <h2 title="Uses of Class com.badlogic.gdx.net.HttpStatus" class="title">Uses of Class<br>com.badlogic.gdx.net.HttpStatus</h2> </div> <div class="classUseContainer"> <ul class="blockList"> <li class="blockList"> <table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation"> <caption><span>Packages that use <a href="../../../../../com/badlogic/gdx/net/HttpStatus.html" title="class in com.badlogic.gdx.net">HttpStatus</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Package</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="#com.badlogic.gdx">com.badlogic.gdx</a></td> <td class="colLast">&nbsp;</td> </tr> </tbody> </table> </li> <li class="blockList"> <ul class="blockList"> <li class="blockList"><a name="com.badlogic.gdx"> </a> <h3>Uses of <a href="../../../../../com/badlogic/gdx/net/HttpStatus.html" title="class in com.badlogic.gdx.net">HttpStatus</a> in <a href="../../../../../com/badlogic/gdx/package-summary.html">com.badlogic.gdx</a></h3> <table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation"> <caption><span>Methods in <a href="../../../../../com/badlogic/gdx/package-summary.html">com.badlogic.gdx</a> that return <a href="../../../../../com/badlogic/gdx/net/HttpStatus.html" title="class in com.badlogic.gdx.net">HttpStatus</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><code><a href="../../../../../com/badlogic/gdx/net/HttpStatus.html" title="class in com.badlogic.gdx.net">HttpStatus</a></code></td> <td class="colLast"><span class="strong">Net.HttpResponse.</span><code><strong><a href="../../../../../com/badlogic/gdx/Net.HttpResponse.html#getStatus()">getStatus</a></strong>()</code> <div class="block">Returns the <a href="../../../../../com/badlogic/gdx/net/HttpStatus.html" title="class in com.badlogic.gdx.net"><code>HttpStatus</code></a> containing the statusCode of the HTTP response.</div> </td> </tr> </tbody> </table> </li> </ul> </li> </ul> </div> <div class="bottomNav"><a name="navbar_bottom"> </a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="<API key>"> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../com/badlogic/gdx/net/HttpStatus.html" title="class in com.badlogic.gdx.net">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../../../../../overview-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> <div class="aboutLanguage"><em>libgdx API</em></div> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?com/badlogic/gdx/net/class-use/HttpStatus.html" target="_top">Frames</a></li> <li><a href="HttpStatus.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="<API key>"> <li><a href="../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><! allClassesLink = document.getElementById("<API key>"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } </script> </div> <a name="skip-navbar_bottom"> </a></div> <p class="legalCopy"><small> <div style="font-size:9pt"><i> Copyright &copy; 2010-2013 Mario Zechner (contact@badlogicgames.com), Nathan Sweet (admin@esotericsoftware.com) </i></div> </small></p> </body> </html>
<!DOCTYPE HTML PUBLIC "- <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_112) on Fri Jun 16 09:55:12 MST 2017 --> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>org.wildfly.swarm.batch.jberet (Public javadocs 2017.6.1 API)</title> <meta name="date" content="2017-06-16"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../script.js"></script> </head> <body> <h1 class="bar"><a href="../../../../../org/wildfly/swarm/batch/jberet/package-summary.html" target="classFrame">org.wildfly.swarm.batch.jberet</a></h1> <div class="indexContainer"> <h2 title="Classes">Classes</h2> <ul title="Classes"> <li><a href="BatchFraction.html" title="class in org.wildfly.swarm.batch.jberet" target="classFrame">BatchFraction</a></li> </ul> </div> </body> </html>
<table class="table table-bordered" st-pipe="load" st-table="displayed"> <thead> <tr ng-if="globalFilter"> <th colspan="{{ columns.length + 1 }}"> <input st-search="" class="form-control" placeholder="global search ..." type="text"/> </th> </tr> <tr ng-if="columnFilter"> <th ng-repeat="col in columns"> <input st-search="{{col}}" class="form-control" placeholder="search by {{ col | translate }} ..." type="text"/> </th> <th></th> </tr> <tr> <th ng-repeat="col in columns" st-sort="{{col}}"> {{ col | translate }}</th> <th>Edit</th> </tr> </thead> <tbody ng-show="isLoading"> <tr> <td colspan="{{ columns.length + 1 }}" class="text-center">Loading ...</td> </tr> </tbody> <tbody ng-show="!isLoading"> <tr ng-repeat="row in displayed"> <td ng-repeat="col in columns">{{ row[col] }}</td> <td> <a ui-sref="{{edit}}"> {{ 'action.edit' | translate }} </a> </td> </tr> </tbody> <tfoot> <tr> <td colspan="{{ columns.length + 1 }}" class="text-center"> <div st-pagination="" st-items-by-page="itemsPerPage" st-displayed-pages="7"></div> </td> </tr> </tfoot> </table>
# AUTOGENERATED FILE FROM balenalib/<API key>:bullseye-build ENV NODE_VERSION 12.21.0 ENV YARN_VERSION 1.22.4 RUN for key in \ <API key> \ ; do \ gpg --batch --keyserver pgp.mit.edu --recv-keys "$key" || \ gpg --batch --keyserver keyserver.pgp.com --recv-keys "$key" || \ gpg --batch --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \ done \ && curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$<API key>.tar.gz" \ && echo "<SHA256-like> node-v$<API key>.tar.gz" | sha256sum -c - \ && tar -xzf "node-v$<API key>.tar.gz" -C /usr/local --strip-components=1 \ && rm "node-v$<API key>.tar.gz" \ && curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \ && curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \ && gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \ && mkdir -p /opt/yarn \ && tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \ && ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \ && ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \ && rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \ && npm config set unsafe-perm true -g --unsafe-perm \
package org.n52.sos.binding.rest.resources; import org.n52.sos.binding.rest.requests.RestRequest; /** * @author <a href="mailto:e.h.juerrens@52north.org">Eike Hinderk J&uuml;rrens</a> * */ public class OptionsRestRequest implements RestRequest { private String resourceType; private boolean isGlobalResource; private boolean <API key>; public OptionsRestRequest(String resourceType, boolean isGlobalResource, boolean <API key>) { this.resourceType = resourceType; this.isGlobalResource = isGlobalResource; this.<API key> = <API key>; } public String getResourceType() { return resourceType; } public boolean isGlobalResource() { return isGlobalResource; } public boolean <API key>() { return <API key>; } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace Watcher.Model { public class GreatThan : IPredicate { public string Quantifier { get { return "great than"; } } public bool IsSatisfiedBy(int a, int b) { return a > b; } } }
package org.adligo.tests4j.system.shared.trials; import org.adligo.tests4j.shared.common.ClassMethods; import org.adligo.tests4j.shared.xml.I_XML_Builder; public class TrialParamValue implements I_TrialParamValue { public static final String TAG_NAME = "value"; public static final String CLASS_NAME = "class"; public static final String <API key> = "Parameter value must be a non Void primitive or String."; private Object value_; public TrialParamValue(Object value) { if (value == null) { throw new <API key>(); } Class<?> c = value.getClass(); if ( (ClassMethods.isPrimitiveClass(c) && !ClassMethods.isClass(Void.class, c)) || ClassMethods.isClass(String.class, c)) { value_ = value; } else { throw new <API key>( <API key>); } } @Override public String getClassName() { return value_.getClass().getName(); } @Override public Object getValue() { return value_; } @Override public void toXml(I_XML_Builder builder) { builder.addIndent(); builder.addStartTag(TAG_NAME); String name = ClassMethods.getSimpleName(value_.getClass()); builder.addAttribute(CLASS_NAME, name); builder.endHeader(); builder.addText(value_.toString()); builder.addEndTag(TAG_NAME); builder.endLine(); } }
""" Installs and configures MySQL """ import uuid import logging from packstack.installer import validators from packstack.installer import utils from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile # Controller object will be initialized from main flow controller = None # Plugin name PLUGIN_NAME = "OS-MySQL" PLUGIN_NAME_COLORED = utils.color_text(PLUGIN_NAME, 'blue') logging.debug("plugin %s loaded", __name__) def initConfig(controllerObject): global controller controller = controllerObject logging.debug("Adding MySQL OpenStack configuration") paramsList = [ {"CMD_OPTION" : "mysql-host", "USAGE" : "The IP address of the server on which to install MySQL", "PROMPT" : "Enter the IP address of the MySQL server", "OPTION_LIST" : [], "VALIDATORS" : [validators.validate_ssh], "DEFAULT_VALUE" : utils.get_localhost_ip(), "MASK_INPUT" : False, "LOOSE_VALIDATION": True, "CONF_NAME" : "CONFIG_MYSQL_HOST", "USE_DEFAULT" : False, "NEED_CONFIRM" : False, "CONDITION" : False }, {"CMD_OPTION" : "mysql-user", "USAGE" : "Username for the MySQL admin user", "PROMPT" : "Enter the username for the MySQL admin user", "OPTION_LIST" : [], "VALIDATORS" : [validators.validate_not_empty], "DEFAULT_VALUE" : "root", "MASK_INPUT" : False, "LOOSE_VALIDATION": False, "CONF_NAME" : "CONFIG_MYSQL_USER", "USE_DEFAULT" : True, "NEED_CONFIRM" : False, "CONDITION" : False }, {"CMD_OPTION" : "mysql-pw", "USAGE" : "Password for the MySQL admin user", "PROMPT" : "Enter the password for the MySQL admin user", "OPTION_LIST" : [], "VALIDATORS" : [validators.validate_not_empty], "DEFAULT_VALUE" : uuid.uuid4().hex[:16], "MASK_INPUT" : True, "LOOSE_VALIDATION": True, "CONF_NAME" : "CONFIG_MYSQL_PW", "USE_DEFAULT" : False, "NEED_CONFIRM" : True, "CONDITION" : False }, ] groupDict = { "GROUP_NAME" : "MYSQL", "DESCRIPTION" : "MySQL Config parameters", "PRE_CONDITION" : lambda x: 'yes', "PRE_CONDITION_MATCH" : "yes", "POST_CONDITION" : False, "<API key>" : True} controller.addGroup(groupDict, paramsList) def initSequences(controller): mysqlsteps = [ {'title': 'Adding MySQL manifest entries', 'functions':[createmanifest]} ] controller.addSequence("Installing MySQL", [], [], mysqlsteps) def createmanifest(config): if config['<API key>'] == 'y': install = True suffix = 'install' else: install = False suffix = 'noinstall' # In case we are not installing MySQL server, mysql* manifests have # to be run from Keystone host host = install and config['CONFIG_MYSQL_HOST'] \ or config['<API key>'] manifestfile = "%s_mysql.pp" % host manifestdata = [getManifestTemplate('mysql_%s.pp' % suffix)] def append_for(module, suffix): # Modules have to be appended to the existing mysql.pp # otherwise pp will fail for some of them saying that # Mysql::Config definition is missing. template = "mysql_%s_%s.pp" % (module, suffix) manifestdata.append(getManifestTemplate(template)) append_for("keystone", suffix) hosts = set() for mod in ['nova', 'cinder', 'glance', 'neutron', 'heat']: if config['CONFIG_%s_INSTALL' % mod.upper()] == 'y': append_for(mod, suffix) # Check wich modules are enabled so we can allow their # hosts on the firewall if mod != 'nova' and mod != 'neutron': hosts.add(config.get('CONFIG_%s_HOST' % mod.upper()).strip()) elif mod == 'neutron': hosts.add(config.get('<API key>').strip()) elif config['CONFIG_NOVA_INSTALL'] != 'n': #In that remote case that we have lot's of nova hosts hosts.add(config.get('<API key>').strip()) hosts.add(config.get('<API key>').strip()) hosts.add(config.get('<API key>').strip()) hosts.add(config.get('<API key>').strip()) hosts.add(config.get('<API key>').strip()) if config['<API key>'] != 'y': dbhosts = split_hosts(config['<API key>']) hosts |= dbhosts for host in config.get('<API key>').split(','): hosts.add(host.strip()) config['FIREWALL_ALLOWED'] = ",".join(["'%s'" % i for i in hosts]) config['<API key>'] = "mysql" config['FIREWALL_PORTS'] = "'3306'" manifestdata.append(getManifestTemplate("firewall.pp")) appendManifestFile(manifestfile, "\n".join(manifestdata), 'pre')
nginx server { listen 80; charset utf-8; <API key> 128M; server_name meotrics.com; return 301 https://$host$request_uri; } server { listen 80; server_name www.meotrics.com; return 301 http://meotrics.com$request_uri; } server { listen 443; server_name www.meotrics.com meotrics.com; ssl on; ssl_certificate /etc/ssl/certs/chained.pem; ssl_certificate_key /etc/ssl/private/domain.key; ssl_session_timeout 5m; ssl_protocols TLSv1 TLSv1.1 TLSv1.2; ssl_ciphers <API key>:<API key>:DHE-RSA-AES256-G$ ssl_session_cache shared:SSL:50m; ssl_dhparam /etc/ssl/certs/dhparam.pem; <API key> on; root /home/thanhpk/meotrics/landing/; location ~* \.(js|css|png|jpg|jpeg|gif|ico)$ { expires 1y; log_not_found off; } server_tokens off; #more_set_headers "Server: Meotrics"; index index.html; location ~ /\.(ht|svn|git) { deny all; } # Common bandwidth hoggers and hacking tools. if ($http_user_agent ~ "libwww-perl") { set $block_user_agents 1; } if ($http_user_agent ~ "GetRight") { set $block_user_agents 1; } if ($http_user_agent ~ "GetWeb!") { set $block_user_agents 1; } if ($http_user_agent ~ "Go!Zilla") { set $block_user_agents 1; } if ($http_user_agent ~ "Download Demon") { set $block_user_agents 1; } if ($http_user_agent ~ "Go-Ahead-Got-It") { set $block_user_agents 1; } if ($http_user_agent ~ "TurnitinBot") { set $block_user_agents 1; } if ($http_user_agent ~ "GrabNet") { set $block_user_agents 1; } if ($block_user_agents = 1) { return 403; } } server { listen 80; server_name api.meotrics.com; location / { proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $<API key>; proxy_set_header Host $http_host; proxy_pass http://127.0.0.1:1711/api; } } server { listen 443; server_name api.meotrics.com; ssl on; ssl_certificate /etc/ssl/certs/chained.pem; ssl_certificate_key /etc/ssl/private/domain.key; ssl_session_timeout 5m; ssl_protocols TLSv1 TLSv1.1 TLSv1.2; ssl_ciphers <API key>:<API key>:<API key>:<API key>:<API key>:<API key>:<API key>:DHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA; ssl_session_cache shared:SSL:50m; ssl_dhparam /etc/ssl/certs/dhparam.pem; <API key> on; <API key> 8 32k; location / { proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $<API key>; proxy_set_header Host $http_host; proxy_pass http://127.0.0.1:1711/api; } } server { listen 80; server_name app.meotrics.com; return 301 https://$server_name$request_uri; } map $http_upgrade $connection_upgrade{ default upgrade; '' close; } upstream websocket { server 127.0.0.1:2910; } server { charset utf-8; listen 443; server_name app.meotrics.com; root /home/thanhpk/meotrics/dashboard/public/; index index.php; ssl on; ssl_certificate /etc/ssl/certs/chained.pem; ssl_certificate_key /etc/ssl/private/domain.key; ssl_session_timeout 5m; ssl_protocols TLSv1 TLSv1.1 TLSv1.2; ssl_ciphers <API key>:<API key>:<API key>:<API key>:<API key>:<API key>:<API key>:DHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA; ssl_session_cache shared:SSL:50m; ssl_dhparam /etc/ssl/certs/dhparam.pem; <API key> on; access_log /home/thanhpk/tmp/meotrics-access443.log; error_log /home/thanhpk/tmp/meotrics-error443.log; location /ws { proxy_pass http://websocket; proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection $connection_upgrade; } location / { try_files $uri $uri/ /index.php?$args; } location ~ \.php$ { include fastcgi_params; fastcgi_param REMOTE_ADDR $http_x_real_ip; fastcgi_param SCRIPT_FILENAME $document_root/$fastcgi_script_name; fastcgi_pass unix:/var/run/php5-fpm.sock; try_files $uri =404; } location ~ /\.(ht|svn|git) { deny all; } }
define( ['app/models/proto_model'], function(ProtoModel) { var Model = ProtoModel.extend({ // matches first part of method name in @remote.method urlRoot: '/cru_api.order_', must_be_floats: ['sub_total', 'actual_total'], }); return Model; } );
#include <cassert> #include <iostream> #include <sstream> #include <boost/program_options/variables_map.hpp> #include "optimize.h" #include "online_optimizer.h" #include "sparse_vector.h" #include "fdict.h" using namespace std; double TestOptimizer(BatchOptimizer* opt) { cerr << "TESTING NON-PERSISTENT OPTIMIZER\n"; // f(x,y) = 4x1^2 + x1*x2 + x2^2 + x3^2 + 6x3 + 5 // df/dx1 = 8*x1 + x2 // df/dx2 = 2*x2 + x1 // df/dx3 = 2*x3 + 6 vector<double> x(3); vector<double> g(3); x[0] = 8; x[1] = 8; x[2] = 8; double obj = 0; do { g[0] = 8 * x[0] + x[1]; g[1] = 2 * x[1] + x[0]; g[2] = 2 * x[2] + 6; obj = 4 * x[0]*x[0] + x[0] * x[1] + x[1]*x[1] + x[2]*x[2] + 6 * x[2] + 5; opt->Optimize(obj, g, &x); cerr << x[0] << " " << x[1] << " " << x[2] << endl; cerr << " obj=" << obj << "\td/dx1=" << g[0] << " d/dx2=" << g[1] << " d/dx3=" << g[2] << endl; } while (!opt->HasConverged()); return obj; } double <API key>(BatchOptimizer* opt) { cerr << "\nTESTING PERSISTENT OPTIMIZER\n"; // f(x,y) = 4x1^2 + x1*x2 + x2^2 + x3^2 + 6x3 + 5 // df/dx1 = 8*x1 + x2 // df/dx2 = 2*x2 + x1 // df/dx3 = 2*x3 + 6 vector<double> x(3); vector<double> g(3); x[0] = 8; x[1] = 8; x[2] = 8; double obj = 0; string state; bool converged = false; while (!converged) { g[0] = 8 * x[0] + x[1]; g[1] = 2 * x[1] + x[0]; g[2] = 2 * x[2] + 6; obj = 4 * x[0]*x[0] + x[0] * x[1] + x[1]*x[1] + x[2]*x[2] + 6 * x[2] + 5; { if (state.size() > 0) { istringstream is(state, ios::binary); opt->Load(&is); } opt->Optimize(obj, g, &x); ostringstream os(ios::binary); opt->Save(&os); state = os.str(); } cerr << x[0] << " " << x[1] << " " << x[2] << endl; cerr << " obj=" << obj << "\td/dx1=" << g[0] << " d/dx2=" << g[1] << " d/dx3=" << g[2] << endl; converged = opt->HasConverged(); if (!converged) { // now screw up the state (should be undone by Load) obj += 2.0; g[1] = -g[2]; vector<double> x2 = x; try { opt->Optimize(obj, g, &x2); } catch (...) { } } } return obj; } template <class O> void <API key>(int num_vars) { O oa(num_vars); cerr << " cerr << "TESTING: " << oa.Name() << endl; double o1 = TestOptimizer(&oa); O ob(num_vars); double o2 = <API key>(&ob); if (o1 != o2) { cerr << oa.Name() << " VARIANTS PERFORMED DIFFERENTLY!\n" << o1 << " vs. " << o2 << endl; exit(1); } cerr << oa.Name() << " SUCCESS\n"; } using namespace std::tr1; void TestOnline() { size_t N = 20; double C = 1.0; double eta0 = 0.2; std::tr1::shared_ptr<<API key>> r(new <API key>(N, eta0, 0.85)); //shared_ptr<<API key>> r(new <API key>(N, eta0)); <API key> opt(r, N, C, std::vector<int>()); assert(r->eta(10) < r->eta(1)); } int main() { int n = 3; <API key><LBFGSOptimizer>(n); <API key><RPropOptimizer>(n); TestOnline(); return 0; }
package com.sequenceiq.freeipa.entity.util; import com.sequenceiq.cloudbreak.converter.<API key>; import com.sequenceiq.freeipa.api.v1.kerberos.model.KerberosType; public class <API key> extends <API key><KerberosType> { @Override public KerberosType getDefault() { return KerberosType.FREEIPA; } }
#!/usr/bin/env bash echo "Puppet6 Platform Detection and Installation" /usr/bin/wget -O - https://raw.githubusercontent.com/petems/<API key>/master/<API key>.sh | /bin/sh echo "Install R10k and Hiera-Eyaml" /opt/puppetlabs/puppet/bin/gem install r10k hiera-eyaml echo "Retrieve Puppetfile from puppet-maas repo" /usr/bin/wget -O /etc/puppetlabs/code/environments/production/Puppetfile https://raw.githubusercontent.com/ppouliot/puppet-maas/master/Puppetfile echo "Run R10k on downloaded Puppetfile" cd /etc/puppetlabs/code/environments/production && /opt/puppetlabs/puppet/bin/r10k puppetfile install --verbose DEBUG2 /opt/puppetlabs/bin/puppet apply --debug --trace --verbose --modulepath=/etc/puppetlabs/code/environments/production/modules:/etc/puppetlabs/code/modules /etc/puppetlabs/code/environments/production/modules/maas/examples/init.pp
namespace TrelloToExcel.Trello { public class TextData { public Emoji3 emoji { get; set; } } }
def power_digit_sum(exponent): power_of_2 = str(2 ** exponent) return sum([int(x) for x in power_of_2])
package com.lyubenblagoev.postfixrest.security; import com.lyubenblagoev.postfixrest.entity.User; import com.lyubenblagoev.postfixrest.repository.UserRepository; import org.springframework.security.core.userdetails.UserDetails; import org.springframework.security.core.userdetails.UserDetailsService; import org.springframework.security.core.userdetails.<API key>; import org.springframework.stereotype.Service; import java.util.Optional; @Service public class <API key> implements UserDetailsService { private final UserRepository userRepository; public <API key>(UserRepository userRepository) { this.userRepository = userRepository; } @Override public UserDetails loadUserByUsername(String username) throws <API key> { return userRepository.findByEmail(username) .map(u -> new UserPrincipal(u)) .orElseThrow(() -> new <API key>("No user found for " + username)); } }
"""Auto-generated file, do not edit by hand. BM metadata""" from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata PHONE_METADATA_BM = PhoneMetadata(id='BM', country_code=1, <API key>='011', general_desc=PhoneNumberDesc(<API key>='(?:441|[58]\\d\\d|900)\\d{7}', possible_length=(10,), <API key>=(7,)), fixed_line=PhoneNumberDesc(<API key>='441(?:[46]\\d\\d|5(?:4\\d|60|89))\\d{4}', example_number='4414123456', possible_length=(10,), <API key>=(7,)), mobile=PhoneNumberDesc(<API key>='441(?:[2378]\\d|5[0-39])\\d{5}', example_number='4413701234', possible_length=(10,), <API key>=(7,)), toll_free=PhoneNumberDesc(<API key>='8(?:00|33|44|55|66|77|88)[2-9]\\d{6}', example_number='8002123456', possible_length=(10,)), premium_rate=PhoneNumberDesc(<API key>='900[2-9]\\d{6}', example_number='9002123456', possible_length=(10,)), personal_number=PhoneNumberDesc(<API key>='52(?:3(?:[2-46-9][02-9]\\d|5(?:[02-46-9]\\d|5[0-46-9]))|4(?:[2-478][02-9]\\d|5(?:[034]\\d|2[024-9]|5[0-46-9])|6(?:0[1-9]|[2-9]\\d)|9(?:[05-9]\\d|2[0-5]|49)))\\d{4}|52[34][2-9]1[02-9]\\d{4}|5(?:00|2[12]|33|44|66|77|88)[2-9]\\d{6}', example_number='5002345678', possible_length=(10,)), national_prefix='1', <API key>='1|([2-8]\\d{6})$', <API key>='441\\1', leading_digits='441', <API key>=True)
<head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <title>{% if page.title %}{{ page.title }}{% else %}{{ site.title }}{% endif %}</title> <meta name="viewport" content="width=device-width"> <meta name="description" content="{{ site.description }}"> <link rel="canonical" href="{{ page.url | replace:'index.html','' | prepend: site.baseurl | prepend: site.url }}"> <link rel="stylesheet" href="{{ "/style.css" | prepend: site.baseurl }}"> <!-- Custom Fonts --> <link rel="stylesheet" href="{{ "/css/font-awesome/css/font-awesome.min.css" | prepend: site.baseurl }}"> <link href="//fonts.googleapis.com/css?family=Lora:400,700,400italic,700italic" rel="stylesheet" type="text/css"> <link href="//fonts.googleapis.com/css?family=Montserrat:400,700" rel="stylesheet" type="text/css"> <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file: <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/libs/html5shiv/3.7.0/html5shiv.js"></script> <script src="https://oss.maxcdn.com/libs/respond.js/1.4.2/respond.min.js"></script> <![endif] </head>
# AUTOGENERATED FILE FROM balenalib/artik530-debian:buster-run RUN apt-get update \ && apt-get install -y --<API key> \ ca-certificates \ curl \ \ # .NET Core dependencies libc6 \ libgcc1 \ libgssapi-krb5-2 \ libicu63 \ libssl1.1 \ libstdc++6 \ zlib1g \
# AUTOGENERATED FILE FROM balenalib/<API key>:33-build ENV NODE_VERSION 14.18.3 ENV YARN_VERSION 1.22.4 RUN for key in \ <API key> \ ; do \ gpg --keyserver pgp.mit.edu --recv-keys "$key" || \ gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \ gpg --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \ done \ && curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$<API key>.tar.gz" \ && echo "<SHA256-like> node-v$<API key>.tar.gz" | sha256sum -c - \ && tar -xzf "node-v$<API key>.tar.gz" -C /usr/local --strip-components=1 \ && rm "node-v$<API key>.tar.gz" \ && curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \ && curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \ && gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \ && mkdir -p /opt/yarn \ && tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \ && ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \ && ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \ && rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \ && npm config set unsafe-perm true -g --unsafe-perm \
<?php $l['report_post'] = "Reportar mensaje"; $l['report_to_mod'] = "Reporta este mensaje a un moderador"; $l['only_report'] = "Solo debes reportar mensajes que sean spam, de publicidad, o abusivos."; $l['report_reason'] = "Tu razón para reportar este mensaje:"; $l['thank_you'] = "Gracias."; $l['post_reported'] = "El mensaje se ha reportado correctamente. Ya puedes cerrar la ventana."; $l['report_error'] = "Error"; $l['no_reason'] = "No puedes reportar un mensaje sin especificar la razón del reporte."; $l['go_back'] = "Volver"; $l['close_window'] = "Cerrar ventana"; ?>
<html dir="LTR"> <head> <meta http-equiv="Content-Type" content="text/html; charset=Windows-1252" /> <meta name="vs_targetSchema" content="http://schemas.microsoft.com/intellisense/ie5" /> <title>BasicConfigurator.Configure Method ()</title> <xml> </xml> <link rel="stylesheet" type="text/css" href="MSDN.css" /> </head> <body id="bodyID" class="dtBODY"> <div id="nsbanner"> <div id="bannerrow1"> <table class="bannerparthead" cellspacing="0"> <tr id="hdr"> <td class="runninghead">Apache log4net™ SDK Documentation - Microsoft .NET Framework 4.0</td> <td class="product"> </td> </tr> </table> </div> <div id="TitleRow"> <h1 class="dtH1">BasicConfigurator.Configure Method ()</h1> </div> </div> <div id="nstext"> <p> Initializes the log4net system with a default configuration. </p> <div class="syntax"> <span class="lang">[Visual Basic]</span> <br />Overloads Public Shared Function Configure() As <a href="ms-help://MS.NETFrameworkSDKv1.1/cpref/html/<API key>.htm">ICollection</a></div> <div class="syntax"> <span class="lang">[C#]</span> <br />public static <a href="ms-help://MS.NETFrameworkSDKv1.1/cpref/html/<API key>.htm">ICollection</a> Configure();</div> <h4 class="dtH4">Remarks</h4> <p> Initializes the log4net logging system using a <a href="log4net.Appender.ConsoleAppender.html">ConsoleAppender</a> that will write to <code>Console.Out</code>. The log messages are formatted using the <a href="log4net.Layout.PatternLayout.html">PatternLayout</a> layout object with the <a href="log4net.Layout.PatternLayout.<API key>.html"><API key></a> layout style. </p> <h4 class="dtH4">See Also</h4><p><a href="log4net.Config.BasicConfigurator.html">BasicConfigurator Class</a> | <a href="log4net.Config.html">log4net.Config Namespace</a> | <a href="log4net.Config.BasicConfigurator.Configure_overloads.html">BasicConfigurator.Configure Overload List</a></p><object type="application/x-oleobject" classid="clsid:<API key>" viewastext="true" style="display: none;"><param name="Keyword" value="Configure method"></param><param name="Keyword" value="Configure method, BasicConfigurator class"></param><param name="Keyword" value="BasicConfigurator.Configure method"></param></object><hr /><div id="footer"><a href='http: </body> </html>
using System; using System.Collections.Generic; using System.ComponentModel; using System.Drawing; using System.Linq; using System.Text; using System.Diagnostics; namespace <API key>.Net.Settings { [DisplayName("Line Settings")] public class LineSettings { #region contruction public LineSettings() { this.Volume = 0; this.m_bidColor = Color.Black; this.m_askColor = Color.Black; } internal LineSettings(LineSettings settings) { this.Volume = settings.Volume; this.m_bidColor = settings.m_bidColor; this.m_askColor = settings.m_askColor; } internal LineSettings(double volume, Color bidColor, Color askColor) { this.Volume = volume; this.m_bidColor = bidColor; this.m_askColor = askColor; } #endregion #region properties [DefaultValue(1)] public double Volume { get { return m_volume; } set { if ((value < m_minVolume) || (value > m_maxVolume)) { string message = string.Format("Volum can be from {0} to {1}", m_minVolume, m_maxVolume); throw new <API key>("value", value, message); } m_volume = value; } } [DisplayName("Bid Color")] [DefaultValue(typeof(Color), "Black")] public Color BidColor { get { return m_bidColor; } set { m_bidColor = NormalizeColor(value); } } [DisplayName("Ask Color")] [DefaultValue(typeof(Color), "Black")] public Color AskColor { get { return m_askColor; } set { m_askColor = NormalizeColor(value); } } #endregion #region private members private static Color NormalizeColor(Color value) { if (255 == value.A) { return value; } Color result = Color.FromArgb(255, value.R, value.G, value.B); return result; } #endregion #region overrode methods public override string ToString() { string result = string.Format("Volume = {0}", this.Volume); return result; } #endregion #region members private double m_volume; private const double m_minVolume = 0; private const double m_maxVolume = 10000; private Color m_bidColor; private Color m_askColor; #endregion } }
#!/bin/bash # Clone pinned Kuberay commit to temporary directory, copy the CRD definitions # into the autoscaler folder. SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) DIR=$(mktemp -d -t "kuberay-XXXXXX") pushd "$DIR" || exit git clone https://github.com/ray-project/kuberay/ pushd "kuberay" || exit # If you changed the Kuberay CRD, you need to update this commit to point # to the new CRD. The following always need to be compatible: The used CRDs, # the docker image of the Kuberay operator and the KuberayNodeProvider. # This is normally not a problem since the KuberayNodeProvider uses a # stable part of the CRD definition and the Kuberay operator and the # get updated together. It is important to keep this in mind when making # changes. The CRD is designed to be stable so one operator can run many # different versions of Ray. git checkout <SHA1-like> # Here is where we specify the docker image that is used for the operator. # If you want to use your own version of Kuberay, you should change the content # of kuberay-autoscaler.patch to point to your operator. # This would normally better be done with kustomization, but we don't want to make # kustomization a dependency for running this. git apply "$SCRIPT_DIR/kuberay-autoscaler.patch" cp -r ray-operator/config "$SCRIPT_DIR/" popd || exit popd || exit
<?php header("Content-Type: text/html;charset=utf-8"); $name = $_POST['first_name']; $email = $_POST['email']; $message = $_POST['comments']; $to = "jgo@<API key>.org"; $subject = "Estimado Representante"; $body = ' <html> <head> <title>Estimado Representante</title> </head> <body> <p><b>Name: </b> '.$name.'</p> <p><b>Email: </b> '.$email.'</p> <p><b>Message: </b> '.$message.'</p> </body> </html> '; $headers = "MIME-Version: 1.0\r\n"; $headers .= "Content-type: text/html; charset=utf-8\r\n"; $headers .= "Bcc: estimadosenador@gmail.com" . "\r\n"; $headers .= "From: ".$name." <".$email.">\r\n"; $sended = mail($to, $subject, $body, $headers); ?> <html> <head> <title>Estimado Representante</title> <link rel="stylesheet" type="text/css" href="style.css"> </head> <body> <div class="bigone"> <div class="menu clearfix"> <a href="index.html"><h2>Home</h2></a> <a href="about.html"><h2>About</h2></a> </div> <div class="line"></div> <div class="cta"> <h1>Estimado Representante,</h1> <h4> Esta p&aacute;gina es dedicada al pueblo puertorrique&ntilde;o para ejercer nuestro derecho de libertad de expresi&oacute;n <br>y exigir el m&aacute;s alto respeto y cumplimiento a nuestros representantes legislativos. Este canal ser&aacute; una fuente de ideas, uno que fomente la unidad y el progreso, uno que demande fiscalizaci&oacute;n, responsabilidad, &eacute;tica, e igualdad.<br> <br><b>Puertorrique&ntilde;o</b>, felicita, comenta, y protesta pero siempre con propiedad y respeto. Di lo que ves. Di lo que piensas. La libertad de expresi&oacute;n te lo permite. La democracia te lo pide. Porque todo representante tiene que escuchar para poder cumplir. </h4> </div> <div class="about"> <h5><b>Tu mensaje fue enviado.</b><br> <br>Si te gust&oacute; la p&aacute;gina, por favor comp&aacute;rtela, y as&iacute; lograremos que m&aacute;s puertorrique&ntilde;os se expresen.<br> <br>Gracias por usar Estimado Representante.</h5> </div> </body> </html>
sap.ui.define([ "delegates/odata/v4/TableDelegate", "sap/ui/core/Core" ], function( TableDelegate, Core ) { "use strict"; /** * Test delegate for OData V4. */ var ODataTableDelegate = Object.assign({}, TableDelegate); /** * Updates the binding info with the relevant path and model from the metadata. * * @param {Object} oTable The MDC table instance * @param {Object} oBindingInfo The bindingInfo of the table */ ODataTableDelegate.updateBindingInfo = function(oTable, oBindingInfo) { TableDelegate.updateBindingInfo.apply(this, arguments); var oFilterBar = Core.byId(oTable.getFilter()); if (oFilterBar) { // get the basic search var sSearchText = oFilterBar.getSearch instanceof Function ? oFilterBar.getSearch() : ""; if (sSearchText && sSearchText.indexOf(" ") === -1) { // to allow search for "("..... sSearchText = '"' + sSearchText + '"'; // TODO: escape " in string } // if it contains spaces allow opeartors like OR... oBindingInfo.parameters.$search = sSearchText || undefined; } }; return ODataTableDelegate; });
package ip_test import ( . "github.com/cloudfoundry/bosh-init/internal/github.com/onsi/ginkgo" . "github.com/cloudfoundry/bosh-init/internal/github.com/onsi/gomega" "testing" ) func TestPlatform(t *testing.T) { RegisterFailHandler(Fail) RunSpecs(t, "Ip Suite") }
package org.aksw.servicecat.web.api; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import org.aksw.servicecat.core.<API key>; import org.springframework.beans.factory.annotation.Autowired; @org.springframework.stereotype.Service @Path("/services") public class ServletServiceApi { @Autowired private <API key> processor; @GET @Produces(MediaType.APPLICATION_JSON) @Path("/put") public String registerService(@QueryParam("url") String serviceUrl) { processor.process(serviceUrl); String result = "{}"; return result; } }
# aws included via metadata.rb # if node[:ebs_volumes] # node[:ebs_volumes].each do |name, conf| # aws_ebs_volume "attach hdfs volume #{conf.inspect}" do # provider "aws_ebs_volume" # aws_access_key node[:aws][:aws_access_key] # <API key> node[:aws][:<API key>] # aws_region node[:aws][:aws_region] # availability_zone node[:aws][:availability_zone] # volume_id conf[:volume_id] # device conf[:device] # action :attach # end # end # end
package de.jpaw.fixedpoint.tests; import java.math.BigDecimal; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import de.jpaw.fixedpoint.types.MicroUnits; public class TestConversions { @Test public void testFromConversions() throws Exception { MicroUnits fromLong = MicroUnits.valueOf(2); MicroUnits fromDouble = MicroUnits.valueOf(2.0); MicroUnits fromString = MicroUnits.valueOf("2.0"); MicroUnits fromBigDecimal = MicroUnits.valueOf(BigDecimal.valueOf(2)); MicroUnits fromMantissa = MicroUnits.of(2_000_000L); Assertions.assertEquals(fromMantissa, fromBigDecimal, "from BigDecimal"); Assertions.assertEquals(fromMantissa, fromString, "from String"); Assertions.assertEquals(fromMantissa, fromDouble, "from double"); Assertions.assertEquals(fromMantissa, fromLong, "from long"); } @Test public void testToConversions() throws Exception { MicroUnits value = MicroUnits.valueOf(2); Assertions.assertEquals("2", value.toString(), "to String"); Assertions.assertEquals(BigDecimal.valueOf(2).setScale(6), value.toBigDecimal(), "to BigDecimal"); Assertions.assertEquals(2, value.intValue(), "to int"); Assertions.assertEquals(2.0, value.doubleValue(), "to double"); Assertions.assertEquals(2_000_000L, value.getMantissa(), "to Mantissa"); } }
package org.dominokit.domino.api.client; @FunctionalInterface public interface <API key> { void <API key>(); }
<!DOCTYPE HTML PUBLIC "- <!--NewPage <HTML> <HEAD> <!-- Generated by javadoc (build 1.6.0_20) on Fri Mar 25 19:54:51 PDT 2011 --> <TITLE> org.apache.hadoop.io.nativeio (Hadoop 0.20.2-cdh3u0 API) </TITLE> <META NAME="date" CONTENT="2011-03-25"> <LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../stylesheet.css" TITLE="Style"> </HEAD> <BODY BGCOLOR="white"> <FONT size="+1" CLASS="FrameTitleFont"> <A HREF="../../../../../org/apache/hadoop/io/nativeio/package-summary.html" target="classFrame">org.apache.hadoop.io.nativeio</A></FONT> <TABLE BORDER="0" WIDTH="100%" SUMMARY=""> <TR> <TD NOWRAP><FONT size="+1" CLASS="FrameHeadingFont"> Classes</FONT>&nbsp; <FONT CLASS="FrameItemFont"> <BR> <A HREF="NativeIO.html" title="class in org.apache.hadoop.io.nativeio" target="classFrame">NativeIO</A></FONT></TD> </TR> </TABLE> <TABLE BORDER="0" WIDTH="100%" SUMMARY=""> <TR> <TD NOWRAP><FONT size="+1" CLASS="FrameHeadingFont"> Enums</FONT>&nbsp; <FONT CLASS="FrameItemFont"> <BR> <A HREF="Errno.html" title="enum in org.apache.hadoop.io.nativeio" target="classFrame">Errno</A></FONT></TD> </TR> </TABLE> <TABLE BORDER="0" WIDTH="100%" SUMMARY=""> <TR> <TD NOWRAP><FONT size="+1" CLASS="FrameHeadingFont"> Exceptions</FONT>&nbsp; <FONT CLASS="FrameItemFont"> <BR> <A HREF="NativeIOException.html" title="class in org.apache.hadoop.io.nativeio" target="classFrame">NativeIOException</A></FONT></TD> </TR> </TABLE> </BODY> </HTML>
Arkivo::Engine.routes.draw do namespace :api, defaults: { format: :json } do resources :items, except: :index end end
public class Customers { public static string[] allCustomers = {"Peter Parker", "Klark Kent", "Bruce Vayne"}; }
package dk.lessismore.nojpa.reflection.db.annotations; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Created : with IntelliJ IDEA. * User: seb */ @Target(ElementType.METHOD) @Retention(RetentionPolicy.RUNTIME) public @interface SearchField { public static final String NULL = ""; public boolean translate() default false; public boolean searchReverse() default false; public float boostFactor() default 3f; public float reverseBoostFactor() default 0.3f; public String dynamicSolrPostName() default NULL; }
package com.hoang.fu; /** * * @author hoangpt */ public class Teacher extends Employee implements ITeacher { Teacher(String name) { this.name = name; } @Override float calculateSalary(){ return 500f; } @Override public int calculateBonus() { throw new <API key>("Not supported yet."); //To change body of generated methods, choose Tools | Templates. } @Override public float calculateAllowance() { throw new <API key>("Not supported yet."); //To change body of generated methods, choose Tools | Templates. } }
package org.xbill.DNS; import java.io.*; import java.util.*; import org.xbill.DNS.utils.*; /** * Transaction Signature - this record is automatically generated by the * resolver. TSIG records provide transaction security between the * sender and receiver of a message, using a shared key. * @see org.xbill.DNS.Resolver * @see org.xbill.DNS.TSIG * * @author Brian Wellington */ public class TSIGRecord extends Record { private static final long serialVersionUID = -88820909016649306L; private Name alg; private Date timeSigned; private int fudge; private byte [] signature; private int originalID; private int error; private byte [] other; TSIGRecord() {} Record getObject() { return new TSIGRecord(); } /** * Creates a TSIG Record from the given data. This is normally called by * the TSIG class * @param alg The shared key's algorithm * @param timeSigned The time that this record was generated * @param fudge The fudge factor for time - if the time that the message is * received is not in the range [now - fudge, now + fudge], the signature * fails * @param signature The signature * @param originalID The message ID at the time of its generation * @param error The extended error field. Should be 0 in queries. * @param other The other data field. Currently used only in BADTIME * responses. * @see org.xbill.DNS.TSIG */ public TSIGRecord(Name name, int dclass, long ttl, Name alg, Date timeSigned, int fudge, byte [] signature, int originalID, int error, byte other[]) { super(name, Type.TSIG, dclass, ttl); this.alg = checkName("alg", alg); this.timeSigned = timeSigned; this.fudge = checkU16("fudge", fudge); this.signature = signature; this.originalID = checkU16("originalID", originalID); this.error = checkU16("error", error); this.other = other; } void rrFromWire(DNSInput in) throws IOException { alg = new Name(in); long timeHigh = in.readU16(); long timeLow = in.readU32(); long time = (timeHigh << 32) + timeLow; timeSigned = new Date(time * 1000); fudge = in.readU16(); int sigLen = in.readU16(); signature = in.readByteArray(sigLen); originalID = in.readU16(); error = in.readU16(); int otherLen = in.readU16(); if (otherLen > 0) other = in.readByteArray(otherLen); else other = null; } void rdataFromString(Tokenizer st, Name origin) throws IOException { throw st.exception("no text format defined for TSIG"); } /** Converts rdata to a String */ String rrToString() { StringBuffer sb = new StringBuffer(); sb.append(alg); sb.append(" "); if (Options.check("multiline")) sb.append("(\n\t"); sb.append (timeSigned.getTime() / 1000); sb.append (" "); sb.append (fudge); sb.append (" "); sb.append (signature.length); if (Options.check("multiline")) { sb.append ("\n"); sb.append (base64.formatString(signature, 64, "\t", false)); } else { sb.append (" "); sb.append (base64.toString(signature)); } sb.append (" "); sb.append (Rcode.TSIGstring(error)); sb.append (" "); if (other == null) sb.append (0); else { sb.append (other.length); if (Options.check("multiline")) sb.append("\n\n\n\t"); else sb.append(" "); if (error == Rcode.BADTIME) { if (other.length != 6) { sb.append("<invalid BADTIME other data>"); } else { long time = ((long)(other[0] & 0xFF) << 40) + ((long)(other[1] & 0xFF) << 32) + ((other[2] & 0xFF) << 24) + ((other[3] & 0xFF) << 16) + ((other[4] & 0xFF) << 8) + ((other[5] & 0xFF) ); sb.append("<server time: "); sb.append(new Date(time * 1000)); sb.append(">"); } } else { sb.append("<"); sb.append(base64.toString(other)); sb.append(">"); } } if (Options.check("multiline")) sb.append(" )"); return sb.toString(); } /** Returns the shared key's algorithm */ public Name getAlgorithm() { return alg; } /** Returns the time that this record was generated */ public Date getTimeSigned() { return timeSigned; } /** Returns the time fudge factor */ public int getFudge() { return fudge; } /** Returns the signature */ public byte [] getSignature() { return signature; } /** Returns the original message ID */ public int getOriginalID() { return originalID; } /** Returns the extended error */ public int getError() { return error; } /** Returns the other data */ public byte [] getOther() { return other; } void rrToWire(DNSOutput out, Compression c, boolean canonical) { alg.toWire(out, null, canonical); long time = timeSigned.getTime() / 1000; int timeHigh = (int) (time >> 32); long timeLow = (time & 0xFFFFFFFFL); out.writeU16(timeHigh); out.writeU32(timeLow); out.writeU16(fudge); out.writeU16(signature.length); out.writeByteArray(signature); out.writeU16(originalID); out.writeU16(error); if (other != null) { out.writeU16(other.length); out.writeByteArray(other); } else out.writeU16(0); } }
-- Users delete from user where id <= 6; insert into user (id,password,user_name, email, role) values (6,'$2a$08$<API key>','boisterous', 'neuban35@gmail.com', 'USER'); insert into user (id,password,user_name, email, role) values (5,'$2a$08$2/<API key>','traumatic', 'neuban34@gmail.com', 'USER'); insert into user (id,password,user_name, email, role) values (4,'$2a$08$<API key>/a','chopchop', 'neuban33@gmail.com', 'USER'); insert into user (id,password,user_name, email, role) values (3,'$2a$08$<API key>','krisztmas', 'neuban32@gmail.com', 'USER'); insert into user (id,password,user_name, email, role) values (2,'$2a$08$<API key>.<API key>','pivanyi', 'neuban31@gmail.com', 'USER'); insert into user (id,password,user_name, email, role) values (1,'$2a$08$0E.<API key>//ptScWjRmres1U0txi','falatka', 'neuban3@gmail.com', 'USER'); -- Boards delete from board where id <= 2; insert into board (id,title) values (1,'Test board 1'); insert into board (id,title) values (2,'Test board 2'); -- User-Board delete from <API key> where user_id = 1; insert into <API key> (user_id,board_id) values (1,1); insert into <API key> (user_id,board_id) values (1,2); -- Columns delete from board_column where id <= 4; insert into board_column (id,title,board_id) values (1,'TODO',1); insert into board_column (id,title,board_id) values (2,'IN DEV',1); insert into board_column (id,title,board_id) values (3,'QA',1); insert into board_column (id,title,board_id) values (4,'DONE',1); -- Cards delete from card where id <= 8; insert into card (id,description,title,column_id) values (1,'Sample description for task 1','Task 1',1); insert into card (id,description,title,column_id) values (2,'Sample description for task 2','Task 2',1); insert into card (id,description,title,column_id) values (3,'Sample description for task 3','Task 3',2); insert into card (id,description,title,column_id) values (4,'Sample description for task 4','Task 4',3); insert into card (id,description,title,column_id) values (5,'Sample description for task 5','Task 5',3); insert into card (id,description,title,column_id) values (6,'Sample description for task 6','Task 6',4); insert into card (id,description,title,column_id) values (7,'Sample description for task 7','Task 7',4); insert into card (id,description,title,column_id) values (8,'Sample description for task 8','Task 8',4); insert into comment (id,content,created_time,user_id,card_id) values (1,"Some birthday content",'1993-12-03 12:30',1,1); insert into comment (id,content,created_time,user_id,card_id) values (2,"Some birthday content",'1994-08-20 12:30',2,1); insert into comment (id,content,created_time,user_id,card_id) values (3,"Some birthday content",'1994-08-19 06:30',3,1); -- User-Card delete from <API key> where user_id = 1; insert into <API key> (card_id,user_id) values (3,1); insert into <API key> (card_id,user_id) values (4,1); insert into <API key> (card_id,user_id) values (7,1);
package app.monitor.job; import core.framework.internal.log.LogManager; import core.framework.json.JSON; import core.framework.kafka.MessagePublisher; import core.framework.log.message.StatMessage; import core.framework.scheduler.Job; import core.framework.scheduler.JobContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.time.Duration; import java.time.Instant; import java.time.ZonedDateTime; import java.util.List; import java.util.Map; /** * @author neo */ public class KubeMonitorJob implements Job { public final MessagePublisher<StatMessage> publisher; public final KubeClient kubeClient; public final List<String> namespaces; private final Logger logger = LoggerFactory.getLogger(KubeMonitorJob.class); public KubeMonitorJob(List<String> namespaces, KubeClient kubeClient, MessagePublisher<StatMessage> publisher) { this.publisher = publisher; this.kubeClient = kubeClient; this.namespaces = namespaces; } @Override public void execute(JobContext context) { try { var now = ZonedDateTime.now(); for (String namespace : namespaces) { KubePodList pods = kubeClient.listPods(namespace); for (KubePodList.Pod pod : pods.items) { String errorMessage = check(pod, now); if (errorMessage != null) { publishPodFailure(pod, errorMessage); } } } } catch (Throwable e) { logger.error(e.getMessage(), e); publisher.publish(StatMessageFactory.failedToCollect(LogManager.APP_NAME, null, e)); } } String check(KubePodList.Pod pod, ZonedDateTime now) { if (pod.metadata.deletionTimestamp != null) { Duration elapsed = Duration.between(pod.metadata.deletionTimestamp, now); if (elapsed.toSeconds() >= 300) { return "pod is still in deletion, elapsed=" + elapsed; } return null; } String phase = pod.status.phase; if ("Succeeded".equals(phase)) return null; // terminated if ("Failed".equals(phase) || "Unknown".equals(phase)) return "unexpected pod phase, phase=" + phase; if ("Pending".equals(phase)) { // newly created pod may not have container status yet, containerStatuses is initialized as empty for (KubePodList.ContainerStatus status : pod.status.containerStatuses) { if (status.state.waiting != null && "ImagePullBackOff".equals(status.state.waiting.reason)) { return "ImagePullBackOff: " + status.state.waiting.message; } } // for unschedulable pod for (KubePodList.PodCondition condition : pod.status.conditions) { if ("PodScheduled".equals(condition.type) && "False".equals(condition.status) && Duration.between(condition.lastTransitionTime, now).toSeconds() >= 300) { return condition.reason + ": " + condition.message; } } } if ("Running".equals(phase)) { boolean ready = true; for (KubePodList.ContainerStatus status : pod.status.containerStatuses) { if (status.state.waiting != null && "CrashLoopBackOff".equals(status.state.waiting.reason)) { return "CrashLoopBackOff: " + status.state.waiting.message; } boolean containerReady = Boolean.TRUE.equals(status.ready); if (!containerReady && status.lastState != null && status.lastState.terminated != null) { var terminated = status.lastState.terminated; return "pod was terminated, reason=" + terminated.reason + ", exitCode=" + terminated.exitCode; } if (!containerReady) { ready = false; } } if (ready) return null; // all running, all ready } ZonedDateTime startTime = pod.status.startTime != null ? pod.status.startTime : pod.metadata.creationTimestamp; // startTime may not be populated yet if pod is just created Duration elapsed = Duration.between(startTime, now); if (elapsed.toSeconds() >= 300) { // can be: 1) took long to be ready after start, or 2) readiness check failed in the middle run return "pod is not in ready state, uptime=" + elapsed; } return null; } private void publishPodFailure(KubePodList.Pod pod, String errorMessage) { var now = Instant.now(); var message = new StatMessage(); message.id = LogManager.ID_GENERATOR.next(now); message.date = now; message.result = "ERROR"; message.app = pod.metadata.labels.getOrDefault("app", pod.metadata.name); message.host = pod.metadata.name; message.errorCode = "POD_FAILURE"; message.errorMessage = errorMessage; message.info = Map.of("pod", JSON.toJSON(pod)); publisher.publish(message); } }
package mahjong type Hand []Pai func remove(list []Pai, p Pai) []Pai { var result []Pai removed := false for _, e := range list { if e == p && !removed { removed = true } else { result = append(result, e) } } return result } func contain(list []Pai, p Pai) bool { for _, a := range list { if a == p { return true } } return false } func contain2(list []Pai, p Pai) bool { count := 0 for _, a := range list { if a == p { count += 1 } } return count >= 2 } func createCandidates(list []Pai, cand [][][]Pai) [][][]Pai { if len(list) <= 0 { return cand } current := list[0] remain := list[1:] nextOne := current + 1 nextTwo := current + 2 if current.IsNumber() { if current.Suit() == nextOne.Suit() && current.Suit() == nextTwo.Suit() && contain(remain, nextOne) && contain(remain, nextTwo) { idx := len(cand) - 1 tmp := make([][]Pai, len(cand[idx])) copy(tmp, cand[idx]) cand[idx] = append(cand[idx], []Pai{current, nextOne, nextTwo}) _remain := remove(remove(remain, nextOne), nextTwo) cand = createCandidates(_remain, cand) cand = append(cand, tmp) } if current.Suit() == nextOne.Suit() && contain(remain, nextOne) { idx := len(cand) - 1 tmp := make([][]Pai, len(cand[idx])) copy(tmp, cand[idx]) cand[len(cand)-1] = append(cand[len(cand)-1], []Pai{current, nextOne}) _remain := remove(remain, nextOne) cand = createCandidates(_remain, cand) cand = append(cand, tmp) } if current.Suit() == nextTwo.Suit() && contain(remain, nextTwo) { idx := len(cand) - 1 tmp := make([][]Pai, len(cand[idx])) copy(tmp, cand[idx]) cand[len(cand)-1] = append(cand[len(cand)-1], []Pai{current, nextTwo}) _remain := remove(remain, nextTwo) cand = createCandidates(_remain, cand) cand = append(cand, tmp) } } if contain2(remain, current) { idx := len(cand) - 1 tmp := make([][]Pai, len(cand[idx])) copy(tmp, cand[idx]) cand[len(cand)-1] = append(cand[len(cand)-1], []Pai{current, current, current}) _remain := remove(remove(remain, current), current) cand = createCandidates(_remain, cand) cand = append(cand, tmp) } if contain(remain, current) { idx := len(cand) - 1 tmp := make([][]Pai, len(cand[idx])) copy(tmp, cand[idx]) cand[len(cand)-1] = append(cand[len(cand)-1], []Pai{current, current}) _remain := remove(remain, current) cand = createCandidates(_remain, cand) cand = append(cand, tmp) } cand[len(cand)-1] = append(cand[len(cand)-1], []Pai{current}) return createCandidates(remain, cand) } func isUnique(list []Pai) bool { result := []Pai{} for _, p := range list { if contain(result, p) { // nothing to do } else { result = append(result, p) } } return len(list) == len(result) } func isSevenPairs(list [][]Pai) bool { if len(list) != 7 { return false } stack := []Pai{} for _, pair := range list { if len(pair) == 2 && pair[0] != pair[1] { return false } stack = append(stack, pair[0]) } return isUnique(stack) } func isThirteenOrphans(list [][]Pai) bool { if len(list) == 12 || len(list) == 13 { for _, pair := range list { for _, pai := range pair { if !pai.IsOrphan() { return false } } } return true } return false } func (hand *Hand) IsTenpai() bool { _hand := *hand cand := [][][]Pai{[][]Pai{}} cand = createCandidates(_hand, cand) for _, a := range cand { // regular type if len(a) == 5 { return true } // seven pairs if isSevenPairs(a) { return true } if isThirteenOrphans(a) { return true } } return false }
/* ** Stack.cpp for cpp_abstractvm in /var/projects/cpp_abstractvm/Stack.cpp ** ** Made by kevin labbe ** Login <labbe_k@epitech.net> ** ** Started on Mar 1, 2014 2:15:13 AM 2014 kevin labbe ** Last update Mar 1, 2014 2:15:13 AM 2014 kevin labbe */ #include "EmptyStackException.hpp" #include "<API key>.hpp" #include "<API key>.hpp" #include "Stack.hpp" namespace Arithmetic { Stack::Stack() { _funcs["add"] = &Stack::add; _funcs["sub"] = &Stack::sub; _funcs["mul"] = &Stack::mul; _funcs["div"] = &Stack::div; _funcs["mod"] = &Stack::mod; _funcs["pop"] = &Stack::pop; _funcs["dump"] = &Stack::dump; _funcs["print"] = &Stack::print; } Stack::~Stack() { } void Stack::execFunc(const std::string& name) { if (_funcs[name.c_str()]) (this->*_funcs[name.c_str()])(); } void Stack::push(IOperand* op) { _stack.push_front(op); } void Stack::pop() { if (_stack.empty()) throw Exception::EmptyStackException("pop"); delete _stack.front(); _stack.pop_front(); } void Stack::assert(IOperand* op) { if (_stack.empty()) throw Exception::<API key>("Stack empty"); if (_stack.front()->getPrecision() != op->getPrecision() || _stack.front()->getType() != op->getType() || _stack.front()->toString() != op->toString()) throw Exception::<API key>("Operand different at the top of the stack"); } void Stack::dump() { for (std::deque<IOperand*>::iterator it = _stack.begin(); it != _stack.end(); it++) std::cout << (*it)->toString() << std::endl; } void Stack::print() { std::stringstream stream; char chr; int nbr; if (_stack.empty()) throw Exception::EmptyStackException("print"); if (_stack.front()->getType() != INT8) throw Exception::<API key>("print expects an int8 at the top of the stack"); stream << _stack.front()->toString(); stream >> nbr; chr = nbr; std::cout << chr << std::endl; } void Stack::add() { _loadOperands(); _pushResult(*_op1 + *_op2); } void Stack::sub() { _loadOperands(); _pushResult(*_op1 - *_op2); } void Stack::mul() { _loadOperands(); _pushResult(*_op1 * *_op2); } void Stack::div() { _loadOperands(); _pushResult(*_op1 / *_op2); } void Stack::mod() { _loadOperands(); _pushResult(*_op1 % *_op2); } void Stack::_loadOperands() { if (_stack.size() < 2) throw Exception::EmptyStackException("Calc"); _op1 = _stack.front(); _stack.pop_front(); _op2 = _stack.front(); _stack.pop_front(); } void Stack::_pushResult(IOperand* result) { _stack.push_front(result); delete _op1; delete _op2; } } /* namespace Arithmetic */
# Di Roccia | Di Luce Website of the exibition [Di Roccia | Di Luce](http: ## Credits * Theme by [Grayscale](http://startbootstrap.com/template-overviews/grayscale/) * Gallery by [Bootstrap Lightbox](http://ashleydw.github.io/lightbox/)
import { Upload } from './../models/upload/upload.model'; import { SUPController } from './sup.server.controller'; const yellow = '\x1b[33m%s\x1b[0m: '; export class SUP { constructor(private io: SocketIOClient.Manager) { } registerIO() { this.io.on('connection', (socket: SocketIOClient.Socket) => { console.log(yellow, 'Socket connected!'); socket.on('NextChunk', (data) => { console.log(yellow, 'Receiving data.'); SUPController.nextChunk(data, socket); }); socket.on('NextFile', (data) => { console.log(yellow, 'Receiving next File.'); SUPController.nextFile(data, socket); }); }); } static handshake(data, cb): void { SUPController.handshake(data, cb); } static pause(data, cb): void { SUPController.pause(data, cb); } static continue(data, cb): void { SUPController.continue(data, cb); } static abort(data, cb): void { SUPController.abort(data, cb); } }
package com.yueny.demo.job.controller; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.ResponseBody; import com.yueny.demo.common.example.bo.ModifyDemoBo; import com.yueny.demo.common.example.service.<API key>; import lombok.extern.slf4j.Slf4j; /** * @author yueny09 <deep_blue_yang@163.com> * * @DATE 2016216 8:23:11 * */ @Controller @Slf4j public class DemoController { @Autowired private <API key> <API key>; @RequestMapping(value = { "/", "welcome" }, method = RequestMethod.GET) @ResponseBody public List<ModifyDemoBo> bar() { try { return <API key>.queryAll(); } catch (final Exception e) { log.error("exception:", e); } return null; } @RequestMapping(value = "/healthy", method = RequestMethod.GET) @ResponseBody public String healthy() { return "OK"; } }
#pragma once #include "plugin_common.hpp" #include "serialize.hpp" #include "onnx2trt_common.hpp" #include <NvInferPlugin.h> #include <memory> #include <vector> namespace onnx2trt { // A convenient base class for plugins. Provides default implementations of // some methods. // Adapts a plugin so that its type is automatically serialized, enabling it // to be identified when deserializing. class Plugin : public nvinfer1::IPluginExt, public IOwnable { public: virtual const char* getPluginType() const = 0; nvinfer1::Dims const& getInputDims(int index) const { return _input_dims.at(index); } size_t getMaxBatchSize() const { return _max_batch_size; } nvinfer1::DataType getDataType() const { return _data_type; } nvinfer1::PluginFormat getDataFormat() const { return _data_format; } size_t getWorkspaceSize(int) const override { return 0; } int initialize() override { return 0;} void terminate() override {} bool supportsFormat(nvinfer1::DataType type, nvinfer1::PluginFormat format) const override; void configureWithFormat(const nvinfer1::Dims* inputDims, int nbInputs, const nvinfer1::Dims* outputDims, int nbOutputs, nvinfer1::DataType type, nvinfer1::PluginFormat format, int maxBatchSize) override; void destroy() override { delete this; } protected: void deserializeBase(void const*& serialData, size_t& serialLength); size_t <API key>(); void serializeBase(void*& buffer); std::vector<nvinfer1::Dims> _input_dims; size_t _max_batch_size; nvinfer1::DataType _data_type; nvinfer1::PluginFormat _data_format; virtual ~Plugin() {} }; class PluginAdapter : public Plugin { protected: nvinfer1::IPlugin* _plugin; nvinfer1::IPluginExt* _ext; public: PluginAdapter(nvinfer1::IPlugin* plugin) : _plugin(plugin), _ext(dynamic_cast<IPluginExt*>(plugin)) {} virtual int getNbOutputs() const override; virtual nvinfer1::Dims getOutputDimensions(int index, const nvinfer1::Dims *inputDims, int nbInputs) override ; virtual void serialize(void* buffer) override; virtual size_t <API key>() override; virtual int initialize() override; virtual void terminate() override; virtual bool supportsFormat(nvinfer1::DataType type, nvinfer1::PluginFormat format) const override; virtual void configureWithFormat(const nvinfer1::Dims *inputDims, int nbInputs, const nvinfer1::Dims *outputDims, int nbOutputs, nvinfer1::DataType type, nvinfer1::PluginFormat format, int maxBatchSize); virtual size_t getWorkspaceSize(int maxBatchSize) const override; virtual int enqueue(int batchSize, const void *const *inputs, void **outputs, void *workspace, cudaStream_t stream) override; }; // This makes a plugin compatible with onnx2trt::PluginFactory by serializing // its plugin type. class <API key> : public PluginAdapter { UniqueOwnable _owned_plugin; Plugin* _plugin; public: <API key>(Plugin* plugin) : PluginAdapter(plugin), _owned_plugin(plugin), _plugin(plugin) {} void serialize(void* buffer) override { const char* plugin_type = _plugin->getPluginType(); serialize_value(&buffer, (const char*)<API key>); serialize_value(&buffer, plugin_type); return _plugin->serialize(buffer); } size_t <API key>() override { const char* plugin_type = _plugin->getPluginType(); // Note: +1 for NULL-terminated string return (sizeof(<API key>) + 1 + strlen(plugin_type) + _plugin-><API key>()); } const char* getPluginType() const override { return _plugin->getPluginType(); } void destroy() override { delete this; } }; // Adapts nvinfer1::plugin::INvPlugin into onnx2trt::Plugin // (This enables existing NV plugins to be used in this plugin infrastructure) class NvPlugin : public PluginAdapter { nvinfer1::plugin::INvPlugin* _plugin; public: NvPlugin(nvinfer1::plugin::INvPlugin* plugin) : PluginAdapter(plugin), _plugin(plugin) {} virtual const char* getPluginType() const override; virtual void destroy() override; }; } // namespace onnx2trt
<html> <head></head> <body> <!-- output should be the same in Client/Server rendering --> <div class="dotvvm-upload" data-bind="with: Files"> <span class="<API key>" data-bind="visible: !IsBusy()"> <a href="javascript:;" onclick="dotvvm.fileUpload.showUploadDialog(this); return false;">Upload</a> </span> <input data-bind="dotvvm-FileUpload: {&quot;url&quot;:&quot;/dotvvmFileUpload?multiple=true&quot;}" multiple="multiple" style="display:none" type="file"> <span class="dotvvm-upload-files" data-bind="html: dotvvm.globalize.format(&quot;{0} files&quot;, Files().length)"></span> <span class="<API key>" data-bind="visible: IsBusy"> <span class="<API key>" data-bind="style: { 'width': (Progress() == -1 ? '50' : Progress()) + '%' }"></span> </span> <span class="<API key>" data-bind="html: Error() ? &quot;Error occurred.&quot; : &quot;The files were uploaded successfully.&quot;, attr: { title: Error }, css: { '<API key>': !Error(), '<API key>': Error }, visible: !IsBusy() &amp;&amp; Files().length > 0"></span> </div> </body> </html>
package docs.extension; //#imports import akka.actor.Extension; import akka.actor.AbstractExtensionId; import akka.actor.ExtensionIdProvider; import akka.actor.ActorSystem; import akka.actor.ExtendedActorSystem; import scala.concurrent.duration.Duration; import com.typesafe.config.Config; import java.util.concurrent.TimeUnit; //#imports import akka.actor.UntypedActor; import org.junit.Test; public class <API key> { static //#extension public class SettingsImpl implements Extension { public final String DB_URI; public final Duration <API key>; public SettingsImpl(Config config) { DB_URI = config.getString("myapp.db.uri"); <API key> = Duration.create(config.getDuration("myapp.circuit-breaker.timeout", TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS); } } //#extension static //#extensionid public class Settings extends AbstractExtensionId<SettingsImpl> implements ExtensionIdProvider { public final static Settings SettingsProvider = new Settings(); private Settings() {} public Settings lookup() { return Settings.SettingsProvider; } public SettingsImpl createExtension(ExtendedActorSystem system) { return new SettingsImpl(system.settings().config()); } } //#extensionid static //#<API key> public class MyActor extends UntypedActor { // typically you would use static import of the Settings.SettingsProvider field final SettingsImpl settings = Settings.SettingsProvider.get(getContext().system()); Connection connection = connect(settings.DB_URI, settings.<API key>); //#<API key> public Connection connect(String dbUri, Duration <API key>) { return new Connection(); } public void onReceive(Object msg) { } //#<API key> } //#<API key> public static class Connection { } @Test public void <API key>() { final ActorSystem system = null; try { //#extension-usage // typically you would use static import of the Settings.SettingsProvider field String dbUri = Settings.SettingsProvider.get(system).DB_URI; //#extension-usage } catch (Exception e) { //do nothing } } }
// Generated from /POI/java/org/apache/poi/hssf/record/BoundSheetRecord.java #include <org/apache/poi/hssf/record/BoundSheetRecord.hpp> #include <java/lang/ArrayStoreException.hpp> #include <java/lang/<API key>.hpp> #include <java/lang/String.hpp> #include <java/lang/StringBuffer.hpp> #include <java/util/Arrays.hpp> #include <java/util/Comparator.hpp> #include <java/util/List.hpp> #include <org/apache/poi/hssf/record/BoundSheetRecord_1.hpp> #include <org/apache/poi/hssf/record/Record.hpp> #include <org/apache/poi/hssf/record/RecordBase.hpp> #include <org/apache/poi/hssf/record/RecordInputStream.hpp> #include <org/apache/poi/hssf/record/StandardRecord.hpp> #include <org/apache/poi/ss/util/WorkbookUtil.hpp> #include <org/apache/poi/util/BitField.hpp> #include <org/apache/poi/util/BitFieldFactory.hpp> #include <org/apache/poi/util/HexDump.hpp> #include <org/apache/poi/util/LittleEndian.hpp> #include <org/apache/poi/util/LittleEndianConsts.hpp> #include <org/apache/poi/util/LittleEndianOutput.hpp> #include <org/apache/poi/util/StringUtil.hpp> #include <Array.hpp> #include <ObjectArray.hpp> #include <SubArray.hpp> template<typename ComponentType, typename... Bases> struct SubArray; namespace poi { namespace hssf { namespace record { typedef ::SubArray< ::poi::hssf::record::RecordBase, ::java::lang::ObjectArray > RecordBaseArray; typedef ::SubArray< ::poi::hssf::record::Record, RecordBaseArray > RecordArray; typedef ::SubArray< ::poi::hssf::record::StandardRecord, RecordArray > StandardRecordArray; typedef ::SubArray< ::poi::hssf::record::BoundSheetRecord, StandardRecordArray > <API key>; } // record } // hssf } // poi template<typename T> static T* npc(T* t) { if(!t) throw new ::java::lang::<API key>(); return t; } poi::hssf::record::BoundSheetRecord::BoundSheetRecord(const ::default_init_tag&) : super(*static_cast< ::default_init_tag* >(0)) { clinit(); } poi::hssf::record::BoundSheetRecord::BoundSheetRecord(::java::lang::String* sheetname) : BoundSheetRecord(*static_cast< ::default_init_tag* >(0)) { ctor(sheetname); } poi::hssf::record::BoundSheetRecord::BoundSheetRecord(RecordInputStream* in) : BoundSheetRecord(*static_cast< ::default_init_tag* >(0)) { ctor(in); } constexpr int16_t poi::hssf::record::BoundSheetRecord::sid; poi::util::BitField*& poi::hssf::record::BoundSheetRecord::hiddenFlag() { clinit(); return hiddenFlag_; } poi::util::BitField* poi::hssf::record::BoundSheetRecord::hiddenFlag_; poi::util::BitField*& poi::hssf::record::BoundSheetRecord::veryHiddenFlag() { clinit(); return veryHiddenFlag_; } poi::util::BitField* poi::hssf::record::BoundSheetRecord::veryHiddenFlag_; void poi::hssf::record::BoundSheetRecord::ctor(::java::lang::String* sheetname) { super::ctor(); <API key> = 0; setSheetname(sheetname); } void poi::hssf::record::BoundSheetRecord::ctor(RecordInputStream* in) { super::ctor(); auto buf = new ::int8_tArray(::poi::util::LittleEndianConsts::INT_SIZE); npc(in)->readPlain(buf, int32_t(0), npc(buf)->length); <API key> = ::poi::util::LittleEndian::getInt(buf); <API key> = npc(in)->readUShort(); auto <API key> = npc(in)->readUByte(); <API key> = npc(in)->readByte(); if(isMultibyte()) { field_5_sheetname = npc(in)->readUnicodeLEString(<API key>); } else { field_5_sheetname = npc(in)-><API key>(<API key>); } } void poi::hssf::record::BoundSheetRecord::setPositionOfBof(int32_t pos) { <API key> = pos; } void poi::hssf::record::BoundSheetRecord::setSheetname(::java::lang::String* sheetName) { ::poi::ss::util::WorkbookUtil::validateSheetName(sheetName); field_5_sheetname = sheetName; <API key> = ::poi::util::StringUtil::hasMultibyte(sheetName) ? int32_t(1) : int32_t(0); } int32_t poi::hssf::record::BoundSheetRecord::getPositionOfBof() { return <API key>; } bool poi::hssf::record::BoundSheetRecord::isMultibyte() { return (<API key> & int32_t(1)) != 0; } java::lang::String* poi::hssf::record::BoundSheetRecord::getSheetname() { return field_5_sheetname; } java::lang::String* poi::hssf::record::BoundSheetRecord::toString() { auto buffer = new ::java::lang::StringBuffer(); npc(buffer)->append(u"[BOUNDSHEET]\n"_j); npc(npc(npc(buffer)->append(u" .bof = "_j))->append(::poi::util::HexDump::intToHex(getPositionOfBof())))->append(u"\n"_j); npc(npc(npc(buffer)->append(u" .options = "_j))->append(::poi::util::HexDump::shortToHex(<API key>)))->append(u"\n"_j); npc(npc(npc(buffer)->append(u" .unicodeflag= "_j))->append(::poi::util::HexDump::byteToHex(<API key>)))->append(u"\n"_j); npc(npc(npc(buffer)->append(u" .sheetname = "_j))->append(field_5_sheetname))->append(u"\n"_j); npc(buffer)->append(u"[/BOUNDSHEET]\n"_j); return npc(buffer)->toString(); } int32_t poi::hssf::record::BoundSheetRecord::getDataSize() { return int32_t(8) + npc(field_5_sheetname)->length() * (isMultibyte() ? int32_t(2) : int32_t(1)); } void poi::hssf::record::BoundSheetRecord::serialize(::poi::util::LittleEndianOutput* out) { npc(out)->writeInt(getPositionOfBof()); npc(out)->writeShort(<API key>); auto name = field_5_sheetname; npc(out)->writeByte(npc(name)->length()); npc(out)->writeByte(<API key>); if(isMultibyte()) { ::poi::util::StringUtil::putUnicodeLE(name, out); } else { ::poi::util::StringUtil::<API key>(name, out); } } int16_t poi::hssf::record::BoundSheetRecord::getSid() { return sid; } bool poi::hssf::record::BoundSheetRecord::isHidden() { return npc(hiddenFlag_)->isSet(<API key>); } void poi::hssf::record::BoundSheetRecord::setHidden(bool hidden) { <API key> = npc(hiddenFlag_)->setBoolean(<API key>, hidden); } bool poi::hssf::record::BoundSheetRecord::isVeryHidden() { return npc(veryHiddenFlag_)->isSet(<API key>); } void poi::hssf::record::BoundSheetRecord::setVeryHidden(bool veryHidden) { <API key> = npc(veryHiddenFlag_)->setBoolean(<API key>, veryHidden); } poi::hssf::record::<API key>* poi::hssf::record::BoundSheetRecord::orderByBofPosition(::java::util::List* boundSheetRecords) { clinit(); auto bsrs = new <API key>(npc(boundSheetRecords)->size()); npc(boundSheetRecords)->toArray_(static_cast< ::java::lang::ObjectArray* >(bsrs)); ::java::util::Arrays::sort(bsrs, BOFComparator_); return bsrs; } java::util::Comparator*& poi::hssf::record::BoundSheetRecord::BOFComparator() { clinit(); return BOFComparator_; } java::util::Comparator* poi::hssf::record::BoundSheetRecord::BOFComparator_; extern java::lang::Class *class_(const char16_t *c, int n); java::lang::Class* poi::hssf::record::BoundSheetRecord::class_() { static ::java::lang::Class* c = ::class_(u"org.apache.poi.hssf.record.BoundSheetRecord", 43); return c; } void poi::hssf::record::BoundSheetRecord::clinit() { super::clinit(); static bool in_cl_init = false; struct clinit_ { clinit_() { in_cl_init = true; hiddenFlag_ = ::poi::util::BitFieldFactory::getInstance(1); veryHiddenFlag_ = ::poi::util::BitFieldFactory::getInstance(2); BOFComparator_ = new BoundSheetRecord_1(); } }; if(!in_cl_init) { static clinit_ clinit_instance; } } int32_t poi::hssf::record::BoundSheetRecord::serialize(int32_t offset, ::int8_tArray* data) { return super::serialize(offset, data); } int8_tArray* poi::hssf::record::BoundSheetRecord::serialize() { return super::serialize(); } java::lang::Class* poi::hssf::record::BoundSheetRecord::getClass0() { return class_(); }
var logger = require('../logging').getLogger(__LOGGER__); var {PAGE_CSS_NODE_ID} = require('../constants'); var Q = require('q'); var PageUtil = require('./PageUtil') var loadedCss = {}; module.exports = { registerPageLoad: function registerPageLoad() { if (SERVER_SIDE) { throw new Error("ClientCssHelper.registerPageLoad can't be called server-side"); } // for each css node in the head that the react-server server wrote to the response, note it down in the cache, so that // we can remove it on a page to page transition. var <API key> = document.head.querySelectorAll(`link[${PAGE_CSS_NODE_ID}],style[${PAGE_CSS_NODE_ID}]`); for (var i = 0; i < <API key>.length; i++) { var key, styleNode = <API key>[i]; if (styleNode.href) { key = normalizeLocalUrl(styleNode.href); } else { key = styleNode.innerHTML; } loadedCss[key] = styleNode; } }, ensureCss: function ensureCss(routeName, pageObject) { if (SERVER_SIDE) { throw new Error("ClientCssHelper.registerPageLoad can't be called server-side"); } return Q.all(PageUtil.standardizeStyles(pageObject.getHeadStylesheets())).then(newCss => { var newCssByKey = {}; newCss .filter(style => !!style) .forEach(style => {newCssByKey[this._keyFromStyleSheet(style)] = style}); // first, remove the unneeded CSS link elements. Object.keys(loadedCss).forEach(loadedCssKey => { if (!newCssByKey[loadedCssKey]) { // remove the corresponding node from the DOM. logger.debug("Removing stylesheet: " + loadedCssKey); var node = loadedCss[loadedCssKey]; node.parentNode.removeChild(node); delete loadedCss[loadedCssKey]; } }); // next add the style URLs that weren't already loaded. return Q.all(Object.keys(newCssByKey).map(newCssKey => { var retval; if (!loadedCss[newCssKey]) { // this means that the CSS is not currently present in the // document, so we need to add it. logger.debug("Adding stylesheet: " + newCssKey); var style = newCssByKey[newCssKey]; var styleTag; if (style.href) { styleTag = document.createElement('link'); styleTag.rel = 'stylesheet'; styleTag.href = style.href; // If we _can_ wait for the CSS to be loaded before // proceeding, let's do so. if ('onload' in styleTag) { var dfd = Q.defer(); styleTag.onload = dfd.resolve; retval = dfd.promise; } } else { styleTag = document.createElement('style'); styleTag.innerHTML = style.text; } styleTag.type = style.type; styleTag.media = style.media; loadedCss[newCssKey] = styleTag; document.head.appendChild(styleTag); } else { logger.debug(`Stylesheet already loaded (no-op): ${newCssKey}`); } return retval; })); }); }, _keyFromStyleSheet: function(style) { return normalizeLocalUrl(style.href) || style.text; }, } function normalizeLocalUrl(url) { // and //0.0.0.0:3001/common.css as the same file. // Step 2: The browser will give us a full URL even if we only put a // path in on the server. So, if we're comparing against just // a path here we need to strip the base off to avoid a flash // of unstyled content. if (typeof url === 'string') { url = url .replace(/^http[s]?:/, '') .replace(new RegExp("^//" + location.host), ''); } return url; }
package lesson.types; public class Classes { public static void main(String[] args) { JustClass one = new JustClass(); JustClass two = new JustClass(123, "sdf"); System.out.println(one); System.out.println(two); } } class JustClass { private int number; private String name; public JustClass() { } public JustClass(int number, String name) { this.number = number; this.name = name; } @Override public String toString() { return String .format("JustClass {%s, %d}", name,number); } }
using GeneticCreatures.Classes.UtilityClasses; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using Tester.classes; namespace GeneticCreatures.Classes.Objects.InanimateObjects { class Wall : Selectable { public Wall(Position pos) : base(pos, defaultColour) { allWalls.Add(this); } public Wall(Position pos, Colour col) : base(pos, col) { allWalls.Add(this); } public static List<Wall> allWalls = new List<Wall>(); private static Colour defaultColour = new Colour(100, 100, 100); private static Colour selectedColour = Colour.White; public override void Draw() { if (GameState.GetState() != GameStates.CreatingNet) { Colour chosenCol = colour; if (this.isSelected) chosenCol = selectedColour; ShapeDrawer.DrawCircle(position.Location, DrawRadius, chosenCol); } } protected override void DestroyWorldObject() { allWalls.Remove(this); base.DestroyWorldObject(); } } }
/** * Utility classes for converting between granularities of SI (power-of-ten) and IEC (power-of-two) * byte units and bit units. * <p> * <h3>Example Usage</h3> * What's the difference in hard drive space between perception and actual? * <pre><code> * long perception = BinaryByteUnit.TEBIBYTES.toBytes(2); * long usable = DecimalByteUnit.TERABYTES.toBytes(2); * long lost = BinaryByteUnit.BYTES.toGibibytes(perception - usable); * System.out.println(lost + " GiB lost on a 2TB drive."); * </code></pre> * <p> * Method parameter for specifying a resource size. * <pre><code> * public void installDiskCache(long count, ByteUnit unit) { * long size = unit.toBytes(count); * // TODO Install disk cache of 'size' bytes. * } * </code></pre> */ package com.jakewharton.byteunits;
// <auto-generated> // This code was generated by a tool. // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> namespace Experian.Qas.Prowebintegration { public partial class RapidAddress { <summary> ButtonNew control. </summary> <remarks> Auto-generated field. To modify move field declaration from designer file to code-behind file. </remarks> protected global::System.Web.UI.HtmlControls.HtmlButton ButtonNew; <summary> ButtonBack control. </summary> <remarks> Auto-generated field. To modify move field declaration from designer file to code-behind file. </remarks> protected global::System.Web.UI.HtmlControls.HtmlButton ButtonBack; <summary> RadioTypedown control. </summary> <remarks> Auto-generated field. To modify move field declaration from designer file to code-behind file. </remarks> protected global::System.Web.UI.WebControls.RadioButton RadioTypedown; <summary> RadioSingleline control. </summary> <remarks> Auto-generated field. To modify move field declaration from designer file to code-behind file. </remarks> protected global::System.Web.UI.WebControls.RadioButton RadioSingleline; <summary> RadioKeyfinder control. </summary> <remarks> Auto-generated field. To modify move field declaration from designer file to code-behind file. </remarks> protected global::System.Web.UI.WebControls.RadioButton RadioKeyfinder; <summary> country control. </summary> <remarks> Auto-generated field. To modify move field declaration from designer file to code-behind file. </remarks> protected global::System.Web.UI.WebControls.DropDownList country; <summary> LabelPrompt control. </summary> <remarks> Auto-generated field. To modify move field declaration from designer file to code-behind file. </remarks> protected global::System.Web.UI.WebControls.Label LabelPrompt; <summary> searchText control. </summary> <remarks> Auto-generated field. To modify move field declaration from designer file to code-behind file. </remarks> protected global::System.Web.UI.WebControls.TextBox searchText; <summary> TableAddress control. </summary> <remarks> Auto-generated field. To modify move field declaration from designer file to code-behind file. </remarks> protected global::System.Web.UI.WebControls.Table TableAddress; <summary> TableMultiDPCtrl control. </summary> <remarks> Auto-generated field. To modify move field declaration from designer file to code-behind file. </remarks> protected global::System.Web.UI.WebControls.Table TableMultiDPCtrl; <summary> PlaceholderInfo control. </summary> <remarks> Auto-generated field. To modify move field declaration from designer file to code-behind file. </remarks> protected global::System.Web.UI.WebControls.PlaceHolder PlaceholderInfo; <summary> LiteralRoute control. </summary> <remarks> Auto-generated field. To modify move field declaration from designer file to code-behind file. </remarks> protected global::System.Web.UI.WebControls.Literal LiteralRoute; <summary> LiteralError control. </summary> <remarks> Auto-generated field. To modify move field declaration from designer file to code-behind file. </remarks> protected global::System.Web.UI.WebControls.Literal LiteralError; <summary> statusData control. </summary> <remarks> Auto-generated field. To modify move field declaration from designer file to code-behind file. </remarks> protected global::System.Web.UI.HtmlControls.HtmlTableCell statusData; <summary> matchCount control. </summary> <remarks> Auto-generated field. To modify move field declaration from designer file to code-behind file. </remarks> protected global::System.Web.UI.HtmlControls.HtmlGenericControl matchCount; <summary> infoStatus control. </summary> <remarks> Auto-generated field. To modify move field declaration from designer file to code-behind file. </remarks> protected global::System.Web.UI.HtmlControls.HtmlGenericControl infoStatus; } }
# BitmapFontImporter Import bitmap font in Unity generated by Glyph designer or ShoeBox # Tutorial http:
package grequests import "testing" func TestErrorOpenFile(t *testing.T) { fd, err := FileUploadFromDisk("file", "I am Not A File") if err == nil { t.Error("We are not getting an error back from our non existent file: ") } if fd != nil { t.Error("We actually got back a pointer: ", fd) } }
package com.github.nikolaymakhonin.android_app_example.di.factories; import android.content.Context; import android.support.annotation.NonNull; import com.github.nikolaymakhonin.android_app_example.di.components.AppComponent; import com.github.nikolaymakhonin.android_app_example.di.components.DaggerAppComponent; import com.github.nikolaymakhonin.android_app_example.di.components.<API key>; import com.github.nikolaymakhonin.android_app_example.di.components.ServiceComponent; import com.github.nikolaymakhonin.common_di.modules.service.ServiceModuleBase; public final class ComponentsFactory { public static AppComponent buildAppComponent(@NonNull Context appContext) { ServiceComponent serviceComponent = <API key>(appContext); AppComponent appComponent = DaggerAppComponent.builder() .serviceComponent(serviceComponent) .build(); return appComponent; } public static ServiceComponent <API key>(@NonNull Context appContext) { ServiceComponent serviceComponent = <API key>.builder() .serviceModuleBase(new ServiceModuleBase(appContext)) .build(); return serviceComponent; } }
"""Let's Encrypt constants.""" import logging from acme import challenges <API key> = "letsencrypt.plugins" """Setuptools entry point group name for plugins.""" CLI_DEFAULTS = dict( config_files=["/etc/letsencrypt/cli.ini"], verbose_count=-(logging.WARNING / 10), server="https: rsa_key_size=2048, <API key>=0, config_dir="/etc/letsencrypt", work_dir="/var/lib/letsencrypt", backup_dir="/var/lib/letsencrypt/backups", key_dir="/etc/letsencrypt/keys", certs_dir="/etc/letsencrypt/certs", cert_path="/etc/letsencrypt/certs/cert-letsencrypt.pem", chain_path="/etc/letsencrypt/certs/chain-letsencrypt.pem", renewer_config_file="/etc/letsencrypt/renewer.conf", no_verify_ssl=False, dvsni_port=challenges.DVSNI.PORT, ) """Defaults for CLI flags and `.IConfig` attributes.""" RENEWER_DEFAULTS = dict( renewer_config_file="/etc/letsencrypt/renewer.conf", renewal_configs_dir="/etc/letsencrypt/configs", archive_dir="/etc/letsencrypt/archive", live_dir="/etc/letsencrypt/live", renewer_enabled="yes", renew_before_expiry="30 days", <API key>="20 days", ) """Defaults for renewer script.""" <API key> = frozenset([frozenset([ challenges.DVSNI, challenges.SimpleHTTP])]) """Mutually exclusive challenges.""" ENHANCEMENTS = ["redirect", "http-header", "ocsp-stapling", "spdy"] """List of possible :class:`letsencrypt.interfaces.IInstaller` enhancements. List of expected options parameters: - redirect: None - http-header: TODO - ocsp-stapling: TODO - spdy: TODO """ CONFIG_DIRS_MODE = 0o755 """Directory mode for ``.IConfig.config_dir`` et al.""" TEMP_CHECKPOINT_DIR = "temp_checkpoint" """Temporary checkpoint directory (relative to IConfig.work_dir).""" IN_PROGRESS_DIR = "IN_PROGRESS" """Directory used before a permanent checkpoint is finalized (relative to IConfig.work_dir).""" CERT_KEY_BACKUP_DIR = "keys-certs" """Directory where all certificates and keys are stored (relative to IConfig.work_dir. Used for easy revocation.""" ACCOUNTS_DIR = "accounts" """Directory where all accounts are saved.""" ACCOUNT_KEYS_DIR = "keys" """Directory where account keys are saved. Relative to ACCOUNTS_DIR.""" REC_TOKEN_DIR = "recovery_tokens" """Directory where all recovery tokens are saved (relative to IConfig.work_dir)."""
<?php if ( ! function_exists('env') ) { /** * ,,null,empty * * @param string $key * @param mixed $default * @return mixed */ function env($key, $default = null) { $value = getenv($key); if ($value === false) return value($default); return $value; } } if ( ! function_exists('dump') ) { /** * * @param mixed $var * @param boolean $echo True false * @param string $label * @param boolean $strict true * @return void|string */ function dump($var, $echo=true, $label=null, $strict=true) { $label = ($label === null) ? '' : rtrim($label) . ' '; if (!$strict) { if (ini_get('html_errors')) { $output = print_r($var, true); $output = '<pre>' . $label . htmlspecialchars($output, ENT_QUOTES) . '</pre>'; } else { $output = $label . print_r($var, true); } } else { ob_start(); var_dump($var); $output = ob_get_clean(); if (!extension_loaded('xdebug')) { $output = preg_replace('/\]\=\>\n(\s+)/m', '] => ', $output); $output = '<pre>' . $label . htmlspecialchars($output, ENT_QUOTES) . '</pre>'; } } if ($echo) { echo($output); return null; }else return $output; } }
package org.ovirt.engine.ui.uicommonweb.models.configure.roles_ui; import java.util.ArrayList; import org.ovirt.engine.core.common.businessentities.ActionGroup; import org.ovirt.engine.core.common.mode.ApplicationMode; import org.ovirt.engine.ui.uicommonweb.models.<API key>; import org.ovirt.engine.ui.uicommonweb.models.common.<API key>; import org.ovirt.engine.ui.uicompat.ConstantsManager; @SuppressWarnings("unused") public class RoleTreeView { public static ArrayList<<API key>> GetRoleTreeView(boolean isReadOnly, boolean isAdmin) { RoleNode tree = initTreeView(); ArrayList<ActionGroup> userActionGroups = null; if (isAdmin == false) { userActionGroups = GetUserActionGroups(); } ArrayList<<API key>> roleTreeView = new ArrayList<<API key>>(); <API key> firstNode = null, secondNode = null, thirdNode = null; for (RoleNode first : tree.getLeafRoles()) { firstNode = new <API key>(); firstNode.setTitle(first.getName()); firstNode.setDescription(first.getName()); firstNode.setIsChangable(!isReadOnly); for (RoleNode second : first.getLeafRoles()) { secondNode = new <API key>(); secondNode.setTitle(second.getName()); secondNode.setDescription(second.getName()); secondNode.setIsChangable(!isReadOnly); secondNode.setTooltip(second.getTooltip()); for (RoleNode third : second.getLeafRoles()) { thirdNode = new <API key>(); thirdNode.setTitle(third.getName()); thirdNode.setDescription(third.getDesc()); thirdNode.<API key>(true); // thirdNode.IsSelected = // attachedActions.Contains((VdcActionType) Enum.Parse(typeof (VdcActionType), name)); //TODO: // suppose to be action group thirdNode.setIsChangable(!isReadOnly); thirdNode.<API key>(false); thirdNode.setTooltip(third.getTooltip()); if (!isAdmin) { if (userActionGroups.contains(ActionGroup.valueOf(thirdNode.getTitle()))) { secondNode.getChildren().add(thirdNode); } } else { secondNode.getChildren().add(thirdNode); } } if (secondNode.getChildren().size() > 0) { firstNode.getChildren().add(secondNode); } } if (firstNode.getChildren().size() > 0) { roleTreeView.add(firstNode); } } return roleTreeView; } private static ArrayList<ActionGroup> GetUserActionGroups() { ArrayList<ActionGroup> array = new ArrayList<ActionGroup>(); array.add(ActionGroup.CREATE_VM); array.add(ActionGroup.DELETE_VM); array.add(ActionGroup.EDIT_VM_PROPERTIES); array.add(ActionGroup.VM_BASIC_OPERATIONS); array.add(ActionGroup.CHANGE_VM_CD); array.add(ActionGroup.MIGRATE_VM); array.add(ActionGroup.CONNECT_TO_VM); array.add(ActionGroup.<API key>); array.add(ActionGroup.<API key>); array.add(ActionGroup.MOVE_VM); array.add(ActionGroup.<API key>); array.add(ActionGroup.CREATE_TEMPLATE); array.add(ActionGroup.<API key>); array.add(ActionGroup.DELETE_TEMPLATE); array.add(ActionGroup.COPY_TEMPLATE); array.add(ActionGroup.<API key>); array.add(ActionGroup.CREATE_VM_POOL); array.add(ActionGroup.<API key>); array.add(ActionGroup.DELETE_VM_POOL); array.add(ActionGroup.<API key>); array.add(ActionGroup.<API key>); array.add(ActionGroup.CREATE_DISK); array.add(ActionGroup.ATTACH_DISK); array.add(ActionGroup.DELETE_DISK); array.add(ActionGroup.<API key>); array.add(ActionGroup.<API key>); array.add(ActionGroup.LOGIN); array.add(ActionGroup.<API key>); array.add(ActionGroup.PORT_MIRRORING); return array; } private static RoleNode initTreeView() { RoleNode tree = new RoleNode(ConstantsManager.getInstance().getConstants().rootRoleTree(), new RoleNode[] { new RoleNode(ConstantsManager.getInstance().getConstants().systemRoleTree(), new RoleNode(ConstantsManager.getInstance() .getConstants() .<API key>(), new RoleNode[] { new RoleNode(ActionGroup.MANIPULATE_USERS, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.MANIPULATE_ROLES, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.LOGIN, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.CONFIGURE_ENGINE, ConstantsManager.getInstance() .getConstants() .<API key>()) })), new RoleNode(ConstantsManager.getInstance().getConstants().dataCenterRoleTree(), new RoleNode(ConstantsManager.getInstance() .getConstants() .<API key>(), new RoleNode[] { new RoleNode(ActionGroup.CREATE_STORAGE_POOL, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.DELETE_STORAGE_POOL, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()) })), new RoleNode(ConstantsManager.getInstance().getConstants().<API key>(), new RoleNode(ConstantsManager.getInstance() .getConstants() .<API key>(), new RoleNode[] { new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()) })), new RoleNode(ConstantsManager.getInstance().getConstants().clusterRoleTree(), new RoleNode(ConstantsManager.getInstance() .getConstants() .<API key>(), new RoleNode[] { new RoleNode(ActionGroup.CREATE_CLUSTER, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.DELETE_CLUSTER, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()) })), new RoleNode(ConstantsManager.getInstance().getConstants().glusterRoleTree(), new RoleNode(ConstantsManager.getInstance() .getConstants() .<API key>(), new RoleNode[] { new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()) })), new RoleNode(ConstantsManager.getInstance().getConstants().hostRoleTree(), new RoleNode(ConstantsManager.getInstance() .getConstants() .<API key>(), new RoleNode[] { new RoleNode(ActionGroup.CREATE_HOST, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.DELETE_HOST, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.MANIPUTLATE_HOST, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()) })), new RoleNode(ConstantsManager.getInstance().getConstants().templateRoleTree(), new RoleNode[] { new RoleNode(ConstantsManager.getInstance() .getConstants() .<API key>(), new RoleNode[] { new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()) }), new RoleNode(ConstantsManager.getInstance() .getConstants() .<API key>(), ConstantsManager.getInstance() .getConstants() .<API key>(), new RoleNode[] { new RoleNode(ActionGroup.CREATE_TEMPLATE, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.DELETE_TEMPLATE, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.IMPORT_EXPORT_VM, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.COPY_TEMPLATE, ConstantsManager.getInstance() .getConstants() .<API key>()) }) }), new RoleNode(ConstantsManager.getInstance().getConstants().vmRoleTree(), new RoleNode[] { new RoleNode(ConstantsManager.getInstance() .getConstants() .<API key>(), new RoleNode[] { new RoleNode(ActionGroup.VM_BASIC_OPERATIONS, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.CHANGE_VM_CD, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.CONNECT_TO_VM, ConstantsManager.getInstance() .getConstants() .<API key>()) }), new RoleNode(ConstantsManager.getInstance() .getConstants() .<API key>(), ConstantsManager.getInstance() .getConstants() .<API key>(), new RoleNode[] { new RoleNode(ActionGroup.EDIT_VM_PROPERTIES, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.CREATE_VM, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.DELETE_VM, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.IMPORT_EXPORT_VM, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()) }), new RoleNode(ConstantsManager.getInstance() .getConstants() .<API key>(), ConstantsManager.getInstance() .getConstants() .<API key>(), new RoleNode[] { new RoleNode(ActionGroup.MOVE_VM, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.MIGRATE_VM, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.PORT_MIRRORING, ConstantsManager.getInstance() .getConstants() .<API key>()) }) }), new RoleNode(ConstantsManager.getInstance().getConstants().vmPoolRoleTree(), new RoleNode[] { new RoleNode(ConstantsManager.getInstance() .getConstants() .<API key>(), new RoleNode[] { new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()) }), new RoleNode(ConstantsManager.getInstance() .getConstants() .<API key>(), ConstantsManager.getInstance() .getConstants() .<API key>(), new RoleNode[] { new RoleNode(ActionGroup.CREATE_VM_POOL, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.DELETE_VM_POOL, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()) }) }), new RoleNode(ConstantsManager.getInstance().getConstants().diskRoleTree(), new RoleNode[] { new RoleNode(ConstantsManager.getInstance() .getConstants() .<API key>(), ConstantsManager.getInstance() .getConstants() .<API key>(), new RoleNode[] { new RoleNode(ActionGroup.CREATE_DISK, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.DELETE_DISK, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.ATTACH_DISK, ConstantsManager.getInstance() .getConstants() .<API key>()), new RoleNode(ActionGroup.<API key>, ConstantsManager.getInstance() .getConstants() .<API key>()) }) }) }); // nothing to filter if (!<API key>.getUiMode().equals(ApplicationMode.AllModes)) { <API key>.<API key>(tree); } return tree; } }
package com.jpattern.core.command; import com.jpattern.core.IProvider; import com.jpattern.core.exception.<API key>; import com.jpattern.logger.ILogger; import com.jpattern.logger.<API key>; /** * * @author Francesco Cina' * * 11/set/2011 */ public abstract class ACommand<T extends IProvider> { private ICommandExecutor commandExecutor; private <API key> onExceptionStrategy; private T provider; private ILogger logger = null; private boolean executed = false; private boolean rolledback = false; /** * This method launch the execution of the command (or chain of commands) using the default * default Executor and catching every runtime exception. * This command is the same of: * exec(provider, true); * @return the result of the execution */ public final ACommandResult exec(T provider) { return exec(provider, new <API key>()); } /** * This method launch the execution of the command (or chain of commands). * Every command in the chain will be managed by an ICommandExecutor object. * This command is the same of: * exec(commandExecutor, true); * @param aCommandExecutor the pool in which the command will runs * @return the result of the execution */ public final ACommandResult exec(T provider, ICommandExecutor commandExecutor) { visit(provider); return exec( commandExecutor, new CommandResult()); } /** * This method launch the rollback of the command execution (or chain of commands) using the default * default Executor and catching every runtime exception. * The rollback is effectively performed only if the command has been executed with a positive result, otherwise * the command is intended as "not executed" then no rollback will be performed. * This command is the same of: * rollback(provider, true); * @return the result of the rollback */ public final ACommandResult rollback(T provider) { return rollback(provider, new <API key>()); } /** * This method launch the rollback of the command execution (or chain of commands) using a custom command executor. * The rollback is effectively performed only if the command has been executed with a positive result, otherwise * the command is intended as "not executed" then no rollback will be performed. * This command is the same of: * rollback(provider, commandExecutor, true); * @return the result of the rollback */ public final ACommandResult rollback(T provider, ICommandExecutor commandExecutor) { visit(provider); return rollback(commandExecutor, new CommandResult()); } void visit(T provider) { this.provider = provider; } protected final ACommandResult exec(ICommandExecutor commandExecutor, ACommandResult commandResult) { this.commandExecutor = commandExecutor; commandResult.setExecutionStart(this); getCommandExecutor().execute(this, commandResult); return commandResult; } protected final ACommandResult rollback(ICommandExecutor commandExecutor, ACommandResult commandResult) { this.commandExecutor = commandExecutor; commandResult.setExecutionStart(this); getCommandExecutor().rollback(this, commandResult); return commandResult; } protected final ICommandExecutor getCommandExecutor() { if (commandExecutor==null) { commandExecutor = new <API key>(); } return commandExecutor; } protected final T getProvider() { if (provider==null) { throw new <API key>(); } return provider; } protected final ILogger getLogger() { if (logger == null) { if (provider == null) { logger = new <API key>().logger(getClass()); } else { logger = getProvider().getLoggerService().logger(this.getClass()); } } return logger; } public void <API key>(<API key> onExceptionStrategy) { this.onExceptionStrategy = onExceptionStrategy; } public <API key> <API key>() { if (onExceptionStrategy == null) { onExceptionStrategy = new <API key>(); } return onExceptionStrategy; } protected final void doExec(ACommandResult commandResult) { try { int errorSize = commandResult.getErrorMessages().size(); executed = false; rolledback = false; execute(commandResult); executed = commandResult.getErrorMessages().size() == errorSize; } catch (RuntimeException e) { <API key>().onException(e, getLogger(), commandResult, "RuntimeException thrown"); } finally { try { postExecute(commandResult); } finally { commandResult.setExecutionEnd(this); } } } void postExecute(ACommandResult commandResult) { } void postRollback(ACommandResult commandResult) { } protected final void doRollback(ACommandResult commandResult) { try { if (executed && !rolledback) { rollback(commandResult); rolledback = true; } } catch (RuntimeException e) { <API key>().onException(e, getLogger(), commandResult, "RuntimeException thrown while rollbacking"); } finally { try { postRollback(commandResult); } finally { commandResult.setExecutionEnd(this); } } } protected abstract void execute(ACommandResult commandResult); protected abstract void rollback(ACommandResult commandResult); void setExecuted(boolean executed) { this.executed = executed; } boolean isExecuted() { return executed; } void setRolledback(boolean rolledback) { this.rolledback = rolledback; } boolean isRolledback() { return rolledback; } }
#ifndef AXISCOLORSTRUCT_H #define AXISCOLORSTRUCT_H struct axisColorStruct { public : double XAxiscolor[3]; double YAxiscolor[3]; double ZAxiscolor[3]; bool complementaryColor; bool sameColor; }; #endif // AXISCOLORSTRUCT_H
<?php namespace App\Http\Controllers\Sadmin; use App\Http\Controllers\Controller; use App\Driver; use App\Customer; use App\User; use App\Detail; use Illuminate\Http\Request; use Illuminate\Pagination\Paginator; use Illuminate\Support\Facades\Input; use Illuminate\Support\Facades\DB; use Illuminate\Foundation\Auth\AuthenticatesUsers; use Illuminate\Support\Facades\Auth; use File; use Mail; use PDF; class BillingController extends Controller { public function __construct() { $this->middleware(function ($request, $next) { $user = Auth::user(); $customer_email = Auth::user()->email; $customer = Customer::where("email", $customer_email)->get(); $this->customer_id = $customer[0]->id; return $next($request); }); } public function index(Request $request) { if(Auth::user() == NULL) { return redirect('sadmin'); } $customer_email = Auth::user()->email; $login_user_id = Auth::user()->id; $s = $request->s; $billings = Detail::join('drivers', 'details.driver_id', '=', 'drivers.id'); $billings=$billings->select('details.*', 'drivers.user_id'); $billings=$billings->where('drivers.company_id', $this->customer_id); $billings=$billings->where('details.invoice_created','<>',''); if(isset($s))$billings -> search($s); $billings=$billings->orderBy('details.created_at','desc')->paginate(10); return view('sadmin.billing.index', compact('billings','s','customer_email')); } public function set_paymentmark(Request $request) { $id = $request->id; $detail = Detail::find($id); $detail->paid_status = 1; $detail->save(); $result['status'] = 'success'; die(json_encode($result)); } public function create_invoice($id){ $detail_id = $id; $detail = Detail::join('contact_lists as c','details.contact_id','c.id')-> select('details.*','c.d_company_name','c.address1','c.city','c.state','c.zipcode')->where('details.id', $detail_id)->get(); $driver = Driver::join('details', 'details.driver_id','=', 'drivers.id') ->join('contact_lists','details.contact_id','contact_lists.id') ->join('users','users.id','drivers.user_id') ->where('details.id',$detail_id)->get(); $driver = $driver[0]; $customer = Customer::find($this->customer_id); return view('sadmin.billing.invoice_template', compact('detail_id','detail','driver','customer')); } public function generate_invoice(Request $request) { $detail_id = $request['detail_id']; $activity = $request['activity']; $amount = $request['sp_rate']; $charge_array = array(); for($i=0; $i< count($activity); $i++) { $item['text'] = $activity[$i]; $item['rate'] = $amount[$i]; array_push($charge_array,$item); } $invoice_details = $this->arrayToObject($charge_array); //return response()->json(['add_charge'=>$add_charge[0]->text], $this->successStatus); $drivers = Driver::join('details', 'details.driver_id','=', 'drivers.id') ->join('contact_lists','details.contact_id','contact_lists.id') ->join('users','users.id','drivers.user_id') ->where('details.id',$detail_id)->get(); // $contact = Detail::where('id', $drivers[0]->contact_id)->get(); $customers = Customer::where('id', $drivers[0]->company_id)->get(); $filename ='Invoice_'. uniqid(). ".pdf" ; $filepath = public_path('files').'/'.$filename; // $pdf=PDF::loadView('driver_invoice_pdf',['drivers' => $drivers, 'customers' => $customers, 'invoice_details' => $invoice_details])->setPaper('a4')->save($filepath); $pdf=PDF::setOptions([ 'logOutputFile' => storage_path('logs/log.htm'), 'tempDir' => storage_path('logs/') ])->loadView('driver_invoice_pdf',['drivers' => $drivers, 'customers' => $customers, 'invoice_details' => $invoice_details])->setPaper('a4')->save($filepath); $filepath_str = asset('/files/'.$filename); $detail = Detail::findOrFail($detail_id); $files = $detail->upload; $names = $detail->filename; $detail->upload = ($files=="")?$filepath_str:$files.",".$filepath_str ; $detail->filename = ($names=="")?$filename:$names.",".$filename; $detail->invoice_created = date("Y-m-d"); $detail->save(); $result['status'] = 'ok'; die(json_encode($result)); } private function array_to_obj($array, &$obj) { foreach ($array as $key => $value) { if (is_array($value)) { $obj->$key = new \stdClass(); $this->array_to_obj($value, $obj->$key); } else { $obj->$key = $value; } } return $obj; } private function arrayToObject($array) { $object= new \stdClass(); return $this->array_to_obj($array,$object); } public function send_invoice(Request $request){ $detail_id = $request->detail_id; $from = Auth::user(); $to = $request->to; $subject = $request->subject; $content = $request->message; $attach = json_decode($request->attach); $message_arr = json_decode($content); $data = array( 'from' => $from, 'to' => $to, 'subject' => $subject, 'content' => $message_arr, 'attach' => $attach ); $mail_status = Mail::send('sadmin.invoice.invoice_mail', $data,function($message) use($data){ $message->to($data['to'])->subject($data['subject']); $message->from($data['from']->email, $data['from']->firstname." " .$data['from']->lastname); $message->replyTo($data['from']->email, $data['from']->firstname." " .$data['from']->lastname); foreach($data['attach'] as $filePath){ $message->attach($filePath); } }); if(count(Mail::failures()) > 0){ $result['msg'] = 'Failed to send invoice email, please try again.'; $result['status'] = "fail"; }else{ $result['msg'] = 'Sent the invoice email succesfully.'; $result['status'] = "success"; } die(json_encode($result)); } /* public function set_payment(Request $request){ $invoice_id = $request->id; $invoice = Invoice::find($invoice_id); if($invoice->send_status == 0){ $result['msg'] = "The invoice is not sent yet.\n Please confirm before."; $result['status']="fail"; }else{ $invoice->paid_status=1; if($invoice->save()){ $result['status']="success"; }else{ $result['status']="fail"; $result['msg'] = "Failed the save."; } } die(json_encode($result)); } //invoice delete public function destroy($id) { $invoice_detail = Invoice_detail::where('inv_id','=',$id)->get(); foreach ($invoice_detail as $recode) { $recode -> delete(); } $invoice_special = Invoice_special::where('inv_id',$id)->get(); foreach ($invoice_special as $recode) { $recode -> delete(); } $invoice = Invoice::find($id); $invoice->delete(); // return redirect('admin/invoice'); return back(); } */ }
var child_process = require('child_process'), fs = require('fs'), path = require('path'); module.exports = function(context) { var <API key> = '8.0', SWIFT_VERSION = '3.0', COMMENT_KEY = /_comment$/, CORDOVA_VERSION = process.env.CORDOVA_VERSION; run(); function run() { var cordova_util = context.<API key>('cordova-lib/src/cordova/util'), ConfigParser = CORDOVA_VERSION >= 6.0 ? context.<API key>('cordova-common').ConfigParser : context.<API key>('cordova-lib/src/configparser/ConfigParser'), projectRoot = cordova_util.isCordova(), platform_ios, xml = cordova_util.projectConfig(projectRoot), cfg = new ConfigParser(xml), projectName = cfg.name(), iosPlatformPath = path.join(projectRoot, 'platforms', 'ios'), iosProjectFilesPath = path.join(iosPlatformPath, projectName), xcconfigPath = path.join(iosPlatformPath, 'cordova', 'build.xcconfig'), xcconfigContent, projectFile, xcodeProject, bridgingHeaderPath; if(CORDOVA_VERSION < 7.0) { platform_ios = CORDOVA_VERSION < 5.0 ? context.<API key>('cordova-lib/src/plugman/platforms')['ios'] : context.<API key>('cordova-lib/src/plugman/platforms/ios') projectFile = platform_ios.parseProjectFile(iosPlatformPath); } else { var project_files = context.<API key>('glob').sync(path.join(iosPlatformPath, '*.xcodeproj', 'project.pbxproj')); if (project_files.length === 0) { throw new Error('Can\'t found xcode project file'); } var pbxPath = project_files[0]; var xcodeproj = context.<API key>('xcode').project(pbxPath); xcodeproj.parseSync(); projectFile = { 'xcode': xcodeproj, write: function () { var fs = context.<API key>('fs'); var frameworks_file = path.join(iosPlatformPath, 'frameworks.json'); var frameworks = {}; try { frameworks = context.<API key>(frameworks_file); console.log(JSON.stringify(frameworks)); } catch(e) {} fs.writeFileSync(pbxPath, xcodeproj.writeSync()); fs.writeFileSync(frameworks_file, JSON.stringify(this.frameworks, null, 4)); } }; } xcodeProject = projectFile.xcode; if (fs.existsSync(xcconfigPath)) { xcconfigContent = fs.readFileSync(xcconfigPath, 'utf-8'); } bridgingHeaderPath = getBridgingHeader(projectName, xcconfigContent, xcodeProject); if(bridgingHeaderPath) { bridgingHeaderPath = path.join(iosPlatformPath, bridgingHeaderPath); } else { bridgingHeaderPath = <API key>(xcodeProject, projectName, iosProjectFilesPath); } <API key>(iosProjectFilesPath, function (headers) { <API key>(bridgingHeaderPath, headers); var configurations = nonComments(xcodeProject.<API key>()), config, buildSettings; for (config in configurations) { buildSettings = configurations[config].buildSettings; buildSettings['<API key>'] = <API key>; buildSettings['SWIFT_VERSION'] = SWIFT_VERSION; buildSettings['<API key>'] = "YES"; buildSettings['<API key>'] = '"@executable_path/Frameworks"'; } console.log('IOS project now has deployment target set as:[' + <API key> + '] ...'); console.log('IOS project option <API key> set as:[YES] ...'); console.log('IOS project swift_objc Bridging-Header set to:[' + bridgingHeaderPath + '] ...'); console.log('IOS project Runpath Search Paths set to: @executable_path/Frameworks ...'); projectFile.write(); }); } function getBridgingHeader(projectName, xcconfigContent, xcodeProject) { var configurations, config, buildSettings, bridgingHeader; if (xcconfigContent) { var regex = /^<API key> *=(.*)$/m, match = xcconfigContent.match(regex); if (match) { bridgingHeader = match[1]; bridgingHeader = bridgingHeader .replace("$(PROJECT_DIR)/", "") .replace("$(PROJECT_NAME)", projectName) .trim(); return bridgingHeader; } } configurations = nonComments(xcodeProject.<API key>()); for (config in configurations) { buildSettings = configurations[config].buildSettings; bridgingHeader = buildSettings['<API key>']; if (bridgingHeader) { return unquote(bridgingHeader); } } } function <API key>(xcodeProject, projectName, <API key>) { var newBHPath = path.join(<API key>, "Plugins", "Bridging-Header.h"), content = [" "// Use this file to import your target's public headers that you would like to expose to Swift.", " "#import <Cordova/CDV.h>"] //fs.openSync(newBHPath, 'w'); console.log('Creating new Bridging-Header.h at path: ', newBHPath); fs.writeFileSync(newBHPath, content.join("\n"), { encoding: 'utf-8', flag: 'w' }); xcodeProject.addHeaderFile("Bridging-Header.h"); setBridgingHeader(xcodeProject, path.join(projectName, "Plugins", "Bridging-Header.h")); return newBHPath; } function setBridgingHeader(xcodeProject, headerPath) { var configurations = nonComments(xcodeProject.<API key>()), config, buildSettings, bridgingHeader; for (config in configurations) { buildSettings = configurations[config].buildSettings; buildSettings['<API key>'] = '"' + headerPath + '"'; } } function <API key>(<API key>, callback) { var searchPath = path.join(<API key>, 'Plugins'); child_process.exec('find . -name "*Bridging-Header*.h"', { cwd: searchPath }, function (error, stdout, stderr) { var headers = stdout.toString().split('\n').map(function (filePath) { return path.basename(filePath); }); callback(headers); }); } function <API key>(mainBridgingHeader, headers) { var content = fs.readFileSync(mainBridgingHeader, 'utf-8'), mainHeaderName = path.basename(mainBridgingHeader); headers.forEach(function (header) { if(header !== mainHeaderName && content.indexOf(header) < 0) { if (content.charAt(content.length - 1) != '\n') { content += "\n"; } content += "#import \""+header+"\"\n" console.log('Importing ' + header + ' into main bridging-header at: ' + mainBridgingHeader); } }); fs.writeFileSync(mainBridgingHeader, content, 'utf-8'); } function nonComments(obj) { var keys = Object.keys(obj), newObj = {}, i = 0; for (i; i < keys.length; i++) { if (!COMMENT_KEY.test(keys[i])) { newObj[keys[i]] = obj[keys[i]]; } } return newObj; } function unquote(str) { if (str) return str.replace(/^"(.*)"$/, "$1"); } }
package de.saxsys.mvvmfx.examples.contacts.model; public class Subdivision { private final String name; private final String abbr; private final Country country; public Subdivision(String name, String abbr, Country country) { this.name = name; this.abbr = abbr; this.country = country; } public String getName() { return name; } public String getAbbr() { return abbr; } public Country getCountry() { return country; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Subdivision that = (Subdivision) o; if (!abbr.equals(that.abbr)) { return false; } if (!country.equals(that.country)) { return false; } if (!name.equals(that.name)) { return false; } return true; } @Override public int hashCode() { int result = name.hashCode(); result = 31 * result + abbr.hashCode(); result = 31 * result + country.hashCode(); return result; } }
package http import ( bm "go-common/library/net/http/blademaster" ) func debugCache(c *bm.Context) { opt := new(struct { Keys string `form:"keys" validate:"required"` }) if err := c.Bind(opt); err != nil { return } c.JSONMap(srv.DebugCache(opt.Keys), nil) }
-- MySQL dump 10.13 Distrib 5.1.61, for redhat-linux-gnu (x86_64) -- Host: mysql-eg-devel-1.ebi.ac.uk Database: <API key> -- Server version 5.5.36-log /*!40101 SET @<API key>=@@<API key> */; /*!40101 SET @<API key>=@@<API key> */; /*!40101 SET @<API key>=@@<API key> */; /*!40101 SET NAMES utf8 */; /*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; /*!40103 SET TIME_ZONE='+00:00' */; /*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='' */; /*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; -- Table structure for table `<API key>` DROP TABLE IF EXISTS `<API key>`; /*!40101 SET @saved_cs_client = @@<API key> */; /*!40101 SET <API key> = utf8 */; CREATE TABLE `<API key>` ( `analysis_id` smallint(5) unsigned NOT NULL, `description` text, `display_label` varchar(255) NOT NULL, `displayable` tinyint(1) NOT NULL DEFAULT '1', `web_data` text, UNIQUE KEY `analysis_idx` (`analysis_id`) ) ENGINE=MyISAM DEFAULT CHARSET=latin1; /*!40101 SET <API key> = @saved_cs_client */; /*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; /*!40101 SET SQL_MODE=@OLD_SQL_MODE */; /*!40101 SET <API key>=@<API key> */; /*!40101 SET <API key>=@<API key> */; /*!40101 SET <API key>=@<API key> */; /*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; -- Dump completed on 2014-05-23 13:47:08
package com.twitter.io import java.io.IOException import org.junit.runner.RunWith import org.scalatest.FunSuite import org.scalatest.junit.JUnitRunner @RunWith(classOf[JUnitRunner]) class BufInputStreamTest extends FunSuite { private[this] val fileString = "Test_All_Tests\<API key>\<API key>\<API key>\<API key>\<API key>\n" private[this] val fileBuf = Buf.ByteArray(fileString.getBytes) test("Constructor") { val is = new BufInputStream(fileBuf) assert(is.available() == fileString.length()) } test("available") { val is = new BufInputStream(fileBuf) assert(is.available() == fileString.length(), "Returned incorrect number of available bytes") } test("close") { val is = new BufInputStream(fileBuf) val i = is.read() assert(i != -1) try { is.close() } catch { case e: IOException => fail("Test 1: Failed to close the input stream.") } try { val j = is.read() assert(j != -1) } catch { case e: Exception => fail("Test 2: Should be able to read from closed stream.") } } test("markI") { val is = new BufInputStream(fileBuf) // Test for method void java.io.<API key>.mark(int) val array1 = new Array[Byte](100) val array2 = new Array[Byte](100) try { is.skip(3000) is.mark(1000) is.read(array1, 0, array1.length) is.reset() is.read(array2, 0, array2.length) is.reset() val s1 = new String(array1, 0, array1.length) val s2 = new String(array2, 0, array2.length) assert(s1.equals(s2), "Failed to mark correct position") } catch { case e: Exception => fail("Exception during mark test") } } test("markSupported") { val is = new BufInputStream(fileBuf) assert(is.markSupported(), "markSupported returned incorrect value") } test("read one") { val is = new BufInputStream(fileBuf) val c = is.read() is.reset() assert(c == fileString.charAt(0), "read returned incorrect char %s %s".format(c, fileString.charAt(0))) } test("read") { val is = new BufInputStream(fileBuf) val array = new Array[Byte](20) is.skip(50) is.mark(100) is.read(array, 0, array.length) val s1 = new String(array, 0, array.length) val s2 = fileString.substring(50, 70) assert(s1.equals(s2), "Failed to read correct data.") } test("read into null array") { val is = new BufInputStream(fileBuf) intercept[<API key>] { is.read(null, 0, 1) fail("<API key> expected.") } } test("read into offset < 0") { val is = new BufInputStream(fileBuf) val array = new Array[Byte](20) intercept[<API key>] { is.read(array , -1, 1) fail("<API key> expected.") } } test("read negative len bytes") { val is = new BufInputStream(fileBuf) val array = new Array[Byte](20) intercept[<API key>] { is.read(array , 1, -1) fail("<API key> expected.") } } test("read beyond end of array") { val is = new BufInputStream(fileBuf) val array = new Array[Byte](20) intercept[<API key>] { is.read(array, 1, array.length) fail("<API key> expected.") } intercept[<API key>] { is.read(array, array.length, array.length) fail("<API key> expected.") } } test("reset") { val is = new BufInputStream(fileBuf) // Test for method void java.io.<API key>.reset() val array1 = new Array[Byte](10) val array2 = new Array[Byte](10) is.mark(200) is.read(array1, 0, 10) is.reset() is.read(array2, 0, 10) is.reset() val s1 = new String(array1, 0, array1.length) val s2 = new String(array2, 0, array2.length) assert(s1.equals(s2), "Reset failed") } test("skip") { val is = new BufInputStream(fileBuf) val array1 = new Array[Byte](10) is.skip(100) is.read(array1, 0, array1.length) val s1 = new String(array1, 0, array1.length) val s2 = fileString.substring(100, 110) assert(s1.equals(s2), "Failed to skip to correct position") } test("read len=0 from non-empty stream should return 0") { val is = new BufInputStream(fileBuf) val array = new Array[Byte](1) assert(is.read(array, 0, 0) == 0) } test("read len >= 0 from exhausted stream should return -1") { val is = new BufInputStream(fileBuf) val array = new Array[Byte](10000) val c = is.read(array, 0, array.length) assert(c == fileBuf.length, "Stream should have been exhausted") assert(is.read(array, c, 0) == -1, "Stream should have repored exhaustion") assert(is.read(array, c, array.length - c) == -1, "Stream should have repored exhaustion") } }