code
stringlengths
4
1.01M
#include <cstdio> #include <vector> using namespace std; long long phi(long long n) { long long ans = n; for (long long i = 2LL; i*i <= n; i++) { if (n % i == 0LL) { ans -= ans / i; while (n % i == 0LL) { n /= i; } } } if (n > 1) { ans -= ans / n; } return ans; } vector<long long> factorize(long long n) { vector<long long> fs; for (long long i = 1LL; i*i <= n; i++) { if (n % i == 0LL) { fs.push_back(i); if (i*i != n) { fs.push_back(n/i); } } } return fs; } int main() { long long N; while (scanf("%lld", &N) != EOF) { long long ans = 0; const vector<long long> fs = factorize(N); ans = 0; for (vector<long long>::const_iterator it = fs.begin(); it != fs.end(); ++it) { ans += *it * phi(N / *it); } printf("%lld\n", ans); } return 0; }
""" Consider this game: Write 8 blanks on a sheet of paper. Randomly pick a digit 0-9. After seeing the digit, choose one of the 8 blanks to place that digit in. Randomly choose another digit (with replacement) and then choose one of the 7 remaining blanks to place it in. Repeat until you've filled all 8 blanks. You win if the 8 digits written down are in order from smallest to largest. Write a program that plays this game by itself and determines whether it won or not. Run it 1 million times and post your probability of winning. Assigning digits to blanks randomly lets you win about 0.02% of the time. Here's a python script that wins about 10.3% of the time. Can you do better? import random def trial(): indices = range(8) # remaining unassigned indices s = [None] * 8 # the digits in their assigned places while indices: d = random.randint(0,9) # choose a random digit index = indices[int(d*len(indices)/10)] # assign it an index s[index] = str(d) indices.remove(index) return s == sorted(s) print sum(trial() for _ in range(1000000)) thanks to cosmologicon for the challenge at /r/dailyprogrammer_ideas .. link [http://www.reddit.com/r/dailyprogrammer_ideas/comments/s30be/intermediate_digitassigning_game/] """ import random import itertools def que_sort(data): # print(data) return all(b >= a for a, b in zip(data, itertools.islice(data, 1, None))) TRIALS = 1 win = 0 for a in range(TRIALS): l = [None] * 8 p = list(range(8)) while p: d = random.randint(0,9) # i = random.choice(p) i = int(d * (len(p)) / 10) print(p[i]) l[p[i]] = d p.pop(i) print(l) if que_sort(l): win += 1 print('{}/{} - {}%'.format(win, TRIALS, win/TRIALS*100))
当然,对于这个赌约,我好不容易才勉强让自己“置身事外”,但我觉得它的确印证了我的观点。 在追求我们官方的“符号编程(iconic programming)”之余,这样浑水摸鱼的确有趣,当时我觉得这就是全部了。 然而出乎我意料的是,几天后丹·英戈尔斯(Dan Ingalls)在NOVA上向我展示了这个成果。 一切都由代码写就(他用的是[(BASIC)](http://baike.baidu.com/item/BASIC%E8%AF%AD%E8%A8%80)!),他还添加了许多细节,诸如[记号扫描处理(token scanner)](http://baike.baidu.com/item/%E6%89%AB%E6%8F%8F%E5%A4%84%E7%90%86)、列表工具(list maker)等,并且一切都能顺利运行。 就如同他喜欢说的那样:“即做即现(You just do it and it's done)。”
<?php namespace Cs278\Mktemp\Exception; final class CreateFailedException extends \RuntimeException { public static function failedFile($dir) { $dir = $dir ? rtrim($dir, DIRECTORY_SEPARATOR) : sprintf('cwd[%s]', getcwd()); return new self(sprintf( 'Failed to create temporary file in `%s`', $dir )); } public static function failedDir($dir) { $dir = $dir ? rtrim($dir, DIRECTORY_SEPARATOR) : sprintf('cwd[%s]', getcwd()); return new self(sprintf( 'Failed to create temporary directory in `%s`', $dir )); } }
/* ChibiOS/RT - Copyright (C) 2006,2007,2008,2009,2010, 2011,2012,2013 Giovanni Di Sirio. This file is part of ChibiOS/RT. ChibiOS/RT is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. ChibiOS/RT is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. --- A special exception to the GPL can be applied should you wish to distribute a combined work that includes ChibiOS/RT, without being obliged to provide the source code for any proprietary components. See the file exception.txt for full details of how and when the exception can be applied. */ #ifndef _WFI_H_ #define _WFI_H_ #include "../../../../../boards/V8/board.h" #ifndef port_wait_for_interrupt #if ENABLE_WFI_IDLE != 0 #define port_wait_for_interrupt() { \ AT91C_BASE_SYS->PMC_SCDR = AT91C_PMC_PCK; \ } #else #define port_wait_for_interrupt() #endif #endif #endif /* _WFI_H_ */
require_relative 'spec_helper' require 'problem_021' describe Problem021 do before(:each) do @prb = Problem021.new end describe "dsum" do it "sums the divisors of 1 as 0" do @prb.dsum(1).should == 0 end it "sums the divisors of 2 as 1" do @prb.dsum(2).should == 1 end it "sums the divisors of 4 as 3" do @prb.dsum(4).should == 3 end it "sums the divisors of 6 as 6" do @prb.dsum(6).should == 6 end it "sums the divisors of 9 as 4" do @prb.dsum(9).should == 4 end it "sums the divisors of 220 as 284" do @prb.dsum(220).should == 284 end it "sums the divisors of 284 as 220" do @prb.dsum(284).should == 220 end end describe "amicable pairs" do it "returns nil as the pair of 1" do @prb.amicable_pair(1).should be_nil end it "returns nil as the pair of 6" do @prb.amicable_pair(6).should be_nil end it "returns nil as the pair of 9" do @prb.amicable_pair(9).should be_nil end it "returns 284 as the pair of 220" do @prb.amicable_pair(284).should == 220 end it "returns 220 as the pair of 284" do @prb.amicable_pair(220).should == 284 end end describe "find pairs" do it "returns [220, 284] for 200..300" do @prb.find_pairs(200..300).should == [220,284] end it "returns [] for 0..100" do @prb.find_pairs(0..100).should == [] end end it "adds all amicable pairs from 1 to 10,000" do @prb.should_receive(:find_pairs).with(1..10_000).and_return([1,2,3,4,5]) @prb.run.should == 15 end end
definition = { "where": "?subj a foaf:Organization .", "fields": { "name": { "where": "?subj rdfs:label ?obj ." } } }
//==================================================================================================================== // Copyright (c) 2012 IdeaBlade //==================================================================================================================== // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE // WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS // OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. //==================================================================================================================== // USE OF THIS SOFTWARE IS GOVERENED BY THE LICENSING TERMS WHICH CAN BE FOUND AT // http://cocktail.ideablade.com/licensing //==================================================================================================================== using System; using System.ComponentModel; using System.ComponentModel.Composition; using System.Linq; using Cocktail; using DomainModel; using DomainServices; using IdeaBlade.EntityModel; namespace AccoBooking.ViewModels.Booking { [Export, PartCreationPolicy(CreationPolicy.NonShared)] public class BookingReminderSummaryViewModel : BaseScreen<BookingReminder> { private bool _activityFlows = true; [ImportingConstructor] public BookingReminderSummaryViewModel(IUnitOfWorkManager<IAccoBookingUnitOfWork> unitOfWorkManager, SystemCodeListViewModel milestoneListViewModel, IDialogManager dialogManager) : base(unitOfWorkManager, dialogManager) { MilestoneList = milestoneListViewModel; MilestoneList.PropertyChanged += MilestoneListOnPropertyChanged; } void EntityPropertyChanged(object sender, PropertyChangedEventArgs e) { if (e.PropertyName == "Offset") { ValueChangedActivityFlows(false); } } private void MilestoneListOnPropertyChanged(object sender, PropertyChangedEventArgs e) { if (e.PropertyName == "ShortName") { if (Entity != null) { ((BookingReminder)Entity).Milestone = MilestoneList.ShortName; _activityFlows = ((BookingReminder)Entity).Milestone != ""; ValueChangedActivityFlows(false); NotifyOfPropertyChange(() => ActivityFlows); } } } public SystemCodeListViewModel MilestoneList { get; set; } public override Entity Entity { get { return base.Entity; } set { base.Entity = value; if (Entity != null) { if (_entity != null) Entity.PropertyChanged -= EntityPropertyChanged; Entity.PropertyChanged += EntityPropertyChanged; MilestoneList.ShortName = ((BookingReminder)Entity).Milestone; _activityFlows = ((BookingReminder)Entity).Milestone != ""; ValueChangedActivityFlows(false); NotifyOfPropertyChange(() => ActivityFlows); } } } private void ValueChangedActivityFlows(bool changedByUser) { // Initialize depending fields if (!_activityFlows) { // The activity flows not with booking data and is fixed on one date if (changedByUser) MilestoneList.ShortName = ""; // The activity flows not with booking data and is fixed on one date ((BookingReminder)Entity).Offset = 0; if (((BookingReminder)Entity).Due == null) ((BookingReminder)Entity).Due = DateTime.Today; } else { var offset = ((BookingReminder) Entity).Offset; var payments = ((BookingReminder) Entity).Booking.BookingPayments; BookingPayment payment; // The activity flows with booking data switch (MilestoneList.ShortName) { case "BOOKING": ((BookingReminder) Entity).IsDue = true; ((BookingReminder) Entity).Due = ((BookingReminder) Entity).Booking.Booked.AddDays(offset); break; case "FIRST-PAYMENT": // check if booking has a non scheduled payment by the guest. // If there is such a payment, the activity is due. If there is not, the activity isn't due if (payments != null) { payment = payments.FirstOrDefault(p => !p.IsScheduledPayment && p.IsPaymentByGuest); if (payment != null) { ((BookingReminder) Entity).IsDue = true; ((BookingReminder) Entity).Due = payment.Due; } else { ((BookingReminder) Entity).IsDue = false; ((BookingReminder) Entity).Due = null; } } else { ((BookingReminder) Entity).IsDue = false; ((BookingReminder) Entity).Due = null; } break; case "LAST-PAYMENT": // check if booking has status PAID. If true, the activity is due. If not true, the activity isn't due if (((BookingReminder) Entity).Booking.Status == "PAID") { ((BookingReminder) Entity).IsDue = true; // find last not scheduled payment by guest to calculate due date //TODO if (payments != null) { payment = payments.Where(p => !p.IsScheduledPayment && p.IsPaymentByGuest) .OrderByDescending(p => p.Due) .FirstOrDefault(); ((BookingReminder)Entity).IsDue = true; ((BookingReminder) Entity).Due = payment.Due; } else { ((BookingReminder) Entity).IsDue = false; ((BookingReminder) Entity).Due = null; } } break; case "ARRIVAL": ((BookingReminder)Entity).IsDue = true; ((BookingReminder)Entity).Due = ((BookingReminder)Entity).Booking.Arrival.AddDays(offset); break; case "DEPARTURE": ((BookingReminder)Entity).IsDue = true; ((BookingReminder)Entity).Due = ((BookingReminder)Entity).Booking.Departure.AddDays(offset); break; } NotifyOfPropertyChange(() => Entity); } } public bool ActivityFlows { get { return _activityFlows; } set { _activityFlows = value; // Refresh depending fields ValueChangedActivityFlows(true); NotifyOfPropertyChange(() => ActivityFlows); } } // Done kan alleen true worden bij wijzigen activity en IsDue = true // in alle andere gevallen checkbox niet laten zien // protected override IRepository<BookingReminder> Repository() { return UnitOfWork.BookingReminders; } protected override void OnActivate() { base.OnActivate(); MilestoneList.Start("MILESTONE"); } protected override void OnDeactivate(bool close) { base.OnDeactivate(close); if (close) { MilestoneList.PropertyChanged -= MilestoneListOnPropertyChanged; } } } }
// // Este arquivo foi gerado pela Arquitetura JavaTM para Implementação de Referência (JAXB) de Bind XML, v2.2.8-b130911.1802 // Consulte <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Todas as modificações neste arquivo serão perdidas após a recompilação do esquema de origem. // Gerado em: 2019.09.22 às 07:50:03 PM BRT // package br.com.swconsultoria.cte.schema_300.evPrestDesacordo; import javax.xml.bind.annotation.XmlEnum; import javax.xml.bind.annotation.XmlType; /** * <p>Classe Java de TUF_sem_EX. * * <p>O seguinte fragmento do esquema especifica o conteúdo esperado contido dentro desta classe. * <p> * <pre> * &lt;simpleType name="TUF_sem_EX"> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"> * &lt;whiteSpace value="preserve"/> * &lt;enumeration value="AC"/> * &lt;enumeration value="AL"/> * &lt;enumeration value="AM"/> * &lt;enumeration value="AP"/> * &lt;enumeration value="BA"/> * &lt;enumeration value="CE"/> * &lt;enumeration value="DF"/> * &lt;enumeration value="ES"/> * &lt;enumeration value="GO"/> * &lt;enumeration value="MA"/> * &lt;enumeration value="MG"/> * &lt;enumeration value="MS"/> * &lt;enumeration value="MT"/> * &lt;enumeration value="PA"/> * &lt;enumeration value="PB"/> * &lt;enumeration value="PE"/> * &lt;enumeration value="PI"/> * &lt;enumeration value="PR"/> * &lt;enumeration value="RJ"/> * &lt;enumeration value="RN"/> * &lt;enumeration value="RO"/> * &lt;enumeration value="RR"/> * &lt;enumeration value="RS"/> * &lt;enumeration value="SC"/> * &lt;enumeration value="SE"/> * &lt;enumeration value="SP"/> * &lt;enumeration value="TO"/> * &lt;/restriction> * &lt;/simpleType> * </pre> * */ @XmlType(name = "TUF_sem_EX", namespace = "http://www.portalfiscal.inf.br/cte") @XmlEnum public enum TUFSemEX { AC, AL, AM, AP, BA, CE, DF, ES, GO, MA, MG, MS, MT, PA, PB, PE, PI, PR, RJ, RN, RO, RR, RS, SC, SE, SP, TO; public String value() { return name(); } public static TUFSemEX fromValue(String v) { return valueOf(v); } }
'use strict'; /** * Module dependencies. */ var mongoose = require('mongoose'), errorHandler = require('./errors.server.controller'), Product = mongoose.model('Product'), multiparty = require('multiparty'), uuid = require('node-uuid'), fs = require('fs'), _ = require('lodash'); /** * Create a Product */ exports.create = function(req, res, next) { var form = new multiparty.Form(); form.parse(req, function(err, fields, files) { var file = files.file[0]; var contentType = file.headers['content-type']; var tmpPath = file.path; var extIndex = tmpPath.lastIndexOf('.'); var extension = (extIndex < 0) ? '' : tmpPath.substr(extIndex); // uuid is for generating unique filenames. var fileName = uuid.v4() + extension; var destPath = '/home/blaze/Sites/github/yaltashop/uploads/' + fileName; //fs.rename(tmpPath, destPath, function(err) { if (err) { console.log('there was an error during saving file'); next(err); } var product = new Product(fields); file.name = file.originalFilename; product.photo.file = file; product.user = req.user; product.save(function(err) { if (err) { return res.status(400).send({ message: errorHandler.getErrorMessage(err) }); } else { res.jsonp(product); } }); //}); }); }; /** * Show the current Product */ exports.read = function(req, res) { res.jsonp(req.product); }; /** * Update a Product */ exports.update = function(req, res) { var product = req.product ; product = _.extend(product , req.body); product.save(function(err) { if (err) { return res.status(400).send({ message: errorHandler.getErrorMessage(err) }); } else { res.jsonp(product); } }); }; /** * Delete an Product */ exports.delete = function(req, res) { var product = req.product ; product.remove(function(err) { if (err) { return res.status(400).send({ message: errorHandler.getErrorMessage(err) }); } else { res.jsonp(product); } }); }; /** * List of Products */ exports.list = function(req, res) { Product.find().sort('-created').populate('user', 'displayName').exec(function(err, products) { if (err) { return res.status(400).send({ message: errorHandler.getErrorMessage(err) }); } else { res.jsonp(products); } }); }; /** * Product middleware */ exports.productByID = function(req, res, next, id) { Product.findById(id).populate('user', 'displayName').exec(function(err, product) { if (err) return next(err); if (! product) return next(new Error('Failed to load Product ' + id)); req.product = product ; next(); }); }; /** * Product authorization middleware */ exports.hasAuthorization = function(req, res, next) { if (req.product.user.id !== req.user.id) { return res.status(403).send('User is not authorized'); } next(); };
--- author: makumbe comments: true date: 2014-06-16 07:49:07+00:00 layout: post link: http://blog.daanalytics.nl/2014/06/16/trying-to-understand-the-oracle-reference-architecture-for-information-management/ slug: trying-to-understand-the-oracle-reference-architecture-for-information-management title: Trying to understand the Oracle Reference Architecture for Information Management wordpress_id: 1736 categories: - Oracle Business Analytics tags: - Big Data - Information Management - Oracle Business Analytics - Reference Architecture --- Last month I have been attending the RittmanMead BI Forum 2014. In the [wrap-up](http://obibb.wordpress.com/2014/05/13/rittmanmead-bi-forum-2014-wrap-up/) I mentioned a presentation by [Andrew Bond](https://www.linkedin.com/pub/andrew-bond/3/693/4a2) & [Stewart Bryson](http://www.rittmanmead.com/author/stewart-bryson/). They had a very nice presentation about the [Oracle Information Management Reference Architecture.](http://www.rittmanmead.com/files/bryson_bond_ref_arch.pdf) This needed some further investigation from my part. This blogpost is a first summary of the information I found online so far. There is a complete reference library of [**IT Strategies from Oracle**](www.oracle.com/goto/itstrategies). Pay extra attention to the following docs: * Oracle Reference Architecture Information Management * Oracle Reference Architecture Business Analytics Foundation * Oracle Reference Architecture Business Analytics Infrastructure * Oracle Reference Architecture Service Orientation * Oracle Reference Architecture Security * Oracle Reference Architecture Engineered Systems [![IT Strategies from Oracle](http://obibb.files.wordpress.com/2014/06/it-strategies-from-oracle.png?w=300)](https://obibb.files.wordpress.com/2014/06/it-strategies-from-oracle.png) Next to that, Oracle has a few White Papers focussing on Information Management (Big Data & Analytics): * [Enabling Pervasive BI through a Practical Data Warehouse Reference Architecture](http://www.oracle.com/technetwork/database/bi-datawarehousing/twp-bidw-enabling-pervasive-bi-thro-132094.pdf) - Feb 2010 * [Information Management and Big Data A Reference Architecture](http://www.oracle.com/technetwork/topics/entarch/articles/info-mgmt-big-data-ref-arch-1902853.pdf) - Feb 2013 * [Big Data & Analytics Reference Architecture](http://www.oracle.com/technetwork/topics/entarch/oracle-wp-big-data-refarch-2019930.pdf) - Sep 2013 ### Oracle Information Management – Logical View Let's take a closer look, see the picture below. This picture has been copied from; the [Oracle Information Management Reference Architecture.](http://www.rittmanmead.com/files/bryson_bond_ref_arch.pdf) presentation I mentioned above. There are a few major components in the Reference Architecture * Data Sources * Information Provisioning * Data Ingestion * Logical Data Warehouse * Information Delivery While putting together this blog post, Mark Rittman posted the following article(s); "Introducing the Updated Oracle / Rittman Mead Information Management Reference Architecture - Pt1. Information Architecture and the "Data Factory" & Pt2. – Delivering the Data Factory” on the [RM Blog](http://www.rittmanmead.com/2014/06/introducing-the-updated-oracle-rittman-mead-information-management-reference-architecture-pt1-information-architecture-and-the-data-factory/). [![Oracle Information Management – Logical View](http://obibb.files.wordpress.com/2014/06/oracle-information-management-e28093-logical-view1-e1401893213260.png?w=630)](https://obibb.files.wordpress.com/2014/06/oracle-information-management-e28093-logical-view1.png) #### Data Sources Within this Reference Architecture Oracle should be able to handle all sorts of data: * Traditional Enterprise Data (ERP, CRM, etc.) * Machine-generated /Sensor Data (Smart Meters, Equipment Logs, etc.) * Social Data (Twitter, Facebook, etc.) [![Any Data - Any Source - Any Format - Different Data](http://obibb.files.wordpress.com/2014/06/any-data-any-source-any-format-different-data.png?w=300)](https://obibb.files.wordpress.com/2014/06/any-data-any-source-any-format-different-data.png) The last couple of years / decades, Data has changed. We (as BI/DW-Consultants) have always looked at data in a Traditional (Structured) way. Now Oracle provides an Architecture to combine the old Data with the new (Big) Data. Whether there are 3, 4, 5, or whatever number of V's, the most important thing is that you [get value from your Data](https://www.linkedin.com/today/post/article/20140306073407-64875646-big-data-the-5-vs-everyone-must-know)! #### Data Ingestion (Loading) Oracle provides / supports tools to perform Batch or (Near) Real-Time Data Ingestion. _**Oracle GoldenGate & Oracle Data Integrator**_ Michael Rainey is writing an excellent series on how to load the Raw Data Reservoir (Staging Layer) and the Foundation Layer; [Oracle GoldenGate and Oracle Data Integrator – A Perfect Match in 12c](http://www.rittmanmead.com/2014/05/goldengate-odi-perfect-match-12c-1/). _**Cloudera Distribution including Apache Hadoop (CDH)**_ Hadoop is Open Source and [Apache](http://hadoop.apache.org) licensed. Cloudera provides an Open Source Distribution ( Cloudera CDH -[ Cloudera Enterprise Data Hub](http://www.cloudera.com/content/cloudera/en/products-and-services/cdh.html)). _**Oracle Event Processing (OEP)**_ OEP is a complete solution for building applications to filter, correlate and process events in real-time. Check the Data Sheet [here](http://www.oracle.com/us/products/middleware/soa/overview/complex-event-processing-ds-066411.pdf). #### Logical Data Warehouse There are a few different Layers: * Raw Data Reservoir (Staging Layer) * Foundation Layer * Access and Perfomance Layer Depending on the requirements and the tooling one can decide to skip or combine the different Layers. Oracle has the tooling to load the various Layers directly ([GoldenGate](http://www.oracle.com/us/products/middleware/data-integration/goldengate/overview/index.html)). The data is no longer only stored in the Oracle Database. The 'new' types of data require 'new' types of storage. There are different Data Stores for different purposes. * Historical (Historical Integrity) * [Oracle Database](http://www.oracle.com/us/products/database/overview/index.html) * [HBase](http://hbase.apache.org) * [HDFS](http://wiki.apache.org/hadoop/HDFS) * [Oracle NoSql Database](http://www.oracle.com/technetwork/database/database-technologies/nosqldb/overview/index.html) * Analytical (Ease of Acces & Query Performance) * Oracle Database * [OLAP](http://www.oracle.com/technetwork/database/options/olap/index.html) * [Spatial & Graph](http://www.oracle.com/technetwork/database/options/spatialandgraph/overview/index.html) * Oracle Essbase * [Oracle NoSql Database](http://www.oracle.com/technetwork/database/database-technologies/nosqldb/overview/index.html) * [Cloudera Hadoop](http://www.cloudera.com/content/cloudera/en/products-and-services/cloudera-enterprise.html) * [Oracle Endeca Information Discovery](http://www.oracle.com/technetwork/middleware/endeca/overview/index.html?ssSourceSiteId=otnpt) The [Oracle Big Data Connectors](http://www.oracle.com/technetwork/database/database-technologies/bdc/big-data-connectors/overview/index.html) can be used to integrate Apache Hadoop with Oracle Database Software. * Oracle Loader for Hadoop * [Oracle Data Integrator Application Adapter for Hadoop](http://www.oracle.com/us/products/middleware/data-integration/hadoop/overview/index.html?ssSourceSiteId=ocomcaen) * Oracle R Advanced Analytics for Hadoop * Oracle SQL Connector for HDFS * Oracle XQuery for Hadoop Processing the data could be either In-Database ([Oracle Database Options](http://www.oracle.com/technetwork/database/options/index.html?ssSourceSiteId=null) -  Advanced Analytics, OLAP) or In-Memory ([Oracle TimesTen](http://www.oracle.com/technetwork/database/database-technologies/timesten/overview/index.html)). #### Information Delivery Oracle is able to support proven answers to known questions via [Oracle BI](http://www.oracle.com/us/solutions/business-analytics/business-intelligence/overview/index.html). Fast answers to new questions are provided by [Oracle Endeca Information Discovery](http://www.oracle.com/technetwork/middleware/endeca/overview/index.html?ssSourceSiteId=otnpt). "The Oracle BI Foundation Suite provides comprehensive capabilities for business intelligence, including enterprise reporting, dashboards, ad-hoc analysis, multi-dimensional OLAP, scorecards, and predictive analytics on an integrated platform" [![Oracle BI Foundation - Front-End](http://obibb.files.wordpress.com/2014/06/oracle-bi-foundation-front-end1.png?w=300)](https://obibb.files.wordpress.com/2014/06/oracle-bi-foundation-front-end1.png) OBIEE makes it possible (Logical Layer) to skip the 'Access and Perfomance Layer' and source directly form the 'Foundation Layer'. There are some very interesting presentations online around this subject. * [Using OBIEE and Data Vault to Virtualize Your BI Environment: An Agile Approach](https://s3.amazonaws.com/rmc_docs/odtug2013_datavault.pdf) * [Using OBIEE against Transactional Schemas](http://www.rittmanmead.com/2013/03/obiee-transactional5/) * [Advanced MetaData Topics](https://s3.amazonaws.com/rmc_docs/biforum2011/Mcquigg_Metadata.pdf) * [Oracle BI Server - the ultimate choice for BICC's - Decoupling](https://s3.amazonaws.com/rmc_docs/biforum2011/Wilcke_BICC.pdf) "Oracle Endeca Information Discovery is a complete enterprise data discovery platform that combines information of any type, from any source, empowering business user independence in balance with IT governance. Now organizations can access the information they need, when they need it, to make business decisions they can trust." There is still a lot more to investigate, but for me this gives a little bit more guidance.
package trusis.config.personaggi; import java.util.ArrayList; import java.util.List; import trusis.config.personaggi.define.Personaggio; import trusis.config.personaggi.define.stdPersonaggio; import trusis.config.poteri.UccisioneNotturna; import trusis.config.poteri.define.Potere; public class Trusis extends stdPersonaggio { private String nome="Trusis"; private String fazione="Cattivi"; private boolean vivo=true; private boolean killable=true; private List<Personaggio> conoscenti = new ArrayList<Personaggio>(); public Trusis(List<Potere> listPoteri) { setPoteri(listPoteri); } public Trusis() { UccisioneNotturna uccisioneNotturna = new UccisioneNotturna(); List<Potere> poteriTrusis = new ArrayList<Potere>(); poteriTrusis.add(uccisioneNotturna); setter(poteriTrusis); } private void setter(List<Potere> poteri) { setNome(nome); setFazione(fazione); setVivo(vivo); setKillable(killable); setPoteri(poteri); setConoscenti(conoscenti); } }
require 'codeclimate-test-reporter' CodeClimate::TestReporter.start require 'pry' require 'pon' require 'rails/all' require 'rspec/rails' ActiveRecord::Base.establish_connection adapter: 'sqlite3', database: ':memory:' RSpec.configure do |config| # The settings below are suggested to provide a good initial experience # with RSpec, but feel free to customize to your heart's content. # These two settings work together to allow you to limit a spec run # to individual examples or groups you care about by tagging them with # `:focus` metadata. When nothing is tagged with `:focus`, all examples # get run. config.filter_run :focus config.run_all_when_everything_filtered = true # Many RSpec users commonly either run the entire suite or an individual # file, and it's useful to allow more verbose output when running an # individual spec file. if config.files_to_run.one? # Use the documentation formatter for detailed output, # unless a formatter has already been configured # (e.g. via a command-line flag). config.default_formatter = 'doc' end # Print the 10 slowest examples and example groups at the # end of the spec run, to help surface which specs are running # particularly slow. config.profile_examples = 10 # Run specs in random order to surface order dependencies. If you find an # order dependency and want to debug it, you can fix the order by providing # the seed, which is printed after each run. # --seed 1234 config.order = :random # Seed global randomization in this process using the `--seed` CLI option. # Setting this allows you to use `--seed` to deterministically reproduce # test failures related to randomization by passing the same `--seed` value # as the one that triggered the failure. Kernel.srand config.seed # rspec-expectations config goes here. You can use an alternate # assertion/expectation library such as wrong or the stdlib/minitest # assertions if you prefer. config.expect_with :rspec do |expectations| # Enable only the newer, non-monkey-patching expect syntax. # For more details, see: # - http://myronmars.to/n/dev-blog/2012/06/rspecs-new-expectation-syntax expectations.syntax = :expect end # rspec-mocks config goes here. You can use an alternate test double # library (such as bogus or mocha) by changing the `mock_with` option here. config.mock_with :rspec do |mocks| # Enable only the newer, non-monkey-patching expect syntax. # For more details, see: # - http://teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/ mocks.syntax = :expect # Prevents you from mocking or stubbing a method that does not exist on # a real object. This is generally recommended. mocks.verify_partial_doubles = true end end
<!DOCTYPE html> <html> {% include head.html %} <body> <div class="page"> <div class="page-sidebar"> {% include sidebar.html %} </div> <div class="page-body"> <div class="page-main-bg container"> {{ content }} <div class="row"> <div class="col-12"> <hr> <p class="text-xs-right"> subscribe <a href="{{ "/feed.xml" | prepend: site.baseurl }}"> <i class="fa fa-rss"></i> via RSS </a> | connect <a href="{{ site.github_url }}"> <i class="fab fa-github"></i> on gitbub </a> </p> </div> </div> </div> </div> </div> {% include footer.html %} </body> </html>
# encoding: utf-8 # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::NetApp::Mgmt::V2020_11_01 module Models # # Pool change request # class PoolChangeRequest include MsRestAzure # @return [String] Resource id of the pool to move volume to attr_accessor :new_pool_resource_id # # Mapper for PoolChangeRequest class as Ruby Hash. # This will be used for serialization/deserialization. # def self.mapper() { client_side_validation: true, required: false, serialized_name: 'poolChangeRequest', type: { name: 'Composite', class_name: 'PoolChangeRequest', model_properties: { new_pool_resource_id: { client_side_validation: true, required: true, serialized_name: 'newPoolResourceId', type: { name: 'String' } } } } } end end end end
#!/usr/bin/python import sys import re re_valid_email = re.compile(r'^[-_0-9a-zA-Z]+@[0-9a-zA-Z]+\.[0-9a-zA-Z]{1,3}$') def valid_email(s): return not (re_valid_email.search(s) == None) N = int(raw_input().strip()) A = [] for i in range(N): A += [ str(raw_input().strip()) ] A.sort() V = filter(valid_email, A) print V #### INPUT ## ## 3 ## lara@hackerrank.com ## brian-23@hackerrank.com ## britts_54@hackerrank.com ## #### OUTPUT ## ## ['brian-23@hackerrank.com', 'britts_54@hackerrank.com', 'lara@hackerrank.com'] #### INPUT ## ## 5 ## dheeraj-234@gmail.com ## itsallcrap ## harsh_1234@rediff.in ## kunal_shin@iop.az ## matt23@@india.in ## #### OUTPUT ## ## ['dheeraj-234@gmail.com', 'harsh_1234@rediff.in', 'kunal_shin@iop.az']
// <auto-generated /> namespace TurbolinksTestApp.Migrations { using System.Data.Entity.Migrations; using System.Data.Entity.Migrations.Infrastructure; using System.Resources; public sealed partial class TaskMigration : IMigrationMetadata { private readonly ResourceManager Resources = new ResourceManager(typeof(TaskMigration)); string IMigrationMetadata.Id { get { return "201308201210295_TaskMigration"; } } string IMigrationMetadata.Source { get { return null; } } string IMigrationMetadata.Target { get { return Resources.GetString("Target"); } } } }
# can_openner Data Scraper for FIPS, ZIP, and Area Codes ## What does Can Openner do? It *can open* data. (Yeah, bad pun.) One of the crazy things I've run into when going to website like [UnitedStatesZipCodes.org](http://www.unitedstateszipcodes.org/) or [AllAreacodes.com](http://www.allareacodes.com/) is they give you their website data, but will charge $40 to get it. Keep in mind this is free public data posted online. Can Openner doesn't give you their data but a means to get their data. Just as there is a package on for Linux for your to get the Microsoft fonts, which their isn't a package that contains the fonts but gets them for your, Can Openner works on the same legitimate rules. You get the data using a script. Nobody gets in trouble for infringement. ## Why call it Can Openner? The icon commonly associated with a database looks like a metal can. If I ever get around to drawing some icons or art for this program, it would probably be either a can opener or a cat eating tuna out of a can. (Note: If you have a kitty, don't use the can openers that cut the lid open from the top, use the ones that open the can from the side. It's dangerous to feed kitty straight from the can! Put it in a bowl or a saucer.) ## Why FIPS instead of INCITS? Despite [Federal Information Processing Standards](https://en.wikipedia.org/wiki/Federal_Information_Processing_Standards) (FIPS) being phased out in favor of [International Committee for Information Technology Standards](https://en.wikipedia.org/wiki/International_Committee_for_Information_Technology_Standards) (INCITS), INCITS have not been formally published for the public. A bit of hackery needs to be done to get them online. Who knows, maybe WikiLeaks has them. Regardless, it is rediculous that INCITS is not as open as FIPS. However, there are a lot of places where FIPS codes are still used such as [Specific Area](https://en.wikipedia.org/wiki/Specific_Area_Message_Encoding) such as NOAA Weather Radio, the US Census Bureau's American Fact Finder, and the US Bureau of Labor Statistics. ## Why are some data sets not being fetched? Sites like Canada Post and the National Weather Service recently revamped their websites, but a lot of old links are broken. I could try using the Wayback Machine to fetch archives but there is not guarantee that they have the data. I was looking to including Canadian postal codes since a lot of the area code information included the area codes for Canada. ## What would you do with all this data? I'd really like to use it for projects like Open Street Maps or whatever can create KML files that Google Earth uses. These sets probably exist already but for the most part, I focus on not giving this information but HOW to get this information. As they saying goes: "Give a man a fish, you feed him for a day. Teach a man how to fish, you feed him for a lifetime."
// // TWPhotoImageItem.h // Pods // // Created by Madao on 12/8/15. // // #import <UIKit/UIKit.h> @interface TWPhotoImageItem : UIView @property (nonatomic, strong) UIImageView *image; @property (nonatomic, strong) UIView *iconContent; @property (nonatomic, strong) UIImageView *icon; @end
using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Text; using System.Text.RegularExpressions; namespace DocoptNet { public class Docopt { public event EventHandler<PrintExitEventArgs> PrintExit; public IDictionary<string, ValueObject> Apply(string doc) { return Apply(doc, new Tokens("", typeof (DocoptInputErrorException))); } public IDictionary<string, ValueObject> Apply(string doc, string cmdLine, bool help = true, object version = null, bool optionsFirst = false, bool exit = false) { return Apply(doc, new Tokens(cmdLine, typeof (DocoptInputErrorException)), help, version, optionsFirst, exit); } public IDictionary<string, ValueObject> Apply(string doc, ICollection<string> argv, bool help = true, object version = null, bool optionsFirst = false, bool exit = false) { return Apply(doc, new Tokens(argv, typeof (DocoptInputErrorException)), help, version, optionsFirst, exit); } protected IDictionary<string, ValueObject> Apply(string doc, Tokens tokens, bool help = true, object version = null, bool optionsFirst = false, bool exit = false) { try { SetDefaultPrintExitHandlerIfNecessary(exit); var usageSections = ParseSection("usage:", doc); if (usageSections.Length == 0) throw new DocoptLanguageErrorException("\"usage:\" (case-insensitive) not found."); if (usageSections.Length > 1) throw new DocoptLanguageErrorException("More that one \"usage:\" (case-insensitive)."); var exitUsage = usageSections[0]; var options = ParseDefaults(doc); var pattern = ParsePattern(FormalUsage(exitUsage), options); var arguments = ParseArgv(tokens, options, optionsFirst); var patternOptions = pattern.Flat<Option>().Distinct().ToList(); // [default] syntax for argument is disabled foreach (OptionsShortcut optionsShortcut in pattern.Flat(typeof (OptionsShortcut))) { var docOptions = ParseDefaults(doc); optionsShortcut.Children = docOptions.Distinct().Except(patternOptions).ToList(); } Extras(help, version, arguments, doc); var res = pattern.Fix().Match(arguments); if (res.Matched && res.LeftIsEmpty) { var dict = new Dictionary<string, ValueObject>(); foreach (var p in pattern.Flat()) { dict[p.Name] = p.Value; } foreach (var p in res.Collected) { dict[p.Name] = p.Value; } return dict; } throw new DocoptInputErrorException(exitUsage); } catch (DocoptBaseException e) { if (!exit) throw; OnPrintExit(e.Message, e.ErrorCode); return null; } } private void SetDefaultPrintExitHandlerIfNecessary(bool exit) { if (exit && PrintExit == null) // Default behaviour is to print usage // and exit with error code 1 PrintExit += (sender, args) => { Console.WriteLine(args.Message); Environment.Exit(args.ErrorCode); }; } public string GenerateCode(string doc) { var res = GetFlatPatterns(doc); res = res .GroupBy(pattern => pattern.Name) .Select(group => group.First()); var sb = new StringBuilder(); foreach (var p in res) { sb.AppendLine(p.GenerateCode()); } return sb.ToString(); } public IEnumerable<Node> GetNodes(string doc) { return GetFlatPatterns(doc) .Select(p => p.ToNode()) .Where(p => p != null) .ToArray(); } static IEnumerable<Pattern> GetFlatPatterns(string doc) { var usageSections = ParseSection("usage:", doc); if (usageSections.Length == 0) throw new DocoptLanguageErrorException("\"usage:\" (case-insensitive) not found."); if (usageSections.Length > 1) throw new DocoptLanguageErrorException("More that one \"usage:\" (case-insensitive)."); var exitUsage = usageSections[0]; var options = ParseDefaults(doc); var pattern = ParsePattern(FormalUsage(exitUsage), options); var patternOptions = pattern.Flat<Option>().Distinct().ToList(); // [default] syntax for argument is disabled foreach (OptionsShortcut optionsShortcut in pattern.Flat(typeof (OptionsShortcut))) { var docOptions = ParseDefaults(doc); optionsShortcut.Children = docOptions.Distinct().Except(patternOptions).ToList(); } return pattern.Fix().Flat(); } private void Extras(bool help, object version, ICollection<Pattern> options, string doc) { if (help && options.Any(o => (o.Name == "-h" || o.Name == "--help") && !o.Value.IsNullOrEmpty)) { OnPrintExit(doc); } if (version != null && options.Any(o => (o.Name == "--version") && !o.Value.IsNullOrEmpty)) { OnPrintExit(version.ToString()); } } protected void OnPrintExit(string doc, int errorCode = 0) { if (PrintExit == null) { throw new DocoptExitException(doc); } else { PrintExit(this, new PrintExitEventArgs(doc, errorCode)); } } /// <summary> /// Parse command-line argument vector. /// </summary> /// <param name="tokens"></param> /// <param name="options"></param> /// <param name="optionsFirst"></param> /// <returns></returns> internal static IList<Pattern> ParseArgv(Tokens tokens, ICollection<Option> options, bool optionsFirst = false) { // If options_first: // argv ::= [ long | shorts ]* [ argument ]* [ '--' [ argument ]* ] ; // else: // argv ::= [ long | shorts | argument ]* [ '--' [ argument ]* ] ; var parsed = new List<Pattern>(); while (tokens.Current() != null) { if (tokens.Current() == "--") { parsed.AddRange(tokens.Select(v => new Argument(null, new ValueObject(v)))); return parsed; } if (tokens.Current().StartsWith("--")) { parsed.AddRange(ParseLong(tokens, options)); } else if (tokens.Current().StartsWith("-") && tokens.Current() != "-") { parsed.AddRange(ParseShorts(tokens, options)); } else if (optionsFirst) { parsed.AddRange(tokens.Select(v => new Argument(null, new ValueObject(v)))); return parsed; } else { parsed.Add(new Argument(null, new ValueObject(tokens.Move()))); } } return parsed; } internal static string FormalUsage(string exitUsage) { var section = new StringPartition(exitUsage, ":").RightString; // drop "usage:" var pu = section.Split(new char[0], StringSplitOptions.RemoveEmptyEntries); var join = new StringBuilder(); join.Append("( "); for (int i = 1; i < pu.Length; i++) { var s = pu[i]; if (i > 1) join.Append(" "); join.Append((s == pu[0]) ? ") | (" : s); } join.Append(" )"); return join.ToString(); } internal static Required ParsePattern(string source, ICollection<Option> options) { var tokens = Tokens.FromPattern(source); var result = ParseExpr(tokens, options); if (tokens.Current() != null) throw tokens.CreateException("unexpected ending: " + String.Join(" ", tokens.ToArray())); return new Required(result.ToArray()); } private static IEnumerable<Pattern> ParseExpr(Tokens tokens, ICollection<Option> options) { // expr ::= seq ( '|' seq )* ; var seq = ParseSeq(tokens, options); if (tokens.Current() != "|") return seq; var result = new List<Pattern>(); if (seq.Count() > 1) { result.Add(new Required(seq.ToArray())); } else { result.AddRange(seq); } while (tokens.Current() == "|") { tokens.Move(); seq = ParseSeq(tokens, options); if (seq.Count() > 1) { result.Add(new Required(seq.ToArray())); } else { result.AddRange(seq); } } result = result.Distinct().ToList(); if (result.Count > 1) return new[] {new Either(result.ToArray())}; return result; } private static ICollection<Pattern> ParseSeq(Tokens tokens, ICollection<Option> options) { // seq ::= ( atom [ '...' ] )* ; var result = new List<Pattern>(); while (!new[] {null, "]", ")", "|"}.Contains(tokens.Current())) { var atom = ParseAtom(tokens, options); if (tokens.Current() == "...") { result.Add(new OneOrMore(atom.ToArray())); tokens.Move(); return result; } result.AddRange(atom); } return result; } private static IEnumerable<Pattern> ParseAtom(Tokens tokens, ICollection<Option> options) { // atom ::= '(' expr ')' | '[' expr ']' | 'options' // | long | shorts | argument | command ; var token = tokens.Current(); var result = new List<Pattern>(); switch (token) { case "[": case "(": { tokens.Move(); string matching; if (token == "(") { matching = ")"; result.Add(new Required(ParseExpr(tokens, options).ToArray())); } else { matching = "]"; result.Add(new Optional(ParseExpr(tokens, options).ToArray())); } if (tokens.Move() != matching) throw tokens.CreateException("unmatched '" + token + "'"); } break; case "options": tokens.Move(); result.Add(new OptionsShortcut()); break; default: if (token.StartsWith("--") && token != "--") { return ParseLong(tokens, options); } if (token.StartsWith("-") && token != "-" && token != "--") { return ParseShorts(tokens, options); } if ((token.StartsWith("<") && token.EndsWith(">")) || token.All(c => Char.IsUpper(c))) { result.Add(new Argument(tokens.Move())); } else { result.Add(new Command(tokens.Move())); } break; } return result; } private static IEnumerable<Pattern> ParseShorts(Tokens tokens, ICollection<Option> options) { // shorts ::= '-' ( chars )* [ [ ' ' ] chars ] ; var token = tokens.Move(); Debug.Assert(token.StartsWith("-") && !token.StartsWith("--")); var left = token.TrimStart(new[] {'-'}); var parsed = new List<Pattern>(); while (left != "") { var shortName = "-" + left[0]; left = left.Substring(1); var similar = options.Where(o => o.ShortName == shortName).ToList(); Option option = null; if (similar.Count > 1) { throw tokens.CreateException(string.Format("{0} is specified ambiguously {1} times", shortName, similar.Count)); } if (similar.Count < 1) { option = new Option(shortName, null, 0); options.Add(option); if (tokens.ThrowsInputError) { option = new Option(shortName, null, 0, new ValueObject(true)); } } else { // why is copying necessary here? option = new Option(shortName, similar[0].LongName, similar[0].ArgCount, similar[0].Value); ValueObject value = null; if (option.ArgCount != 0) { if (left == "") { if (tokens.Current() == null || tokens.Current() == "--") { throw tokens.CreateException(shortName + " requires argument"); } value = new ValueObject(tokens.Move()); } else { value = new ValueObject(left); left = ""; } } if (tokens.ThrowsInputError) option.Value = value ?? new ValueObject(true); } parsed.Add(option); } return parsed; } private static IEnumerable<Pattern> ParseLong(Tokens tokens, ICollection<Option> options) { // long ::= '--' chars [ ( ' ' | '=' ) chars ] ; var p = new StringPartition(tokens.Move(), "="); var longName = p.LeftString; Debug.Assert(longName.StartsWith("--")); var value = (p.NoSeparatorFound) ? null : new ValueObject(p.RightString); var similar = options.Where(o => o.LongName == longName).ToList(); if (tokens.ThrowsInputError && similar.Count == 0) { // If not exact match similar = options.Where(o => !String.IsNullOrEmpty(o.LongName) && o.LongName.StartsWith(longName)).ToList(); } if (similar.Count > 1) { // Might be simply specified ambiguously 2+ times? throw tokens.CreateException(string.Format("{0} is not a unique prefix: {1}?", longName, string.Join(", ", similar.Select(o => o.LongName)))); } Option option = null; if (similar.Count < 1) { var argCount = p.Separator == "=" ? 1 : 0; option = new Option(null, longName, argCount); options.Add(option); if (tokens.ThrowsInputError) option = new Option(null, longName, argCount, argCount != 0 ? value : new ValueObject(true)); } else { option = new Option(similar[0].ShortName, similar[0].LongName, similar[0].ArgCount, similar[0].Value); if (option.ArgCount == 0) { if (value != null) throw tokens.CreateException(option.LongName + " must not have an argument"); } else { if (value == null) { if (tokens.Current() == null || tokens.Current() == "--") throw tokens.CreateException(option.LongName + " requires an argument"); value = new ValueObject(tokens.Move()); } } if (tokens.ThrowsInputError) option.Value = value ?? new ValueObject(true); } return new[] {option}; } internal static ICollection<Option> ParseDefaults(string doc) { var defaults = new List<Option>(); foreach (var s in ParseSection("options:", doc)) { // FIXME corner case "bla: options: --foo" var p = new StringPartition(s, ":"); // get rid of "options:" var optionsText = p.RightString; var a = Regex.Split("\n" + optionsText, @"\r?\n[ \t]*(-\S+?)"); var split = new List<string>(); for (int i = 1; i < a.Length - 1; i += 2) { var s1 = a[i]; var s2 = a[i + 1]; split.Add(s1 + s2); } var options = split.Where(x => x.StartsWith("-")).Select(x => Option.Parse(x)); defaults.AddRange(options); } return defaults; } internal static string[] ParseSection(string name, string source) { var pattern = new Regex(@"^([^\r\n]*" + name + @"[^\r\n]*\r?\n?(?:[ \t].*?(?:\r?\n|$))*)", RegexOptions.IgnoreCase | RegexOptions.Multiline); return (from Match match in pattern.Matches(source) select match.Value.Trim()).ToArray(); } } public class PrintExitEventArgs : EventArgs { public PrintExitEventArgs(string msg, int errorCode) { Message = msg; ErrorCode = errorCode; } public string Message { get; set; } public int ErrorCode { get; set; } } }
<?php class ItemTestClass { public function export() { return array('name' => 'export'); } }
package modtweaker.mods.botania.handlers; import minetweaker.MineTweakerAPI; import minetweaker.api.item.IIngredient; import minetweaker.api.item.IItemStack; import com.blamejared.mtlib.helpers.LogHelper; import com.blamejared.mtlib.utils.BaseListAddition; import com.blamejared.mtlib.utils.BaseListRemoval; import stanhebben.zenscript.annotations.ZenClass; import stanhebben.zenscript.annotations.ZenMethod; import vazkii.botania.api.BotaniaAPI; import vazkii.botania.api.recipe.RecipePetals; import vazkii.botania.common.item.block.ItemBlockSpecialFlower; import java.util.LinkedList; import java.util.List; import static com.blamejared.mtlib.helpers.InputHelper.toObjects; import static com.blamejared.mtlib.helpers.InputHelper.toStack; import static com.blamejared.mtlib.helpers.InputHelper.*; import static com.blamejared.mtlib.helpers.StackHelper.matches; @ZenClass("mods.botania.Apothecary") public class Apothecary { protected static final String name = "Botania Petal"; ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// @ZenMethod public static void addRecipe(IItemStack output, IIngredient[] input) { MineTweakerAPI.apply(new Add(new RecipePetals(toStack(output), toObjects(input)))); } @ZenMethod public static void addRecipe(String output, IIngredient[] input) { addRecipe(toIItemStack(ItemBlockSpecialFlower.ofType(output)), input); } private static class Add extends BaseListAddition<RecipePetals> { public Add(RecipePetals recipe) { super("Botania Petal", BotaniaAPI.petalRecipes); recipes.add(recipe); } @Override public String getRecipeInfo(RecipePetals recipe) { return LogHelper.getStackDescription(recipe.getOutput()); } } ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// @ZenMethod public static void removeRecipe(IIngredient output) { // Get list of existing recipes, matching with parameter LinkedList<RecipePetals> result = new LinkedList<>(); for(RecipePetals entry : BotaniaAPI.petalRecipes) { if(entry != null && entry.getOutput() != null && matches(output, toIItemStack(entry.getOutput()))) { result.add(entry); } } // Check if we found the recipes and apply the action if(!result.isEmpty()) { MineTweakerAPI.apply(new Remove(result)); } else { LogHelper.logWarning(String.format("No %s Recipe found for %s. Command ignored!", Apothecary.name, output.toString())); } } @ZenMethod public static void removeRecipe(String output) { removeRecipe(toIItemStack(ItemBlockSpecialFlower.ofType(output))); } private static class Remove extends BaseListRemoval<RecipePetals> { public Remove(List<RecipePetals> recipes) { super(Apothecary.name, BotaniaAPI.petalRecipes, recipes); } @Override public String getRecipeInfo(RecipePetals recipe) { return LogHelper.getStackDescription(recipe.getOutput()); } } }
module Roffle class SourceMap def initialize(sexp) @sexp = sexp @map = build(sexp) end def at_lines(lines) lines.inject([]) do |acc, line| acc + wrap(@map[line]) end end private def build(sexp) # wtb inject map = {} sexp.each_sexp do |s| map[s.line] ||= s end map end def wrap(obj) obj.nil? ? [] : [obj] end end end
/*! jQuery UI - v1.10.4 - 2018-06-10 * http://jqueryui.com * Copyright jQuery Foundation and other contributors; Licensed MIT */ jQuery(function(t){t.datepicker.regional["en-GB"]={closeText:"Done",prevText:"Prev",nextText:"Next",currentText:"Today",monthNames:["January","February","March","April","May","June","July","August","September","October","November","December"],monthNamesShort:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"],dayNames:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],dayNamesShort:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],dayNamesMin:["Su","Mo","Tu","We","Th","Fr","Sa"],weekHeader:"Wk",dateFormat:"dd/mm/yy",firstDay:1,isRTL:!1,showMonthAfterYear:!1,yearSuffix:""},t.datepicker.setDefaults(t.datepicker.regional["en-GB"])});
package database_comm; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import com.sun.scenario.Settings; import config.ProgramSettings; public class DatabaseConnection { static final String JDBC_DRIVER = "com.microsoft.sqlserver.jdbc.SQLServerDriver"; static final String USER = "sa"; static final String PASS = "brianbrian"; //Debug localhost string //static final String connectionUrl = "jdbc:sqlserver://150.250.147.167:1433;DatabaseName=REDFOX;user=sa;password=brianbrian;"; //Debug online string //String connectionUrl = "jdbc:sqlserver://rbmsdemo.dyndns.org:1433;DatabaseName=REDFOX;user=brian;password=brianbrian;"; private Connection connection; private Statement statement; public DatabaseConnection() throws SQLException { try { //I don't know what this line really does //No touch Class.forName(JDBC_DRIVER); } catch (ClassNotFoundException e) { e.printStackTrace(); } System.out.println("Connecting to database..."); connection = DriverManager.getConnection(getConnectionUrl()); statement = connection.createStatement(); } private String getConnectionUrl() { return "jdbc:sqlserver://" + ProgramSettings.getProperty("database_server_name") + ":" + ProgramSettings.getProperty("database_port") + ";DatabaseName=" + ProgramSettings.getProperty("database_name") + ";user=" + ProgramSettings.getProperty("database_username") + ";password=" + ProgramSettings.getProperty("database_password") + ";"; //String s = "jdbc:sqlserver://rbmsdemo.dyndns.org:1433;DatabaseName=REDFOX;user=brian;password=brianbrian;"; } //Executes and returns results from a given query public ResultSet executeQuery(String query) throws SQLException { return statement.executeQuery(query); } //Executes a SQL statement, returns true it successful public boolean execute(String sql) throws SQLException { return statement.execute(sql); } //Close all connections in this object public void close() throws SQLException { statement.close(); connection.close(); } }
package com.github.badoualy.telegram.tl.api.request; import com.github.badoualy.telegram.tl.TLContext; import com.github.badoualy.telegram.tl.api.TLAbsInputUser; import com.github.badoualy.telegram.tl.api.TLAbsUpdates; import com.github.badoualy.telegram.tl.core.TLMethod; import com.github.badoualy.telegram.tl.core.TLObject; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import static com.github.badoualy.telegram.tl.StreamUtils.readInt; import static com.github.badoualy.telegram.tl.StreamUtils.readTLObject; import static com.github.badoualy.telegram.tl.StreamUtils.writeInt; import static com.github.badoualy.telegram.tl.StreamUtils.writeTLObject; import static com.github.badoualy.telegram.tl.TLObjectUtils.SIZE_CONSTRUCTOR_ID; import static com.github.badoualy.telegram.tl.TLObjectUtils.SIZE_INT32; /** * @author Yannick Badoual yann.badoual@gmail.com * @see <a href="http://github.com/badoualy/kotlogram">http://github.com/badoualy/kotlogram</a> */ public class TLRequestMessagesAddChatUser extends TLMethod<TLAbsUpdates> { public static final int CONSTRUCTOR_ID = 0xf9a0aa09; protected int chatId; protected TLAbsInputUser userId; protected int fwdLimit; private final String _constructor = "messages.addChatUser#f9a0aa09"; public TLRequestMessagesAddChatUser() { } public TLRequestMessagesAddChatUser(int chatId, TLAbsInputUser userId, int fwdLimit) { this.chatId = chatId; this.userId = userId; this.fwdLimit = fwdLimit; } @Override @SuppressWarnings({"unchecked", "SimplifiableConditionalExpression"}) public TLAbsUpdates deserializeResponse(InputStream stream, TLContext context) throws IOException { final TLObject response = readTLObject(stream, context); if (response == null) { throw new IOException("Unable to parse response"); } if (!(response instanceof TLAbsUpdates)) { throw new IOException( "Incorrect response type, expected " + getClass().getCanonicalName() + ", found " + response .getClass().getCanonicalName()); } return (TLAbsUpdates) response; } @Override public void serializeBody(OutputStream stream) throws IOException { writeInt(chatId, stream); writeTLObject(userId, stream); writeInt(fwdLimit, stream); } @Override @SuppressWarnings({"unchecked", "SimplifiableConditionalExpression"}) public void deserializeBody(InputStream stream, TLContext context) throws IOException { chatId = readInt(stream); userId = readTLObject(stream, context, TLAbsInputUser.class, -1); fwdLimit = readInt(stream); } @Override public int computeSerializedSize() { int size = SIZE_CONSTRUCTOR_ID; size += SIZE_INT32; size += userId.computeSerializedSize(); size += SIZE_INT32; return size; } @Override public String toString() { return _constructor; } @Override public int getConstructorId() { return CONSTRUCTOR_ID; } public int getChatId() { return chatId; } public void setChatId(int chatId) { this.chatId = chatId; } public TLAbsInputUser getUserId() { return userId; } public void setUserId(TLAbsInputUser userId) { this.userId = userId; } public int getFwdLimit() { return fwdLimit; } public void setFwdLimit(int fwdLimit) { this.fwdLimit = fwdLimit; } }
<!DOCTYPE html> <html lang="en"><head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta name="description" content=""> <meta name="author" content=""> <link rel="shortcut icon" href="../../docs-assets/ico/favicon.png"><title>Iversity Webblog</title> <!-- Bootstrap core CSS --> <link href="dist/css/bootstrap.css" rel="stylesheet"> <!-- Custom styles for this template --> <link href="jumbotron-narrow.css" rel="stylesheet"> <!-- Just for debugging purposes. Don't actually copy this line! --><!--[if lt IE 9]><script src="../../docs-assets/js/ie8-responsive-file-warning.js"></script><![endif]--><!-- HTML5 shim and Respond.js IE8 support of HTML5 elements and media queries --><!--[if lt IE 9]> <script src="https://oss.maxcdn.com/libs/html5shiv/3.7.0/html5shiv.js"></script> <script src="https://oss.maxcdn.com/libs/respond.js/1.3.0/respond.min.js"></script> <![endif]--> </head> <body> <div class="container"> <div class="header"> <ul class="nav nav-pills pull-right"> <li class="active"><a href="boot.html">Home</a></li> <li><a href="#">Impressum</a></li> </ul> <h3 class="text-muted">Webblog zum Onlinekurs</h3> </div> <div class="jumbotron"> <h1><img style="width: 90%; height: 90%;" alt="Iversity Logo" src="https://d1wshrh2fwv7ib.cloudfront.net/assets/logo-iversity-494f4c5e361ad68f629f320da9858677.png"></h1> MOOC &nbsp;| &nbsp;Web-Engineering I<br> Grundlagen der Webentwicklung<br> </div> <div class="row marketing"> <div class="col-lg-6"> <h4>Impressumsangaben</h4> <p> Daniel Ponath<br> &#112;&#111;&#110;&#97;&#116;&#104;&#64;&#103;&#109;&#120;&#46;&#100;&#101;</p> </div> </div> <div class="footer"> <p><a href="impress.html">Impressum</a> | © Daniel Ponath 2013 | Jumbotron Narrow Theme</p> </div> <!-- /container --><!-- jQuery (necessary for Bootstrap's JavaScript plugins) --> <script src="https://code.jquery.com/jquery.js"></script><!-- Include all compiled plugins (below), or include individual files as needed --> <script src="../dist/js/bootstrap.min.js"></script></div> </body></html>
/* * Copyright (C) 2013 Google Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "config.h" #include "public/platform/WebMediaStreamTrack.h" #include "platform/mediastream/MediaStreamComponent.h" #include "platform/mediastream/MediaStreamSource.h" #include "public/platform/WebAudioSourceProvider.h" #include "public/platform/WebMediaStream.h" #include "public/platform/WebMediaStreamSource.h" #include "public/platform/WebString.h" #include "wtf/Vector.h" using namespace WebCore; namespace blink { namespace { class ExtraDataContainer : public MediaStreamComponent::ExtraData { public: explicit ExtraDataContainer(PassOwnPtr<WebMediaStreamTrack::ExtraData> extraData) : m_extraData(extraData) { } WebMediaStreamTrack::ExtraData* extraData() { return m_extraData.get(); } private: OwnPtr<WebMediaStreamTrack::ExtraData> m_extraData; }; } // namespace WebMediaStreamTrack::WebMediaStreamTrack(PassRefPtr<WebCore::MediaStreamComponent> mediaStreamComponent) : m_private(mediaStreamComponent) { } WebMediaStreamTrack::WebMediaStreamTrack(WebCore::MediaStreamComponent* mediaStreamComponent) : m_private(mediaStreamComponent) { } WebMediaStreamTrack& WebMediaStreamTrack::operator=(WebCore::MediaStreamComponent* mediaStreamComponent) { m_private = mediaStreamComponent; return *this; } void WebMediaStreamTrack::initialize(const WebMediaStreamSource& source) { m_private = MediaStreamComponent::create(source); } void WebMediaStreamTrack::initialize(const WebString& id, const WebMediaStreamSource& source) { m_private = MediaStreamComponent::create(id, source); } void WebMediaStreamTrack::reset() { m_private.reset(); } WebMediaStreamTrack::operator PassRefPtr<MediaStreamComponent>() const { return m_private.get(); } WebMediaStreamTrack::operator MediaStreamComponent*() const { return m_private.get(); } bool WebMediaStreamTrack::isEnabled() const { ASSERT(!m_private.isNull()); return m_private->enabled(); } WebString WebMediaStreamTrack::id() const { ASSERT(!m_private.isNull()); return m_private->id(); } WebMediaStream WebMediaStreamTrack::stream() const { ASSERT(!m_private.isNull()); return WebMediaStream(m_private->stream()); } WebMediaStreamSource WebMediaStreamTrack::source() const { ASSERT(!m_private.isNull()); return WebMediaStreamSource(m_private->source()); } WebMediaStreamTrack::ExtraData* WebMediaStreamTrack::extraData() const { RefPtr<MediaStreamComponent::ExtraData> data = m_private->extraData(); if (!data) return 0; return static_cast<ExtraDataContainer*>(data.get())->extraData(); } void WebMediaStreamTrack::setExtraData(ExtraData* extraData) { m_private->setExtraData(adoptRef(new ExtraDataContainer(adoptPtr(extraData)))); } void WebMediaStreamTrack::setSourceProvider(WebAudioSourceProvider* provider) { #if ENABLE(WEB_AUDIO) ASSERT(!m_private.isNull()); m_private->setSourceProvider(provider); #endif // ENABLE(WEB_AUDIO) } void WebMediaStreamTrack::assign(const WebMediaStreamTrack& other) { m_private = other.m_private; } } // namespace blink
/* * ProGuard -- shrinking, optimization, obfuscation, and preverification * of Java bytecode. * * Copyright (c) 2002-2016 Eric Lafortune @ GuardSquare * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free * Software Foundation; either version 2 of the License, or (at your option) * any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package proguard; import java.io.*; import java.util.*; /** * This class checks and prints out information about the GPL. * * @author Eric Lafortune */ public class GPL { /** * Prints out a note about the GPL if ProGuard is linked against unknown * code. */ public static void check() { ByteArrayOutputStream out = new ByteArrayOutputStream(); new Exception().printStackTrace(new PrintStream(out)); LineNumberReader reader = new LineNumberReader( new InputStreamReader( new ByteArrayInputStream(out.toByteArray()))); Set unknownPackageNames = unknownPackageNames(reader); if (unknownPackageNames.size() > 0) { String uniquePackageNames = uniquePackageNames(unknownPackageNames); System.out.println("ProGuard is released under the GNU General Public License. You therefore"); System.out.println("must ensure that programs that link to it ("+uniquePackageNames+"...)"); System.out.println("carry the GNU General Public License as well. Alternatively, you can"); System.out.println("apply for an exception with the author of ProGuard."); } } /** * Returns a set of package names from the given stack trace. */ private static Set unknownPackageNames(LineNumberReader reader) { Set packageNames = new HashSet(); try { while (true) { String line = reader.readLine(); if (line == null) { break; } line = line.trim(); if (line.startsWith("at ")) { line = line.substring(2).trim(); line = trimSuffix(line, '('); line = trimSuffix(line, '.'); line = trimSuffix(line, '.'); if (line.length() > 0 && !isKnown(line)) { packageNames.add(line); } } } } catch (IOException ex) { // We'll just stop looking for more names. } return packageNames; } /** * Returns a comma-separated list of package names from the set, excluding * any subpackages of packages in the set. */ private static String uniquePackageNames(Set packageNames) { StringBuffer buffer = new StringBuffer(); Iterator iterator = packageNames.iterator(); while (iterator.hasNext()) { String packageName = (String)iterator.next(); if (!containsPrefix(packageNames, packageName)) { buffer.append(packageName).append(", "); } } return buffer.toString(); } /** * Returns a given string without the suffix, as defined by the given * separator. */ private static String trimSuffix(String string, char separator) { int index = string.lastIndexOf(separator); return index < 0 ? "" : string.substring(0, index); } /** * Returns whether the given set contains a prefix of the given name. */ private static boolean containsPrefix(Set set, String name) { int index = 0; while (!set.contains(name.substring(0, index))) { index = name.indexOf('.', index + 1); if (index < 0) { return false; } } return true; } /** * Returns whether the given package name has been granted an exception * against the GPL linking clause, by the copyright holder of ProGuard. * This method is not legally binding, but of course the actual license is. * Please contact the copyright holder if you would like an exception for * your code as well. */ private static boolean isKnown(String packageName) { return packageName.startsWith("java") || packageName.startsWith("sun.reflect") || packageName.startsWith("proguard") || packageName.startsWith("org.apache.tools.ant") || packageName.startsWith("org.apache.tools.maven") || packageName.startsWith("org.gradle") || packageName.startsWith("org.codehaus.groovy") || packageName.startsWith("org.eclipse") || packageName.startsWith("org.netbeans") || packageName.startsWith("com.android") || packageName.startsWith("com.sun.kvem") || packageName.startsWith("net.certiv.proguarddt") || packageName.startsWith("groovy") || packageName.startsWith("scala") || packageName.startsWith("sbt") || packageName.startsWith("xsbt") || packageName.startsWith("eclipseme"); } public static void main(String[] args) { LineNumberReader reader = new LineNumberReader( new InputStreamReader(System.in)); Set unknownPackageNames = unknownPackageNames(reader); if (unknownPackageNames.size() > 0) { String uniquePackageNames = uniquePackageNames(unknownPackageNames); System.out.println(uniquePackageNames); } } }
<?php namespace DD\ShopBundle\Form; use Symfony\Component\Form\AbstractType; use Symfony\Component\Form\FormBuilderInterface; use Symfony\Component\OptionsResolver\OptionsResolverInterface; class ResourceType extends AbstractType { /** * @param FormBuilderInterface $builder * @param array $options */ public function buildForm(FormBuilderInterface $builder, array $options) { $builder ->add('name') ->add('description') ; } /** * @param OptionsResolverInterface $resolver */ public function setDefaultOptions(OptionsResolverInterface $resolver) { $resolver->setDefaults(array( 'data_class' => 'DD\ShopBundle\Entity\Resource' )); } /** * @return string */ public function getName() { return 'dd_shopbundle_resource'; } }
var mongoose = require('mongoose'); var BaseModel = require("./base_model"); var Schema = mongoose.Schema; var ObjectId = Schema.ObjectId; var UserFollowSchema = new Schema({ user_id: { type: ObjectId }, kind: { type: String }, object_id: { type: ObjectId }, create_at: { type: Date, default: Date.now } }); UserFollowSchema.plugin(BaseModel); UserFollowSchema.index({user_id: 1, object_id: 1}, {unique: true}); mongoose.model('UserFollow', UserFollowSchema);
'use strict' const path = require('path') const webpack = require('webpack') const HtmlWebpackPlugin = require('html-webpack-plugin') const ExtractTextPlugin = require('extract-text-webpack-plugin') const ManifestPlugin = require('webpack-manifest-plugin') const InterpolateHtmlPlugin = require('react-dev-utils/InterpolateHtmlPlugin') const SWPrecacheWebpackPlugin = require('sw-precache-webpack-plugin') const ModuleScopePlugin = require('react-dev-utils/ModuleScopePlugin') const paths = require('./paths') const getClientEnvironment = require('./env') // Webpack uses `publicPath` to determine where the app is being served from. // It requires a trailing slash, or the file assets will get an incorrect path. const publicPath = paths.servedPath // Some apps do not use client-side routing with pushState. // For these, "homepage" can be set to "." to enable relative asset paths. const shouldUseRelativeAssetPaths = publicPath === './' // `publicUrl` is just like `publicPath`, but we will provide it to our app // as %PUBLIC_URL% in `index.html` and `process.env.PUBLIC_URL` in JavaScript. // Omit trailing slash as %PUBLIC_URL%/xyz looks better than %PUBLIC_URL%xyz. const publicUrl = publicPath.slice(0, -1) // Get environment variables to inject into our app. const env = getClientEnvironment(publicUrl) // Assert this just to be safe. // Development builds of React are slow and not intended for production. if (env.stringified['process.env'].NODE_ENV !== '"production"') { throw new Error('Production builds must have NODE_ENV=production.') } // Note: defined here because it will be used more than once. const cssFilename = 'static/css/[name].[contenthash:8].css' // ExtractTextPlugin expects the build output to be flat. // (See https://github.com/webpack-contrib/extract-text-webpack-plugin/issues/27) // However, our output is structured with css, js and media folders. // To have this structure working with relative paths, we have to use custom options. const extractTextPluginOptions = shouldUseRelativeAssetPaths // Making sure that the publicPath goes back to to build folder. ? { publicPath: Array(cssFilename.split('/').length).join('../') } : {} // This is the production configuration. // It compiles slowly and is focused on producing a fast and minimal bundle. // The development configuration is different and lives in a separate file. module.exports = { // Don't attempt to continue if there are any errors. bail: true, // We generate sourcemaps in production. This is slow but gives good results. // You can exclude the *.map files from the build during deployment. devtool: 'source-map', // In production, we only want to load the polyfills and the app code. entry: [require.resolve('./polyfills'), paths.appIndexJs], output: { // The build folder. path: paths.appBuild, // Generated JS file names (with nested folders). // There will be one main bundle, and one file per asynchronous chunk. // We don't currently advertise code splitting but Webpack supports it. filename: 'static/js/[name].[chunkhash:8].js', chunkFilename: 'static/js/[name].[chunkhash:8].chunk.js', // We inferred the "public path" (such as / or /my-project) from homepage. publicPath: publicPath, // Point sourcemap entries to original disk location devtoolModuleFilenameTemplate: info => path.relative(paths.appSrc, info.absoluteResourcePath) }, resolve: { // This allows you to set a fallback for where Webpack should look for modules. // We placed these paths second because we want `node_modules` to "win" // if there are any conflicts. This matches Node resolution mechanism. // https://github.com/facebookincubator/create-react-app/issues/253 modules: ['node_modules', paths.appNodeModules].concat( // It is guaranteed to exist because we tweak it in `env.js` process.env.NODE_PATH.split(path.delimiter).filter(Boolean) ), // These are the reasonable defaults supported by the Node ecosystem. // We also include JSX as a common component filename extension to support // some tools, although we do not recommend using it, see: // https://github.com/facebookincubator/create-react-app/issues/290 extensions: ['.js', '.json', '.jsx'], alias: { // Support React Native Web // https://www.smashingmagazine.com/2016/08/a-glimpse-into-the-future-with-react-native-for-web/ 'react-native': 'react-native-web', Components: path.resolve(__dirname, '../src/components'), Util: path.resolve(__dirname, '../src/util'), Actions: path.resolve(__dirname, '../src/actions'), Selectors: path.resolve(__dirname, '../src/selectors') }, plugins: [ // Prevents users from importing files from outside of src/ (or node_modules/). // This often causes confusion because we only process files within src/ with babel. // To fix this, we prevent you from importing files out of src/ -- if you'd like to, // please link the files into your node_modules/ and let module-resolution kick in. // Make sure your source files are compiled, as they will not be processed in any way. new ModuleScopePlugin(paths.appSrc) ] }, module: { strictExportPresence: true, rules: [ // TODO: Disable require.ensure as it's not a standard language feature. // We are waiting for https://github.com/facebookincubator/create-react-app/issues/2176. // { parser: { requireEnsure: false } }, // First, run the linter. // It's important to do this before Babel processes the JS. { test: /\.(js|jsx)$/, enforce: 'pre', loader: 'standard-loader', options: { error: true }, include: paths.appSrc }, // ** ADDING/UPDATING LOADERS ** // The "file" loader handles all assets unless explicitly excluded. // The `exclude` list *must* be updated with every change to loader extensions. // When adding a new loader, you must add its `test` // as a new entry in the `exclude` list in the "file" loader. // "file" loader makes sure those assets end up in the `build` folder. // When you `import` an asset, you get its filename. { exclude: [ /\.html$/, /\.(js|jsx)$/, /\.css$/, /\.scss$/, /\.json$/, /\.bmp$/, /\.gif$/, /\.jpe?g$/, /\.png$/ ], loader: require.resolve('file-loader'), options: { name: 'static/media/[name].[hash:8].[ext]' } }, // "url" loader works just like "file" loader but it also embeds // assets smaller than specified size as data URLs to avoid requests. { test: [/\.bmp$/, /\.gif$/, /\.jpe?g$/, /\.png$/], loader: require.resolve('url-loader'), options: { limit: 10000, name: 'static/media/[name].[hash:8].[ext]' } }, // Process JS with Babel. { test: /\.(js|jsx)$/, include: paths.appSrc, loader: require.resolve('babel-loader') }, // The notation here is somewhat confusing. // "postcss" loader applies autoprefixer to our CSS. // "css" loader resolves paths in CSS and adds assets as dependencies. // "style" loader normally turns CSS into JS modules injecting <style>, // but unlike in development configuration, we do something different. // `ExtractTextPlugin` first applies the "postcss" and "css" loaders // (second argument), then grabs the result CSS and puts it into a // separate file in our build process. This way we actually ship // a single CSS file in production instead of JS code injecting <style> // tags. If you use code splitting, however, any async bundles will still // use the "style" loader inside the async code so CSS from them won't be // in the main CSS file. { test: /\.css$/, loader: ExtractTextPlugin.extract( Object.assign( { fallback: require.resolve('style-loader'), use: [ { loader: require.resolve('css-loader'), options: { importLoaders: 1, minimize: true, sourceMap: true } } // { // loader: require.resolve('postcss-loader'), // options: { // ident: 'postcss', // https://webpack.js.org/guides/migrating/#complex-options // plugins: () => [ // require('postcss-flexbugs-fixes'), // autoprefixer({ // browsers: [ // '>1%', // 'last 4 versions', // 'Firefox ESR', // 'not ie < 9', // React doesn't support IE8 anyway // ], // flexbox: 'no-2009', // }), // ], // }, // }, ] }, extractTextPluginOptions ) ) // Note: this won't work without `new ExtractTextPlugin()` in `plugins`. } // ** STOP ** Are you adding a new loader? // Remember to add the new extension(s) to the "file" loader exclusion list. ] }, plugins: [ // Makes some environment variables available in index.html. // The public URL is available as %PUBLIC_URL% in index.html, e.g.: // <link rel="shortcut icon" href="%PUBLIC_URL%/favicon.ico"> // In production, it will be an empty string unless you specify "homepage" // in `package.json`, in which case it will be the pathname of that URL. new InterpolateHtmlPlugin(env.raw), // Generates an `index.html` file with the <script> injected. new HtmlWebpackPlugin({ inject: true, template: paths.appHtml, minify: { removeComments: true, collapseWhitespace: true, removeRedundantAttributes: true, useShortDoctype: true, removeEmptyAttributes: true, removeStyleLinkTypeAttributes: true, keepClosingSlash: true, minifyJS: true, minifyCSS: true, minifyURLs: true } }), // Makes some environment variables available to the JS code, for example: // if (process.env.NODE_ENV === 'production') { ... }. See `./env.js`. // It is absolutely essential that NODE_ENV was set to production here. // Otherwise React will be compiled in the very slow development mode. new webpack.DefinePlugin(env.stringified), // Minify the code. new webpack.optimize.UglifyJsPlugin({ compress: { warnings: false, // This feature has been reported as buggy a few times, such as: // https://github.com/mishoo/UglifyJS2/issues/1964 // We'll wait with enabling it by default until it is more solid. reduce_vars: false }, output: { comments: false }, sourceMap: true }), // Note: this won't work without ExtractTextPlugin.extract(..) in `loaders`. new ExtractTextPlugin({ filename: cssFilename }), // Generate a manifest file which contains a mapping of all asset filenames // to their corresponding output file so that tools can pick it up without // having to parse `index.html`. new ManifestPlugin({ fileName: 'asset-manifest.json' }), // Generate a service worker script that will precache, and keep up to date, // the HTML & assets that are part of the Webpack build. new SWPrecacheWebpackPlugin({ // By default, a cache-busting query parameter is appended to requests // used to populate the caches, to ensure the responses are fresh. // If a URL is already hashed by Webpack, then there is no concern // about it being stale, and the cache-busting can be skipped. dontCacheBustUrlsMatching: /\.\w{8}\./, filename: 'service-worker.js', logger (message) { if (message.indexOf('Total precache size is') === 0) { // This message occurs for every build and is a bit too noisy. return } console.log(message) }, minify: true, navigateFallback: publicUrl + '/index.html', staticFileGlobsIgnorePatterns: [/\.map$/, /asset-manifest\.json$/], // Work around Windows path issue in SWPrecacheWebpackPlugin: // https://github.com/facebookincubator/create-react-app/issues/2235 stripPrefix: paths.appBuild.replace(/\\/g, '/') + '/' }), // Moment.js is an extremely popular library that bundles large locale files // by default due to how Webpack interprets its code. This is a practical // solution that requires the user to opt into importing specific locales. // https://github.com/jmblog/how-to-optimize-momentjs-with-webpack // You can remove this if you don't use Moment.js: new webpack.IgnorePlugin(/^\.\/locale$/, /moment$/) ], // Some libraries import Node modules but don't use them in the browser. // Tell Webpack to provide empty mocks for them so importing them works. node: { fs: 'empty', net: 'empty', tls: 'empty' } }
using System.Threading.Tasks; namespace Framework.Tile { public interface ISecondaryPinner { Task<bool> Pin(TileInfo tileInfo); Task<bool> Unpin(TileInfo tileInfo); bool IsPinned(TileInfo tiltileInfoeId); } }
require 'fileutils' require "bundler/setup" def run_guard Signal.trap("HUP") { exit } require 'guard' Guard.setup Guard::Dsl.evaluate_guardfile(:guardfile => 'Guardfile') Guard.start end guard_pid = fork { run_guard } sleep 2 Dir['{specs,source}/**/*'].each{ |f| FileUtils.touch(f) } sleep 2 Process.kill "HUP", guard_pid Process.wait( guard_pid ) `npm run-script minify`
<html> <head> <meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> <title>W30725_text</title> <link rel="stylesheet" type="text/css" href="style.css" /> </head> <body> <div style="margin-left: auto; margin-right: auto; width: 800px; overflow: hidden;"> <div style="float: left;"> <a href="page36.html">&laquo;</a> </div> <div style="float: right;"> </div> </div> <hr/> <div style="position: absolute; margin-left: 550px; margin-top: 192px;"> <p class="styleSans28.0000<enum PANGO_WEIGHT_NORMAL of type PangoWeight><enum PANGO_STYLE_NORMAL of type PangoStyle>"></p> </div> <div style="position: absolute; margin-left: 1009px; margin-top: 246px;"> <p class="styleSans12000<enum PANGO_WEIGHT_NORMAL of type PangoWeight><enum PANGO_STYLE_NORMAL of type PangoStyle>"></p> </div> <div style="position: absolute; margin-left: 247px; margin-top: 357px;"> <p class="styleSans12000<enum PANGO_WEIGHT_NORMAL of type PangoWeight><enum PANGO_STYLE_NORMAL of type PangoStyle>"></p> </div> <div style="position: absolute; margin-left: 517px; margin-top: 330px;"> <img src="images/tmpCrYitz.png" alt="tmpCrYitz.png" /> </div> <div style="position: absolute; margin-left: 1030px; margin-top: 330px;"> <p class="styleSans12000<enum PANGO_WEIGHT_NORMAL of type PangoWeight><enum PANGO_STYLE_NORMAL of type PangoStyle>"></p> </div> <div style="position: absolute; margin-left: 660px; margin-top: 577px;"> <p class="styleSans36.0000<enum PANGO_WEIGHT_NORMAL of type PangoWeight><enum PANGO_STYLE_NORMAL of type PangoStyle>"> <br/>Proppant Totals <br/>None 20/40 White Sand 90000 lb None 18/40 VersaProp 45000 lb <br/> </p> </div> <div style="position: absolute; margin-left: 247px; margin-top: 825px;"> <p class="styleSans144.0000<enum PANGO_WEIGHT_NORMAL of type PangoWeight><enum PANGO_STYLE_NORMAL of type PangoStyle>"> <br/>Treatmem EXQCUIIOH <br/>Volume Volume Mass Mass —-m--m--I-—n--m-“ “um..“mnn-m-am“..1. .1!- <br/>Pre Flush 1000 70000 -_ 135000 “.3- <br/> <br/> </p> </div> <div style="position: absolute; margin-left: 1650px; margin-top: 2915px;"> <p class="styleSans33.0000<enum PANGO_WEIGHT_NORMAL of type PangoWeight><enum PANGO_STYLE_NORMAL of type PangoStyle>">Schlumherger </p> </div> <div style="position: absolute; margin-left: 1375px; margin-top: 219px;"> <img src="images/tmpeLOqd5.png" alt="tmpeLOqd5.png" /> </div> <div style="position: absolute; margin-left: 1861px; margin-top: 192px;"> <p class="styleSans44.0000<enum PANGO_WEIGHT_NORMAL of type PangoWeight><enum PANGO_STYLE_NORMAL of type PangoStyle>"></p> </div> </body> </html>
# Use this hook to configure devise mailer, warden hooks and so forth. # Many of these configuration options can be set straight in your model. Devise.setup do |config| # The secret key used by Devise. Devise uses this key to generate # random tokens. Changing this key will render invalid all existing # confirmation, reset password and unlock tokens in the database. config.secret_key = 'ad9f64291a13fcc9b06c1925af84ca2d72534cd1f7fd28611672148e1896ec224fc9f7b330edb12834fe8ce5f3e6414179953d344a6e5c2d6c62e1b955e32956' # ==> Mailer Configuration # Configure the e-mail address which will be shown in Devise::Mailer, # note that it will be overwritten if you use your own mailer class # with default "from" parameter. config.mailer_sender = 'please-change-me-at-config-initializers-devise@example.com' # Configure the class responsible to send e-mails. # config.mailer = 'Devise::Mailer' # ==> ORM configuration # Load and configure the ORM. Supports :active_record (default) and # :mongoid (bson_ext recommended) by default. Other ORMs may be # available as additional gems. require 'devise/orm/active_record' # ==> Configuration for any authentication mechanism # Configure which keys are used when authenticating a user. The default is # just :email. You can configure it to use [:username, :subdomain], so for # authenticating a user, both parameters are required. Remember that those # parameters are used only when authenticating and not when retrieving from # session. If you need permissions, you should implement that in a before filter. # You can also supply a hash where the value is a boolean determining whether # or not authentication should be aborted when the value is not present. config.authentication_keys = [ :login ] # Configure parameters from the request object used for authentication. Each entry # given should be a request method and it will automatically be passed to the # find_for_authentication method and considered in your model lookup. For instance, # if you set :request_keys to [:subdomain], :subdomain will be used on authentication. # The same considerations mentioned for authentication_keys also apply to request_keys. # config.request_keys = [] # Configure which authentication keys should be case-insensitive. # These keys will be downcased upon creating or modifying a user and when used # to authenticate or find a user. Default is :email. config.case_insensitive_keys = [ :email ] # Configure which authentication keys should have whitespace stripped. # These keys will have whitespace before and after removed upon creating or # modifying a user and when used to authenticate or find a user. Default is :email. config.strip_whitespace_keys = [ :email ] # Tell if authentication through request.params is enabled. True by default. # It can be set to an array that will enable params authentication only for the # given strategies, for example, `config.params_authenticatable = [:database]` will # enable it only for database (email + password) authentication. # config.params_authenticatable = true # Tell if authentication through HTTP Auth is enabled. False by default. # It can be set to an array that will enable http authentication only for the # given strategies, for example, `config.http_authenticatable = [:database]` will # enable it only for database authentication. The supported strategies are: # :database = Support basic authentication with authentication key + password # config.http_authenticatable = false # If http headers should be returned for AJAX requests. True by default. # config.http_authenticatable_on_xhr = true # The realm used in Http Basic Authentication. 'Application' by default. # config.http_authentication_realm = 'Application' # It will change confirmation, password recovery and other workflows # to behave the same regardless if the e-mail provided was right or wrong. # Does not affect registerable. #config.paranoid = true # By default Devise will store the user in session. You can skip storage for # particular strategies by setting this option. # Notice that if you are skipping storage for all authentication paths, you # may want to disable generating routes to Devise's sessions controller by # passing :skip => :sessions to `devise_for` in your config/routes.rb config.skip_session_storage = [:http_auth] # By default, Devise cleans up the CSRF token on authentication to # avoid CSRF token fixation attacks. This means that, when using AJAX # requests for sign in and sign up, you need to get a new CSRF token # from the server. You can disable this option at your own risk. # config.clean_up_csrf_token_on_authentication = true # ==> Configuration for :database_authenticatable # For bcrypt, this is the cost for hashing the password and defaults to 10. If # using other encryptors, it sets how many times you want the password re-encrypted. # # Limiting the stretches to just one in testing will increase the performance of # your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use # a value less than 10 in other environments. config.stretches = Rails.env.test? ? 1 : 10 # Setup a pepper to generate the encrypted password. # config.pepper = '4808813c7426ff5a3770a8982dfdc37e94867741a316bf01c8ca8c39d70c636f22c45d47ad44c312e49719d04b6ac270a8f1ba8afc1564fda0c77282824641c7' # ==> Configuration for :confirmable # A period that the user is allowed to access the website even without # confirming his account. For instance, if set to 2.days, the user will be # able to access the website for two days without confirming his account, # access will be blocked just in the third day. Default is 0.days, meaning # the user cannot access the website without confirming his account. #config.allow_unconfirmed_access_for = 2.days # A period that the user is allowed to confirm their account before their # token becomes invalid. For example, if set to 3.days, the user can confirm # their account within 3 days after the mail was sent, but on the fourth day # their account can't be confirmed with the token any more. # Default is nil, meaning there is no restriction on how long a user can take # before confirming their account. # config.confirm_within = 3.days # If true, requires any email changes to be confirmed (exactly the same way as # initial account confirmation) to be applied. Requires additional unconfirmed_email # db field (see migrations). Until confirmed new email is stored in # unconfirmed email column, and copied to email column on successful confirmation. config.reconfirmable = true # Defines which key will be used when confirming an account config.confirmation_keys = [ :login ] # ==> Configuration for :rememberable # The time the user will be remembered without asking for credentials again. # config.remember_for = 2.weeks # If true, extends the user's remember period when remembered via cookie. # config.extend_remember_period = false # Options to be passed to the created cookie. For instance, you can set # :secure => true in order to force SSL only cookies. # config.rememberable_options = {} # ==> Configuration for :validatable # Range for password length. Default is 4..128. config.password_length = 4..128 # Email regex used to validate email formats. It simply asserts that # one (and only one) @ exists in the given string. This is mainly # to give user feedback and not to assert the e-mail validity. # config.email_regexp = /\A[^@]+@[^@]+\z/ # ==> Configuration for :timeoutable # The time you want to timeout the user session without activity. After this # time the user will be asked for credentials again. Default is 30 minutes. # config.timeout_in = 30.minutes # If true, expires auth token on session timeout. # config.expire_auth_token_on_timeout = false # ==> Configuration for :lockable # Defines which strategy will be used to lock an account. # :failed_attempts = Locks an account after a number of failed attempts to sign in. # :none = No lock strategy. You should handle locking by yourself. # config.lock_strategy = :failed_attempts # Defines which key will be used when locking and unlocking an account # config.unlock_keys = [ :email ] # Defines which strategy will be used to unlock an account. # :email = Sends an unlock link to the user email # :time = Re-enables login after a certain amount of time (see :unlock_in below) # :both = Enables both strategies # :none = No unlock strategy. You should handle unlocking by yourself. # config.unlock_strategy = :both # Number of authentication tries before locking an account if lock_strategy # is failed attempts. # config.maximum_attempts = 20 # Time interval to unlock the account if :time is enabled as unlock_strategy. # config.unlock_in = 1.hour # Warn on the last attempt before the account is locked. # config.last_attempt_warning = false # ==> Configuration for :recoverable # # Defines which key will be used when recovering the password for an account config.reset_password_keys = [ :login ] # Time interval you can reset your password with a reset password key. # Don't put a too small interval or your users won't have the time to # change their passwords. config.reset_password_within = 6.hours # ==> Configuration for :encryptable # Allow you to use another encryption algorithm besides bcrypt (default). You can use # :sha1, :sha512 or encryptors from others authentication tools as :clearance_sha1, # :authlogic_sha512 (then you should set stretches above to 20 for default behavior) # and :restful_authentication_sha1 (then you should set stretches to 10, and copy # REST_AUTH_SITE_KEY to pepper). # # Require the `devise-encryptable` gem when using anything other than bcrypt # config.encryptor = :sha512 # ==> Scopes configuration # Turn scoped views on. Before rendering "sessions/new", it will first check for # "users/sessions/new". It's turned off by default because it's slower if you # are using only default views. # config.scoped_views = false # Configure the default scope given to Warden. By default it's the first # devise role declared in your routes (usually :user). # config.default_scope = :user # Set this configuration to false if you want /users/sign_out to sign out # only the current scope. By default, Devise signs out all scopes. # config.sign_out_all_scopes = true # ==> Navigation configuration # Lists the formats that should be treated as navigational. Formats like # :html, should redirect to the sign in page when the user does not have # access, but formats like :xml or :json, should return 401. # # If you have any extra navigational formats, like :iphone or :mobile, you # should add them to the navigational formats lists. # # The "*/*" below is required to match Internet Explorer requests. # config.navigational_formats = ['*/*', :html] # The default HTTP method used to sign out a resource. Default is :delete. config.sign_out_via = :delete # ==> OmniAuth # Add a new OmniAuth provider. Check the wiki for more information on setting # up on your models and hooks. # config.omniauth :github, 'APP_ID', 'APP_SECRET', :scope => 'user,public_repo' # ==> Warden configuration # If you want to use other strategies, that are not supported by Devise, or # change the failure app, you can configure them inside the config.warden block. # # config.warden do |manager| # manager.intercept_401 = false # manager.default_strategies(:scope => :user).unshift :some_external_strategy # end # ==> Mountable engine configurations # When using Devise inside an engine, let's call it `MyEngine`, and this engine # is mountable, there are some extra configurations to be taken into account. # The following options are available, assuming the engine is mounted as: # # mount MyEngine, at: '/my_engine' # # The router that invoked `devise_for`, in the example above, would be: # config.router_name = :my_engine # # When using omniauth, Devise cannot automatically set Omniauth path, # so you need to do it manually. For the users scope, it would be: # config.omniauth_path_prefix = '/my_engine/users/auth' end
"use strict" const messages = require("..").messages const ruleName = require("..").ruleName const rules = require("../../../rules") const rule = rules[ruleName] testRule(rule, { ruleName, config: ["always"], accept: [ { code: "a { color :pink }", description: "space only before", }, { code: "a { color : pink }", description: "space before and after", }, { code: "a { color :\npink }", description: "space before and newline after", }, { code: "a { color :\r\npink }", description: "space before and CRLF after", }, { code: "$map:(key:value)", description: "SCSS map with no newlines", }, { code: "$list:('value1', 'value2')", description: "SCSS list with no newlines", }, { code: "a { background : url(data:application/font-woff;...); }", description: "data URI", } ], reject: [ { code: "a { color: pink; }", description: "no space before", message: messages.expectedBefore(), line: 1, column: 11, }, { code: "a { color : pink; }", description: "two spaces before", message: messages.expectedBefore(), line: 1, column: 11, }, { code: "a { color\t: pink; }", description: "tab before", message: messages.expectedBefore(), line: 1, column: 11, }, { code: "a { color\n: pink; }", description: "newline before", message: messages.expectedBefore(), line: 2, column: 1, }, { code: "a { color\r\n: pink; }", description: "CRLF before", message: messages.expectedBefore(), line: 1, column: 11, } ], }) testRule(rule, { ruleName, config: ["never"], accept: [ { code: "a { color:pink }", description: "no space before and after", }, { code: "a { color: pink }", description: "no space before and space after", }, { code: "a { color:\npink }", description: "no space before and newline after", }, { code: "a { color:\r\npink }", description: "no space before and CRLF after", }, { code: "$map :(key :value)", description: "SCSS map with no newlines", } ], reject: [ { code: "a { color : pink; }", description: "space before", message: messages.rejectedBefore(), line: 1, column: 11, }, { code: "a { color : pink; }", description: "two spaces before", message: messages.rejectedBefore(), line: 1, column: 11, }, { code: "a { color\t: pink; }", description: "tab before", message: messages.rejectedBefore(), line: 1, column: 11, }, { code: "a { color\n: pink; }", description: "newline before", message: messages.rejectedBefore(), line: 2, column: 1, }, { code: "a { color\r\n: pink; }", description: "CRLF before", message: messages.rejectedBefore(), line: 1, column: 11, } ], })
<?php /* FinanceiroBundle:Endereco:edit.html.twig */ class __TwigTemplate_1dc971c36abf90e00714509b23da42af7e32f6d9a7851853410b161779a15e2d extends Twig_Template { public function __construct(Twig_Environment $env) { parent::__construct($env); // line 1 try { $this->parent = $this->env->loadTemplate("::base.html.twig"); } catch (Twig_Error_Loader $e) { $e->setTemplateFile($this->getTemplateName()); $e->setTemplateLine(1); throw $e; } $this->blocks = array( 'body' => array($this, 'block_body'), ); } protected function doGetParent(array $context) { return "::base.html.twig"; } protected function doDisplay(array $context, array $blocks = array()) { $this->parent->display($context, array_merge($this->blocks, $blocks)); } // line 3 public function block_body($context, array $blocks = array()) { // line 4 echo "<h1>Endereco edit</h1> "; // line 6 echo $this->env->getExtension('form')->renderer->renderBlock((isset($context["edit_form"]) ? $context["edit_form"] : $this->getContext($context, "edit_form")), 'form'); echo " <ul class=\"record_actions\"> <li> <a href=\""; // line 10 echo $this->env->getExtension('routing')->getPath("endereco"); echo "\"> Back to the list </a> </li> <li>"; // line 14 echo $this->env->getExtension('form')->renderer->renderBlock((isset($context["delete_form"]) ? $context["delete_form"] : $this->getContext($context, "delete_form")), 'form'); echo "</li> </ul> "; } public function getTemplateName() { return "FinanceiroBundle:Endereco:edit.html.twig"; } public function isTraitable() { return false; } public function getDebugInfo() { return array ( 57 => 14, 50 => 10, 43 => 6, 39 => 4, 36 => 3, 11 => 1,); } }
package binders import org.scalatest.{Matchers, FlatSpec} import models.{Namespace, RepositoryName, Repository} class BindersSpec extends FlatSpec with Matchers { "repoBinder" should "bind 'foo' to a Repository with no namespace" in { repoBinder.bind("", "foo") shouldBe Right(Repository(RepositoryName("foo"))) } it should "bind 'foo/bar' to a Repository with namespace 'foo' and name 'bar'" in { repoBinder.bind("", "foo/bar") shouldBe Right(Repository(Namespace("foo"), RepositoryName("bar"))) } it should "fail to bind 'foo/bar/baz'" in { repoBinder.bind("", "foo/bar/baz") shouldBe a[Left[String, _]] } }
import argparse from nltk.corpus import brown import requests import arrow import json parser = argparse.ArgumentParser() parser.add_argument('host') args = parser.parse_args() def create_new_novel(): url = 'http://{host}/api/novel'.format(host=args.host) response = requests.post(url, json={'title': 'Test Novel {}'.format(arrow.utcnow())}) return json.loads(response.text)['id'] def create_new_chapter(novel_id): url = 'http://{host}/api/chapter'.format(host=args.host) chapter_title = 'Chapter {}'.format(arrow.utcnow()) response = requests.post(url, json={'title': chapter_title, 'novel_id': novel_id}) return json.loads(response.text)['id'] def post_example_text_to_chapter(chapter_id, host): url = 'http://{host}/api/novel_token'.format(host=host) words = brown.words(categories=['news']) for ordinal, word in enumerate(words): if ordinal > 1000: break requests.post(url, json={'token': word.lower(), 'ordinal': ordinal, 'chapter_id': chapter_id}) if __name__ == '__main__': novel_id = create_new_novel() chapter_id = create_new_chapter(novel_id) post_example_text_to_chapter(chapter_id, args.host)
#ifndef BIOSENSOR_MODELING_PARAMETERS_UTILS_H #define BIOSENSOR_MODELING_PARAMETERS_UTILS_H #include <cmath> #include <string> struct parameters { // Width of bio-sensor parts widths double d_e; double d_m; // Width and time mesh parameters unsigned N_b; unsigned T; unsigned M; // Model parameters double Dse; double Dsm; double Dpe; double Dpm; double C1; double C2; double Vmax; double Km; double S0; unsigned L; unsigned ne; parameters() {} parameters( double _d_e, double _d_m, int _N_b, int _T, int _M, double _Dse, double _Dsm, double _Dpe, double _Dpm, double _C1, double _C2, double _Vmax, double _Km, double _S0, unsigned _L, unsigned _ne ) { d_e = _d_e * pow(10, -6); d_m = _d_m * pow(10, -6); N_b = _N_b; T = _T; M = _M; Dse = _Dse * pow(10, -12); Dsm = _Dsm * pow(10, -12); Dpe = _Dpe * pow(10, -12); Dpm = _Dpm * pow(10, -12); C1 = _C1; C2 = _C2; Vmax = _Vmax; Km = _Km; S0 = _S0; L = _L; ne = _ne; } std::string toString() { std::string str = "Parameters: "; str += "d_e=" + std::to_string(d_e) + ", " + "d_m=" + std::to_string(d_m) + ", " + "N_b=" + std::to_string(N_b) + ", " + "T=" + std::to_string(T) + ", " + "M=" + std::to_string(M) + ", " + "Dse=" + std::to_string(Dse) + ", " + "Dsm=" + std::to_string(Dsm) + ", " + "Dpe=" + std::to_string(Dpe) + ", " + "Dpm=" + std::to_string(Dpm) + ", " + "C1=" + std::to_string(C1) + ", " + "C2=" + std::to_string(C2) + ", " + "Vmax=" + std::to_string(Vmax) + ", " + "Km=" + std::to_string(Km) + ", " + "S0=" + std::to_string(S0) + ", " + "L=" + std::to_string(L) + ", " + "ne=" + std::to_string(ne); return str; } }; parameters readParameters(std::string); std::vector<double> get_alpha(int de_length, int dm_length); std::vector<double> get_D(std::vector<double> alpha, double D_e, double D_m); #endif //BIOSENSOR_MODELING_PARAMETERS_UTILS_H
var scene; var camera; var renderer; var stats; var geometry; var material; var line; var ambientLight; var loader; var cow; var cowMixer; var walkCow; var walkCowMixer; var cowStatus = "walkings"; // none standing walking cowCur = "walking"; // standing var milk; var loopAnim; var loopFallMilk; // 循环滴落奶 var grass; var grassMixer; var grass2; var grass3; var grassList = [ { mesh: undefined, x: 300, y: 120, z: -50 }, { mesh: undefined, x: -160, y: 120, z: -300 }, { mesh: undefined, x: 200, y: 120, z: -600 }, { mesh: undefined, x: -400, y: 120, z: -1400 }, ] var clock = new THREE.Clock(); var webglContainer = document.getElementById('webgl-container'); var $cowNaz = $('#cow-naz'); var milkBoxStatus = 0; // 装满级别 1 2 3 var milkBoxLoading = false; var timeHandle; var cowFile; var walkCowFile; var grassFile; // 函数定义--------------------------------- function init() { var scalePoint = 1; var animations; var animation; //- 创建场景 scene = new THREE.Scene(); //- 创建相机 camera = new THREE.PerspectiveCamera( 75, window.innerWidth / window.innerHeight, 0.1, 1000000 ); camera.position.z = 550; camera.position.y = 380; camera.position.x = 30; // camera.lookAt(scene.position); //- 渲染 renderer = new THREE.WebGLRenderer({antialias: false, alpha: true}); renderer.setPixelRatio( window.devicePixelRatio ); renderer.setSize( window.innerWidth, window.innerHeight ); renderer.shadowMap.enabled = true; renderer.shadowMap.type = THREE.PCFSoftShadowMap; renderer.domElement.className = 'webgl-container'; webglContainer.appendChild(renderer.domElement); // - 平面坐標系 var CoSystem = new THREEex.CoSystem(500, 50, 0x000000); line = CoSystem.create(); scene.add(line); //- gltf 3d模型导入 loader = new THREE.GLTFLoader(); loader.setCrossOrigin('https://ossgw.alicdn.com'); var shanurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/51ff6704e19375613c3d4d3563348b7f.gltf'; var grassurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/5e6c2c4bb052ef7562b52654c5635127.gltf' var bburl = 'https://ossgw.alicdn.com/tmall-c3/tmx/7554d11d494d79413fc665e9ef140aa6.gltf' // var walkCowUrl = 'https://ossgw.alicdn.com/tmall-c3/tmx/3972247d3c4e96d1ac7e83a173e3a331.gltf'; // 1 // var walkCowUrl = 'https://ossgw.alicdn.com/tmall-c3/tmx/95628df6d8a8dc3adc3c41b97ba2e49c.gltf'; // 2 var walkCowUrl = 'https://ossgw.alicdn.com/tmall-c3/tmx/15e972f4cc71db07fee122da7a125e5b.gltf'; // 3 var cowurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/2f17ddef947a7b6c702af69ff0e5b95f.gltf'; var doorurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/203247ec660952407695fdfaf45812af.gltf'; var demourl = 'https://ossgw.alicdn.com/tmall-c3/tmx/25ed65d4e9684567962230671512f731.gltf' var lanurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/1e1dfc4da8dfe2d7f14f23f0996c7feb.gltf' var daiurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/e68183de37ea4bed1787f6051b1d1f94.gltf' var douurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/0ca2926cbf4bc664ff00b03c1a5d1f66.gltf' var fishurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/03807648cf70d99a7c1d3d634a2d4ea3.gltf'; var fishActiveurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/bb90ddfe2542267c142e892ab91f60ad.gltf'; var fishBowUrl = 'https://ossgw.alicdn.com/tmall-c3/tmx/c5e934aae17373e927fe98aaf1f71767.gltf' // cow // loader.load(cowurl, function(data) { // var scalePoint = 1; // var animations; // var animation; // gltf = data; // cow = gltf.scene; // cow.position.set(650, 240, 180); // // cow.position.set(0, 0, -240); // cow.rotation.y = -Math.PI / 2; // cow.scale.set(scalePoint, scalePoint, scalePoint); // animations = data.animations; // if (animations && animations.length) { // cowMixer = new THREE.AnimationMixer(cow); // for (var i = 0; i < animations.length; i++) { // var animation = animations[i]; // cowMixer.clipAction(animation).play(); // } // } // // scene.add(cow); // }) cow = cowFile.scene; cow.position.set(650, 240, 180); // cow.position.set(0, 0, -240); cow.rotation.y = -Math.PI / 2; cow.scale.set(scalePoint, scalePoint, scalePoint); animations = cowFile.animations; if (animations && animations.length) { cowMixer = new THREE.AnimationMixer(cow); for (var i = 0; i < animations.length; i++) { var animation = animations[i]; cowMixer.clipAction(animation).play(); } } // loader.load(walkCowUrl, function(data) { // var scalePoint = 1; // var animations; // var animation; // gltf = data; // walkCow = gltf.scene; // walkCow.position.set(650, 240, 180); // walkCow.rotation.y = -Math.PI / 2; // walkCow.scale.set(scalePoint, scalePoint, scalePoint); // animations = data.animations; // if (animations && animations.length) { // walkCowMixer = new THREE.AnimationMixer(walkCow); // for (var i = 0; i < animations.length; i++) { // var animation = animations[i]; // walkCowMixer.clipAction(animation).play(); // } // } // scene.add(walkCow); // cowWalkIn(); // }) walkCow = walkCowFile.scene; walkCow.position.set(650, 240, 180); walkCow.rotation.y = -Math.PI / 2; walkCow.scale.set(scalePoint, scalePoint, scalePoint); animations = walkCowFile.animations; if (animations && animations.length) { walkCowMixer = new THREE.AnimationMixer(walkCow); for (var i = 0; i < animations.length; i++) { var animation = animations[i]; walkCowMixer.clipAction(animation).play(); } } scene.add(walkCow); cowWalkIn(); // loader.load(grassurl, function(data) { // var scalePoint = .005; // var animations; // var animation; // gltf = data; // grass = gltf.scene; // window.wgrass = grass; // grass.scale.set(scalePoint, scalePoint, scalePoint); // for (var i = grassList.length - 1; i >= 0; i--) { // grassList[i].mesh = grass.clone(); // grassList[i].mesh.position.set(grassList[i].x, grassList[i].y, grassList[i].z) // scene.add(grassList[i].mesh); // } // // 草从小变大 // new TWEEN.Tween({scalePoint: .01}) // .to({scalePoint: .4}, 2000) // .onUpdate(function() { // // console.log('scalePoint loop: ', this); // var scalePoint = this.scalePoint; // for (var i = grassList.length - 1; i >= 0; i--) { // grassList[i].mesh.scale.set(scalePoint, scalePoint, scalePoint); // } // }) // .start(); // new TWEEN.Tween(this) // .to({}, 4000) // .onUpdate(function() { // render(); // }) // .start(); // }) scalePoint = 0.005; grass = grassFile.scene; grass.scale.set(scalePoint, scalePoint, scalePoint); for (var i = grassList.length - 1; i >= 0; i--) { grassList[i].mesh = grass.clone(); grassList[i].mesh.position.set(grassList[i].x, grassList[i].y, grassList[i].z) scene.add(grassList[i].mesh); } // 草从小变大 new TWEEN.Tween({scalePoint: .01}) .to({scalePoint: .4}, 2000) .onUpdate(function() { // console.log('scalePoint loop: ', this); var scalePoint = this.scalePoint; for (var i = grassList.length - 1; i >= 0; i--) { grassList[i].mesh.scale.set(scalePoint, scalePoint, scalePoint); } }) .start(); new TWEEN.Tween(this) .to({}, 4000) .onUpdate(function() { render(); }) .start(); //- 环境灯 ambientLight = new THREE.AmbientLight(0xffffff); scene.add(ambientLight); //- 直射灯 // var directionalLight = new THREE.DirectionalLight( 0xdddddd ); // directionalLight.position.set( 0, 0, 1 ).normalize(); // scene.add( directionalLight ); // //- 点灯 // var light = new THREE.PointLight(0xFFFFFF); // light.position.set(50000, 50000, 50000); // scene.add(light); //- 绑定窗口大小,自适应 var threeexResize = new THREEex.WindowResize(renderer, camera); //- threejs 的控制器 // var controls = new THREE.OrbitControls( camera, renderer.domElement ); // controls.target = new THREE.Vector3(0,15,0); //- controls.maxPolarAngle = Math.PI / 2; //- controls.addEventListener( 'change', function() { renderer.render(scene, camera); } ); // add this only if there is no animation loop (requestAnimationFrame) // 监听挤奶事件 $cowNaz.on('click', function() { console.log('click naz', milkBoxLoading); if (milkBoxLoading === true) return; milkBoxLoading = true; milkBoxStatus++; // console.log('click milk', milkBoxStatus); // addMilk(); addMilk2(); startFallMilk(); }) } // 显示空白瓶子 function showEmptyMilk() { var $milkBox = $('.milkbox'); $milkBox.animate({ bottom: '-140px' }, 2000); } // 显示挤奶那妞 function showCowNaz() { $cowNaz.show(); } // showCowNaz(); // showEmptyMilk(); function cowWalkIn() { cowStatus = 'walking'; // 头部先进 最后到 var headIn = new TWEEN.Tween(walkCow.position) .to({ x: 320 }, 6000) .delay(1000) // .easing(TWEEN.Easing.Exponential.InOut) var legIn = new TWEEN.Tween(walkCow.position) .to({ x: -250 }, 3500) .onComplete(function() { cowStatus = 'standing' }) .delay(2000); var downCamera = new TWEEN.Tween(camera.position) .to({ z: 540, y: 250, x: 0 }, 1000) .easing(TWEEN.Easing.Exponential.InOut) .onStart(function() { showEmptyMilk(); }) .onComplete(function() { showCowNaz() }) legIn.chain(downCamera); headIn.chain(legIn); headIn.start(); new TWEEN.Tween(this) .to({}, 4000 * 2) .onUpdate(function() { render(); }) .start(); } // 合并图挤奶 function addMilk2() { var anim; var milkID = '#milkbox' + milkBoxStatus; $('.milkbox').addClass('hide'); $('' + milkID).removeClass('hide'); if (loopAnim) { loopAnim.stop(); } anim = frameAnimation.anims($('' + milkID), 5625, 25, 2, 1, function() { if (milkBoxStatus === 3) { $('.milkbox').hide(); $('#milkink').hide(); $cowNaz.hide(); showJinDian(); } if (milkBoxStatus !== 3) { loopMilk(); } stopFallMilk(); milkBoxLoading = false; }); anim.start(); } // 循环播放最后8帧 function loopMilk() { var milkID = '#milkbox' + milkBoxStatus; if (loopAnim) { loopAnim.stop(); } console.log('loopMilk:', milkID); loopAnim = frameAnimation.anims($('' + milkID), 5625, 25, 3, 0, function() {}, 18); loopAnim.start(); } // 滴落奶 function startFallMilk() { $('#milkink').removeClass('hide'); if (!loopFallMilk) { loopFallMilk = frameAnimation.anims($('#milkink'), 1875, 25, 1, 0); } loopFallMilk.start(); } window.startFallMilk = startFallMilk; function stopFallMilk() { $('#milkink').addClass('hide'); loopFallMilk.stop(true); } function showJinDian() { TWEEN.removeAll(); new TWEEN.Tween(camera.position) .to({ z: 4000 }, 4000) .onUpdate(function() { var op = 1 - this.z / 4000; $(webglContainer).css({opacity: op}); render(); }) .onComplete(function() { var $milk = $("#milk"); $(webglContainer).hide(); $milk.animate({'bottom': '250px'}, 600); }) .start(); } function animate() { requestAnimationFrame(animate); // camera.lookAt(scene.position); if (cowStatus === 'walking' && cowCur !== 'walking') { walkCow.position = cow.position; scene.add(walkCow); scene.remove(cow); cowCur = 'walking' } if (cowStatus === 'standing' && cowCur !== 'standing') { // console.log('walkCow.position:', walkCow.position, cow.position); cow.position = walkCow.position; cow.position.x = walkCow.position.x; cow.position.y = walkCow.position.y; cow.position.z = walkCow.position.z; scene.add(cow); scene.remove(walkCow); cowCur = 'standing'; // console.log('walkCow.position:', walkCow.position, cow.position); } if (cowMixer && cowCur === 'standing') { cowMixer.update(clock.getDelta()); } if (walkCowMixer && cowCur === 'walking') { walkCowMixer.update(clock.getDelta()); } TWEEN.update(); // stats.begin(); render(); // stats.end(); } //- 循环体-渲染 function render() { renderer.render( scene, camera ); } // 加载图片 function preLoadImg(url) { var def = $.Deferred(); var img = new Image(); img.src = url; if (img.complete) { def.resolve({ img: img, url: url }) } img.onload = function() { def.resolve({ img: img, url: url }); } img.onerror = function() { def.resolve({ img: null, url: url }) } return def.promise(); } // 加载单张图片 function loadImage(url, callback) { var img = new Image(); //创建一个Image对象,实现图片的预下载 img.src = url; if (img.complete) { // 如果图片已经存在于浏览器缓存,直接调用回调函数 callback.call(img); return; // 直接返回,不用再处理onload事件 } img.onload = function () { //图片下载完毕时异步调用callback函数。 callback.call(img);//将回调函数的this替换为Image对象 }; } // 加载所有图片 function loadAllImage(imgList) { var defList = []; var i = 0; var len; var def = $.Deferred(); for (i = 0, len = imgList.length; i < len; i++) { defList[i] = preLoadImg(imgList[i]) } $.when.apply(this, defList) .then(function() { var retData = Array.prototype.slice.apply(arguments); def.resolve(retData); }) return def.promise(); } // 隐藏加载 function hideLoading() { $('#loading').hide(); } // 3d模型def 加载 function loadGltf(url) { var def = $.Deferred(); var loader = new THREE.GLTFLoader(); loader.setCrossOrigin('https://ossgw.alicdn.com'); loader.load(url, function(data) { def.resolve(data); }) return def.promise(); } // 加载所有3d模型 function loadAllGltf(list) { var defList = []; var i = 0; var len; var def = $.Deferred(); for (i = 0, len = list.length; i < len; i++) { defList[i] = loadGltf(list[i]) } $.when.apply(this, defList) .then(function() { var retData = Array.prototype.slice.apply(arguments); def.resolve(retData); }) return def.promise(); } // 加载雪山 function loadCowGltf() { var def = $.Deferred(); var shanurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/51ff6704e19375613c3d4d3563348b7f.gltf'; var grassurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/5e6c2c4bb052ef7562b52654c5635127.gltf' var bburl = 'https://ossgw.alicdn.com/tmall-c3/tmx/7554d11d494d79413fc665e9ef140aa6.gltf' // var walkCowUrl = 'https://ossgw.alicdn.com/tmall-c3/tmx/3972247d3c4e96d1ac7e83a173e3a331.gltf'; // 1 var walkCowUrl = 'https://ossgw.alicdn.com/tmall-c3/tmx/95628df6d8a8dc3adc3c41b97ba2e49c.gltf'; // 2 // var walkCowUrl = 'https://ossgw.alicdn.com/tmall-c3/tmx/15e972f4cc71db07fee122da7a125e5b.gltf'; // 3 var cowurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/2f17ddef947a7b6c702af69ff0e5b95f.gltf'; var doorurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/203247ec660952407695fdfaf45812af.gltf'; var demourl = 'https://ossgw.alicdn.com/tmall-c3/tmx/25ed65d4e9684567962230671512f731.gltf' var lanurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/1e1dfc4da8dfe2d7f14f23f0996c7feb.gltf' var daiurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/e68183de37ea4bed1787f6051b1d1f94.gltf' var douurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/0ca2926cbf4bc664ff00b03c1a5d1f66.gltf' var fishurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/03807648cf70d99a7c1d3d634a2d4ea3.gltf'; var fishActiveurl = 'https://ossgw.alicdn.com/tmall-c3/tmx/bb90ddfe2542267c142e892ab91f60ad.gltf'; var fishBowUrl = 'https://ossgw.alicdn.com/tmall-c3/tmx/c5e934aae17373e927fe98aaf1f71767.gltf' $.when(loadGltf(cowurl), loadGltf(walkCowUrl), loadGltf(grassurl)) .then(function(cowData, walkCowData, grassData) { cowFile = cowData; walkCowFile = walkCowData; grassFile = grassData def.resolve([cowurl, walkCowUrl, grassurl]); }) return def.promise(); } // 函数定义--------------------------------- // 开始----------------------- var imgList = [ '/threejs/static/img/canvas_milk_out.png', '/threejs/static/img/canvas_milk1.png', '/threejs/static/img/canvas_milk2.png', '/threejs/static/img/canvas_milk3.png', '/threejs/static/img/box.png', '/threejs/static/img/fly.png' ] loadAllImage(imgList) .then(function(imgData) { loadCowGltf() .then(function(gltfdata) { hideLoading(); main(); }) }) function main() { init(); animate(); } // 开始-----------------------
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.4.2_03) on Sat May 01 12:13:29 GMT-07:00 2004 --> <TITLE> MiiEditorViewportSizeLayout </TITLE> <META NAME="keywords" CONTENT="com.swfm.mica.MiiEditorViewportSizeLayout interface"> <LINK REL ="stylesheet" TYPE="text/css" HREF="../../../stylesheet.css" TITLE="Style"> <SCRIPT type="text/javascript"> function windowTitle() { parent.document.title="MiiEditorViewportSizeLayout"; } </SCRIPT> </HEAD> <BODY BGCOLOR="white" onload="windowTitle();"> <!-- ========= START OF TOP NAVBAR ======= --> <A NAME="navbar_top"><!-- --></A> <A HREF="#skip-navbar_top" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=3 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_top_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;<A HREF="../../../com/swfm/mica/MiiDraggable.html" title="interface in com.swfm.mica"><B>PREV CLASS</B></A>&nbsp; &nbsp;<A HREF="../../../com/swfm/mica/MiiEditorWindowSemanticsManager.html" title="interface in com.swfm.mica"><B>NEXT CLASS</B></A></FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../index.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="MiiEditorViewportSizeLayout.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> <TR> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> SUMMARY:&nbsp;NESTED&nbsp;|&nbsp;FIELD&nbsp;|&nbsp;CONSTR&nbsp;|&nbsp;<A HREF="#method_summary">METHOD</A></FONT></TD> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> DETAIL:&nbsp;FIELD&nbsp;|&nbsp;CONSTR&nbsp;|&nbsp;<A HREF="#method_detail">METHOD</A></FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_top"></A> <!-- ========= END OF TOP NAVBAR ========= --> <HR> <!-- ======== START OF CLASS DATA ======== --> <H2> <FONT SIZE="-1"> com.swfm.mica</FONT> <BR> Interface MiiEditorViewportSizeLayout</H2> <DL> <DT><B>All Known Implementing Classes:</B> <DD><A HREF="../../../com/swfm/mica/MiEditorUniverseIncludesAllPartsLayout.html" title="class in com.swfm.mica">MiEditorUniverseIncludesAllPartsLayout</A>, <A HREF="../../../com/swfm/mica/MiEditorViewportSizeIsOneToOneLayout.html" title="class in com.swfm.mica">MiEditorViewportSizeIsOneToOneLayout</A></DD> </DL> <HR> <DL> <DT>public interface <B>MiiEditorViewportSizeLayout</B></DL> <P> <DL> <DT><B>Version:</B></DT> <DD>%I% %G%</DD> <DT><B>Author:</B></DT> <DD>Michael L. Davis</DD> </DL> <HR> <P> <!-- ======== NESTED CLASS SUMMARY ======== --> <!-- =========== FIELD SUMMARY =========== --> <!-- ======== CONSTRUCTOR SUMMARY ======== --> <!-- ========== METHOD SUMMARY =========== --> <A NAME="method_summary"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TD COLSPAN=2><FONT SIZE="+2"> <B>Method Summary</B></FONT></TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;void</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../com/swfm/mica/MiiEditorViewportSizeLayout.html#getMinimumSizeOfDevice(com.swfm.mica.MiBounds)">getMinimumSizeOfDevice</A></B>(<A HREF="../../../com/swfm/mica/MiBounds.html" title="class in com.swfm.mica">MiBounds</A>&nbsp;minDevice)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;void</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../com/swfm/mica/MiiEditorViewportSizeLayout.html#getMinimumSizeOfUniverse(com.swfm.mica.MiBounds)">getMinimumSizeOfUniverse</A></B>(<A HREF="../../../com/swfm/mica/MiBounds.html" title="class in com.swfm.mica">MiBounds</A>&nbsp;minUniverse)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;void</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../com/swfm/mica/MiiEditorViewportSizeLayout.html#getMinimumSizeOfWorld(com.swfm.mica.MiBounds)">getMinimumSizeOfWorld</A></B>(<A HREF="../../../com/swfm/mica/MiBounds.html" title="class in com.swfm.mica">MiBounds</A>&nbsp;minWorld)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;void</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../com/swfm/mica/MiiEditorViewportSizeLayout.html#getPreferredSizeOfDevice(com.swfm.mica.MiBounds)">getPreferredSizeOfDevice</A></B>(<A HREF="../../../com/swfm/mica/MiBounds.html" title="class in com.swfm.mica">MiBounds</A>&nbsp;prefDevice)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;void</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../com/swfm/mica/MiiEditorViewportSizeLayout.html#getPreferredSizeOfUniverse(com.swfm.mica.MiBounds)">getPreferredSizeOfUniverse</A></B>(<A HREF="../../../com/swfm/mica/MiBounds.html" title="class in com.swfm.mica">MiBounds</A>&nbsp;prefUniverse)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;void</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../com/swfm/mica/MiiEditorViewportSizeLayout.html#getPreferredSizeOfWorld(com.swfm.mica.MiBounds)">getPreferredSizeOfWorld</A></B>(<A HREF="../../../com/swfm/mica/MiBounds.html" title="class in com.swfm.mica">MiBounds</A>&nbsp;prefWorld)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;void</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../com/swfm/mica/MiiEditorViewportSizeLayout.html#setTarget(com.swfm.mica.MiEditor)">setTarget</A></B>(<A HREF="../../../com/swfm/mica/MiEditor.html" title="class in com.swfm.mica">MiEditor</A>&nbsp;editor)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;void</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../com/swfm/mica/MiiEditorViewportSizeLayout.html#validateLayout()">validateLayout</A></B>()</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> </TABLE> &nbsp; <P> <!-- ============ FIELD DETAIL =========== --> <!-- ========= CONSTRUCTOR DETAIL ======== --> <!-- ============ METHOD DETAIL ========== --> <A NAME="method_detail"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TD COLSPAN=1><FONT SIZE="+2"> <B>Method Detail</B></FONT></TD> </TR> </TABLE> <A NAME="setTarget(com.swfm.mica.MiEditor)"><!-- --></A><H3> setTarget</H3> <PRE> public void <B>setTarget</B>(<A HREF="../../../com/swfm/mica/MiEditor.html" title="class in com.swfm.mica">MiEditor</A>&nbsp;editor)</PRE> <DL> <DD><DL> </DL> </DD> </DL> <HR> <A NAME="validateLayout()"><!-- --></A><H3> validateLayout</H3> <PRE> public void <B>validateLayout</B>()</PRE> <DL> <DD><DL> </DL> </DD> </DL> <HR> <A NAME="getPreferredSizeOfUniverse(com.swfm.mica.MiBounds)"><!-- --></A><H3> getPreferredSizeOfUniverse</H3> <PRE> public void <B>getPreferredSizeOfUniverse</B>(<A HREF="../../../com/swfm/mica/MiBounds.html" title="class in com.swfm.mica">MiBounds</A>&nbsp;prefUniverse)</PRE> <DL> <DD><DL> </DL> </DD> </DL> <HR> <A NAME="getPreferredSizeOfDevice(com.swfm.mica.MiBounds)"><!-- --></A><H3> getPreferredSizeOfDevice</H3> <PRE> public void <B>getPreferredSizeOfDevice</B>(<A HREF="../../../com/swfm/mica/MiBounds.html" title="class in com.swfm.mica">MiBounds</A>&nbsp;prefDevice)</PRE> <DL> <DD><DL> </DL> </DD> </DL> <HR> <A NAME="getPreferredSizeOfWorld(com.swfm.mica.MiBounds)"><!-- --></A><H3> getPreferredSizeOfWorld</H3> <PRE> public void <B>getPreferredSizeOfWorld</B>(<A HREF="../../../com/swfm/mica/MiBounds.html" title="class in com.swfm.mica">MiBounds</A>&nbsp;prefWorld)</PRE> <DL> <DD><DL> </DL> </DD> </DL> <HR> <A NAME="getMinimumSizeOfUniverse(com.swfm.mica.MiBounds)"><!-- --></A><H3> getMinimumSizeOfUniverse</H3> <PRE> public void <B>getMinimumSizeOfUniverse</B>(<A HREF="../../../com/swfm/mica/MiBounds.html" title="class in com.swfm.mica">MiBounds</A>&nbsp;minUniverse)</PRE> <DL> <DD><DL> </DL> </DD> </DL> <HR> <A NAME="getMinimumSizeOfDevice(com.swfm.mica.MiBounds)"><!-- --></A><H3> getMinimumSizeOfDevice</H3> <PRE> public void <B>getMinimumSizeOfDevice</B>(<A HREF="../../../com/swfm/mica/MiBounds.html" title="class in com.swfm.mica">MiBounds</A>&nbsp;minDevice)</PRE> <DL> <DD><DL> </DL> </DD> </DL> <HR> <A NAME="getMinimumSizeOfWorld(com.swfm.mica.MiBounds)"><!-- --></A><H3> getMinimumSizeOfWorld</H3> <PRE> public void <B>getMinimumSizeOfWorld</B>(<A HREF="../../../com/swfm/mica/MiBounds.html" title="class in com.swfm.mica">MiBounds</A>&nbsp;minWorld)</PRE> <DL> <DD><DL> </DL> </DD> </DL> <!-- ========= END OF CLASS DATA ========= --> <HR> <!-- ======= START OF BOTTOM NAVBAR ====== --> <A NAME="navbar_bottom"><!-- --></A> <A HREF="#skip-navbar_bottom" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=3 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_bottom_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;<A HREF="../../../com/swfm/mica/MiiDraggable.html" title="interface in com.swfm.mica"><B>PREV CLASS</B></A>&nbsp; &nbsp;<A HREF="../../../com/swfm/mica/MiiEditorWindowSemanticsManager.html" title="interface in com.swfm.mica"><B>NEXT CLASS</B></A></FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../index.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="MiiEditorViewportSizeLayout.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> <TR> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> SUMMARY:&nbsp;NESTED&nbsp;|&nbsp;FIELD&nbsp;|&nbsp;CONSTR&nbsp;|&nbsp;<A HREF="#method_summary">METHOD</A></FONT></TD> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> DETAIL:&nbsp;FIELD&nbsp;|&nbsp;CONSTR&nbsp;|&nbsp;<A HREF="#method_detail">METHOD</A></FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_bottom"></A> <!-- ======== END OF BOTTOM NAVBAR ======= --> <HR> </BODY> </HTML>
import {vec2} from "gl-matrix"; // In-place perpendicularization of a vector. (x,y) => (-y, x) export function perpendicularize(vector :vec2) { let x :number = vector[0]; vector[0] = -vector[1]; vector[1] = x; } export function cross(w: vec2, v: vec2): number { return w[0] * v[1] - w[1] * v[0]; }
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.core.exceptions import ValidationError from django.forms import models from djanban.apps.hourly_rates.models import HourlyRate from django import forms # Hourly rate creation and edition form class HourlyRateForm(models.ModelForm): class Meta: model = HourlyRate fields = ["name", "start_date", "end_date", "amount", "is_active"] widgets = { 'start_date': forms.SelectDateWidget(), 'end_date': forms.SelectDateWidget(empty_label=u"Until now"), } def __init__(self, *args, **kwargs): super(HourlyRateForm, self).__init__(*args, **kwargs) def clean(self): cleaned_data = super(HourlyRateForm, self).clean() if cleaned_data.get("end_date") and cleaned_data.get("start_date") > cleaned_data.get("end_date"): raise ValidationError(u"Start date can't be greater that end date") return cleaned_data class DeleteHourlyRateForm(forms.Form): confirmed = forms.BooleanField(label=u"Please confirm you really want to do this action", required=True)
## DEPRECATED: This library and the REST API v1 has been deprecated. More information at https://hs.smsglobal.com/services-update SMSGlobal Class Library for PHP =============================== This is a wrapper for the [SMSGlobal](http://www.smsglobal.com/) REST API. Get an API key from SMSGlobal by signing up and viewing the API key page in the MXT platform. View the [REST API documentation](http://www.smsglobal.com/rest-api/) for a list of available resources. Quick Start ----------- This wrapper is requires PHP 5.3 or greater, and either the cURL library or the HTTP stream wrapper to be installed and enabled. To install, add the dependency to your `composer.json` file: ```json { "require": { "smsglobal/rest-api-client": "*" } } ``` And install with Composer. ```bash $ cd path/to/your/project $ composer install --no-dev ``` Not using Composer? ------------------- You can install the library by downloading it from Github. Just use a PSR-0 compliant autoloader to load in the classes. To run unit tests or generate documentation, you'll need PHPUnit and phpDocumentor. Running Unit Tests ------------------ ```bash $ cd path/to/SMSGlobal/rest/api/client $ composer install $ phpunit ``` Get documentation ----------------- Documentation is available on [the SMSGlobal website](http://www.smsglobal.com/docs/rest-api-client-php/), or you can generate it yourself: ```bash $ cd path/to/SMSGlobal/rest/api/client $ composer install $ vendor/phpdocumentor/phpdocumentor/bin/phpdoc.php -c phpdoc.xml ``` Using the library ----------------- Running Unit Tests ```php // Include the Composer autoloader or use your own PSR-0 autoloader require 'vendor/autoload.php'; use Smsglobal\RestApiClient\ApiKey; use Smsglobal\RestApiClient\Resource\Sms; use Smsglobal\RestApiClient\RestApiClient; // Get an API key from SMSGlobal and insert the key and secret $apiKey = new ApiKey('your-api-key', 'your-api-secret'); // All requests are done via a 'REST API client.' This abstracts away the REST // API so you can deal with it like you would an ORM $rest = new RestApiClient($apiKey); // Now you can get objects $contact = $rest->get('contact', 1); // Contact resource with ID = 1 // Edit them $contact->setMsisdn('61447100250'); // And save them $rest->save($contact); // Or delete them $rest->delete($contact); // You can also instantiate new resources $sms = new Sms(); $sms->setDestination('61447100250') ->setOrigin('Test') ->setMessage('Hello World'); // And save them $rest->save($sms); // When a new object is saved, the ID gets populated (it was null before) echo $sms->getId(); // integer // For an SMS, saving also sends the message, so you can use a more meaningful // keyword for it $sms->send($rest); // You can get a list of available resources $list = $rest->getList('sms'); foreach ($list->objects as $resource) { // ... } // Pagination data is included echo $list->meta->getTotalPages(); // integer // Lists can be filtered // e.g. contacts belonging to group ID 1 $rest->getList('contact', 0, 20, array('group' => 1)); ``` Notes ----- 1. Requesting the same object twice in one session will return the same instance (even in the resource lists) 2. Exceptions are thrown if you attempt to save an object with invalid data
version https://git-lfs.github.com/spec/v1 oid sha256:012c0c10efb1958941ed2fd9f393df39f1ae6f76369bf56e500e39ade0496295 size 8392
AutoLayoutHelper ======================= UIView helper to easily create common Auto Layout Constraints for iOS ### The Problem Using Auto Layout programatically can either be quite verbose i.e. building NSLayoutConstraint objects for each rule or error prone e.g. (using Visual Format Language strings) ### A Solution We can make creating common NSLayoutConstraint relations into some reusable methods we can call on any class that subclasses UIView. We also ensure the constraint created gets added to the view's superview for you. This means you can relate a UIView or subclass you have to another view fairly quickly with the NSLayoutAttribute or NSLayoutRelation you need and in a way that looks part of the view's setup routine and helps us keep the code DRY. ### Dependencies * [Xcode](https://itunes.apple.com/gb/app/xcode/id497799835?mt=12#) ### Installation - Add the UIView+LayoutConstraints.h and UIView+LayoutConstraints.m files to your project. - Import into source files where necessary i.e. <code>#import UIView+LayoutConstraints.h</code> ### Usage Examples: Add NSLayoutConstraint relations for a UIView relating its left, top, bottom and right edges to it's superview // Create the view UIView *leftView = [[UIView alloc] initWithFrame:CGRectZero]; leftView.backgroundColor = [UIColor redColor]; [self.view addSubview:leftView]; leftView.translatesAutoresizingMaskIntoConstraints = NO; // Note. these methods will create, return and add the constraint to the superview. [leftView addTopConstraintToView:leftView.superview attribute:NSLayoutAttributeTop relation:NSLayoutRelationEqual constant:10.0]; [leftView addLeftConstraintToView:leftView.superview attribute:NSLayoutAttributeLeft relation:NSLayoutRelationEqual constant:10.0]; [leftView addRightConstraintToView:leftView.superview attribute:NSLayoutAttributeRight relation:NSLayoutRelationEqual constant:-10.0]; [leftView addBottomConstraintToView:leftView.superview attribute:NSLayoutAttributeBottom relation:NSLayoutRelationEqual constant:-10.0]; or shorter assuming top to top, left to left, right to right and bottom to bottom you can omit the attribute: [leftView addTopConstraintToView:leftView.superview relation:NSLayoutRelationEqual constant:10.0]; [leftView addLeftConstraintToView:leftView.superview relation:NSLayoutRelationEqual constant:10.0]; [leftView addRightConstraintToView:leftView.superview relation:NSLayoutRelationEqual constant:-10.0]; [leftView addBottomConstraintToView:leftView.superview relation:NSLayoutRelationEqual constant:-10.0]; or even more succinctly: [leftView fillSuperview:UIEdgeInsetsMake(10.0, 10.0, 10.0, 10.0)]; Add constraints to center a UIView in its superview both vertically (Y) and horizontally (X): [label addCenterXConstraintToView:label.superview relation:NSLayoutRelationEqual constant:0.0]; [label addCenterYConstraintToView:label.superview relation:NSLayoutRelationEqual constant:0.0]; Add constraints for a fixed width and height amount: [view addWidthConstraintWithRelation:NSLayoutRelationEqual constant:100.0]; [view addHeightConstraintWithRelation:NSLayoutRelationEqual constant:80.0]; Modify constraints (since the methods return them to you) // Store the height constraint when its created self.heightConstraint = [view addHeightConstraintWithRelation:NSLayoutRelationEqual constant:80.0]; ... // Modify height amount self.heightConstraint.constant = 30.0; // Animate changes [UIView animateWithDuration:0.3 animations:^{ [view layoutIfNeeded]; }]; ### Team * Development: [Shagun Madhikarmi](mailto:shagun@ustwo.com), [Daniela Dias](mailto:daniela@ustwo.com)
package redis.clients.jedis; import static redis.clients.jedis.Protocol.toByteArray; import java.io.Closeable; import java.net.URI; import java.util.AbstractMap; import java.util.AbstractSet; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import redis.clients.jedis.BinaryClient.LIST_POSITION; import redis.clients.jedis.commands.*; import redis.clients.jedis.exceptions.InvalidURIException; import redis.clients.jedis.exceptions.JedisDataException; import redis.clients.jedis.exceptions.JedisException; import redis.clients.jedis.params.set.SetParams; import redis.clients.jedis.params.sortedset.ZAddParams; import redis.clients.jedis.params.sortedset.ZIncrByParams; import redis.clients.util.JedisByteHashMap; import redis.clients.util.JedisURIHelper; import redis.clients.util.SafeEncoder; public class BinaryJedis implements BasicCommands, BinaryJedisCommands, MultiKeyBinaryCommands, AdvancedBinaryJedisCommands, BinaryScriptingCommands, Closeable { protected Client client = null; protected Transaction transaction = null; protected Pipeline pipeline = null; public BinaryJedis() { client = new Client(); } public BinaryJedis(final String host) { URI uri = URI.create(host); if (uri.getScheme() != null && uri.getScheme().equals("redis")) { initializeClientFromURI(uri); } else { client = new Client(host); } } public BinaryJedis(final String host, final int port) { client = new Client(host, port); } public BinaryJedis(final String host, final int port, final int timeout) { client = new Client(host, port); client.setConnectionTimeout(timeout); client.setSoTimeout(timeout); } public BinaryJedis(final String host, final int port, final int connectionTimeout, final int soTimeout) { client = new Client(host, port); client.setConnectionTimeout(connectionTimeout); client.setSoTimeout(soTimeout); } public BinaryJedis(final JedisShardInfo shardInfo) { client = new Client(shardInfo.getHost(), shardInfo.getPort()); client.setConnectionTimeout(shardInfo.getConnectionTimeout()); client.setSoTimeout(shardInfo.getSoTimeout()); client.setPassword(shardInfo.getPassword()); client.setDb(shardInfo.getDb()); } public BinaryJedis(URI uri) { initializeClientFromURI(uri); } public BinaryJedis(final URI uri, final int timeout) { initializeClientFromURI(uri); client.setConnectionTimeout(timeout); client.setSoTimeout(timeout); } public BinaryJedis(final URI uri, final int connectionTimeout, final int soTimeout) { initializeClientFromURI(uri); client.setConnectionTimeout(connectionTimeout); client.setSoTimeout(soTimeout); } private void initializeClientFromURI(URI uri) { if (!JedisURIHelper.isValid(uri)) { throw new InvalidURIException(String.format( "Cannot open Redis connection due invalid URI. %s", uri.toString())); } client = new Client(uri.getHost(), uri.getPort()); String password = JedisURIHelper.getPassword(uri); if (password != null) { client.auth(password); client.getStatusCodeReply(); } int dbIndex = JedisURIHelper.getDBIndex(uri); if (dbIndex > 0) { client.select(dbIndex); client.getStatusCodeReply(); client.setDb(dbIndex); } } @Override public String ping() { checkIsInMultiOrPipeline(); client.ping(); return client.getStatusCodeReply(); } /** * Set the string value as value of the key. The string can't be longer than 1073741824 bytes (1 * GB). * <p> * Time complexity: O(1) * @param key * @param value * @return Status code reply */ @Override public String set(final byte[] key, final byte[] value) { checkIsInMultiOrPipeline(); client.set(key, value); return client.getStatusCodeReply(); } /** * Set the string value as value of the key. The string can't be longer than 1073741824 bytes (1 * GB). * @param key * @param value * @param params * @return Status code reply */ public String set(final byte[] key, final byte[] value, final SetParams params) { checkIsInMultiOrPipeline(); client.set(key, value, params); return client.getStatusCodeReply(); } /** * Get the value of the specified key. If the key does not exist the special value 'nil' is * returned. If the value stored at key is not a string an error is returned because GET can only * handle string values. * <p> * Time complexity: O(1) * @param key * @return Bulk reply */ @Override public byte[] get(final byte[] key) { checkIsInMultiOrPipeline(); client.get(key); return client.getBinaryBulkReply(); } /** * Ask the server to silently close the connection. */ @Override public String quit() { checkIsInMultiOrPipeline(); client.quit(); String quitReturn = client.getStatusCodeReply(); client.disconnect(); return quitReturn; } /** * Test if the specified keys exist. The command returns the number of keys existed Time * complexity: O(N) * @param keys * @return Integer reply, specifically: an integer greater than 0 if one or more keys existed 0 if * none of the specified keys existed */ public Long exists(final byte[]... keys) { checkIsInMultiOrPipeline(); client.exists(keys); return client.getIntegerReply(); } /** * Test if the specified key exists. The command returns "1" if the key exists, otherwise "0" is * returned. Note that even keys set with an empty string as value will return "1". Time * complexity: O(1) * @param key * @return Boolean reply, true if the key exists, otherwise false */ @Override public Boolean exists(final byte[] key) { checkIsInMultiOrPipeline(); client.exists(key); return client.getIntegerReply() == 1; } /** * Remove the specified keys. If a given key does not exist no operation is performed for this * key. The command returns the number of keys removed. Time complexity: O(1) * @param keys * @return Integer reply, specifically: an integer greater than 0 if one or more keys were removed * 0 if none of the specified key existed */ @Override public Long del(final byte[]... keys) { checkIsInMultiOrPipeline(); client.del(keys); return client.getIntegerReply(); } @Override public Long del(final byte[] key) { checkIsInMultiOrPipeline(); client.del(key); return client.getIntegerReply(); } /** * Return the type of the value stored at key in form of a string. The type can be one of "none", * "string", "list", "set". "none" is returned if the key does not exist. Time complexity: O(1) * @param key * @return Status code reply, specifically: "none" if the key does not exist "string" if the key * contains a String value "list" if the key contains a List value "set" if the key * contains a Set value "zset" if the key contains a Sorted Set value "hash" if the key * contains a Hash value */ @Override public String type(final byte[] key) { checkIsInMultiOrPipeline(); client.type(key); return client.getStatusCodeReply(); } /** * Delete all the keys of the currently selected DB. This command never fails. * @return Status code reply */ @Override public String flushDB() { checkIsInMultiOrPipeline(); client.flushDB(); return client.getStatusCodeReply(); } /** * Returns all the keys matching the glob-style pattern as space separated strings. For example if * you have in the database the keys "foo" and "foobar" the command "KEYS foo*" will return * "foo foobar". * <p> * Note that while the time complexity for this operation is O(n) the constant times are pretty * low. For example Redis running on an entry level laptop can scan a 1 million keys database in * 40 milliseconds. <b>Still it's better to consider this one of the slow commands that may ruin * the DB performance if not used with care.</b> * <p> * In other words this command is intended only for debugging and special operations like creating * a script to change the DB schema. Don't use it in your normal code. Use Redis Sets in order to * group together a subset of objects. * <p> * Glob style patterns examples: * <ul> * <li>h?llo will match hello hallo hhllo * <li>h*llo will match hllo heeeello * <li>h[ae]llo will match hello and hallo, but not hillo * </ul> * <p> * Use \ to escape special chars if you want to match them verbatim. * <p> * Time complexity: O(n) (with n being the number of keys in the DB, and assuming keys and pattern * of limited length) * @param pattern * @return Multi bulk reply */ @Override public Set<byte[]> keys(final byte[] pattern) { checkIsInMultiOrPipeline(); client.keys(pattern); return SetFromList.of(client.getBinaryMultiBulkReply()); } /** * Return a randomly selected key from the currently selected DB. * <p> * Time complexity: O(1) * @return Singe line reply, specifically the randomly selected key or an empty string is the * database is empty */ @Override public byte[] randomBinaryKey() { checkIsInMultiOrPipeline(); client.randomKey(); return client.getBinaryBulkReply(); } /** * Atomically renames the key oldkey to newkey. If the source and destination name are the same an * error is returned. If newkey already exists it is overwritten. * <p> * Time complexity: O(1) * @param oldkey * @param newkey * @return Status code repy */ @Override public String rename(final byte[] oldkey, final byte[] newkey) { checkIsInMultiOrPipeline(); client.rename(oldkey, newkey); return client.getStatusCodeReply(); } /** * Rename oldkey into newkey but fails if the destination key newkey already exists. * <p> * Time complexity: O(1) * @param oldkey * @param newkey * @return Integer reply, specifically: 1 if the key was renamed 0 if the target key already exist */ @Override public Long renamenx(final byte[] oldkey, final byte[] newkey) { checkIsInMultiOrPipeline(); client.renamenx(oldkey, newkey); return client.getIntegerReply(); } /** * Return the number of keys in the currently selected database. * @return Integer reply */ @Override public Long dbSize() { checkIsInMultiOrPipeline(); client.dbSize(); return client.getIntegerReply(); } /** * Set a timeout on the specified key. After the timeout the key will be automatically deleted by * the server. A key with an associated timeout is said to be volatile in Redis terminology. * <p> * Voltile keys are stored on disk like the other keys, the timeout is persistent too like all the * other aspects of the dataset. Saving a dataset containing expires and stopping the server does * not stop the flow of time as Redis stores on disk the time when the key will no longer be * available as Unix time, and not the remaining seconds. * <p> * Since Redis 2.1.3 you can update the value of the timeout of a key already having an expire * set. It is also possible to undo the expire at all turning the key into a normal key using the * {@link #persist(byte[]) PERSIST} command. * <p> * Time complexity: O(1) * @see <a href="http://redis.io/commands/expire">Expire Command</a> * @param key * @param seconds * @return Integer reply, specifically: 1: the timeout was set. 0: the timeout was not set since * the key already has an associated timeout (this may happen only in Redis versions &lt; * 2.1.3, Redis &gt;= 2.1.3 will happily update the timeout), or the key does not exist. */ @Override public Long expire(final byte[] key, final int seconds) { checkIsInMultiOrPipeline(); client.expire(key, seconds); return client.getIntegerReply(); } /** * EXPIREAT works exctly like {@link #expire(byte[], int) EXPIRE} but instead to get the number of * seconds representing the Time To Live of the key as a second argument (that is a relative way * of specifing the TTL), it takes an absolute one in the form of a UNIX timestamp (Number of * seconds elapsed since 1 Gen 1970). * <p> * EXPIREAT was introduced in order to implement the Append Only File persistence mode so that * EXPIRE commands are automatically translated into EXPIREAT commands for the append only file. * Of course EXPIREAT can also used by programmers that need a way to simply specify that a given * key should expire at a given time in the future. * <p> * Since Redis 2.1.3 you can update the value of the timeout of a key already having an expire * set. It is also possible to undo the expire at all turning the key into a normal key using the * {@link #persist(byte[]) PERSIST} command. * <p> * Time complexity: O(1) * @see <a href="http://redis.io/commands/expire">Expire Command</a> * @param key * @param unixTime * @return Integer reply, specifically: 1: the timeout was set. 0: the timeout was not set since * the key already has an associated timeout (this may happen only in Redis versions &lt; * 2.1.3, Redis &gt;= 2.1.3 will happily update the timeout), or the key does not exist. */ @Override public Long expireAt(final byte[] key, final long unixTime) { checkIsInMultiOrPipeline(); client.expireAt(key, unixTime); return client.getIntegerReply(); } /** * The TTL command returns the remaining time to live in seconds of a key that has an * {@link #expire(byte[], int) EXPIRE} set. This introspection capability allows a Redis client to * check how many seconds a given key will continue to be part of the dataset. * @param key * @return Integer reply, returns the remaining time to live in seconds of a key that has an * EXPIRE. If the Key does not exists or does not have an associated expire, -1 is * returned. */ @Override public Long ttl(final byte[] key) { checkIsInMultiOrPipeline(); client.ttl(key); return client.getIntegerReply(); } /** * Select the DB with having the specified zero-based numeric index. For default every new client * connection is automatically selected to DB 0. * @param index * @return Status code reply */ @Override public String select(final int index) { checkIsInMultiOrPipeline(); client.select(index); String statusCodeReply = client.getStatusCodeReply(); client.setDb(index); return statusCodeReply; } /** * Move the specified key from the currently selected DB to the specified destination DB. Note * that this command returns 1 only if the key was successfully moved, and 0 if the target key was * already there or if the source key was not found at all, so it is possible to use MOVE as a * locking primitive. * @param key * @param dbIndex * @return Integer reply, specifically: 1 if the key was moved 0 if the key was not moved because * already present on the target DB or was not found in the current DB. */ @Override public Long move(final byte[] key, final int dbIndex) { checkIsInMultiOrPipeline(); client.move(key, dbIndex); return client.getIntegerReply(); } /** * Delete all the keys of all the existing databases, not just the currently selected one. This * command never fails. * @return Status code reply */ @Override public String flushAll() { checkIsInMultiOrPipeline(); client.flushAll(); return client.getStatusCodeReply(); } /** * GETSET is an atomic set this value and return the old value command. Set key to the string * value and return the old value stored at key. The string can't be longer than 1073741824 bytes * (1 GB). * <p> * Time complexity: O(1) * @param key * @param value * @return Bulk reply */ @Override public byte[] getSet(final byte[] key, final byte[] value) { checkIsInMultiOrPipeline(); client.getSet(key, value); return client.getBinaryBulkReply(); } /** * Get the values of all the specified keys. If one or more keys dont exist or is not of type * String, a 'nil' value is returned instead of the value of the specified key, but the operation * never fails. * <p> * Time complexity: O(1) for every key * @param keys * @return Multi bulk reply */ @Override public List<byte[]> mget(final byte[]... keys) { checkIsInMultiOrPipeline(); client.mget(keys); return client.getBinaryMultiBulkReply(); } /** * SETNX works exactly like {@link #set(byte[], byte[]) SET} with the only difference that if the * key already exists no operation is performed. SETNX actually means "SET if Not eXists". * <p> * Time complexity: O(1) * @param key * @param value * @return Integer reply, specifically: 1 if the key was set 0 if the key was not set */ @Override public Long setnx(final byte[] key, final byte[] value) { checkIsInMultiOrPipeline(); client.setnx(key, value); return client.getIntegerReply(); } /** * The command is exactly equivalent to the following group of commands: * {@link #set(byte[], byte[]) SET} + {@link #expire(byte[], int) EXPIRE}. The operation is * atomic. * <p> * Time complexity: O(1) * @param key * @param seconds * @param value * @return Status code reply */ @Override public String setex(final byte[] key, final int seconds, final byte[] value) { checkIsInMultiOrPipeline(); client.setex(key, seconds, value); return client.getStatusCodeReply(); } /** * Set the the respective keys to the respective values. MSET will replace old values with new * values, while {@link #msetnx(byte[]...) MSETNX} will not perform any operation at all even if * just a single key already exists. * <p> * Because of this semantic MSETNX can be used in order to set different keys representing * different fields of an unique logic object in a way that ensures that either all the fields or * none at all are set. * <p> * Both MSET and MSETNX are atomic operations. This means that for instance if the keys A and B * are modified, another client talking to Redis can either see the changes to both A and B at * once, or no modification at all. * @see #msetnx(byte[]...) * @param keysvalues * @return Status code reply Basically +OK as MSET can't fail */ @Override public String mset(final byte[]... keysvalues) { checkIsInMultiOrPipeline(); client.mset(keysvalues); return client.getStatusCodeReply(); } /** * Set the the respective keys to the respective values. {@link #mset(byte[]...) MSET} will * replace old values with new values, while MSETNX will not perform any operation at all even if * just a single key already exists. * <p> * Because of this semantic MSETNX can be used in order to set different keys representing * different fields of an unique logic object in a way that ensures that either all the fields or * none at all are set. * <p> * Both MSET and MSETNX are atomic operations. This means that for instance if the keys A and B * are modified, another client talking to Redis can either see the changes to both A and B at * once, or no modification at all. * @see #mset(byte[]...) * @param keysvalues * @return Integer reply, specifically: 1 if the all the keys were set 0 if no key was set (at * least one key already existed) */ @Override public Long msetnx(final byte[]... keysvalues) { checkIsInMultiOrPipeline(); client.msetnx(keysvalues); return client.getIntegerReply(); } /** * DECRBY work just like {@link #decr(byte[]) INCR} but instead to decrement by 1 the decrement is * integer. * <p> * INCR commands are limited to 64 bit signed integers. * <p> * Note: this is actually a string operation, that is, in Redis there are not "integer" types. * Simply the string stored at the key is parsed as a base 10 64 bit signed integer, incremented, * and then converted back as a string. * <p> * Time complexity: O(1) * @see #incr(byte[]) * @see #decr(byte[]) * @see #incrBy(byte[], long) * @param key * @param integer * @return Integer reply, this commands will reply with the new value of key after the increment. */ @Override public Long decrBy(final byte[] key, final long integer) { checkIsInMultiOrPipeline(); client.decrBy(key, integer); return client.getIntegerReply(); } /** * Decrement the number stored at key by one. If the key does not exist or contains a value of a * wrong type, set the key to the value of "0" before to perform the decrement operation. * <p> * INCR commands are limited to 64 bit signed integers. * <p> * Note: this is actually a string operation, that is, in Redis there are not "integer" types. * Simply the string stored at the key is parsed as a base 10 64 bit signed integer, incremented, * and then converted back as a string. * <p> * Time complexity: O(1) * @see #incr(byte[]) * @see #incrBy(byte[], long) * @see #decrBy(byte[], long) * @param key * @return Integer reply, this commands will reply with the new value of key after the increment. */ @Override public Long decr(final byte[] key) { checkIsInMultiOrPipeline(); client.decr(key); return client.getIntegerReply(); } /** * INCRBY work just like {@link #incr(byte[]) INCR} but instead to increment by 1 the increment is * integer. * <p> * INCR commands are limited to 64 bit signed integers. * <p> * Note: this is actually a string operation, that is, in Redis there are not "integer" types. * Simply the string stored at the key is parsed as a base 10 64 bit signed integer, incremented, * and then converted back as a string. * <p> * Time complexity: O(1) * @see #incr(byte[]) * @see #decr(byte[]) * @see #decrBy(byte[], long) * @param key * @param integer * @return Integer reply, this commands will reply with the new value of key after the increment. */ @Override public Long incrBy(final byte[] key, final long integer) { checkIsInMultiOrPipeline(); client.incrBy(key, integer); return client.getIntegerReply(); } /** * INCRBYFLOAT work just like {@link #incrBy(byte[], long)} INCRBY} but increments by floats * instead of integers. * <p> * INCRBYFLOAT commands are limited to double precision floating point values. * <p> * Note: this is actually a string operation, that is, in Redis there are not "double" types. * Simply the string stored at the key is parsed as a base double precision floating point value, * incremented, and then converted back as a string. There is no DECRYBYFLOAT but providing a * negative value will work as expected. * <p> * Time complexity: O(1) * @see #incr(byte[]) * @see #decr(byte[]) * @see #decrBy(byte[], long) * @param key the key to increment * @param integer the value to increment by * @return Integer reply, this commands will reply with the new value of key after the increment. */ @Override public Double incrByFloat(final byte[] key, final double integer) { checkIsInMultiOrPipeline(); client.incrByFloat(key, integer); String dval = client.getBulkReply(); return (dval != null ? new Double(dval) : null); } /** * Increment the number stored at key by one. If the key does not exist or contains a value of a * wrong type, set the key to the value of "0" before to perform the increment operation. * <p> * INCR commands are limited to 64 bit signed integers. * <p> * Note: this is actually a string operation, that is, in Redis there are not "integer" types. * Simply the string stored at the key is parsed as a base 10 64 bit signed integer, incremented, * and then converted back as a string. * <p> * Time complexity: O(1) * @see #incrBy(byte[], long) * @see #decr(byte[]) * @see #decrBy(byte[], long) * @param key * @return Integer reply, this commands will reply with the new value of key after the increment. */ @Override public Long incr(final byte[] key) { checkIsInMultiOrPipeline(); client.incr(key); return client.getIntegerReply(); } /** * If the key already exists and is a string, this command appends the provided value at the end * of the string. If the key does not exist it is created and set as an empty string, so APPEND * will be very similar to SET in this special case. * <p> * Time complexity: O(1). The amortized time complexity is O(1) assuming the appended value is * small and the already present value is of any size, since the dynamic string library used by * Redis will double the free space available on every reallocation. * @param key * @param value * @return Integer reply, specifically the total length of the string after the append operation. */ @Override public Long append(final byte[] key, final byte[] value) { checkIsInMultiOrPipeline(); client.append(key, value); return client.getIntegerReply(); } /** * Return a subset of the string from offset start to offset end (both offsets are inclusive). * Negative offsets can be used in order to provide an offset starting from the end of the string. * So -1 means the last char, -2 the penultimate and so forth. * <p> * The function handles out of range requests without raising an error, but just limiting the * resulting range to the actual length of the string. * <p> * Time complexity: O(start+n) (with start being the start index and n the total length of the * requested range). Note that the lookup part of this command is O(1) so for small strings this * is actually an O(1) command. * @param key * @param start * @param end * @return Bulk reply */ @Override public byte[] substr(final byte[] key, final int start, final int end) { checkIsInMultiOrPipeline(); client.substr(key, start, end); return client.getBinaryBulkReply(); } /** * Set the specified hash field to the specified value. * <p> * If key does not exist, a new key holding a hash is created. * <p> * <b>Time complexity:</b> O(1) * @param key * @param field * @param value * @return If the field already exists, and the HSET just produced an update of the value, 0 is * returned, otherwise if a new field is created 1 is returned. */ @Override public Long hset(final byte[] key, final byte[] field, final byte[] value) { checkIsInMultiOrPipeline(); client.hset(key, field, value); return client.getIntegerReply(); } /** * If key holds a hash, retrieve the value associated to the specified field. * <p> * If the field is not found or the key does not exist, a special 'nil' value is returned. * <p> * <b>Time complexity:</b> O(1) * @param key * @param field * @return Bulk reply */ @Override public byte[] hget(final byte[] key, final byte[] field) { checkIsInMultiOrPipeline(); client.hget(key, field); return client.getBinaryBulkReply(); } /** * Set the specified hash field to the specified value if the field not exists. <b>Time * complexity:</b> O(1) * @param key * @param field * @param value * @return If the field already exists, 0 is returned, otherwise if a new field is created 1 is * returned. */ @Override public Long hsetnx(final byte[] key, final byte[] field, final byte[] value) { checkIsInMultiOrPipeline(); client.hsetnx(key, field, value); return client.getIntegerReply(); } /** * Set the respective fields to the respective values. HMSET replaces old values with new values. * <p> * If key does not exist, a new key holding a hash is created. * <p> * <b>Time complexity:</b> O(N) (with N being the number of fields) * @param key * @param hash * @return Always OK because HMSET can't fail */ @Override public String hmset(final byte[] key, final Map<byte[], byte[]> hash) { checkIsInMultiOrPipeline(); client.hmset(key, hash); return client.getStatusCodeReply(); } /** * Retrieve the values associated to the specified fields. * <p> * If some of the specified fields do not exist, nil values are returned. Non existing keys are * considered like empty hashes. * <p> * <b>Time complexity:</b> O(N) (with N being the number of fields) * @param key * @param fields * @return Multi Bulk Reply specifically a list of all the values associated with the specified * fields, in the same order of the request. */ @Override public List<byte[]> hmget(final byte[] key, final byte[]... fields) { checkIsInMultiOrPipeline(); client.hmget(key, fields); return client.getBinaryMultiBulkReply(); } /** * Increment the number stored at field in the hash at key by value. If key does not exist, a new * key holding a hash is created. If field does not exist or holds a string, the value is set to 0 * before applying the operation. Since the value argument is signed you can use this command to * perform both increments and decrements. * <p> * The range of values supported by HINCRBY is limited to 64 bit signed integers. * <p> * <b>Time complexity:</b> O(1) * @param key * @param field * @param value * @return Integer reply The new value at field after the increment operation. */ @Override public Long hincrBy(final byte[] key, final byte[] field, final long value) { checkIsInMultiOrPipeline(); client.hincrBy(key, field, value); return client.getIntegerReply(); } /** * Increment the number stored at field in the hash at key by a double precision floating point * value. If key does not exist, a new key holding a hash is created. If field does not exist or * holds a string, the value is set to 0 before applying the operation. Since the value argument * is signed you can use this command to perform both increments and decrements. * <p> * The range of values supported by HINCRBYFLOAT is limited to double precision floating point * values. * <p> * <b>Time complexity:</b> O(1) * @param key * @param field * @param value * @return Double precision floating point reply The new value at field after the increment * operation. */ @Override public Double hincrByFloat(final byte[] key, final byte[] field, final double value) { checkIsInMultiOrPipeline(); client.hincrByFloat(key, field, value); final String dval = client.getBulkReply(); return (dval != null ? new Double(dval) : null); } /** * Test for existence of a specified field in a hash. <b>Time complexity:</b> O(1) * @param key * @param field * @return Return 1 if the hash stored at key contains the specified field. Return 0 if the key is * not found or the field is not present. */ @Override public Boolean hexists(final byte[] key, final byte[] field) { checkIsInMultiOrPipeline(); client.hexists(key, field); return client.getIntegerReply() == 1; } /** * Remove the specified field from an hash stored at key. * <p> * <b>Time complexity:</b> O(1) * @param key * @param fields * @return If the field was present in the hash it is deleted and 1 is returned, otherwise 0 is * returned and no operation is performed. */ @Override public Long hdel(final byte[] key, final byte[]... fields) { checkIsInMultiOrPipeline(); client.hdel(key, fields); return client.getIntegerReply(); } /** * Return the number of items in a hash. * <p> * <b>Time complexity:</b> O(1) * @param key * @return The number of entries (fields) contained in the hash stored at key. If the specified * key does not exist, 0 is returned assuming an empty hash. */ @Override public Long hlen(final byte[] key) { checkIsInMultiOrPipeline(); client.hlen(key); return client.getIntegerReply(); } /** * Return all the fields in a hash. * <p> * <b>Time complexity:</b> O(N), where N is the total number of entries * @param key * @return All the fields names contained into a hash. */ @Override public Set<byte[]> hkeys(final byte[] key) { checkIsInMultiOrPipeline(); client.hkeys(key); return SetFromList.of(client.getBinaryMultiBulkReply()); } /** * Return all the values in a hash. * <p> * <b>Time complexity:</b> O(N), where N is the total number of entries * @param key * @return All the fields values contained into a hash. */ @Override public List<byte[]> hvals(final byte[] key) { checkIsInMultiOrPipeline(); client.hvals(key); return client.getBinaryMultiBulkReply(); } /** * Return all the fields and associated values in a hash. * <p> * <b>Time complexity:</b> O(N), where N is the total number of entries * @param key * @return All the fields and values contained into a hash. */ @Override public Map<byte[], byte[]> hgetAll(final byte[] key) { checkIsInMultiOrPipeline(); client.hgetAll(key); final List<byte[]> flatHash = client.getBinaryMultiBulkReply(); final Map<byte[], byte[]> hash = new JedisByteHashMap(); final Iterator<byte[]> iterator = flatHash.iterator(); while (iterator.hasNext()) { hash.put(iterator.next(), iterator.next()); } return hash; } /** * Add the string value to the head (LPUSH) or tail (RPUSH) of the list stored at key. If the key * does not exist an empty list is created just before the append operation. If the key exists but * is not a List an error is returned. * <p> * Time complexity: O(1) * @see BinaryJedis#rpush(byte[], byte[]...) * @param key * @param strings * @return Integer reply, specifically, the number of elements inside the list after the push * operation. */ @Override public Long rpush(final byte[] key, final byte[]... strings) { checkIsInMultiOrPipeline(); client.rpush(key, strings); return client.getIntegerReply(); } /** * Add the string value to the head (LPUSH) or tail (RPUSH) of the list stored at key. If the key * does not exist an empty list is created just before the append operation. If the key exists but * is not a List an error is returned. * <p> * Time complexity: O(1) * @see BinaryJedis#rpush(byte[], byte[]...) * @param key * @param strings * @return Integer reply, specifically, the number of elements inside the list after the push * operation. */ @Override public Long lpush(final byte[] key, final byte[]... strings) { checkIsInMultiOrPipeline(); client.lpush(key, strings); return client.getIntegerReply(); } /** * Return the length of the list stored at the specified key. If the key does not exist zero is * returned (the same behaviour as for empty lists). If the value stored at key is not a list an * error is returned. * <p> * Time complexity: O(1) * @param key * @return The length of the list. */ @Override public Long llen(final byte[] key) { checkIsInMultiOrPipeline(); client.llen(key); return client.getIntegerReply(); } /** * Return the specified elements of the list stored at the specified key. Start and end are * zero-based indexes. 0 is the first element of the list (the list head), 1 the next element and * so on. * <p> * For example LRANGE foobar 0 2 will return the first three elements of the list. * <p> * start and end can also be negative numbers indicating offsets from the end of the list. For * example -1 is the last element of the list, -2 the penultimate element and so on. * <p> * <b>Consistency with range functions in various programming languages</b> * <p> * Note that if you have a list of numbers from 0 to 100, LRANGE 0 10 will return 11 elements, * that is, rightmost item is included. This may or may not be consistent with behavior of * range-related functions in your programming language of choice (think Ruby's Range.new, * Array#slice or Python's range() function). * <p> * LRANGE behavior is consistent with one of Tcl. * <p> * <b>Out-of-range indexes</b> * <p> * Indexes out of range will not produce an error: if start is over the end of the list, or start * &gt; end, an empty list is returned. If end is over the end of the list Redis will threat it * just like the last element of the list. * <p> * Time complexity: O(start+n) (with n being the length of the range and start being the start * offset) * @param key * @param start * @param end * @return Multi bulk reply, specifically a list of elements in the specified range. */ @Override public List<byte[]> lrange(final byte[] key, final long start, final long end) { checkIsInMultiOrPipeline(); client.lrange(key, start, end); return client.getBinaryMultiBulkReply(); } /** * Trim an existing list so that it will contain only the specified range of elements specified. * Start and end are zero-based indexes. 0 is the first element of the list (the list head), 1 the * next element and so on. * <p> * For example LTRIM foobar 0 2 will modify the list stored at foobar key so that only the first * three elements of the list will remain. * <p> * start and end can also be negative numbers indicating offsets from the end of the list. For * example -1 is the last element of the list, -2 the penultimate element and so on. * <p> * Indexes out of range will not produce an error: if start is over the end of the list, or start * &gt; end, an empty list is left as value. If end over the end of the list Redis will threat it * just like the last element of the list. * <p> * Hint: the obvious use of LTRIM is together with LPUSH/RPUSH. For example: * <p> * {@code lpush("mylist", "someelement"); ltrim("mylist", 0, 99); * } * <p> * The above two commands will push elements in the list taking care that the list will not grow * without limits. This is very useful when using Redis to store logs for example. It is important * to note that when used in this way LTRIM is an O(1) operation because in the average case just * one element is removed from the tail of the list. * <p> * Time complexity: O(n) (with n being len of list - len of range) * @param key * @param start * @param end * @return Status code reply */ @Override public String ltrim(final byte[] key, final long start, final long end) { checkIsInMultiOrPipeline(); client.ltrim(key, start, end); return client.getStatusCodeReply(); } /** * Return the specified element of the list stored at the specified key. 0 is the first element, 1 * the second and so on. Negative indexes are supported, for example -1 is the last element, -2 * the penultimate and so on. * <p> * If the value stored at key is not of list type an error is returned. If the index is out of * range a 'nil' reply is returned. * <p> * Note that even if the average time complexity is O(n) asking for the first or the last element * of the list is O(1). * <p> * Time complexity: O(n) (with n being the length of the list) * @param key * @param index * @return Bulk reply, specifically the requested element */ @Override public byte[] lindex(final byte[] key, final long index) { checkIsInMultiOrPipeline(); client.lindex(key, index); return client.getBinaryBulkReply(); } /** * Set a new value as the element at index position of the List at key. * <p> * Out of range indexes will generate an error. * <p> * Similarly to other list commands accepting indexes, the index can be negative to access * elements starting from the end of the list. So -1 is the last element, -2 is the penultimate, * and so forth. * <p> * <b>Time complexity:</b> * <p> * O(N) (with N being the length of the list), setting the first or last elements of the list is * O(1). * @see #lindex(byte[], long) * @param key * @param index * @param value * @return Status code reply */ @Override public String lset(final byte[] key, final long index, final byte[] value) { checkIsInMultiOrPipeline(); client.lset(key, index, value); return client.getStatusCodeReply(); } /** * Remove the first count occurrences of the value element from the list. If count is zero all the * elements are removed. If count is negative elements are removed from tail to head, instead to * go from head to tail that is the normal behaviour. So for example LREM with count -2 and hello * as value to remove against the list (a,b,c,hello,x,hello,hello) will have the list * (a,b,c,hello,x). The number of removed elements is returned as an integer, see below for more * information about the returned value. Note that non existing keys are considered like empty * lists by LREM, so LREM against non existing keys will always return 0. * <p> * Time complexity: O(N) (with N being the length of the list) * @param key * @param count * @param value * @return Integer Reply, specifically: The number of removed elements if the operation succeeded */ @Override public Long lrem(final byte[] key, final long count, final byte[] value) { checkIsInMultiOrPipeline(); client.lrem(key, count, value); return client.getIntegerReply(); } /** * Atomically return and remove the first (LPOP) or last (RPOP) element of the list. For example * if the list contains the elements "a","b","c" LPOP will return "a" and the list will become * "b","c". * <p> * If the key does not exist or the list is already empty the special value 'nil' is returned. * @see #rpop(byte[]) * @param key * @return Bulk reply */ @Override public byte[] lpop(final byte[] key) { checkIsInMultiOrPipeline(); client.lpop(key); return client.getBinaryBulkReply(); } /** * Atomically return and remove the first (LPOP) or last (RPOP) element of the list. For example * if the list contains the elements "a","b","c" LPOP will return "a" and the list will become * "b","c". * <p> * If the key does not exist or the list is already empty the special value 'nil' is returned. * @see #lpop(byte[]) * @param key * @return Bulk reply */ @Override public byte[] rpop(final byte[] key) { checkIsInMultiOrPipeline(); client.rpop(key); return client.getBinaryBulkReply(); } /** * Atomically return and remove the last (tail) element of the srckey list, and push the element * as the first (head) element of the dstkey list. For example if the source list contains the * elements "a","b","c" and the destination list contains the elements "foo","bar" after an * RPOPLPUSH command the content of the two lists will be "a","b" and "c","foo","bar". * <p> * If the key does not exist or the list is already empty the special value 'nil' is returned. If * the srckey and dstkey are the same the operation is equivalent to removing the last element * from the list and pusing it as first element of the list, so it's a "list rotation" command. * <p> * Time complexity: O(1) * @param srckey * @param dstkey * @return Bulk reply */ @Override public byte[] rpoplpush(final byte[] srckey, final byte[] dstkey) { checkIsInMultiOrPipeline(); client.rpoplpush(srckey, dstkey); return client.getBinaryBulkReply(); } /** * Add the specified member to the set value stored at key. If member is already a member of the * set no operation is performed. If key does not exist a new set with the specified member as * sole member is created. If the key exists but does not hold a set value an error is returned. * <p> * Time complexity O(1) * @param key * @param members * @return Integer reply, specifically: 1 if the new element was added 0 if the element was * already a member of the set */ @Override public Long sadd(final byte[] key, final byte[]... members) { checkIsInMultiOrPipeline(); client.sadd(key, members); return client.getIntegerReply(); } /** * Return all the members (elements) of the set value stored at key. This is just syntax glue for * {@link #sinter(byte[]...)} SINTER}. * <p> * Time complexity O(N) * @param key the key of the set * @return Multi bulk reply */ @Override public Set<byte[]> smembers(final byte[] key) { checkIsInMultiOrPipeline(); client.smembers(key); return SetFromList.of(client.getBinaryMultiBulkReply()); } /** * Remove the specified member from the set value stored at key. If member was not a member of the * set no operation is performed. If key does not hold a set value an error is returned. * <p> * Time complexity O(1) * @param key the key of the set * @param member the set member to remove * @return Integer reply, specifically: 1 if the new element was removed 0 if the new element was * not a member of the set */ @Override public Long srem(final byte[] key, final byte[]... member) { checkIsInMultiOrPipeline(); client.srem(key, member); return client.getIntegerReply(); } /** * Remove a random element from a Set returning it as return value. If the Set is empty or the key * does not exist, a nil object is returned. * <p> * The {@link #srandmember(byte[])} command does a similar work but the returned element is not * removed from the Set. * <p> * Time complexity O(1) * @param key * @return Bulk reply */ @Override public byte[] spop(final byte[] key) { checkIsInMultiOrPipeline(); client.spop(key); return client.getBinaryBulkReply(); } @Override public Set<byte[]> spop(final byte[] key, final long count) { checkIsInMultiOrPipeline(); client.spop(key, count); return SetFromList.of(client.getBinaryMultiBulkReply()); } /** * Move the specified member from the set at srckey to the set at dstkey. This operation is * atomic, in every given moment the element will appear to be in the source or destination set * for accessing clients. * <p> * If the source set does not exist or does not contain the specified element no operation is * performed and zero is returned, otherwise the element is removed from the source set and added * to the destination set. On success one is returned, even if the element was already present in * the destination set. * <p> * An error is raised if the source or destination keys contain a non Set value. * <p> * Time complexity O(1) * @param srckey * @param dstkey * @param member * @return Integer reply, specifically: 1 if the element was moved 0 if the element was not found * on the first set and no operation was performed */ @Override public Long smove(final byte[] srckey, final byte[] dstkey, final byte[] member) { checkIsInMultiOrPipeline(); client.smove(srckey, dstkey, member); return client.getIntegerReply(); } /** * Return the set cardinality (number of elements). If the key does not exist 0 is returned, like * for empty sets. * @param key * @return Integer reply, specifically: the cardinality (number of elements) of the set as an * integer. */ @Override public Long scard(final byte[] key) { checkIsInMultiOrPipeline(); client.scard(key); return client.getIntegerReply(); } /** * Return 1 if member is a member of the set stored at key, otherwise 0 is returned. * <p> * Time complexity O(1) * @param key * @param member * @return Integer reply, specifically: 1 if the element is a member of the set 0 if the element * is not a member of the set OR if the key does not exist */ @Override public Boolean sismember(final byte[] key, final byte[] member) { checkIsInMultiOrPipeline(); client.sismember(key, member); return client.getIntegerReply() == 1; } /** * Return the members of a set resulting from the intersection of all the sets hold at the * specified keys. Like in {@link #lrange(byte[], long, long)} LRANGE} the result is sent to the * client as a multi-bulk reply (see the protocol specification for more information). If just a * single key is specified, then this command produces the same result as * {@link #smembers(byte[]) SMEMBERS}. Actually SMEMBERS is just syntax sugar for SINTER. * <p> * Non existing keys are considered like empty sets, so if one of the keys is missing an empty set * is returned (since the intersection with an empty set always is an empty set). * <p> * Time complexity O(N*M) worst case where N is the cardinality of the smallest set and M the * number of sets * @param keys * @return Multi bulk reply, specifically the list of common elements. */ @Override public Set<byte[]> sinter(final byte[]... keys) { checkIsInMultiOrPipeline(); client.sinter(keys); return SetFromList.of(client.getBinaryMultiBulkReply()); } /** * This commnad works exactly like {@link #sinter(byte[]...) SINTER} but instead of being returned * the resulting set is sotred as dstkey. * <p> * Time complexity O(N*M) worst case where N is the cardinality of the smallest set and M the * number of sets * @param dstkey * @param keys * @return Status code reply */ @Override public Long sinterstore(final byte[] dstkey, final byte[]... keys) { checkIsInMultiOrPipeline(); client.sinterstore(dstkey, keys); return client.getIntegerReply(); } /** * Return the members of a set resulting from the union of all the sets hold at the specified * keys. Like in {@link #lrange(byte[], long, long)} LRANGE} the result is sent to the client as a * multi-bulk reply (see the protocol specification for more information). If just a single key is * specified, then this command produces the same result as {@link #smembers(byte[]) SMEMBERS}. * <p> * Non existing keys are considered like empty sets. * <p> * Time complexity O(N) where N is the total number of elements in all the provided sets * @param keys * @return Multi bulk reply, specifically the list of common elements. */ @Override public Set<byte[]> sunion(final byte[]... keys) { checkIsInMultiOrPipeline(); client.sunion(keys); return SetFromList.of(client.getBinaryMultiBulkReply()); } /** * This command works exactly like {@link #sunion(byte[]...) SUNION} but instead of being returned * the resulting set is stored as dstkey. Any existing value in dstkey will be over-written. * <p> * Time complexity O(N) where N is the total number of elements in all the provided sets * @param dstkey * @param keys * @return Status code reply */ @Override public Long sunionstore(final byte[] dstkey, final byte[]... keys) { checkIsInMultiOrPipeline(); client.sunionstore(dstkey, keys); return client.getIntegerReply(); } /** * Return the difference between the Set stored at key1 and all the Sets key2, ..., keyN * <p> * <b>Example:</b> * * <pre> * key1 = [x, a, b, c] * key2 = [c] * key3 = [a, d] * SDIFF key1,key2,key3 =&gt; [x, b] * </pre> * * Non existing keys are considered like empty sets. * <p> * <b>Time complexity:</b> * <p> * O(N) with N being the total number of elements of all the sets * @param keys * @return Return the members of a set resulting from the difference between the first set * provided and all the successive sets. */ @Override public Set<byte[]> sdiff(final byte[]... keys) { checkIsInMultiOrPipeline(); client.sdiff(keys); return SetFromList.of(client.getBinaryMultiBulkReply()); } /** * This command works exactly like {@link #sdiff(byte[]...) SDIFF} but instead of being returned * the resulting set is stored in dstkey. * @param dstkey * @param keys * @return Status code reply */ @Override public Long sdiffstore(final byte[] dstkey, final byte[]... keys) { checkIsInMultiOrPipeline(); client.sdiffstore(dstkey, keys); return client.getIntegerReply(); } /** * Return a random element from a Set, without removing the element. If the Set is empty or the * key does not exist, a nil object is returned. * <p> * The SPOP command does a similar work but the returned element is popped (removed) from the Set. * <p> * Time complexity O(1) * @param key * @return Bulk reply */ @Override public byte[] srandmember(final byte[] key) { checkIsInMultiOrPipeline(); client.srandmember(key); return client.getBinaryBulkReply(); } @Override public List<byte[]> srandmember(final byte[] key, final int count) { checkIsInMultiOrPipeline(); client.srandmember(key, count); return client.getBinaryMultiBulkReply(); } /** * Add the specified member having the specifeid score to the sorted set stored at key. If member * is already a member of the sorted set the score is updated, and the element reinserted in the * right position to ensure sorting. If key does not exist a new sorted set with the specified * member as sole member is crated. If the key exists but does not hold a sorted set value an * error is returned. * <p> * The score value can be the string representation of a double precision floating point number. * <p> * Time complexity O(log(N)) with N being the number of elements in the sorted set * @param key * @param score * @param member * @return Integer reply, specifically: 1 if the new element was added 0 if the element was * already a member of the sorted set and the score was updated */ @Override public Long zadd(final byte[] key, final double score, final byte[] member) { checkIsInMultiOrPipeline(); client.zadd(key, score, member); return client.getIntegerReply(); } @Override public Long zadd(byte[] key, double score, byte[] member, ZAddParams params) { checkIsInMultiOrPipeline(); client.zadd(key, score, member, params); return client.getIntegerReply(); } @Override public Long zadd(final byte[] key, final Map<byte[], Double> scoreMembers) { checkIsInMultiOrPipeline(); client.zadd(key, scoreMembers); return client.getIntegerReply(); } @Override public Long zadd(byte[] key, Map<byte[], Double> scoreMembers, ZAddParams params) { checkIsInMultiOrPipeline(); client.zadd(key, scoreMembers, params); return client.getIntegerReply(); } @Override public Set<byte[]> zrange(final byte[] key, final long start, final long end) { checkIsInMultiOrPipeline(); client.zrange(key, start, end); return SetFromList.of(client.getBinaryMultiBulkReply()); } /** * Remove the specified member from the sorted set value stored at key. If member was not a member * of the set no operation is performed. If key does not not hold a set value an error is * returned. * <p> * Time complexity O(log(N)) with N being the number of elements in the sorted set * @param key * @param members * @return Integer reply, specifically: 1 if the new element was removed 0 if the new element was * not a member of the set */ @Override public Long zrem(final byte[] key, final byte[]... members) { checkIsInMultiOrPipeline(); client.zrem(key, members); return client.getIntegerReply(); } /** * If member already exists in the sorted set adds the increment to its score and updates the * position of the element in the sorted set accordingly. If member does not already exist in the * sorted set it is added with increment as score (that is, like if the previous score was * virtually zero). If key does not exist a new sorted set with the specified member as sole * member is crated. If the key exists but does not hold a sorted set value an error is returned. * <p> * The score value can be the string representation of a double precision floating point number. * It's possible to provide a negative value to perform a decrement. * <p> * For an introduction to sorted sets check the Introduction to Redis data types page. * <p> * Time complexity O(log(N)) with N being the number of elements in the sorted set * @param key * @param score * @param member * @return The new score */ @Override public Double zincrby(final byte[] key, final double score, final byte[] member) { checkIsInMultiOrPipeline(); client.zincrby(key, score, member); String newscore = client.getBulkReply(); return Double.valueOf(newscore); } @Override public Double zincrby(byte[] key, double score, byte[] member, ZIncrByParams params) { checkIsInMultiOrPipeline(); client.zincrby(key, score, member, params); String newscore = client.getBulkReply(); // with nx / xx options it could return null now if (newscore == null) return null; return Double.valueOf(newscore); } /** * Return the rank (or index) or member in the sorted set at key, with scores being ordered from * low to high. * <p> * When the given member does not exist in the sorted set, the special value 'nil' is returned. * The returned rank (or index) of the member is 0-based for both commands. * <p> * <b>Time complexity:</b> * <p> * O(log(N)) * @see #zrevrank(byte[], byte[]) * @param key * @param member * @return Integer reply or a nil bulk reply, specifically: the rank of the element as an integer * reply if the element exists. A nil bulk reply if there is no such element. */ @Override public Long zrank(final byte[] key, final byte[] member) { checkIsInMultiOrPipeline(); client.zrank(key, member); return client.getIntegerReply(); } /** * Return the rank (or index) or member in the sorted set at key, with scores being ordered from * high to low. * <p> * When the given member does not exist in the sorted set, the special value 'nil' is returned. * The returned rank (or index) of the member is 0-based for both commands. * <p> * <b>Time complexity:</b> * <p> * O(log(N)) * @see #zrank(byte[], byte[]) * @param key * @param member * @return Integer reply or a nil bulk reply, specifically: the rank of the element as an integer * reply if the element exists. A nil bulk reply if there is no such element. */ @Override public Long zrevrank(final byte[] key, final byte[] member) { checkIsInMultiOrPipeline(); client.zrevrank(key, member); return client.getIntegerReply(); } @Override public Set<byte[]> zrevrange(final byte[] key, final long start, final long end) { checkIsInMultiOrPipeline(); client.zrevrange(key, start, end); return SetFromList.of(client.getBinaryMultiBulkReply()); } @Override public Set<Tuple> zrangeWithScores(final byte[] key, final long start, final long end) { checkIsInMultiOrPipeline(); client.zrangeWithScores(key, start, end); return getBinaryTupledSet(); } @Override public Set<Tuple> zrevrangeWithScores(final byte[] key, final long start, final long end) { checkIsInMultiOrPipeline(); client.zrevrangeWithScores(key, start, end); return getBinaryTupledSet(); } /** * Return the sorted set cardinality (number of elements). If the key does not exist 0 is * returned, like for empty sorted sets. * <p> * Time complexity O(1) * @param key * @return the cardinality (number of elements) of the set as an integer. */ @Override public Long zcard(final byte[] key) { checkIsInMultiOrPipeline(); client.zcard(key); return client.getIntegerReply(); } /** * Return the score of the specified element of the sorted set at key. If the specified element * does not exist in the sorted set, or the key does not exist at all, a special 'nil' value is * returned. * <p> * <b>Time complexity:</b> O(1) * @param key * @param member * @return the score */ @Override public Double zscore(final byte[] key, final byte[] member) { checkIsInMultiOrPipeline(); client.zscore(key, member); final String score = client.getBulkReply(); return (score != null ? new Double(score) : null); } public Transaction multi() { client.multi(); client.getOne(); // expected OK transaction = new Transaction(client); return transaction; } protected void checkIsInMultiOrPipeline() { if (client.isInMulti()) { throw new JedisDataException( "Cannot use Jedis when in Multi. Please use Transation or reset jedis state."); } else if (pipeline != null && pipeline.hasPipelinedResponse()) { throw new JedisDataException( "Cannot use Jedis when in Pipeline. Please use Pipeline or reset jedis state ."); } } public void connect() { client.connect(); } public void disconnect() { client.disconnect(); } public void resetState() { if (client.isConnected()) { if (transaction != null) { transaction.clear(); } if (pipeline != null) { pipeline.clear(); } if (client.isInWatch()) { unwatch(); } client.resetState(); } transaction = null; pipeline = null; } @Override public String watch(final byte[]... keys) { client.watch(keys); return client.getStatusCodeReply(); } @Override public String unwatch() { client.unwatch(); return client.getStatusCodeReply(); } @Override public void close() { client.close(); } /** * Sort a Set or a List. * <p> * Sort the elements contained in the List, Set, or Sorted Set value at key. By default sorting is * numeric with elements being compared as double precision floating point numbers. This is the * simplest form of SORT. * @see #sort(byte[], byte[]) * @see #sort(byte[], SortingParams) * @see #sort(byte[], SortingParams, byte[]) * @param key * @return Assuming the Set/List at key contains a list of numbers, the return value will be the * list of numbers ordered from the smallest to the biggest number. */ @Override public List<byte[]> sort(final byte[] key) { checkIsInMultiOrPipeline(); client.sort(key); return client.getBinaryMultiBulkReply(); } /** * Sort a Set or a List accordingly to the specified parameters. * <p> * <b>examples:</b> * <p> * Given are the following sets and key/values: * * <pre> * x = [1, 2, 3] * y = [a, b, c] * * k1 = z * k2 = y * k3 = x * * w1 = 9 * w2 = 8 * w3 = 7 * </pre> * * Sort Order: * * <pre> * sort(x) or sort(x, sp.asc()) * -&gt; [1, 2, 3] * * sort(x, sp.desc()) * -&gt; [3, 2, 1] * * sort(y) * -&gt; [c, a, b] * * sort(y, sp.alpha()) * -&gt; [a, b, c] * * sort(y, sp.alpha().desc()) * -&gt; [c, a, b] * </pre> * * Limit (e.g. for Pagination): * * <pre> * sort(x, sp.limit(0, 2)) * -&gt; [1, 2] * * sort(y, sp.alpha().desc().limit(1, 2)) * -&gt; [b, a] * </pre> * * Sorting by external keys: * * <pre> * sort(x, sb.by(w*)) * -&gt; [3, 2, 1] * * sort(x, sb.by(w*).desc()) * -&gt; [1, 2, 3] * </pre> * * Getting external keys: * * <pre> * sort(x, sp.by(w*).get(k*)) * -&gt; [x, y, z] * * sort(x, sp.by(w*).get(#).get(k*)) * -&gt; [3, x, 2, y, 1, z] * </pre> * @see #sort(byte[]) * @see #sort(byte[], SortingParams, byte[]) * @param key * @param sortingParameters * @return a list of sorted elements. */ @Override public List<byte[]> sort(final byte[] key, final SortingParams sortingParameters) { checkIsInMultiOrPipeline(); client.sort(key, sortingParameters); return client.getBinaryMultiBulkReply(); } /** * BLPOP (and BRPOP) is a blocking list pop primitive. You can see this commands as blocking * versions of LPOP and RPOP able to block if the specified keys don't exist or contain empty * lists. * <p> * The following is a description of the exact semantic. We describe BLPOP but the two commands * are identical, the only difference is that BLPOP pops the element from the left (head) of the * list, and BRPOP pops from the right (tail). * <p> * <b>Non blocking behavior</b> * <p> * When BLPOP is called, if at least one of the specified keys contain a non empty list, an * element is popped from the head of the list and returned to the caller together with the name * of the key (BLPOP returns a two elements array, the first element is the key, the second the * popped value). * <p> * Keys are scanned from left to right, so for instance if you issue BLPOP list1 list2 list3 0 * against a dataset where list1 does not exist but list2 and list3 contain non empty lists, BLPOP * guarantees to return an element from the list stored at list2 (since it is the first non empty * list starting from the left). * <p> * <b>Blocking behavior</b> * <p> * If none of the specified keys exist or contain non empty lists, BLPOP blocks until some other * client performs a LPUSH or an RPUSH operation against one of the lists. * <p> * Once new data is present on one of the lists, the client finally returns with the name of the * key unblocking it and the popped value. * <p> * When blocking, if a non-zero timeout is specified, the client will unblock returning a nil * special value if the specified amount of seconds passed without a push operation against at * least one of the specified keys. * <p> * The timeout argument is interpreted as an integer value. A timeout of zero means instead to * block forever. * <p> * <b>Multiple clients blocking for the same keys</b> * <p> * Multiple clients can block for the same key. They are put into a queue, so the first to be * served will be the one that started to wait earlier, in a first-blpopping first-served fashion. * <p> * <b>blocking POP inside a MULTI/EXEC transaction</b> * <p> * BLPOP and BRPOP can be used with pipelining (sending multiple commands and reading the replies * in batch), but it does not make sense to use BLPOP or BRPOP inside a MULTI/EXEC block (a Redis * transaction). * <p> * The behavior of BLPOP inside MULTI/EXEC when the list is empty is to return a multi-bulk nil * reply, exactly what happens when the timeout is reached. If you like science fiction, think at * it like if inside MULTI/EXEC the time will flow at infinite speed :) * <p> * Time complexity: O(1) * @see #brpop(int, byte[]...) * @param timeout * @param keys * @return BLPOP returns a two-elements array via a multi bulk reply in order to return both the * unblocking key and the popped value. * <p> * When a non-zero timeout is specified, and the BLPOP operation timed out, the return * value is a nil multi bulk reply. Most client values will return false or nil * accordingly to the programming language used. */ @Override public List<byte[]> blpop(final int timeout, final byte[]... keys) { return blpop(getArgsAddTimeout(timeout, keys)); } private byte[][] getArgsAddTimeout(int timeout, byte[][] keys) { int size = keys.length; final byte[][] args = new byte[size + 1][]; for (int at = 0; at != size; ++at) { args[at] = keys[at]; } args[size] = Protocol.toByteArray(timeout); return args; } /** * Sort a Set or a List accordingly to the specified parameters and store the result at dstkey. * @see #sort(byte[], SortingParams) * @see #sort(byte[]) * @see #sort(byte[], byte[]) * @param key * @param sortingParameters * @param dstkey * @return The number of elements of the list at dstkey. */ @Override public Long sort(final byte[] key, final SortingParams sortingParameters, final byte[] dstkey) { checkIsInMultiOrPipeline(); client.sort(key, sortingParameters, dstkey); return client.getIntegerReply(); } /** * Sort a Set or a List and Store the Result at dstkey. * <p> * Sort the elements contained in the List, Set, or Sorted Set value at key and store the result * at dstkey. By default sorting is numeric with elements being compared as double precision * floating point numbers. This is the simplest form of SORT. * @see #sort(byte[]) * @see #sort(byte[], SortingParams) * @see #sort(byte[], SortingParams, byte[]) * @param key * @param dstkey * @return The number of elements of the list at dstkey. */ @Override public Long sort(final byte[] key, final byte[] dstkey) { checkIsInMultiOrPipeline(); client.sort(key, dstkey); return client.getIntegerReply(); } /** * BLPOP (and BRPOP) is a blocking list pop primitive. You can see this commands as blocking * versions of LPOP and RPOP able to block if the specified keys don't exist or contain empty * lists. * <p> * The following is a description of the exact semantic. We describe BLPOP but the two commands * are identical, the only difference is that BLPOP pops the element from the left (head) of the * list, and BRPOP pops from the right (tail). * <p> * <b>Non blocking behavior</b> * <p> * When BLPOP is called, if at least one of the specified keys contain a non empty list, an * element is popped from the head of the list and returned to the caller together with the name * of the key (BLPOP returns a two elements array, the first element is the key, the second the * popped value). * <p> * Keys are scanned from left to right, so for instance if you issue BLPOP list1 list2 list3 0 * against a dataset where list1 does not exist but list2 and list3 contain non empty lists, BLPOP * guarantees to return an element from the list stored at list2 (since it is the first non empty * list starting from the left). * <p> * <b>Blocking behavior</b> * <p> * If none of the specified keys exist or contain non empty lists, BLPOP blocks until some other * client performs a LPUSH or an RPUSH operation against one of the lists. * <p> * Once new data is present on one of the lists, the client finally returns with the name of the * key unblocking it and the popped value. * <p> * When blocking, if a non-zero timeout is specified, the client will unblock returning a nil * special value if the specified amount of seconds passed without a push operation against at * least one of the specified keys. * <p> * The timeout argument is interpreted as an integer value. A timeout of zero means instead to * block forever. * <p> * <b>Multiple clients blocking for the same keys</b> * <p> * Multiple clients can block for the same key. They are put into a queue, so the first to be * served will be the one that started to wait earlier, in a first-blpopping first-served fashion. * <p> * <b>blocking POP inside a MULTI/EXEC transaction</b> * <p> * BLPOP and BRPOP can be used with pipelining (sending multiple commands and reading the replies * in batch), but it does not make sense to use BLPOP or BRPOP inside a MULTI/EXEC block (a Redis * transaction). * <p> * The behavior of BLPOP inside MULTI/EXEC when the list is empty is to return a multi-bulk nil * reply, exactly what happens when the timeout is reached. If you like science fiction, think at * it like if inside MULTI/EXEC the time will flow at infinite speed :) * <p> * Time complexity: O(1) * @see #blpop(int, byte[]...) * @param timeout * @param keys * @return BLPOP returns a two-elements array via a multi bulk reply in order to return both the * unblocking key and the popped value. * <p> * When a non-zero timeout is specified, and the BLPOP operation timed out, the return * value is a nil multi bulk reply. Most client values will return false or nil * accordingly to the programming language used. */ @Override public List<byte[]> brpop(final int timeout, final byte[]... keys) { return brpop(getArgsAddTimeout(timeout, keys)); } @Override public List<byte[]> blpop(byte[]... args) { checkIsInMultiOrPipeline(); client.blpop(args); client.setTimeoutInfinite(); try { return client.getBinaryMultiBulkReply(); } finally { client.rollbackTimeout(); } } @Override public List<byte[]> brpop(byte[]... args) { checkIsInMultiOrPipeline(); client.brpop(args); client.setTimeoutInfinite(); try { return client.getBinaryMultiBulkReply(); } finally { client.rollbackTimeout(); } } /** * Request for authentication in a password protected Redis server. A Redis server can be * instructed to require a password before to allow clients to issue commands. This is done using * the requirepass directive in the Redis configuration file. If the password given by the client * is correct the server replies with an OK status code reply and starts accepting commands from * the client. Otherwise an error is returned and the clients needs to try a new password. Note * that for the high performance nature of Redis it is possible to try a lot of passwords in * parallel in very short time, so make sure to generate a strong and very long password so that * this attack is infeasible. * @param password * @return Status code reply */ @Override public String auth(final String password) { checkIsInMultiOrPipeline(); client.auth(password); return client.getStatusCodeReply(); } public Pipeline pipelined() { pipeline = new Pipeline(); pipeline.setClient(client); return pipeline; } @Override public Long zcount(final byte[] key, final double min, final double max) { return zcount(key, toByteArray(min), toByteArray(max)); } @Override public Long zcount(final byte[] key, final byte[] min, final byte[] max) { checkIsInMultiOrPipeline(); client.zcount(key, min, max); return client.getIntegerReply(); } /** * Return the all the elements in the sorted set at key with a score between min and max * (including elements with score equal to min or max). * <p> * The elements having the same score are returned sorted lexicographically as ASCII strings (this * follows from a property of Redis sorted sets and does not involve further computation). * <p> * Using the optional {@link #zrangeByScore(byte[], double, double, int, int) LIMIT} it's possible * to get only a range of the matching elements in an SQL-alike way. Note that if offset is large * the commands needs to traverse the list for offset elements and this adds up to the O(M) * figure. * <p> * The {@link #zcount(byte[], double, double) ZCOUNT} command is similar to * {@link #zrangeByScore(byte[], double, double) ZRANGEBYSCORE} but instead of returning the * actual elements in the specified interval, it just returns the number of matching elements. * <p> * <b>Exclusive intervals and infinity</b> * <p> * min and max can be -inf and +inf, so that you are not required to know what's the greatest or * smallest element in order to take, for instance, elements "up to a given value". * <p> * Also while the interval is for default closed (inclusive) it's possible to specify open * intervals prefixing the score with a "(" character, so for instance: * <p> * {@code ZRANGEBYSCORE zset (1.3 5} * <p> * Will return all the values with score &gt; 1.3 and &lt;= 5, while for instance: * <p> * {@code ZRANGEBYSCORE zset (5 (10} * <p> * Will return all the values with score &gt; 5 and &lt; 10 (5 and 10 excluded). * <p> * <b>Time complexity:</b> * <p> * O(log(N))+O(M) with N being the number of elements in the sorted set and M the number of * elements returned by the command, so if M is constant (for instance you always ask for the * first ten elements with LIMIT) you can consider it O(log(N)) * @see #zrangeByScore(byte[], double, double) * @see #zrangeByScore(byte[], double, double, int, int) * @see #zrangeByScoreWithScores(byte[], double, double) * @see #zrangeByScoreWithScores(byte[], double, double, int, int) * @see #zcount(byte[], double, double) * @param key * @param min * @param max * @return Multi bulk reply specifically a list of elements in the specified score range. */ @Override public Set<byte[]> zrangeByScore(final byte[] key, final double min, final double max) { return zrangeByScore(key, toByteArray(min), toByteArray(max)); } @Override public Set<byte[]> zrangeByScore(final byte[] key, final byte[] min, final byte[] max) { checkIsInMultiOrPipeline(); client.zrangeByScore(key, min, max); return SetFromList.of(client.getBinaryMultiBulkReply()); } /** * Return the all the elements in the sorted set at key with a score between min and max * (including elements with score equal to min or max). * <p> * The elements having the same score are returned sorted lexicographically as ASCII strings (this * follows from a property of Redis sorted sets and does not involve further computation). * <p> * Using the optional {@link #zrangeByScore(byte[], double, double, int, int) LIMIT} it's possible * to get only a range of the matching elements in an SQL-alike way. Note that if offset is large * the commands needs to traverse the list for offset elements and this adds up to the O(M) * figure. * <p> * The {@link #zcount(byte[], double, double) ZCOUNT} command is similar to * {@link #zrangeByScore(byte[], double, double) ZRANGEBYSCORE} but instead of returning the * actual elements in the specified interval, it just returns the number of matching elements. * <p> * <b>Exclusive intervals and infinity</b> * <p> * min and max can be -inf and +inf, so that you are not required to know what's the greatest or * smallest element in order to take, for instance, elements "up to a given value". * <p> * Also while the interval is for default closed (inclusive) it's possible to specify open * intervals prefixing the score with a "(" character, so for instance: * <p> * {@code ZRANGEBYSCORE zset (1.3 5} * <p> * Will return all the values with score &gt; 1.3 and &lt;= 5, while for instance: * <p> * {@code ZRANGEBYSCORE zset (5 (10} * <p> * Will return all the values with score &gt; 5 and &lt; 10 (5 and 10 excluded). * <p> * <b>Time complexity:</b> * <p> * O(log(N))+O(M) with N being the number of elements in the sorted set and M the number of * elements returned by the command, so if M is constant (for instance you always ask for the * first ten elements with LIMIT) you can consider it O(log(N)) * @see #zrangeByScore(byte[], double, double) * @see #zrangeByScore(byte[], double, double, int, int) * @see #zrangeByScoreWithScores(byte[], double, double) * @see #zrangeByScoreWithScores(byte[], double, double, int, int) * @see #zcount(byte[], double, double) * @param key * @param min * @param max * @return Multi bulk reply specifically a list of elements in the specified score range. */ @Override public Set<byte[]> zrangeByScore(final byte[] key, final double min, final double max, final int offset, final int count) { return zrangeByScore(key, toByteArray(min), toByteArray(max), offset, count); } @Override public Set<byte[]> zrangeByScore(final byte[] key, final byte[] min, final byte[] max, final int offset, final int count) { checkIsInMultiOrPipeline(); client.zrangeByScore(key, min, max, offset, count); return SetFromList.of(client.getBinaryMultiBulkReply()); } /** * Return the all the elements in the sorted set at key with a score between min and max * (including elements with score equal to min or max). * <p> * The elements having the same score are returned sorted lexicographically as ASCII strings (this * follows from a property of Redis sorted sets and does not involve further computation). * <p> * Using the optional {@link #zrangeByScore(byte[], double, double, int, int) LIMIT} it's possible * to get only a range of the matching elements in an SQL-alike way. Note that if offset is large * the commands needs to traverse the list for offset elements and this adds up to the O(M) * figure. * <p> * The {@link #zcount(byte[], double, double) ZCOUNT} command is similar to * {@link #zrangeByScore(byte[], double, double) ZRANGEBYSCORE} but instead of returning the * actual elements in the specified interval, it just returns the number of matching elements. * <p> * <b>Exclusive intervals and infinity</b> * <p> * min and max can be -inf and +inf, so that you are not required to know what's the greatest or * smallest element in order to take, for instance, elements "up to a given value". * <p> * Also while the interval is for default closed (inclusive) it's possible to specify open * intervals prefixing the score with a "(" character, so for instance: * <p> * {@code ZRANGEBYSCORE zset (1.3 5} * <p> * Will return all the values with score &gt; 1.3 and &lt;= 5, while for instance: * <p> * {@code ZRANGEBYSCORE zset (5 (10} * <p> * Will return all the values with score &gt; 5 and &lt; 10 (5 and 10 excluded). * <p> * <b>Time complexity:</b> * <p> * O(log(N))+O(M) with N being the number of elements in the sorted set and M the number of * elements returned by the command, so if M is constant (for instance you always ask for the * first ten elements with LIMIT) you can consider it O(log(N)) * @see #zrangeByScore(byte[], double, double) * @see #zrangeByScore(byte[], double, double, int, int) * @see #zrangeByScoreWithScores(byte[], double, double) * @see #zrangeByScoreWithScores(byte[], double, double, int, int) * @see #zcount(byte[], double, double) * @param key * @param min * @param max * @return Multi bulk reply specifically a list of elements in the specified score range. */ @Override public Set<Tuple> zrangeByScoreWithScores(final byte[] key, final double min, final double max) { return zrangeByScoreWithScores(key, toByteArray(min), toByteArray(max)); } @Override public Set<Tuple> zrangeByScoreWithScores(final byte[] key, final byte[] min, final byte[] max) { checkIsInMultiOrPipeline(); client.zrangeByScoreWithScores(key, min, max); return getBinaryTupledSet(); } /** * Return the all the elements in the sorted set at key with a score between min and max * (including elements with score equal to min or max). * <p> * The elements having the same score are returned sorted lexicographically as ASCII strings (this * follows from a property of Redis sorted sets and does not involve further computation). * <p> * Using the optional {@link #zrangeByScore(byte[], double, double, int, int) LIMIT} it's possible * to get only a range of the matching elements in an SQL-alike way. Note that if offset is large * the commands needs to traverse the list for offset elements and this adds up to the O(M) * figure. * <p> * The {@link #zcount(byte[], double, double) ZCOUNT} command is similar to * {@link #zrangeByScore(byte[], double, double) ZRANGEBYSCORE} but instead of returning the * actual elements in the specified interval, it just returns the number of matching elements. * <p> * <b>Exclusive intervals and infinity</b> * <p> * min and max can be -inf and +inf, so that you are not required to know what's the greatest or * smallest element in order to take, for instance, elements "up to a given value". * <p> * Also while the interval is for default closed (inclusive) it's possible to specify open * intervals prefixing the score with a "(" character, so for instance: * <p> * {@code ZRANGEBYSCORE zset (1.3 5} * <p> * Will return all the values with score &gt; 1.3 and &lt;= 5, while for instance: * <p> * {@code ZRANGEBYSCORE zset (5 (10} * <p> * Will return all the values with score &gt; 5 and &lt; 10 (5 and 10 excluded). * <p> * <b>Time complexity:</b> * <p> * O(log(N))+O(M) with N being the number of elements in the sorted set and M the number of * elements returned by the command, so if M is constant (for instance you always ask for the * first ten elements with LIMIT) you can consider it O(log(N)) * @see #zrangeByScore(byte[], double, double) * @see #zrangeByScore(byte[], double, double, int, int) * @see #zrangeByScoreWithScores(byte[], double, double) * @see #zrangeByScoreWithScores(byte[], double, double, int, int) * @see #zcount(byte[], double, double) * @param key * @param min * @param max * @return Multi bulk reply specifically a list of elements in the specified score range. */ @Override public Set<Tuple> zrangeByScoreWithScores(final byte[] key, final double min, final double max, final int offset, final int count) { return zrangeByScoreWithScores(key, toByteArray(min), toByteArray(max), offset, count); } @Override public Set<Tuple> zrangeByScoreWithScores(final byte[] key, final byte[] min, final byte[] max, final int offset, final int count) { checkIsInMultiOrPipeline(); client.zrangeByScoreWithScores(key, min, max, offset, count); return getBinaryTupledSet(); } private Set<Tuple> getBinaryTupledSet() { checkIsInMultiOrPipeline(); List<byte[]> membersWithScores = client.getBinaryMultiBulkReply(); if (membersWithScores.size() == 0) { return Collections.emptySet(); } Set<Tuple> set = new LinkedHashSet<Tuple>(membersWithScores.size() / 2, 1.0f); Iterator<byte[]> iterator = membersWithScores.iterator(); while (iterator.hasNext()) { set.add(new Tuple(iterator.next(), Double.valueOf(SafeEncoder.encode(iterator.next())))); } return set; } @Override public Set<byte[]> zrevrangeByScore(final byte[] key, final double max, final double min) { return zrevrangeByScore(key, toByteArray(max), toByteArray(min)); } @Override public Set<byte[]> zrevrangeByScore(final byte[] key, final byte[] max, final byte[] min) { checkIsInMultiOrPipeline(); client.zrevrangeByScore(key, max, min); return SetFromList.of(client.getBinaryMultiBulkReply()); } @Override public Set<byte[]> zrevrangeByScore(final byte[] key, final double max, final double min, final int offset, final int count) { return zrevrangeByScore(key, toByteArray(max), toByteArray(min), offset, count); } @Override public Set<byte[]> zrevrangeByScore(final byte[] key, final byte[] max, final byte[] min, final int offset, final int count) { checkIsInMultiOrPipeline(); client.zrevrangeByScore(key, max, min, offset, count); return SetFromList.of(client.getBinaryMultiBulkReply()); } @Override public Set<Tuple> zrevrangeByScoreWithScores(final byte[] key, final double max, final double min) { return zrevrangeByScoreWithScores(key, toByteArray(max), toByteArray(min)); } @Override public Set<Tuple> zrevrangeByScoreWithScores(final byte[] key, final double max, final double min, final int offset, final int count) { return zrevrangeByScoreWithScores(key, toByteArray(max), toByteArray(min), offset, count); } @Override public Set<Tuple> zrevrangeByScoreWithScores(final byte[] key, final byte[] max, final byte[] min) { checkIsInMultiOrPipeline(); client.zrevrangeByScoreWithScores(key, max, min); return getBinaryTupledSet(); } @Override public Set<Tuple> zrevrangeByScoreWithScores(final byte[] key, final byte[] max, final byte[] min, final int offset, final int count) { checkIsInMultiOrPipeline(); client.zrevrangeByScoreWithScores(key, max, min, offset, count); return getBinaryTupledSet(); } /** * Remove all elements in the sorted set at key with rank between start and end. Start and end are * 0-based with rank 0 being the element with the lowest score. Both start and end can be negative * numbers, where they indicate offsets starting at the element with the highest rank. For * example: -1 is the element with the highest score, -2 the element with the second highest score * and so forth. * <p> * <b>Time complexity:</b> O(log(N))+O(M) with N being the number of elements in the sorted set * and M the number of elements removed by the operation */ @Override public Long zremrangeByRank(final byte[] key, final long start, final long end) { checkIsInMultiOrPipeline(); client.zremrangeByRank(key, start, end); return client.getIntegerReply(); } /** * Remove all the elements in the sorted set at key with a score between min and max (including * elements with score equal to min or max). * <p> * <b>Time complexity:</b> * <p> * O(log(N))+O(M) with N being the number of elements in the sorted set and M the number of * elements removed by the operation * @param key * @param start * @param end * @return Integer reply, specifically the number of elements removed. */ @Override public Long zremrangeByScore(final byte[] key, final double start, final double end) { return zremrangeByScore(key, toByteArray(start), toByteArray(end)); } @Override public Long zremrangeByScore(final byte[] key, final byte[] start, final byte[] end) { checkIsInMultiOrPipeline(); client.zremrangeByScore(key, start, end); return client.getIntegerReply(); } /** * Creates a union or intersection of N sorted sets given by keys k1 through kN, and stores it at * dstkey. It is mandatory to provide the number of input keys N, before passing the input keys * and the other (optional) arguments. * <p> * As the terms imply, the {@link #zinterstore(byte[], byte[]...)} ZINTERSTORE} command requires * an element to be present in each of the given inputs to be inserted in the result. The {@link * #zunionstore(byte[], byte[]...)} command inserts all elements across all inputs. * <p> * Using the WEIGHTS option, it is possible to add weight to each input sorted set. This means * that the score of each element in the sorted set is first multiplied by this weight before * being passed to the aggregation. When this option is not given, all weights default to 1. * <p> * With the AGGREGATE option, it's possible to specify how the results of the union or * intersection are aggregated. This option defaults to SUM, where the score of an element is * summed across the inputs where it exists. When this option is set to be either MIN or MAX, the * resulting set will contain the minimum or maximum score of an element across the inputs where * it exists. * <p> * <b>Time complexity:</b> O(N) + O(M log(M)) with N being the sum of the sizes of the input * sorted sets, and M being the number of elements in the resulting sorted set * @see #zunionstore(byte[], byte[]...) * @see #zunionstore(byte[], ZParams, byte[]...) * @see #zinterstore(byte[], byte[]...) * @see #zinterstore(byte[], ZParams, byte[]...) * @param dstkey * @param sets * @return Integer reply, specifically the number of elements in the sorted set at dstkey */ @Override public Long zunionstore(final byte[] dstkey, final byte[]... sets) { checkIsInMultiOrPipeline(); client.zunionstore(dstkey, sets); return client.getIntegerReply(); } /** * Creates a union or intersection of N sorted sets given by keys k1 through kN, and stores it at * dstkey. It is mandatory to provide the number of input keys N, before passing the input keys * and the other (optional) arguments. * <p> * As the terms imply, the {@link #zinterstore(byte[], byte[]...) ZINTERSTORE} command requires an * element to be present in each of the given inputs to be inserted in the result. The {@link * #zunionstore(byte[], byte[]...) ZUNIONSTORE} command inserts all elements across all inputs. * <p> * Using the WEIGHTS option, it is possible to add weight to each input sorted set. This means * that the score of each element in the sorted set is first multiplied by this weight before * being passed to the aggregation. When this option is not given, all weights default to 1. * <p> * With the AGGREGATE option, it's possible to specify how the results of the union or * intersection are aggregated. This option defaults to SUM, where the score of an element is * summed across the inputs where it exists. When this option is set to be either MIN or MAX, the * resulting set will contain the minimum or maximum score of an element across the inputs where * it exists. * <p> * <b>Time complexity:</b> O(N) + O(M log(M)) with N being the sum of the sizes of the input * sorted sets, and M being the number of elements in the resulting sorted set * @see #zunionstore(byte[], byte[]...) * @see #zunionstore(byte[], ZParams, byte[]...) * @see #zinterstore(byte[], byte[]...) * @see #zinterstore(byte[], ZParams, byte[]...) * @param dstkey * @param sets * @param params * @return Integer reply, specifically the number of elements in the sorted set at dstkey */ @Override public Long zunionstore(final byte[] dstkey, final ZParams params, final byte[]... sets) { checkIsInMultiOrPipeline(); client.zunionstore(dstkey, params, sets); return client.getIntegerReply(); } /** * Creates a union or intersection of N sorted sets given by keys k1 through kN, and stores it at * dstkey. It is mandatory to provide the number of input keys N, before passing the input keys * and the other (optional) arguments. * <p> * As the terms imply, the {@link #zinterstore(byte[], byte[]...) ZINTERSTORE} command requires an * element to be present in each of the given inputs to be inserted in the result. The {@link * #zunionstore(byte[], byte[]...) ZUNIONSTORE} command inserts all elements across all inputs. * <p> * Using the WEIGHTS option, it is possible to add weight to each input sorted set. This means * that the score of each element in the sorted set is first multiplied by this weight before * being passed to the aggregation. When this option is not given, all weights default to 1. * <p> * With the AGGREGATE option, it's possible to specify how the results of the union or * intersection are aggregated. This option defaults to SUM, where the score of an element is * summed across the inputs where it exists. When this option is set to be either MIN or MAX, the * resulting set will contain the minimum or maximum score of an element across the inputs where * it exists. * <p> * <b>Time complexity:</b> O(N) + O(M log(M)) with N being the sum of the sizes of the input * sorted sets, and M being the number of elements in the resulting sorted set * @see #zunionstore(byte[], byte[]...) * @see #zunionstore(byte[], ZParams, byte[]...) * @see #zinterstore(byte[], byte[]...) * @see #zinterstore(byte[], ZParams, byte[]...) * @param dstkey * @param sets * @return Integer reply, specifically the number of elements in the sorted set at dstkey */ @Override public Long zinterstore(final byte[] dstkey, final byte[]... sets) { checkIsInMultiOrPipeline(); client.zinterstore(dstkey, sets); return client.getIntegerReply(); } /** * Creates a union or intersection of N sorted sets given by keys k1 through kN, and stores it at * dstkey. It is mandatory to provide the number of input keys N, before passing the input keys * and the other (optional) arguments. * <p> * As the terms imply, the {@link #zinterstore(byte[], byte[]...) ZINTERSTORE} command requires an * element to be present in each of the given inputs to be inserted in the result. The {@link * #zunionstore(byte[], byte[]...) ZUNIONSTORE} command inserts all elements across all inputs. * <p> * Using the WEIGHTS option, it is possible to add weight to each input sorted set. This means * that the score of each element in the sorted set is first multiplied by this weight before * being passed to the aggregation. When this option is not given, all weights default to 1. * <p> * With the AGGREGATE option, it's possible to specify how the results of the union or * intersection are aggregated. This option defaults to SUM, where the score of an element is * summed across the inputs where it exists. When this option is set to be either MIN or MAX, the * resulting set will contain the minimum or maximum score of an element across the inputs where * it exists. * <p> * <b>Time complexity:</b> O(N) + O(M log(M)) with N being the sum of the sizes of the input * sorted sets, and M being the number of elements in the resulting sorted set * @see #zunionstore(byte[], byte[]...) * @see #zunionstore(byte[], ZParams, byte[]...) * @see #zinterstore(byte[], byte[]...) * @see #zinterstore(byte[], ZParams, byte[]...) * @param dstkey * @param sets * @param params * @return Integer reply, specifically the number of elements in the sorted set at dstkey */ @Override public Long zinterstore(final byte[] dstkey, final ZParams params, final byte[]... sets) { checkIsInMultiOrPipeline(); client.zinterstore(dstkey, params, sets); return client.getIntegerReply(); } @Override public Long zlexcount(final byte[] key, final byte[] min, final byte[] max) { checkIsInMultiOrPipeline(); client.zlexcount(key, min, max); return client.getIntegerReply(); } @Override public Set<byte[]> zrangeByLex(final byte[] key, final byte[] min, final byte[] max) { checkIsInMultiOrPipeline(); client.zrangeByLex(key, min, max); return SetFromList.of(client.getBinaryMultiBulkReply()); } @Override public Set<byte[]> zrangeByLex(final byte[] key, final byte[] min, final byte[] max, final int offset, final int count) { checkIsInMultiOrPipeline(); client.zrangeByLex(key, min, max, offset, count); return SetFromList.of(client.getBinaryMultiBulkReply()); } @Override public Set<byte[]> zrevrangeByLex(byte[] key, byte[] max, byte[] min) { checkIsInMultiOrPipeline(); client.zrevrangeByLex(key, max, min); return SetFromList.of(client.getBinaryMultiBulkReply()); } @Override public Set<byte[]> zrevrangeByLex(byte[] key, byte[] max, byte[] min, int offset, int count) { checkIsInMultiOrPipeline(); client.zrevrangeByLex(key, max, min, offset, count); return SetFromList.of(client.getBinaryMultiBulkReply()); } @Override public Long zremrangeByLex(final byte[] key, final byte[] min, final byte[] max) { checkIsInMultiOrPipeline(); client.zremrangeByLex(key, min, max); return client.getIntegerReply(); } /** * Synchronously save the DB on disk. * <p> * Save the whole dataset on disk (this means that all the databases are saved, as well as keys * with an EXPIRE set (the expire is preserved). The server hangs while the saving is not * completed, no connection is served in the meanwhile. An OK code is returned when the DB was * fully stored in disk. * <p> * The background variant of this command is {@link #bgsave() BGSAVE} that is able to perform the * saving in the background while the server continues serving other clients. * <p> * @return Status code reply */ @Override public String save() { client.save(); return client.getStatusCodeReply(); } /** * Asynchronously save the DB on disk. * <p> * Save the DB in background. The OK code is immediately returned. Redis forks, the parent * continues to server the clients, the child saves the DB on disk then exit. A client my be able * to check if the operation succeeded using the LASTSAVE command. * @return Status code reply */ @Override public String bgsave() { client.bgsave(); return client.getStatusCodeReply(); } /** * Rewrite the append only file in background when it gets too big. Please for detailed * information about the Redis Append Only File check the <a * href="http://redis.io/topics/persistence#append-only-file">Append Only File Howto</a>. * <p> * BGREWRITEAOF rewrites the Append Only File in background when it gets too big. The Redis Append * Only File is a Journal, so every operation modifying the dataset is logged in the Append Only * File (and replayed at startup). This means that the Append Only File always grows. In order to * rebuild its content the BGREWRITEAOF creates a new version of the append only file starting * directly form the dataset in memory in order to guarantee the generation of the minimal number * of commands needed to rebuild the database. * <p> * @return Status code reply */ @Override public String bgrewriteaof() { client.bgrewriteaof(); return client.getStatusCodeReply(); } /** * Return the UNIX time stamp of the last successfully saving of the dataset on disk. * <p> * Return the UNIX TIME of the last DB save executed with success. A client may check if a * {@link #bgsave() BGSAVE} command succeeded reading the LASTSAVE value, then issuing a BGSAVE * command and checking at regular intervals every N seconds if LASTSAVE changed. * @return Integer reply, specifically an UNIX time stamp. */ @Override public Long lastsave() { client.lastsave(); return client.getIntegerReply(); } /** * Synchronously save the DB on disk, then shutdown the server. * <p> * Stop all the clients, save the DB, then quit the server. This commands makes sure that the DB * is switched off without the lost of any data. This is not guaranteed if the client uses simply * {@link #save() SAVE} and then {@link #quit() QUIT} because other clients may alter the DB data * between the two commands. * @return Status code reply on error. On success nothing is returned since the server quits and * the connection is closed. */ @Override public String shutdown() { client.shutdown(); String status; try { status = client.getStatusCodeReply(); } catch (JedisException ex) { status = null; } return status; } /** * Provide information and statistics about the server. * <p> * The info command returns different information and statistics about the server in an format * that's simple to parse by computers and easy to read by humans. * <p> * <b>Format of the returned String:</b> * <p> * All the fields are in the form field:value * * <pre> * edis_version:0.07 * connected_clients:1 * connected_slaves:0 * used_memory:3187 * changes_since_last_save:0 * last_save_time:1237655729 * total_connections_received:1 * total_commands_processed:1 * uptime_in_seconds:25 * uptime_in_days:0 * </pre> * * <b>Notes</b> * <p> * used_memory is returned in bytes, and is the total number of bytes allocated by the program * using malloc. * <p> * uptime_in_days is redundant since the uptime in seconds contains already the full uptime * information, this field is only mainly present for humans. * <p> * changes_since_last_save does not refer to the number of key changes, but to the number of * operations that produced some kind of change in the dataset. * <p> * @return Bulk reply */ @Override public String info() { client.info(); return client.getBulkReply(); } @Override public String info(final String section) { client.info(section); return client.getBulkReply(); } /** * Dump all the received requests in real time. * <p> * MONITOR is a debugging command that outputs the whole sequence of commands received by the * Redis server. is very handy in order to understand what is happening into the database. This * command is used directly via telnet. * @param jedisMonitor */ public void monitor(final JedisMonitor jedisMonitor) { client.monitor(); client.getStatusCodeReply(); jedisMonitor.proceed(client); } /** * Change the replication settings. * <p> * The SLAVEOF command can change the replication settings of a slave on the fly. If a Redis * server is arleady acting as slave, the command SLAVEOF NO ONE will turn off the replicaiton * turning the Redis server into a MASTER. In the proper form SLAVEOF hostname port will make the * server a slave of the specific server listening at the specified hostname and port. * <p> * If a server is already a slave of some master, SLAVEOF hostname port will stop the replication * against the old server and start the synchrnonization against the new one discarding the old * dataset. * <p> * The form SLAVEOF no one will stop replication turning the server into a MASTER but will not * discard the replication. So if the old master stop working it is possible to turn the slave * into a master and set the application to use the new master in read/write. Later when the other * Redis server will be fixed it can be configured in order to work as slave. * <p> * @param host * @param port * @return Status code reply */ @Override public String slaveof(final String host, final int port) { client.slaveof(host, port); return client.getStatusCodeReply(); } @Override public String slaveofNoOne() { client.slaveofNoOne(); return client.getStatusCodeReply(); } /** * Retrieve the configuration of a running Redis server. Not all the configuration parameters are * supported. * <p> * CONFIG GET returns the current configuration parameters. This sub command only accepts a single * argument, that is glob style pattern. All the configuration parameters matching this parameter * are reported as a list of key-value pairs. * <p> * <b>Example:</b> * * <pre> * $ redis-cli config get '*' * 1. "dbfilename" * 2. "dump.rdb" * 3. "requirepass" * 4. (nil) * 5. "masterauth" * 6. (nil) * 7. "maxmemory" * 8. "0\n" * 9. "appendfsync" * 10. "everysec" * 11. "save" * 12. "3600 1 300 100 60 10000" * * $ redis-cli config get 'm*' * 1. "masterauth" * 2. (nil) * 3. "maxmemory" * 4. "0\n" * </pre> * @param pattern * @return Bulk reply. */ @Override public List<byte[]> configGet(final byte[] pattern) { client.configGet(pattern); return client.getBinaryMultiBulkReply(); } /** * Reset the stats returned by INFO * @return */ @Override public String configResetStat() { client.configResetStat(); return client.getStatusCodeReply(); } /** * Alter the configuration of a running Redis server. Not all the configuration parameters are * supported. * <p> * The list of configuration parameters supported by CONFIG SET can be obtained issuing a * {@link #configGet(byte[]) CONFIG GET *} command. * <p> * The configuration set using CONFIG SET is immediately loaded by the Redis server that will * start acting as specified starting from the next command. * <p> * <b>Parameters value format</b> * <p> * The value of the configuration parameter is the same as the one of the same parameter in the * Redis configuration file, with the following exceptions: * <p> * <ul> * <li>The save paramter is a list of space-separated integers. Every pair of integers specify the * time and number of changes limit to trigger a save. For instance the command CONFIG SET save * "3600 10 60 10000" will configure the server to issue a background saving of the RDB file every * 3600 seconds if there are at least 10 changes in the dataset, and every 60 seconds if there are * at least 10000 changes. To completely disable automatic snapshots just set the parameter as an * empty string. * <li>All the integer parameters representing memory are returned and accepted only using bytes * as unit. * </ul> * @param parameter * @param value * @return Status code reply */ @Override public byte[] configSet(final byte[] parameter, final byte[] value) { client.configSet(parameter, value); return client.getBinaryBulkReply(); } public boolean isConnected() { return client.isConnected(); } @Override public Long strlen(final byte[] key) { client.strlen(key); return client.getIntegerReply(); } public void sync() { client.sync(); } @Override public Long lpushx(final byte[] key, final byte[]... string) { client.lpushx(key, string); return client.getIntegerReply(); } /** * Undo a {@link #expire(byte[], int) expire} at turning the expire key into a normal key. * <p> * Time complexity: O(1) * @param key * @return Integer reply, specifically: 1: the key is now persist. 0: the key is not persist (only * happens when key not set). */ @Override public Long persist(final byte[] key) { client.persist(key); return client.getIntegerReply(); } @Override public Long rpushx(final byte[] key, final byte[]... string) { client.rpushx(key, string); return client.getIntegerReply(); } @Override public byte[] echo(final byte[] string) { client.echo(string); return client.getBinaryBulkReply(); } @Override public Long linsert(final byte[] key, final LIST_POSITION where, final byte[] pivot, final byte[] value) { client.linsert(key, where, pivot, value); return client.getIntegerReply(); } @Override public String debug(final DebugParams params) { client.debug(params); return client.getStatusCodeReply(); } public Client getClient() { return client; } /** * Pop a value from a list, push it to another list and return it; or block until one is available * @param source * @param destination * @param timeout * @return the element */ @Override public byte[] brpoplpush(byte[] source, byte[] destination, int timeout) { client.brpoplpush(source, destination, timeout); client.setTimeoutInfinite(); try { return client.getBinaryBulkReply(); } finally { client.rollbackTimeout(); } } /** * Sets or clears the bit at offset in the string value stored at key * @param key * @param offset * @param value * @return */ @Override public Boolean setbit(byte[] key, long offset, boolean value) { client.setbit(key, offset, value); return client.getIntegerReply() == 1; } @Override public Boolean setbit(byte[] key, long offset, byte[] value) { client.setbit(key, offset, value); return client.getIntegerReply() == 1; } /** * Returns the bit value at offset in the string value stored at key * @param key * @param offset * @return */ @Override public Boolean getbit(byte[] key, long offset) { client.getbit(key, offset); return client.getIntegerReply() == 1; } public Long bitpos(final byte[] key, final boolean value) { return bitpos(key, value, new BitPosParams()); } public Long bitpos(final byte[] key, final boolean value, final BitPosParams params) { client.bitpos(key, value, params); return client.getIntegerReply(); } @Override public Long setrange(byte[] key, long offset, byte[] value) { client.setrange(key, offset, value); return client.getIntegerReply(); } @Override public byte[] getrange(byte[] key, long startOffset, long endOffset) { client.getrange(key, startOffset, endOffset); return client.getBinaryBulkReply(); } @Override public Long publish(byte[] channel, byte[] message) { client.publish(channel, message); return client.getIntegerReply(); } @Override public void subscribe(BinaryJedisPubSub jedisPubSub, byte[]... channels) { client.setTimeoutInfinite(); try { jedisPubSub.proceed(client, channels); } finally { client.rollbackTimeout(); } } @Override public void psubscribe(BinaryJedisPubSub jedisPubSub, byte[]... patterns) { client.setTimeoutInfinite(); try { jedisPubSub.proceedWithPatterns(client, patterns); } finally { client.rollbackTimeout(); } } @Override public int getDB() { return client.getDB(); } /** * Evaluates scripts using the Lua interpreter built into Redis starting from version 2.6.0. * <p> * @return Script result */ @Override public Object eval(byte[] script, List<byte[]> keys, List<byte[]> args) { return eval(script, toByteArray(keys.size()), getParamsWithBinary(keys, args)); } protected static byte[][] getParamsWithBinary(List<byte[]> keys, List<byte[]> args) { final int keyCount = keys.size(); final int argCount = args.size(); byte[][] params = new byte[keyCount + argCount][]; for (int i = 0; i < keyCount; i++) params[i] = keys.get(i); for (int i = 0; i < argCount; i++) params[keyCount + i] = args.get(i); return params; } @Override public Object eval(byte[] script, byte[] keyCount, byte[]... params) { client.setTimeoutInfinite(); try { client.eval(script, keyCount, params); return client.getOne(); } finally { client.rollbackTimeout(); } } @Override public Object eval(byte[] script, int keyCount, byte[]... params) { return eval(script, toByteArray(keyCount), params); } @Override public Object eval(byte[] script) { return eval(script, 0); } @Override public Object evalsha(byte[] sha1) { return evalsha(sha1, 1); } @Override public Object evalsha(byte[] sha1, List<byte[]> keys, List<byte[]> args) { return evalsha(sha1, keys.size(), getParamsWithBinary(keys, args)); } @Override public Object evalsha(byte[] sha1, int keyCount, byte[]... params) { client.setTimeoutInfinite(); try { client.evalsha(sha1, keyCount, params); return client.getOne(); } finally { client.rollbackTimeout(); } } @Override public String scriptFlush() { client.scriptFlush(); return client.getStatusCodeReply(); } public Long scriptExists(byte[] sha1) { byte[][] a = new byte[1][]; a[0] = sha1; return scriptExists(a).get(0); } @Override public List<Long> scriptExists(byte[]... sha1) { client.scriptExists(sha1); return client.getIntegerMultiBulkReply(); } @Override public byte[] scriptLoad(byte[] script) { client.scriptLoad(script); return client.getBinaryBulkReply(); } @Override public String scriptKill() { client.scriptKill(); return client.getStatusCodeReply(); } @Override public String slowlogReset() { client.slowlogReset(); return client.getBulkReply(); } @Override public Long slowlogLen() { client.slowlogLen(); return client.getIntegerReply(); } @Override public List<byte[]> slowlogGetBinary() { client.slowlogGet(); return client.getBinaryMultiBulkReply(); } @Override public List<byte[]> slowlogGetBinary(long entries) { client.slowlogGet(entries); return client.getBinaryMultiBulkReply(); } @Override public Long objectRefcount(byte[] key) { client.objectRefcount(key); return client.getIntegerReply(); } @Override public byte[] objectEncoding(byte[] key) { client.objectEncoding(key); return client.getBinaryBulkReply(); } @Override public Long objectIdletime(byte[] key) { client.objectIdletime(key); return client.getIntegerReply(); } @Override public Long bitcount(final byte[] key) { client.bitcount(key); return client.getIntegerReply(); } @Override public Long bitcount(final byte[] key, long start, long end) { client.bitcount(key, start, end); return client.getIntegerReply(); } @Override public Long bitop(BitOP op, final byte[] destKey, byte[]... srcKeys) { client.bitop(op, destKey, srcKeys); return client.getIntegerReply(); } public byte[] dump(final byte[] key) { checkIsInMultiOrPipeline(); client.dump(key); return client.getBinaryBulkReply(); } public String restore(final byte[] key, final long ttl, final byte[] serializedValue) { checkIsInMultiOrPipeline(); client.restore(key, ttl, serializedValue); return client.getStatusCodeReply(); } /** * Set a timeout on the specified key. After the timeout the key will be automatically deleted by * the server. A key with an associated timeout is said to be volatile in Redis terminology. * <p> * Voltile keys are stored on disk like the other keys, the timeout is persistent too like all the * other aspects of the dataset. Saving a dataset containing expires and stopping the server does * not stop the flow of time as Redis stores on disk the time when the key will no longer be * available as Unix time, and not the remaining milliseconds. * <p> * Since Redis 2.1.3 you can update the value of the timeout of a key already having an expire * set. It is also possible to undo the expire at all turning the key into a normal key using the * {@link #persist(byte[]) PERSIST} command. * <p> * Time complexity: O(1) * @see <ahref="http://redis.io/commands/pexpire">PEXPIRE Command</a> * @param key * @param milliseconds * @return Integer reply, specifically: 1: the timeout was set. 0: the timeout was not set since * the key already has an associated timeout (this may happen only in Redis versions < * 2.1.3, Redis >= 2.1.3 will happily update the timeout), or the key does not exist. */ @Override public Long pexpire(final byte[] key, final long milliseconds) { checkIsInMultiOrPipeline(); client.pexpire(key, milliseconds); return client.getIntegerReply(); } @Override public Long pexpireAt(final byte[] key, final long millisecondsTimestamp) { checkIsInMultiOrPipeline(); client.pexpireAt(key, millisecondsTimestamp); return client.getIntegerReply(); } public Long pttl(final byte[] key) { checkIsInMultiOrPipeline(); client.pttl(key); return client.getIntegerReply(); } /** * PSETEX works exactly like {@link #setex(byte[], int, byte[])} with the sole difference that the * expire time is specified in milliseconds instead of seconds. Time complexity: O(1) * @param key * @param milliseconds * @param value * @return Status code reply */ public String psetex(final byte[] key, final long milliseconds, final byte[] value) { checkIsInMultiOrPipeline(); client.psetex(key, milliseconds, value); return client.getStatusCodeReply(); } public String clientKill(final byte[] client) { checkIsInMultiOrPipeline(); this.client.clientKill(client); return this.client.getStatusCodeReply(); } public String clientGetname() { checkIsInMultiOrPipeline(); client.clientGetname(); return client.getBulkReply(); } public String clientList() { checkIsInMultiOrPipeline(); client.clientList(); return client.getBulkReply(); } public String clientSetname(final byte[] name) { checkIsInMultiOrPipeline(); client.clientSetname(name); return client.getBulkReply(); } public List<String> time() { checkIsInMultiOrPipeline(); client.time(); return client.getMultiBulkReply(); } public String migrate(final byte[] host, final int port, final byte[] key, final int destinationDb, final int timeout) { checkIsInMultiOrPipeline(); client.migrate(host, port, key, destinationDb, timeout); return client.getStatusCodeReply(); } /** * Syncrhonous replication of Redis as described here: http://antirez.com/news/66 Since Java * Object class has implemented "wait" method, we cannot use it, so I had to change the name of * the method. Sorry :S */ @Override public Long waitReplicas(int replicas, long timeout) { checkIsInMultiOrPipeline(); client.waitReplicas(replicas, timeout); return client.getIntegerReply(); } @Override public Long pfadd(final byte[] key, final byte[]... elements) { checkIsInMultiOrPipeline(); client.pfadd(key, elements); return client.getIntegerReply(); } @Override public long pfcount(final byte[] key) { checkIsInMultiOrPipeline(); client.pfcount(key); return client.getIntegerReply(); } @Override public String pfmerge(final byte[] destkey, final byte[]... sourcekeys) { checkIsInMultiOrPipeline(); client.pfmerge(destkey, sourcekeys); return client.getStatusCodeReply(); } @Override public Long pfcount(byte[]... keys) { checkIsInMultiOrPipeline(); client.pfcount(keys); return client.getIntegerReply(); } public ScanResult<byte[]> scan(final byte[] cursor) { return scan(cursor, new ScanParams()); } public ScanResult<byte[]> scan(final byte[] cursor, final ScanParams params) { checkIsInMultiOrPipeline(); client.scan(cursor, params); List<Object> result = client.getObjectMultiBulkReply(); byte[] newcursor = (byte[]) result.get(0); List<byte[]> rawResults = (List<byte[]>) result.get(1); return new ScanResult<byte[]>(newcursor, rawResults); } public ScanResult<Map.Entry<byte[], byte[]>> hscan(final byte[] key, final byte[] cursor) { return hscan(key, cursor, new ScanParams()); } public ScanResult<Map.Entry<byte[], byte[]>> hscan(final byte[] key, final byte[] cursor, final ScanParams params) { checkIsInMultiOrPipeline(); client.hscan(key, cursor, params); List<Object> result = client.getObjectMultiBulkReply(); byte[] newcursor = (byte[]) result.get(0); List<Map.Entry<byte[], byte[]>> results = new ArrayList<Map.Entry<byte[], byte[]>>(); List<byte[]> rawResults = (List<byte[]>) result.get(1); Iterator<byte[]> iterator = rawResults.iterator(); while (iterator.hasNext()) { results.add(new AbstractMap.SimpleEntry<byte[], byte[]>(iterator.next(), iterator.next())); } return new ScanResult<Map.Entry<byte[], byte[]>>(newcursor, results); } public ScanResult<byte[]> sscan(final byte[] key, final byte[] cursor) { return sscan(key, cursor, new ScanParams()); } public ScanResult<byte[]> sscan(final byte[] key, final byte[] cursor, final ScanParams params) { checkIsInMultiOrPipeline(); client.sscan(key, cursor, params); List<Object> result = client.getObjectMultiBulkReply(); byte[] newcursor = (byte[]) result.get(0); List<byte[]> rawResults = (List<byte[]>) result.get(1); return new ScanResult<byte[]>(newcursor, rawResults); } public ScanResult<Tuple> zscan(final byte[] key, final byte[] cursor) { return zscan(key, cursor, new ScanParams()); } public ScanResult<Tuple> zscan(final byte[] key, final byte[] cursor, final ScanParams params) { checkIsInMultiOrPipeline(); client.zscan(key, cursor, params); List<Object> result = client.getObjectMultiBulkReply(); byte[] newcursor = (byte[]) result.get(0); List<Tuple> results = new ArrayList<Tuple>(); List<byte[]> rawResults = (List<byte[]>) result.get(1); Iterator<byte[]> iterator = rawResults.iterator(); while (iterator.hasNext()) { results.add(new Tuple(iterator.next(), Double.valueOf(SafeEncoder.encode(iterator.next())))); } return new ScanResult<Tuple>(newcursor, results); } /** * A decorator to implement Set from List. Assume that given List do not contains duplicated * values. The resulting set displays the same ordering, concurrency, and performance * characteristics as the backing list. This class should be used only for Redis commands which * return Set result. * @param <E> */ protected static class SetFromList<E> extends AbstractSet<E> { private final List<E> list; private SetFromList(List<E> list) { if (list == null) { throw new NullPointerException("list"); } this.list = list; } @Override public void clear() { list.clear(); } @Override public int size() { return list.size(); } @Override public boolean isEmpty() { return list.isEmpty(); } @Override public boolean contains(Object o) { return list.contains(o); } @Override public boolean remove(Object o) { return list.remove(o); } @Override public boolean add(E e) { return !contains(e) && list.add(e); } @Override public Iterator<E> iterator() { return list.iterator(); } @Override public Object[] toArray() { return list.toArray(); } @Override public <T> T[] toArray(T[] a) { return list.toArray(a); } public String toString() { return list.toString(); } public int hashCode() { return list.hashCode(); } public boolean equals(Object o) { if (o == this) { return true; } if (!(o instanceof Set)) { return false; } Collection<?> c = (Collection<?>) o; if (c.size() != size()) { return false; } return containsAll(c); } @Override public boolean containsAll(Collection<?> c) { return list.containsAll(c); } @Override public boolean removeAll(Collection<?> c) { return list.removeAll(c); } @Override public boolean retainAll(Collection<?> c) { return list.retainAll(c); } protected static <E> SetFromList<E> of(List<E> list) { return new SetFromList<E>(list); } } }
import {NgModule} from '@angular/core'; import {CommonModule} from '@angular/common'; import {MdcListDivider} from './list-divider'; import { MdcList, MdcListGroup, MdcListGroupSubheader } from './list'; import { MdcListItem, MdcListItemGraphic, MdcListItemMeta, MdcListItemSecondary, MdcListItemText } from './list-item'; const LIST_DECLARATIONS = [ MdcList, MdcListDivider, MdcListGroup, MdcListGroupSubheader, MdcListItem, MdcListItemGraphic, MdcListItemMeta, MdcListItemSecondary, MdcListItemText ]; @NgModule({ imports: [CommonModule], exports: LIST_DECLARATIONS, declarations: LIST_DECLARATIONS, }) export class MdcListModule { }
//------------------------------------------------------------------------------ // <auto-generated> // This code was generated by a tool. // Runtime Version:4.0.30319.42000 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ namespace CryptopalTests.Properties { using System; /// <summary> /// A strongly-typed resource class, for looking up localized strings, etc. /// </summary> // This class was auto-generated by the StronglyTypedResourceBuilder // class via a tool like ResGen or Visual Studio. // To add or remove a member, edit your .ResX file then rerun ResGen // with the /str option, or rebuild your VS project. [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")] [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] internal class Resources { private static global::System.Resources.ResourceManager resourceMan; private static global::System.Globalization.CultureInfo resourceCulture; [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")] internal Resources() { } /// <summary> /// Returns the cached ResourceManager instance used by this class. /// </summary> [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] internal static global::System.Resources.ResourceManager ResourceManager { get { if (object.ReferenceEquals(resourceMan, null)) { global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("CryptopalTests.Properties.Resources", typeof(Resources).Assembly); resourceMan = temp; } return resourceMan; } } /// <summary> /// Overrides the current thread's CurrentUICulture property for all /// resource lookups using this strongly typed resource class. /// </summary> [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] internal static global::System.Globalization.CultureInfo Culture { get { return resourceCulture; } set { resourceCulture = value; } } /// <summary> /// Looks up a localized string similar to CRIwqt4+szDbqkNY+I0qbNXPg1XLaCM5etQ5Bt9DRFV/xIN2k8Go7jtArLIy ///P605b071DL8C+FPYSHOXPkMMMFPAKm+Nsu0nCBMQVt9mlluHbVE/yl6VaBCj ///NuOGvHZ9WYvt51uR/lklZZ0ObqD5UaC1rupZwCEK4pIWf6JQ4pTyPjyiPtKX ///g54FNQvbVIHeotUG2kHEvHGS/w2Tt4E42xEwVfi29J3yp0O/TcL7aoRZIcJj ///MV4qxY/uvZLGsjo1/IyhtQp3vY0nSzJjGgaLYXpvRn8TaAcEtH3cqZenBoox ///BH3MxNjD/TVf3NastEWGnqeGp+0D9bQx/3L0+xTf+k2VjBDrV9HPXNELRgPN ///0MlNo79p2gEwWjfTbx2KbF6htgsbGgCMZ6/iCshy3R8/abxkl8eK/VfCGfA6 ///bQQkqs91bgsT0RgxXSWzjjvh4eXTSl8xYoMDCGa2opN/b6Q2MdfvW7rEvp5m ///wJOfQFDtkv4M5cFE [rest of string was truncated]&quot;;. /// </summary> internal static string Challenge10Text { get { return ResourceManager.GetString("Challenge10Text", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 0e3647e8592d35514a081243582536ed3de6734059001e3f535ce6271032 ///334b041de124f73c18011a50e608097ac308ecee501337ec3e100854201d ///40e127f51c10031d0133590b1e490f3514e05a54143d08222c2a4071e351 ///45440b171d5c1b21342e021c3a0eee7373215c4024f0eb733cf006e2040c ///22015e420b07ef21164d5935e82338452f42282c1836e42536284c450de3 ///043b452e0268e7eb005a080b360f0642e6e342005217ef04a42f3e43113d ///581e0829214202063d70030845e5301f5a5212ed0818e22f120b211b171b ///ea0b342957394717132307133f143a1357e9ed1f5023034147465c052616 ///0c300b355c205137 [rest of string was truncated]&quot;;. /// </summary> internal static string Challenge4Text { get { return ResourceManager.GetString("Challenge4Text", resourceCulture); } } /// <summary> /// Looks up a localized string similar to HUIfTQsPAh9PE048GmllH0kcDk4TAQsHThsBFkU2AB4BSWQgVB0dQzNTTmVS ///BgBHVBwNRU0HBAxTEjwMHghJGgkRTxRMIRpHKwAFHUdZEQQJAGQmB1MANxYG ///DBoXQR0BUlQwXwAgEwoFR08SSAhFTmU+Fgk4RQYFCBpGB08fWXh+amI2DB0P ///QQ1IBlUaGwAdQnQEHgFJGgkRAlJ6f0kASDoAGhNJGk9FSA8dDVMEOgFSGQEL ///QRMGAEwxX1NiFQYHCQdUCxdBFBZJeTM1CxsBBQ9GB08dTnhOSCdSBAcMRVhI ///CEEATyBUCHQLHRlJAgAOFlwAUjBpZR9JAgJUAAELB04CEFMBJhAVTQIHAh9P ///G054MGk2UgoBCVQGBwlTTgIQUwg7EAYFSQ8PEE87ADpfRyscSWQzT1QCEFMa ///TwUWEXQMBk0PAg4DQ1JMPU4ALwtJDQhOFw0VVB1PDhxFXigLTRkBEgcKVVN4 ///Tk9iBgELR1MdDAAA [rest of string was truncated]&quot;;. /// </summary> internal static string Challenge6Text { get { return ResourceManager.GetString("Challenge6Text", resourceCulture); } } /// <summary> /// Looks up a localized string similar to CRIwqt4+szDbqkNY+I0qbDe3LQz0wiw0SuxBQtAM5TDdMbjCMD/venUDW9BL ///PEXODbk6a48oMbAY6DDZsuLbc0uR9cp9hQ0QQGATyyCESq2NSsvhx5zKlLtz ///dsnfK5ED5srKjK7Fz4Q38/ttd+stL/9WnDzlJvAo7WBsjI5YJc2gmAYayNfm ///CW2lhZE/ZLG0CBD2aPw0W417QYb4cAIOW92jYRiJ4PTsBBHDe8o4JwqaUac6 ///rqdi833kbyAOV/Y2RMbN0oDb9Rq8uRHvbrqQJaJieaswEtMkgUt3P5Ttgeh7 ///J+hE6TR0uHot8WzHyAKNbUWHoi/5zcRCUipvVOYLoBZXlNu4qnwoCZRSBgvC ///wTdz3Cbsp/P2wXB8tiz6l9rL2bLhBt13Qxyhhu0H0+JKj6soSeX5ZD1Rpilp ///9ncR1tHW8+uurQKyXN4xKeGjaKLOejr2xDIw+aWF7GszU4qJhXBnXTIUUNUf ///RlwEpS6FZcsMzemQ [rest of string was truncated]&quot;;. /// </summary> internal static string Challenge7Text { get { return ResourceManager.GetString("Challenge7Text", resourceCulture); } } /// <summary> /// Looks up a localized string similar to 8a10247f90d0a05538888ad6205882196f5f6d05c21ec8dca0cb0be02c3f8b09e382963f443aa514daa501257b09a36bf8c4c392d8ca1bf4395f0d5f2542148c7e5ff22237969874bf66cb85357ef99956accf13ba1af36ca7a91a50533c4d89b7353f908c5a166774293b0bf6247391df69c87dacc4125a99ec417221b58170e633381e3847c6b1c28dda2913c011e13fc4406f8fe73bbf78e803e1d995ce4d ///bd20aad820c9e387ea57408566e5844c1e470e9d6fbbdba3a6b4df1dd85bce2208f1944f1827d015df9c46c22803f41d1052acb721977f0ccc13db95c970252091ea5e36e423ee6a2f2d12ef909fcadd42529885d221af1225e32161b85e6d [rest of string was truncated]&quot;;. /// </summary> internal static string Challenge8Text { get { return ResourceManager.GetString("Challenge8Text", resourceCulture); } } } }
package com.tinkerrocks.process.traversal; import org.apache.tinkerpop.gremlin.process.traversal.P; import java.util.function.BiPredicate; /** <p> Class with predicate of string Ignore case contains</p> * Created by ashishn on 8/16/15. */ public class Predicate<V> extends P<V> { public Predicate(BiPredicate<V, V> biPredicate, V value) { super(biPredicate, value); } /** * <p> method to check if HasContainer string is part of the property. </p> * * @param value incoming value * @param <V> type of value * @return Predicate to match string contains */ @SuppressWarnings("unchecked") public static <V> P<V> stringContains(final V value) { if (!(value instanceof String)) { throw new IllegalArgumentException("cannot compare String and class: " + value.getClass()); } return new P(StringContains.subString, value); } }
#pragma once #include "VoreealActor.h" #include "VoreealPagedVolumeComponent.h" #include "VoreealPagedVolumeActor.generated.h" UCLASS(ComponentWrapperClass) class VOREEAL_API APagedVolumeActor : public AVoreealActor { GENERATED_BODY() public: UPROPERTY(VisibleAnywhere, BlueprintReadWrite, Category = "Voreeal", meta = (ExposeFunctionCategories = "Voreeal,Rendering,Physics", AllowPrivateAccess = "true")) UPagedVolumeComponent* PagedVolumeComponent; public: APagedVolumeActor(const class FObjectInitializer& ObjectInitializer); // Begin AActor Interface #if WITH_EDITOR virtual void CheckForErrors() override; virtual bool GetReferencedContentObjects(TArray<UObject*>& Objects) const override; #endif // End AActor Interface protected: // Begin UObject Interface virtual FString GetDetailedInfoInternal() const override; // End UObject Interface public: UPagedVolumeComponent* GetPagedVolumeComponent() const; };
<?php /* * Copyright 2013 Yurii Ishchenko <ishenkoyv@gmail.com> * * Licensed under the MIT License (the "License"); */ namespace Ishenkoyv\Otrs\Client; use Ishenkoyv\Otrs\AbstractClient; use Ishenkoyv\Otrs\ClientInterface; /** * Soap * * @author Yurii Ishchenko <ishenkoyv@gmail.com> */ class Soap extends AbstractClient implements ClientInterface { protected $soapUrl; protected $username; protected $password; protected $client; protected $_isDebug = false; public function __construct($soapUrl, $username, $password) { $this->soapUrl = $soapUrl; $this->username = $username; $this->password = $password; $this->client = new \SoapClient( null, array( 'location' => $this->soapUrl, 'uri' => "Core", 'trace' => 1, 'login' => $this->username, 'password' => $this->password, 'style' => SOAP_RPC, 'use' => SOAP_ENCODED ) ); } public function dispatchCall(array $params = array()) { $result = false; $soapParams = array($this->username, $this->password); foreach ($params as $paramName => $paramValue) { $soapParams[] = $paramName; $soapParams[] = $paramValue; } try { $result = $this->client->__soapCall('Dispatch', $soapParams); if (is_array($result)) { $result = $this->parsePairs($result); } } catch (\Exception $e) { } return $result; } public function getLastRequest() { return $this->client->__getLastRequest(); } public function getLastResponse() { return $this->client->__getLastResponse(); } }
/** * Инициализация модуля. */ /*global define*/ define(['./poke-control', './poke-history', 'jquery'], function (pokeControl, pokeHistory, $) { 'use strict'; var pokeSettings = { pokeControl: { selector: '.poke-control-container', template: '<form class="form"><div class="form-group"><label for="bdoChannel">Канал:</label><select class="form-control" name="bdoChannel" id="bdoChannel"></select></div><div class="form-group"><label for="bdoQuest">Квест:</label><select class="form-control" name="bdoQuest" id="bdoQuest"></select></div><button class="btn btn-success">Отправить</button></form>' }, pokeHistory: { selector: '.poke-history-container', template: '<table id="eventsHistory" class="table table-condensed table-hover table-striped"><thead><tr><th data-column-id="startDate">Начало</th><th data-column-id="channel" data-type="numeric">Канал</th><th data-column-id="event">Событие</th></tr></thead></table>', tableSelector: '#eventsHistory' } }, create; create = function (options) { pokeSettings = $.extend(pokeSettings, options); pokeControl.initialize(pokeSettings.pokeControl); pokeHistory.initialize(pokeSettings.pokeHistory); }; return { create: create }; });
import { Injectable } from 'angular2/core' import { Http, Response } from 'angular2/http'; import { Beer } from './beer' import 'rxjs/Rx'; @Injectable() export class BeerService { private _breweryDbKey = '2d18b2531035b441a50dddc3aed32a1b'; private _beerUrl = "http://localhost:1337/api.brewerydb.com/v2/beers?availableId=1&withBreweries=Y&key=" + this._breweryDbKey; private _searchUrl = "http://localhost:1337/api.brewerydb.com/v2/search?type=beer&withBreweries=Y&key=" + this._breweryDbKey + '&q='; constructor(private http: Http) {} getBeer() { return this.http.get(this._beerUrl) .map((response: Response) => { return response.json().data.map(item => { return new Beer({ name: item.nameDisplay, brewery: item.breweries[0].nameShortDisplay, description: item.description, abv: item.abv, ibu: item.ibu, type: item.style.shortName }); }); }) .catch((err: Response) => console.log(err)); } search(term: string) { return this.http.get(this._searchUrl + term) .map((response: Response) => { return response.json().data.map(item => { return new Beer({ name: item.nameDisplay, brewery: item.breweries[0].nameShortDisplay, description: item.description, abv: item.abv, ibu: item.ibu, type: item.style.shortName }); }) }) .catch((err: Response) => console.log(err)); } }
<h4><?php echo __('Tags', array(), 'fzTag') ?></h4> <ul> <?php foreach( $tags as $tag ): ?> <li><?php echo link_to( $tag, 'fz_tag_show', $tag) ?></li> <?php endforeach; ?> </ul>
package miquilini.felipe.intermediario.carta; import java.util.HashSet; import java.util.List; import java.util.Set; public enum Carta { PAUS_4(0, "Quatro de Paus", 1), COPAS_4(1, "Quatro de Copas", 1), ESPADAS_4(2, "Quatro de Espadas", 1), OUROS_4(3, "Quatro de Ouros", 1), PAUS_5(4, "Cinco de Paus", 2), COPAS_5(5, "Cinco de Copas", 2), ESPADAS_5(6, "Cinco de Espadas", 2), OUROS_5(7, "Cinco de Ouros", 2), PAUS_6(8, "Seis de Paus", 3), COPAS_6(9, "Seis de Copas", 3), ESPADAS_6(10, "Seis de Espadas", 3), OUROS_6(11, "Seis de Ouros", 3), PAUS_7(12, "Sete de Paus", 4), COPAS_7(13, "Sete de Copas", 4), ESPADAS_7(14, "Sete de Espadas", 4), OUROS_7(15, "Sete de Ouros", 4), PAUS_Q(16, "Dama de Paus", 5), COPAS_Q(17, "Dama de Copas", 5), ESPADAS_Q(18, "Dama de Espadas", 5), OUROS_Q(19, "Dama de Ouros", 5), PAUS_J(20, "Valete de Paus", 6), COPAS_J(21, "Valete de Copas", 6), ESPADAS_J(22, "Valete de Espadas", 6), OUROS_J(23, "Valete de Ouros", 6), PAUS_K(24, "Rei de Paus", 7), COPAS_K(25, "Rei de Copas", 7), ESPADAS_K(26, "Rei de Espadas", 7), OUROS_K(27, "Rei de Ouros", 7), PAUS_A(28, "Ás de Paus", 8), COPAS_A(29, "Ás de Copas", 8), ESPADAS_A(30, "Ás de Espadas", 8), OUROS_A(31, "Ás de Ouros", 8), PAUS_2(32, "Dois de Paus", 9), COPAS_2(33, "Dois de Copas", 9), ESPADAS_2(34, "Dois de Espadas", 9), OUROS_2(35, "Dois de Ouros", 9), PAUS_3(36, "Três de Paus", 10), COPAS_3(37, "Três de Copas", 10), ESPADAS_3(38, "Três de Espadas", 10), OUROS_3(39, "Três de Ouros", 10), INCOBERTO(40, "Incoberto", 0); private final int codigo; private int forcaBruta; private int adicionalManilha; private String nome; Carta(int codigo, String nome, int forcaBruta) { this.codigo = codigo; this.nome = nome; this.forcaBruta = forcaBruta; adicionalManilha = 0; } public static void gerarManilha(Carta vira) { //para zerar a manilha da rodada anterior Carta[] todasCartas = Carta.values(); for(Carta carta : todasCartas) { carta.adicionalManilha = 0; } int codPausDaManilha = 0; if(vira.codigo < 36) codPausDaManilha = vira.codigo + 4 - (vira.codigo % 4); for(int i = 0, j = 13; i < 4; i++, j--) { todasCartas[codPausDaManilha + i].adicionalManilha = j; } } public int getForca() { return forcaBruta + adicionalManilha; } /** * * @param cartas * @return * @throws IllegalArgumentException */ public static Set<Carta> pegarMaisFortes(List<Carta> cartas) throws IllegalArgumentException{ if(cartas == null || cartas.size() == 0) throw new IllegalArgumentException(); Set<Carta> maisFortes = new HashSet<>(); for(Carta ca : cartas) { if(maisFortes.size() == 0) { maisFortes.add(ca); continue; } int forcaDaCartaMaisForte = maisFortes.iterator().next().getForca(); int forcaDaCartaCA = ca.getForca(); if(forcaDaCartaMaisForte == forcaDaCartaCA) { maisFortes.add(ca); continue; } if(forcaDaCartaCA > forcaDaCartaMaisForte) { maisFortes.clear(); maisFortes.add(ca); continue; } } assert maisFortes.size() > 0; return maisFortes; } public static Carta getCarta(String nome) throws IllegalArgumentException{ Carta carta = null; for(Carta c : values()) { if(c.nome.equals(nome)) { carta = c; break; } } if(carta == null) throw new IllegalStateException("Este nome de carta não existe"); return carta; } @Override public String toString() { return nome; } }
var list = { "Manual": { "Getting Started": { "Creating a scene": "manual/introduction/Creating-a-scene", "Import via modules": "manual/introduction/Import-via-modules", "Browser support": "manual/introduction/Browser-support", "WebGL compatibility check": "manual/introduction/WebGL-compatibility-check", "How to run things locally": "manual/introduction/How-to-run-things-locally", "Drawing Lines": "manual/introduction/Drawing-lines", "Creating Text": "manual/introduction/Creating-text", "Migration Guide": "manual/introduction/Migration-guide", "Code Style Guide": "manual/introduction/Code-style-guide", "FAQ": "manual/introduction/FAQ", "Useful links": "manual/introduction/Useful-links" }, "Next Steps": { "How to update things": "manual/introduction/How-to-update-things", "Matrix transformations": "manual/introduction/Matrix-transformations", "Animation System": "manual/introduction/Animation-system" }, "Build Tools": { "Testing with NPM": "manual/buildTools/Testing-with-NPM" } }, "Reference": { "Animation": { "AnimationAction": "api/animation/AnimationAction", "AnimationClip": "api/animation/AnimationClip", "AnimationMixer": "api/animation/AnimationMixer", "AnimationObjectGroup": "api/animation/AnimationObjectGroup", "AnimationUtils": "api/animation/AnimationUtils", "KeyframeTrack": "api/animation/KeyframeTrack", "PropertyBinding": "api/animation/PropertyBinding", "PropertyMixer": "api/animation/PropertyMixer" }, "Animation / Tracks": { "BooleanKeyframeTrack": "api/animation/tracks/BooleanKeyframeTrack", "ColorKeyframeTrack": "api/animation/tracks/ColorKeyframeTrack", "NumberKeyframeTrack": "api/animation/tracks/NumberKeyframeTrack", "QuaternionKeyframeTrack": "api/animation/tracks/QuaternionKeyframeTrack", "StringKeyframeTrack": "api/animation/tracks/StringKeyframeTrack", "VectorKeyframeTrack": "api/animation/tracks/VectorKeyframeTrack" }, "Audio": { "Audio": "api/audio/Audio", "AudioAnalyser": "api/audio/AudioAnalyser", "AudioContext": "api/audio/AudioContext", "AudioListener": "api/audio/AudioListener", "PositionalAudio": "api/audio/PositionalAudio" }, "Cameras": { "Camera": "api/cameras/Camera", "CubeCamera": "api/cameras/CubeCamera", "OrthographicCamera": "api/cameras/OrthographicCamera", "PerspectiveCamera": "api/cameras/PerspectiveCamera", "StereoCamera": "api/cameras/StereoCamera" }, "Constants": { "Animation": "api/constants/Animation", "Core": "api/constants/Core", "CustomBlendingEquation": "api/constants/CustomBlendingEquations", "DrawModes": "api/constants/DrawModes", "Materials": "api/constants/Materials", "Renderer": "api/constants/Renderer", "Textures": "api/constants/Textures" }, "Core": { "BufferAttribute": "api/core/BufferAttribute", "BufferGeometry": "api/core/BufferGeometry", "Clock": "api/core/Clock", "DirectGeometry": "api/core/DirectGeometry", "EventDispatcher": "api/core/EventDispatcher", "Face3": "api/core/Face3", "Geometry": "api/core/Geometry", "InstancedBufferAttribute": "api/core/InstancedBufferAttribute", "InstancedBufferGeometry": "api/core/InstancedBufferGeometry", "InstancedInterleavedBuffer": "api/core/InstancedInterleavedBuffer", "InterleavedBuffer": "api/core/InterleavedBuffer", "InterleavedBufferAttribute": "api/core/InterleavedBufferAttribute", "Layers": "api/core/Layers", "Object3D": "api/core/Object3D", "Raycaster": "api/core/Raycaster", "Uniform": "api/core/Uniform" }, "Core / BufferAttributes": { "BufferAttribute Types": "api/core/bufferAttributeTypes/BufferAttributeTypes" }, "Deprecated": { "DeprecatedList": "api/deprecated/DeprecatedList" }, "Extras": { "Earcut": "api/extras/Earcut", "ShapeUtils": "api/extras/ShapeUtils" }, "Extras / Core": { "Curve": "api/extras/core/Curve", "CurvePath": "api/extras/core/CurvePath", "Font": "api/extras/core/Font", "Interpolations": "api/extras/core/Interpolations", "Path": "api/extras/core/Path", "Shape": "api/extras/core/Shape", "ShapePath": "api/extras/core/ShapePath" }, "Extras / Curves": { "ArcCurve": "api/extras/curves/ArcCurve", "CatmullRomCurve3": "api/extras/curves/CatmullRomCurve3", "CubicBezierCurve": "api/extras/curves/CubicBezierCurve", "CubicBezierCurve3": "api/extras/curves/CubicBezierCurve3", "EllipseCurve": "api/extras/curves/EllipseCurve", "LineCurve": "api/extras/curves/LineCurve", "LineCurve3": "api/extras/curves/LineCurve3", "QuadraticBezierCurve": "api/extras/curves/QuadraticBezierCurve", "QuadraticBezierCurve3": "api/extras/curves/QuadraticBezierCurve3", "SplineCurve": "api/extras/curves/SplineCurve" }, "Extras / Objects": { "ImmediateRenderObject": "api/extras/objects/ImmediateRenderObject", }, "Geometries": { "BoxBufferGeometry": "api/geometries/BoxBufferGeometry", "BoxGeometry": "api/geometries/BoxGeometry", "CircleBufferGeometry": "api/geometries/CircleBufferGeometry", "CircleGeometry": "api/geometries/CircleGeometry", "ConeBufferGeometry": "api/geometries/ConeBufferGeometry", "ConeGeometry": "api/geometries/ConeGeometry", "CylinderBufferGeometry": "api/geometries/CylinderBufferGeometry", "CylinderGeometry": "api/geometries/CylinderGeometry", "DodecahedronBufferGeometry": "api/geometries/DodecahedronBufferGeometry", "DodecahedronGeometry": "api/geometries/DodecahedronGeometry", "EdgesGeometry": "api/geometries/EdgesGeometry", "ExtrudeBufferGeometry": "api/geometries/ExtrudeBufferGeometry", "ExtrudeGeometry": "api/geometries/ExtrudeGeometry", "IcosahedronBufferGeometry": "api/geometries/IcosahedronBufferGeometry", "IcosahedronGeometry": "api/geometries/IcosahedronGeometry", "LatheBufferGeometry": "api/geometries/LatheBufferGeometry", "LatheGeometry": "api/geometries/LatheGeometry", "OctahedronBufferGeometry": "api/geometries/OctahedronBufferGeometry", "OctahedronGeometry": "api/geometries/OctahedronGeometry", "ParametricBufferGeometry": "api/geometries/ParametricBufferGeometry", "ParametricGeometry": "api/geometries/ParametricGeometry", "PlaneBufferGeometry": "api/geometries/PlaneBufferGeometry", "PlaneGeometry": "api/geometries/PlaneGeometry", "PolyhedronBufferGeometry": "api/geometries/PolyhedronBufferGeometry", "PolyhedronGeometry": "api/geometries/PolyhedronGeometry", "RingBufferGeometry": "api/geometries/RingBufferGeometry", "RingGeometry": "api/geometries/RingGeometry", "ShapeBufferGeometry": "api/geometries/ShapeBufferGeometry", "ShapeGeometry": "api/geometries/ShapeGeometry", "SphereBufferGeometry": "api/geometries/SphereBufferGeometry", "SphereGeometry": "api/geometries/SphereGeometry", "TetrahedronBufferGeometry": "api/geometries/TetrahedronBufferGeometry", "TetrahedronGeometry": "api/geometries/TetrahedronGeometry", "TextBufferGeometry": "api/geometries/TextBufferGeometry", "TextGeometry": "api/geometries/TextGeometry", "TorusBufferGeometry": "api/geometries/TorusBufferGeometry", "TorusGeometry": "api/geometries/TorusGeometry", "TorusKnotBufferGeometry": "api/geometries/TorusKnotBufferGeometry", "TorusKnotGeometry": "api/geometries/TorusKnotGeometry", "TubeBufferGeometry": "api/geometries/TubeBufferGeometry", "TubeGeometry": "api/geometries/TubeGeometry", "WireframeGeometry": "api/geometries/WireframeGeometry" }, "Helpers": { "ArrowHelper": "api/helpers/ArrowHelper", "AxesHelper": "api/helpers/AxesHelper", "BoxHelper": "api/helpers/BoxHelper", "Box3Helper": "api/helpers/Box3Helper", "CameraHelper": "api/helpers/CameraHelper", "DirectionalLightHelper": "api/helpers/DirectionalLightHelper", "FaceNormalsHelper": "api/helpers/FaceNormalsHelper", "GridHelper": "api/helpers/GridHelper", "PolarGridHelper": "api/helpers/PolarGridHelper", "HemisphereLightHelper": "api/helpers/HemisphereLightHelper", "PlaneHelper": "api/helpers/PlaneHelper", "PointLightHelper": "api/helpers/PointLightHelper", "RectAreaLightHelper": "api/helpers/RectAreaLightHelper", "SkeletonHelper": "api/helpers/SkeletonHelper", "SpotLightHelper": "api/helpers/SpotLightHelper", "VertexNormalsHelper": "api/helpers/VertexNormalsHelper" }, "Lights": { "AmbientLight": "api/lights/AmbientLight", "DirectionalLight": "api/lights/DirectionalLight", "HemisphereLight": "api/lights/HemisphereLight", "Light": "api/lights/Light", "PointLight": "api/lights/PointLight", "RectAreaLight": "api/lights/RectAreaLight", "SpotLight": "api/lights/SpotLight" }, "Lights / Shadows": { "DirectionalLightShadow": "api/lights/shadows/DirectionalLightShadow", "LightShadow": "api/lights/shadows/LightShadow", "SpotLightShadow": "api/lights/shadows/SpotLightShadow" }, "Loaders": { "AnimationLoader": "api/loaders/AnimationLoader", "AudioLoader": "api/loaders/AudioLoader", "BufferGeometryLoader": "api/loaders/BufferGeometryLoader", "Cache": "api/loaders/Cache", "CompressedTextureLoader": "api/loaders/CompressedTextureLoader", "CubeTextureLoader": "api/loaders/CubeTextureLoader", "DataTextureLoader": "api/loaders/DataTextureLoader", "FileLoader": "api/loaders/FileLoader", "FontLoader": "api/loaders/FontLoader", "ImageBitmapLoader": "api/loaders/ImageBitmapLoader", "ImageLoader": "api/loaders/ImageLoader", "JSONLoader": "api/loaders/JSONLoader", "Loader": "api/loaders/Loader", "LoaderUtils": "api/loaders/LoaderUtils", "MaterialLoader": "api/loaders/MaterialLoader", "ObjectLoader": "api/loaders/ObjectLoader", "TextureLoader": "api/loaders/TextureLoader" }, "Loaders / Managers": { "DefaultLoadingManager": "api/loaders/managers/DefaultLoadingManager", "LoadingManager": "api/loaders/managers/LoadingManager" }, "Materials": { "LineBasicMaterial": "api/materials/LineBasicMaterial", "LineDashedMaterial": "api/materials/LineDashedMaterial", "Material": "api/materials/Material", "MeshBasicMaterial": "api/materials/MeshBasicMaterial", "MeshDepthMaterial": "api/materials/MeshDepthMaterial", "MeshLambertMaterial": "api/materials/MeshLambertMaterial", "MeshNormalMaterial": "api/materials/MeshNormalMaterial", "MeshPhongMaterial": "api/materials/MeshPhongMaterial", "MeshPhysicalMaterial": "api/materials/MeshPhysicalMaterial", "MeshStandardMaterial": "api/materials/MeshStandardMaterial", "MeshToonMaterial": "api/materials/MeshToonMaterial", "PointsMaterial": "api/materials/PointsMaterial", "RawShaderMaterial": "api/materials/RawShaderMaterial", "ShaderMaterial": "api/materials/ShaderMaterial", "ShadowMaterial": "api/materials/ShadowMaterial", "SpriteMaterial": "api/materials/SpriteMaterial" }, "Math": { "Box2": "api/math/Box2", "Box3": "api/math/Box3", "Color": "api/math/Color", "Cylindrical": "api/math/Cylindrical", "Euler": "api/math/Euler", "Frustum": "api/math/Frustum", "Interpolant": "api/math/Interpolant", "Line3": "api/math/Line3", "Math": "api/math/Math", "Matrix3": "api/math/Matrix3", "Matrix4": "api/math/Matrix4", "Plane": "api/math/Plane", "Quaternion": "api/math/Quaternion", "Ray": "api/math/Ray", "Sphere": "api/math/Sphere", "Spherical": "api/math/Spherical", "Triangle": "api/math/Triangle", "Vector2": "api/math/Vector2", "Vector3": "api/math/Vector3", "Vector4": "api/math/Vector4" }, "Math / Interpolants": { "CubicInterpolant": "api/math/interpolants/CubicInterpolant", "DiscreteInterpolant": "api/math/interpolants/DiscreteInterpolant", "LinearInterpolant": "api/math/interpolants/LinearInterpolant", "QuaternionLinearInterpolant": "api/math/interpolants/QuaternionLinearInterpolant" }, "Objects": { "Bone": "api/objects/Bone", "Group": "api/objects/Group", "Line": "api/objects/Line", "LineLoop": "api/objects/LineLoop", "LineSegments": "api/objects/LineSegments", "LOD": "api/objects/LOD", "Mesh": "api/objects/Mesh", "Points": "api/objects/Points", "Skeleton": "api/objects/Skeleton", "SkinnedMesh": "api/objects/SkinnedMesh", "Sprite": "api/objects/Sprite" }, "Renderers": { "WebGLRenderer": "api/renderers/WebGLRenderer", "WebGLRenderTarget": "api/renderers/WebGLRenderTarget", "WebGLRenderTargetCube": "api/renderers/WebGLRenderTargetCube" }, "Renderers / Shaders": { "ShaderChunk": "api/renderers/shaders/ShaderChunk", "ShaderLib": "api/renderers/shaders/ShaderLib", "UniformsLib": "api/renderers/shaders/UniformsLib", "UniformsUtils": "api/renderers/shaders/UniformsUtils" }, "Scenes": { "Fog": "api/scenes/Fog", "FogExp2": "api/scenes/FogExp2", "Scene": "api/scenes/Scene" }, "Textures": { "CanvasTexture": "api/textures/CanvasTexture", "CompressedTexture": "api/textures/CompressedTexture", "CubeTexture": "api/textures/CubeTexture", "DataTexture": "api/textures/DataTexture", "DepthTexture": "api/textures/DepthTexture", "Texture": "api/textures/Texture", "VideoTexture": "api/textures/VideoTexture" } }, "Examples": { "Controls": { "OrbitControls": "examples/controls/OrbitControls" }, "Geometries": { "ConvexBufferGeometry": "examples/geometries/ConvexBufferGeometry", "ConvexGeometry": "examples/geometries/ConvexGeometry", "DecalGeometry": "examples/geometries/DecalGeometry" }, "Loaders": { "BabylonLoader": "examples/loaders/BabylonLoader", "GLTFLoader": "examples/loaders/GLTFLoader", "MTLLoader": "examples/loaders/MTLLoader", "OBJLoader": "examples/loaders/OBJLoader", "OBJLoader2": "examples/loaders/OBJLoader2", "LoaderSupport": "examples/loaders/LoaderSupport", "PCDLoader": "examples/loaders/PCDLoader", "PDBLoader": "examples/loaders/PDBLoader", "SVGLoader": "examples/loaders/SVGLoader", "TGALoader": "examples/loaders/TGALoader", "PRWMLoader": "examples/loaders/PRWMLoader" }, "Objects": { "Lensflare": "examples/objects/Lensflare", }, "Exporters": { "GLTFExporter": "examples/exporters/GLTFExporter" }, "Plugins": { "LookupTable": "examples/Lut", "SpriteCanvasMaterial": "examples/SpriteCanvasMaterial" }, "QuickHull": { "Face": "examples/quickhull/Face", "HalfEdge": "examples/quickhull/HalfEdge", "QuickHull": "examples/quickhull/QuickHull", "VertexNode": "examples/quickhull/VertexNode", "VertexList": "examples/quickhull/VertexList" }, "Renderers": { "CanvasRenderer": "examples/renderers/CanvasRenderer", "CSS3DRenderer": "examples/renderers/CSS3DRenderer", "CSS2DRenderer": "examples/renderers/CSS2DRenderer" }, "Utils": { "BufferGeometryUtils": "examples/BufferGeometryUtils", "SceneUtils": "examples/utils/SceneUtils" } }, "Developer Reference": { "Polyfills": { "Polyfills": "api/Polyfills" }, "WebGLRenderer": { "WebGLProgram": "api/renderers/webgl/WebGLProgram", "WebGLShader": "api/renderers/webgl/WebGLShader", "WebGLState": "api/renderers/webgl/WebGLState" }, "WebGLRenderer / Plugins": { "SpritePlugin": "api/renderers/webgl/plugins/SpritePlugin" } } };
require 'socket' server = TCPServer.open(8080) # reliable connection loop { myClient = server.accept myClient.puts("Hello This is a test") myClient.close }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.microsoft.azure.servicebus; import com.microsoft.azure.servicebus.management.AccessRights; import com.microsoft.azure.servicebus.management.AuthorizationRule; import com.microsoft.azure.servicebus.management.EntityNameHelper; import com.microsoft.azure.servicebus.management.ManagementClientAsync; import com.microsoft.azure.servicebus.management.NamespaceInfo; import com.microsoft.azure.servicebus.management.NamespaceType; import com.microsoft.azure.servicebus.management.QueueDescription; import com.microsoft.azure.servicebus.management.QueueRuntimeInfo; import com.microsoft.azure.servicebus.management.SharedAccessAuthorizationRule; import com.microsoft.azure.servicebus.management.SubscriptionDescription; import com.microsoft.azure.servicebus.management.SubscriptionRuntimeInfo; import com.microsoft.azure.servicebus.management.TopicDescription; import com.microsoft.azure.servicebus.management.TopicRuntimeInfo; import com.microsoft.azure.servicebus.primitives.MessagingEntityAlreadyExistsException; import com.microsoft.azure.servicebus.primitives.MessagingEntityNotFoundException; import com.microsoft.azure.servicebus.primitives.MessagingFactory; import com.microsoft.azure.servicebus.primitives.ServiceBusException; import com.microsoft.azure.servicebus.rules.CorrelationFilter; import com.microsoft.azure.servicebus.rules.FalseFilter; import com.microsoft.azure.servicebus.rules.RuleDescription; import com.microsoft.azure.servicebus.rules.SqlFilter; import com.microsoft.azure.servicebus.rules.SqlRuleAction; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.net.URI; import java.time.Duration; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicBoolean; public class ManagementTests extends TestBase { private ManagementClientAsync managementClientAsync; @Before public void setup() { URI namespaceEndpointURI = TestUtils.getNamespaceEndpointURI(); ClientSettings managementClientSettings = TestUtils.getManagementClientSettings(); managementClientAsync = new ManagementClientAsync(namespaceEndpointURI, managementClientSettings); } @Test public void basicQueueCrudTest() throws InterruptedException, ExecutionException { String queueName = UUID.randomUUID().toString().substring(0, 8); QueueDescription q = new QueueDescription(queueName); q.setAutoDeleteOnIdle(Duration.ofHours(1)); q.setDefaultMessageTimeToLive(Duration.ofDays(2)); q.setDuplicationDetectionHistoryTimeWindow(Duration.ofMinutes(1)); q.setEnableBatchedOperations(false); q.setEnableDeadLetteringOnMessageExpiration(true); q.setEnablePartitioning(false); q.setForwardTo(null); q.setForwardDeadLetteredMessagesTo(null); q.setLockDuration(Duration.ofSeconds(45)); q.setMaxDeliveryCount(8); q.setMaxSizeInMB(2048); q.setRequiresDuplicateDetection(true); q.setRequiresSession(true); q.setUserMetadata("basicQueueCrudTest"); ArrayList<AuthorizationRule> rules = new ArrayList<>(); ArrayList<AccessRights> rights = new ArrayList<>(); rights.add(AccessRights.Send); rights.add(AccessRights.Listen); rights.add(AccessRights.Manage); rules.add(new SharedAccessAuthorizationRule("allClaims", rights)); q.setAuthorizationRules(rules); QueueDescription qCreated = this.managementClientAsync.createQueueAsync(q).get(); Assert.assertEquals(q, qCreated); QueueDescription queue = this.managementClientAsync.getQueueAsync(queueName).get(); Assert.assertEquals(qCreated, queue); queue.setEnableBatchedOperations(false); queue.setMaxDeliveryCount(9); queue.getAuthorizationRules().clear(); rights = new ArrayList<>(); rights.add(AccessRights.Send); rights.add(AccessRights.Listen); queue.getAuthorizationRules().add(new SharedAccessAuthorizationRule("noManage", rights)); QueueDescription updatedQ = this.managementClientAsync.updateQueueAsync(queue).get(); Assert.assertEquals(queue, updatedQ); Boolean exists = this.managementClientAsync.queueExistsAsync(queueName).get(); Assert.assertTrue(exists); List<QueueDescription> queues = this.managementClientAsync.getQueuesAsync().get(); Assert.assertTrue(queues.size() > 0); AtomicBoolean found = new AtomicBoolean(false); queues.forEach(queueDescription -> { if (queueDescription.getPath().equalsIgnoreCase(queueName)) { found.set(true); } }); Assert.assertTrue(found.get()); this.managementClientAsync.deleteQueueAsync(queueName).get(); exists = this.managementClientAsync.queueExistsAsync(queueName).get(); Assert.assertFalse(exists); } @Test public void basicTopicCrudTest() throws InterruptedException, ExecutionException { String topicName = UUID.randomUUID().toString().substring(0, 8); TopicDescription td = new TopicDescription(topicName); td.setAutoDeleteOnIdle(Duration.ofHours(1)); td.setDefaultMessageTimeToLive(Duration.ofDays(2)); td.setDuplicationDetectionHistoryTimeWindow(Duration.ofMinutes(1)); td.setEnableBatchedOperations(false); td.setEnablePartitioning(false); td.setMaxSizeInMB(2048); td.setRequiresDuplicateDetection(true); td.setUserMetadata("basicTopicCrudTest"); td.setSupportOrdering(true); ArrayList<AuthorizationRule> rules = new ArrayList<>(); ArrayList<AccessRights> rights = new ArrayList<>(); rights.add(AccessRights.Send); rights.add(AccessRights.Listen); rights.add(AccessRights.Manage); rules.add(new SharedAccessAuthorizationRule("allClaims", rights)); td.setAuthorizationRules(rules); TopicDescription tCreated = this.managementClientAsync.createTopicAsync(td).get(); Assert.assertEquals(td, tCreated); TopicDescription topic = this.managementClientAsync.getTopicAsync(topicName).get(); Assert.assertEquals(tCreated, topic); topic.setEnableBatchedOperations(false); topic.setDefaultMessageTimeToLive(Duration.ofDays(3)); topic.getAuthorizationRules().clear(); rights = new ArrayList<>(); rights.add(AccessRights.Send); rights.add(AccessRights.Listen); topic.getAuthorizationRules().add(new SharedAccessAuthorizationRule("noManage", rights)); TopicDescription updatedT = this.managementClientAsync.updateTopicAsync(topic).get(); Assert.assertEquals(topic, updatedT); Boolean exists = this.managementClientAsync.topicExistsAsync(topicName).get(); Assert.assertTrue(exists); List<TopicDescription> topics = this.managementClientAsync.getTopicsAsync().get(); Assert.assertTrue(topics.size() > 0); AtomicBoolean found = new AtomicBoolean(false); topics.forEach(topicDescription -> { if (topicDescription.getPath().equalsIgnoreCase(topicName)) { found.set(true); } }); Assert.assertTrue(found.get()); this.managementClientAsync.deleteTopicAsync(topicName).get(); exists = this.managementClientAsync.topicExistsAsync(topicName).get(); Assert.assertFalse(exists); } @Test public void basicSubscriptionCrudTest() throws InterruptedException, ExecutionException { String topicName = UUID.randomUUID().toString().substring(0, 8); this.managementClientAsync.createTopicAsync(topicName).get(); String subscriptionName = UUID.randomUUID().toString().substring(0, 8); SubscriptionDescription subscriptionDescription = new SubscriptionDescription(topicName, subscriptionName); subscriptionDescription.setAutoDeleteOnIdle(Duration.ofHours(1)); subscriptionDescription.setDefaultMessageTimeToLive(Duration.ofDays(2)); subscriptionDescription.setEnableBatchedOperations(false); subscriptionDescription.setEnableDeadLetteringOnMessageExpiration(true); subscriptionDescription.setEnableDeadLetteringOnFilterEvaluationException(false); subscriptionDescription.setForwardTo(null); subscriptionDescription.setForwardDeadLetteredMessagesTo(null); subscriptionDescription.setLockDuration(Duration.ofSeconds(45)); subscriptionDescription.setMaxDeliveryCount(8); subscriptionDescription.setRequiresSession(true); subscriptionDescription.setUserMetadata("basicSubscriptionCrudTest"); SubscriptionDescription createdS = this.managementClientAsync.createSubscriptionAsync(subscriptionDescription).get(); Assert.assertEquals(subscriptionDescription, createdS); SubscriptionDescription getS = this.managementClientAsync.getSubscriptionAsync(topicName, subscriptionName).get(); Assert.assertEquals(createdS, getS); getS.setEnableBatchedOperations(false); getS.setMaxDeliveryCount(9); SubscriptionDescription updatedQ = this.managementClientAsync.updateSubscriptionAsync(getS).get(); Assert.assertEquals(getS, updatedQ); Boolean exists = this.managementClientAsync.subscriptionExistsAsync(topicName, subscriptionName).get(); Assert.assertTrue(exists); List<SubscriptionDescription> subscriptions = this.managementClientAsync.getSubscriptionsAsync(topicName).get(); Assert.assertEquals(1, subscriptions.size()); Assert.assertEquals(subscriptionName, subscriptions.get(0).getSubscriptionName()); this.managementClientAsync.deleteSubscriptionAsync(topicName, subscriptionName).get(); exists = this.managementClientAsync.subscriptionExistsAsync(topicName, subscriptionName).get(); Assert.assertFalse(exists); this.managementClientAsync.deleteTopicAsync(topicName).get(); exists = this.managementClientAsync.subscriptionExistsAsync(topicName, subscriptionName).get(); Assert.assertFalse(exists); } @Test public void basicRulesCrudTest() throws InterruptedException, ExecutionException { String topicName = UUID.randomUUID().toString().substring(0, 8); String subscriptionName = UUID.randomUUID().toString().substring(0, 8); this.managementClientAsync.createTopicAsync(topicName).get(); this.managementClientAsync.createSubscriptionAsync( new SubscriptionDescription(topicName, subscriptionName), new RuleDescription("rule0", new FalseFilter())).get(); //SqlFilter sqlFilter = new SqlFilter("stringValue = @stringParam AND intValue = @intParam AND longValue = @longParam AND dateValue = @dateParam"); SqlFilter sqlFilter = new SqlFilter("1=1"); /* todo sqlFilter.Parameters.Add("@stringParam", "string"); sqlFilter.Parameters.Add("@intParam", (int)1); sqlFilter.Parameters.Add("@longParam", (long)12); sqlFilter.Parameters.Add("@dateParam", DateTime.UtcNow); */ RuleDescription rule1 = new RuleDescription("rule1"); rule1.setFilter(sqlFilter); rule1.setAction(new SqlRuleAction("SET a='b'")); this.managementClientAsync.createRuleAsync(topicName, subscriptionName, rule1).get(); CorrelationFilter correlationFilter = new CorrelationFilter(); correlationFilter.setContentType("contentType"); correlationFilter.setCorrelationId("correlationId"); correlationFilter.setLabel("label"); correlationFilter.setMessageId("messageId"); correlationFilter.setReplyTo("replyTo"); correlationFilter.setReplyToSessionId("replyToSessionId"); correlationFilter.setSessionId("sessionId"); correlationFilter.setTo("to"); // todo // correlationFilter.Properties.Add("customKey", "customValue"); RuleDescription rule2 = new RuleDescription("rule2"); rule2.setFilter(correlationFilter); this.managementClientAsync.createRuleAsync(topicName, subscriptionName, rule2).get(); List<RuleDescription> rules = this.managementClientAsync.getRulesAsync(topicName, subscriptionName).get(); Assert.assertEquals(3, rules.size()); Assert.assertEquals("rule0", rules.get(0).getName()); Assert.assertEquals(rule1, rules.get(1)); Assert.assertEquals(rule2, rules.get(2)); ((CorrelationFilter) (rule2.getFilter())).setCorrelationId("correlationIdModified"); RuleDescription updatedRule2 = this.managementClientAsync.updateRuleAsync(topicName, subscriptionName, rule2).get(); Assert.assertEquals(rule2, updatedRule2); RuleDescription defaultRule = this.managementClientAsync.getRuleAsync(topicName, subscriptionName, "rule0").get(); Assert.assertNotNull(defaultRule); this.managementClientAsync.deleteRuleAsync(topicName, subscriptionName, "rule0").get(); try { this.managementClientAsync.getRuleAsync(topicName, subscriptionName, "rule0").get(); Assert.fail("Get rule0 should have thrown."); } catch (Exception ex) { Assert.assertTrue(ex instanceof ExecutionException); Throwable cause = ex.getCause(); Assert.assertTrue(cause instanceof MessagingEntityNotFoundException); } Assert.assertFalse(this.managementClientAsync.ruleExistsAsync(topicName, subscriptionName, "rule0").get()); this.managementClientAsync.deleteTopicAsync(topicName).get(); } @Test public void getQueueRuntimeInfoTest() throws ExecutionException, InterruptedException, ServiceBusException { String queueName = UUID.randomUUID().toString().substring(0, 8); // Setting created time QueueDescription qd = this.managementClientAsync.createQueueAsync(queueName).get(); // Changing last updated time qd.setAutoDeleteOnIdle(Duration.ofHours(2)); QueueDescription updatedQd = this.managementClientAsync.updateQueueAsync(qd).get(); // Populating 1 active, 1 dead and 1 scheduled message. // Changing last accessed time. MessagingFactory factory = MessagingFactory.createFromNamespaceEndpointURI(TestUtils.getNamespaceEndpointURI(), TestUtils.getClientSettings()); IMessageSender sender = ClientFactory.createMessageSenderFromEntityPath(factory, queueName); IMessageReceiver receiver = ClientFactory.createMessageReceiverFromEntityPath(factory, queueName); sender.send(new Message("m1")); sender.send(new Message("m2")); sender.scheduleMessage(new Message("m3"), Instant.now().plusSeconds(1000)); IMessage msg = receiver.receive(); receiver.deadLetter(msg.getLockToken()); QueueRuntimeInfo runtimeInfo = this.managementClientAsync.getQueueRuntimeInfoAsync(queueName).get(); Assert.assertEquals(queueName, runtimeInfo.getPath()); Assert.assertTrue(runtimeInfo.getCreatedAt().isBefore(runtimeInfo.getUpdatedAt())); Assert.assertTrue(runtimeInfo.getUpdatedAt().isBefore(runtimeInfo.getAccessedAt())); Assert.assertEquals(1, runtimeInfo.getMessageCountDetails().getActiveMessageCount()); Assert.assertEquals(1, runtimeInfo.getMessageCountDetails().getDeadLetterMessageCount()); Assert.assertEquals(1, runtimeInfo.getMessageCountDetails().getScheduledMessageCount()); Assert.assertEquals(3, runtimeInfo.getMessageCount()); Assert.assertTrue(runtimeInfo.getSizeInBytes() > 0); this.managementClientAsync.deleteQueueAsync(queueName).get(); receiver.close(); sender.close(); factory.close(); } @Test public void getTopicAndSubscriptionRuntimeInfoTest() throws ExecutionException, InterruptedException, ServiceBusException { String topicName = UUID.randomUUID().toString().substring(0, 8); String subscriptionName = UUID.randomUUID().toString().substring(0, 8); // Setting created time TopicDescription td = this.managementClientAsync.createTopicAsync(topicName).get(); // Changing last updated time td.setAutoDeleteOnIdle(Duration.ofHours(2)); TopicDescription updatedTd = this.managementClientAsync.updateTopicAsync(td).get(); SubscriptionDescription sd = this.managementClientAsync.createSubscriptionAsync(topicName, subscriptionName).get(); // Changing Last updated time for subscription. sd.setAutoDeleteOnIdle(Duration.ofHours(2)); SubscriptionDescription updatedSd = this.managementClientAsync.updateSubscriptionAsync(sd).get(); // Populating 1 active, 1 dead and 1 scheduled message. // Changing last accessed time. MessagingFactory factory = MessagingFactory.createFromNamespaceEndpointURI(TestUtils.getNamespaceEndpointURI(), TestUtils.getClientSettings()); IMessageSender sender = ClientFactory.createMessageSenderFromEntityPath(factory, topicName); IMessageReceiver receiver = ClientFactory.createMessageReceiverFromEntityPath(factory, EntityNameHelper.formatSubscriptionPath(topicName, subscriptionName)); sender.send(new Message("m1")); sender.send(new Message("m2")); sender.scheduleMessage(new Message("m3"), Instant.now().plusSeconds(1000)); IMessage msg = receiver.receive(); receiver.deadLetter(msg.getLockToken()); TopicRuntimeInfo topicRuntimeInfo = this.managementClientAsync.getTopicRuntimeInfoAsync(topicName).get(); SubscriptionRuntimeInfo subscriptionRuntimeInfo = this.managementClientAsync.getSubscriptionRuntimeInfoAsync(topicName, subscriptionName).get(); Assert.assertEquals(topicName, topicRuntimeInfo.getPath()); Assert.assertEquals(topicName, subscriptionRuntimeInfo.getTopicPath()); Assert.assertEquals(subscriptionName, subscriptionRuntimeInfo.getSubscriptionName()); Assert.assertEquals(0, topicRuntimeInfo.getMessageCountDetails().getActiveMessageCount()); Assert.assertEquals(0, topicRuntimeInfo.getMessageCountDetails().getDeadLetterMessageCount()); Assert.assertEquals(1, topicRuntimeInfo.getMessageCountDetails().getScheduledMessageCount()); Assert.assertEquals(1, subscriptionRuntimeInfo.getMessageCountDetails().getActiveMessageCount()); Assert.assertEquals(1, subscriptionRuntimeInfo.getMessageCountDetails().getDeadLetterMessageCount()); Assert.assertEquals(0, subscriptionRuntimeInfo.getMessageCountDetails().getScheduledMessageCount()); Assert.assertEquals(2, subscriptionRuntimeInfo.getMessageCount()); Assert.assertEquals(1, topicRuntimeInfo.getSubscriptionCount()); Assert.assertTrue(topicRuntimeInfo.getSizeInBytes() > 0); Assert.assertTrue(topicRuntimeInfo.getCreatedAt().isBefore(topicRuntimeInfo.getUpdatedAt())); Assert.assertTrue(topicRuntimeInfo.getUpdatedAt().isBefore(topicRuntimeInfo.getAccessedAt())); Assert.assertTrue(subscriptionRuntimeInfo.getCreatedAt().isBefore(subscriptionRuntimeInfo.getUpdatedAt())); Assert.assertTrue(subscriptionRuntimeInfo.getUpdatedAt().isBefore(subscriptionRuntimeInfo.getAccessedAt())); Assert.assertTrue(topicRuntimeInfo.getUpdatedAt().isBefore(subscriptionRuntimeInfo.getUpdatedAt())); this.managementClientAsync.deleteSubscriptionAsync(topicName, subscriptionName).get(); this.managementClientAsync.deleteTopicAsync(topicName).get(); receiver.close(); sender.close(); factory.close(); } @Test public void messagingEntityNotFoundExceptionTest() throws ServiceBusException, InterruptedException, ExecutionException { try { Utils.completeFuture(this.managementClientAsync.getQueueAsync("NonExistingPath")); } catch (MessagingEntityNotFoundException e) { } try { Utils.completeFuture(this.managementClientAsync.getTopicAsync("NonExistingPath")); } catch (MessagingEntityNotFoundException e) { } try { Utils.completeFuture(this.managementClientAsync.getSubscriptionAsync("NonExistingTopic", "NonExistingPath")); } catch (MessagingEntityNotFoundException e) { } try { Utils.completeFuture(this.managementClientAsync.updateQueueAsync(new QueueDescription("NonExistingPath"))); } catch (MessagingEntityNotFoundException e) { } try { Utils.completeFuture(this.managementClientAsync.updateTopicAsync(new TopicDescription("NonExistingPath"))); } catch (MessagingEntityNotFoundException e) { } try { Utils.completeFuture(this.managementClientAsync.updateSubscriptionAsync(new SubscriptionDescription("NonExistingTopic", "NonExistingPath"))); } catch (MessagingEntityNotFoundException e) { } try { Utils.completeFuture(this.managementClientAsync.deleteQueueAsync("NonExistingPath")); } catch (MessagingEntityNotFoundException e) { } try { Utils.completeFuture(this.managementClientAsync.deleteTopicAsync("NonExistingPath")); } catch (MessagingEntityNotFoundException e) { } try { Utils.completeFuture(this.managementClientAsync.deleteSubscriptionAsync("nonExistingTopic", "NonExistingPath")); } catch (MessagingEntityNotFoundException e) { } String queueName = UUID.randomUUID().toString().substring(0, 8); String topicName = UUID.randomUUID().toString().substring(0, 8); this.managementClientAsync.createQueueAsync(queueName).get(); this.managementClientAsync.createTopicAsync(topicName).get(); try { Utils.completeFuture(this.managementClientAsync.getQueueAsync(topicName)); } catch (MessagingEntityNotFoundException e) { } try { Utils.completeFuture(this.managementClientAsync.getTopicAsync(queueName)); } catch (MessagingEntityNotFoundException e) { } try { Utils.completeFuture(this.managementClientAsync.getSubscriptionAsync(topicName, "NonExistingSubscription")); } catch (MessagingEntityNotFoundException e) { } this.managementClientAsync.deleteQueueAsync(queueName).get(); this.managementClientAsync.deleteTopicAsync(topicName).get(); } @Test public void messagingEntityAlreadyExistsExceptionTest() throws ServiceBusException, InterruptedException, ExecutionException { String queueName = UUID.randomUUID().toString().substring(0, 8); String topicName = UUID.randomUUID().toString().substring(0, 8); String subscriptionName = UUID.randomUUID().toString().substring(0, 8); this.managementClientAsync.createQueueAsync(queueName).get(); this.managementClientAsync.createTopicAsync(topicName).get(); this.managementClientAsync.createSubscriptionAsync(topicName, subscriptionName).get(); try { Utils.completeFuture(this.managementClientAsync.createQueueAsync(queueName)); } catch (MessagingEntityAlreadyExistsException e) { } try { Utils.completeFuture(this.managementClientAsync.createTopicAsync(topicName)); } catch (MessagingEntityAlreadyExistsException e) { } try { Utils.completeFuture(this.managementClientAsync.createSubscriptionAsync(topicName, subscriptionName)); } catch (MessagingEntityAlreadyExistsException e) { } this.managementClientAsync.deleteQueueAsync(queueName).get(); this.managementClientAsync.deleteSubscriptionAsync(topicName, subscriptionName).get(); this.managementClientAsync.deleteTopicAsync(topicName).get(); } @Test public void forwardingEntitySetupTest() throws ServiceBusException, InterruptedException { // queueName -- fwdTo --> destinationName -- fwd dlqTo --> dlqDestinationName String queueName = UUID.randomUUID().toString().substring(0, 8); String destinationName = UUID.randomUUID().toString().substring(0, 8); String dlqDestinationName = UUID.randomUUID().toString().substring(0, 8); QueueDescription dqlDestinationQ = Utils.completeFuture(this.managementClientAsync.createQueueAsync(dlqDestinationName)); QueueDescription destinationQToCreate = new QueueDescription(destinationName); destinationQToCreate.setForwardDeadLetteredMessagesTo(dlqDestinationName); QueueDescription destinationQ = Utils.completeFuture(this.managementClientAsync.createQueueAsync(destinationQToCreate)); QueueDescription qd = new QueueDescription(queueName); qd.setForwardTo(destinationName); QueueDescription baseQ = Utils.completeFuture(this.managementClientAsync.createQueueAsync(qd)); MessagingFactory factory = MessagingFactory.createFromNamespaceEndpointURI(TestUtils.getNamespaceEndpointURI(), TestUtils.getClientSettings()); IMessageSender sender = ClientFactory.createMessageSenderFromEntityPath(factory, queueName); IMessage message = new Message(); message.setMessageId("mid"); sender.send(message); sender.close(); IMessageReceiver receiver = ClientFactory.createMessageReceiverFromEntityPath(factory, destinationName); IMessage msg = receiver.receive(); Assert.assertNotNull(msg); Assert.assertEquals("mid", msg.getMessageId()); receiver.deadLetter(msg.getLockToken()); receiver.close(); receiver = ClientFactory.createMessageReceiverFromEntityPath(factory, dlqDestinationName); msg = receiver.receive(); Assert.assertNotNull(msg); Assert.assertEquals("mid", msg.getMessageId()); receiver.complete(msg.getLockToken()); receiver.close(); this.managementClientAsync.deleteQueueAsync(queueName); this.managementClientAsync.deleteQueueAsync(destinationName); this.managementClientAsync.deleteQueueAsync(dlqDestinationName); } @Test public void authRulesEqualityCheckTest() { QueueDescription qd = new QueueDescription("a"); SharedAccessAuthorizationRule rule1 = new SharedAccessAuthorizationRule("sendListen", new ArrayList<>(Arrays.asList(AccessRights.Listen, AccessRights.Send))); SharedAccessAuthorizationRule rule2 = new SharedAccessAuthorizationRule("manage", new ArrayList<>(Arrays.asList(AccessRights.Listen, AccessRights.Send, AccessRights.Manage))); qd.setAuthorizationRules(new ArrayList<>(Arrays.asList(rule1, rule2))); QueueDescription qd2 = new QueueDescription("a"); AuthorizationRule rule11 = new SharedAccessAuthorizationRule(rule2.getKeyName(), rule2.getPrimaryKey(), rule2.getSecondaryKey(), rule2.getRights()); AuthorizationRule rule22 = new SharedAccessAuthorizationRule(rule1.getKeyName(), rule1.getPrimaryKey(), rule1.getSecondaryKey(), rule1.getRights()); qd2.setAuthorizationRules(new ArrayList<>(Arrays.asList(rule11, rule22))); Assert.assertTrue(qd.equals(qd2)); } @Test public void getNamespaceInfoTest() throws ExecutionException, InterruptedException { NamespaceInfo nsInfo = this.managementClientAsync.getNamespaceInfoAsync().get(); Assert.assertNotNull(nsInfo); Assert.assertEquals(NamespaceType.ServiceBus, nsInfo.getNamespaceType()); } }
@font-face { font-family: 'footable'; src: url('fonts/footable.eot'); src: url('fonts/footable.eot%3F') format('embedded-opentype'), url('fonts/footable.woff') format('woff'), url('fonts/footable.ttf') format('truetype'), url('fonts/footable.svg') format('svg'); font-weight: normal; font-style: normal; } @media screen and (-webkit-min-device-pixel-ratio: 0) { @font-face { font-family: 'footable'; src: url('fonts/footable.svg') format('svg'); font-weight: normal; font-style: normal; } } .footable { width: 100%; /** SORTING **/ /** PAGINATION **/ } .footable.breakpoint > tbody > tr.footable-detail-show > td { border-bottom: none; } .footable.breakpoint > tbody > tr.footable-detail-show > td > span.footable-toggle:before { content: "\e001"; } .footable.breakpoint > tbody > tr:hover:not(.footable-row-detail) { cursor: pointer; } .footable.breakpoint > tbody > tr > td.footable-cell-detail { background: #eee; border-top: none; } .footable.breakpoint > tbody > tr > td > span.footable-toggle { display: inline-block; font-family: 'footable'; speak: none; font-style: normal; font-weight: normal; font-variant: normal; text-transform: none; -webkit-font-smoothing: antialiased; padding-right: 5px; font-size: 14px; color: #888888; } .footable.breakpoint > tbody > tr > td > span.footable-toggle:before { content: "\e000"; } .footable.breakpoint.toggle-circle > tbody > tr.footable-detail-show > td > span.footable-toggle:before { content: "\e005"; } .footable.breakpoint.toggle-circle > tbody > tr > td > span.footable-toggle:before { content: "\e004"; } .footable.breakpoint.toggle-circle-filled > tbody > tr.footable-detail-show > td > span.footable-toggle:before { content: "\e003"; } .footable.breakpoint.toggle-circle-filled > tbody > tr > td > span.footable-toggle:before { content: "\e002"; } .footable.breakpoint.toggle-square > tbody > tr.footable-detail-show > td > span.footable-toggle:before { content: "\e007"; } .footable.breakpoint.toggle-square > tbody > tr > td > span.footable-toggle:before { content: "\e006"; } .footable.breakpoint.toggle-square-filled > tbody > tr.footable-detail-show > td > span.footable-toggle:before { content: "\e009"; } .footable.breakpoint.toggle-square-filled > tbody > tr > td > span.footable-toggle:before { content: "\e008"; } .footable.breakpoint.toggle-arrow > tbody > tr.footable-detail-show > td > span.footable-toggle:before { content: "\e00f"; } .footable.breakpoint.toggle-arrow > tbody > tr > td > span.footable-toggle:before { content: "\e011"; } .footable.breakpoint.toggle-arrow-small > tbody > tr.footable-detail-show > td > span.footable-toggle:before { content: "\e013"; } .footable.breakpoint.toggle-arrow-small > tbody > tr > td > span.footable-toggle:before { content: "\e015"; } .footable.breakpoint.toggle-arrow-circle > tbody > tr.footable-detail-show > td > span.footable-toggle:before { content: "\e01b"; } .footable.breakpoint.toggle-arrow-circle > tbody > tr > td > span.footable-toggle:before { content: "\e01d"; } .footable.breakpoint.toggle-arrow-circle-filled > tbody > tr.footable-detail-show > td > span.footable-toggle:before { content: "\e00b"; } .footable.breakpoint.toggle-arrow-circle-filled > tbody > tr > td > span.footable-toggle:before { content: "\e00d"; } .footable.breakpoint.toggle-arrow-tiny > tbody > tr.footable-detail-show > td > span.footable-toggle:before { content: "\e01f"; } .footable.breakpoint.toggle-arrow-tiny > tbody > tr > td > span.footable-toggle:before { content: "\e021"; } .footable.breakpoint.toggle-arrow-alt > tbody > tr.footable-detail-show > td > span.footable-toggle:before { content: "\e017"; } .footable.breakpoint.toggle-arrow-alt > tbody > tr > td > span.footable-toggle:before { content: "\e019"; } .footable.breakpoint.toggle-medium > tbody > tr > td > span.footable-toggle { font-size: 18px; } .footable.breakpoint.toggle-large > tbody > tr > td > span.footable-toggle { font-size: 24px; } .footable > thead > tr > th { -webkit-touch-callout: none; -webkit-user-select: none; -khtml-user-select: none; -moz-user-select: -moz-none; -ms-user-select: none; user-select: none; } .footable > thead > tr > th.footable-sortable:hover { cursor: pointer; } .footable > thead > tr > th.footable-sorted > span.footable-sort-indicator:before { content: "\e013"; } .footable > thead > tr > th.footable-sorted-desc > span.footable-sort-indicator:before { content: "\e012"; } .footable > thead > tr > th > span.footable-sort-indicator { display: inline-block; font-family: 'footable'; speak: none; font-style: normal; font-weight: normal; font-variant: normal; text-transform: none; -webkit-font-smoothing: antialiased; padding-left: 5px; } .footable > thead > tr > th > span.footable-sort-indicator:before { content: "\e022"; } .footable > tfoot .pagination { margin: 0; } .footable.no-paging .hide-if-no-paging { display: none; } .footable-row-detail-inner { display: table; } .footable-row-detail-row { display: table-row; line-height: 1.5em; } .footable-row-detail-group { display: block; line-height: 2em; font-size: 1.2em; font-weight: bold; } .footable-row-detail-name { display: table-cell; font-weight: bold; padding-right: 0.5em; } .footable-row-detail-value { display: table-cell; } .footable-odd { background-color: #f7f7f7; }
#region PDFsharp Charting - A .NET charting library based on PDFsharp // // Authors: // Niklas Schneider // // Copyright (c) 2005-2016 empira Software GmbH, Cologne Area (Germany) // // http://www.pdfsharp.com // http://sourceforge.net/projects/pdfsharp // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL // THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. #endregion namespace PdfSharp.Charting { /// <summary> /// The Pdf-Sharp-Charting-String-Resources. /// </summary> // ReSharper disable once InconsistentNaming internal class PSCSR { internal static string InvalidChartTypeForCombination(ChartType chartType) { return string.Format("ChartType '{0}' not valid for combination of charts.", chartType.ToString()); } internal static string PercentNotSupportedByColumnDataLabel { get { return "Column data label cannot be set to 'Percent'"; } } } }
#!/usr/bin/env python # -*- coding: utf-8 -*- """ The daemon that calls auto_copy.py uppon optical disc insertion """ import signal import sys import time sys.path.append('/usr/local/bin') import auto_copy SIGNAL_RECEIVED = False def run_daemon(config): """ Run the damon config: configParser object """ signal.signal(signal.SIGUSR1, signal_handler) while True: time.sleep(1) global SIGNAL_RECEIVED if SIGNAL_RECEIVED: auto_copy.auto_copy(config) SIGNAL_RECEIVED = False def signal_handler(dump1, dump2): global SIGNAL_RECEIVED SIGNAL_RECEIVED = True if __name__ == "__main__": main_config = auto_copy.read_config('/etc/auto_copy.yml') auto_copy.setup_logging(main_config) run_daemon(main_config)
db.groups.update( {lname: "marrasputki"}, {$set:{users: ["Jörö"], description: "Marrasputki 2018"}})
<!doctype html> <html lang="en"> <head> <meta charset="utf-8"> <title>Page Not Found :(</title> <style> ::-moz-selection { background: #fe57a1; color: #fff; text-shadow: none; } ::selection { background: #fe57a1; color: #fff; text-shadow: none; } html { padding: 30px 10px; font-size: 20px; line-height: 1.4; color: #737373; background: #f0f0f0; -webkit-text-size-adjust: 100%; -ms-text-size-adjust: 100%; } html, input { font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; } body { max-width: 500px; _width: 500px; padding: 30px 20px 50px; border: 1px solid #b3b3b3; border-radius: 4px; margin: 0 auto; box-shadow: 0 1px 10px #a7a7a7, inset 0 1px 0 #fff; background: #fcfcfc; } h1 { margin: 0 10px; font-size: 50px; text-align: center; } h1 span { color: #bbb; } h3 { margin: 1.5em 0 0.5em; } p { margin: 1em 0; } ul { padding: 0 0 0 40px; margin: 1em 0; } .container { max-width: 380px; _width: 380px; margin: 0 auto; } /* google search */ #goog-fixurl ul { list-style: none; padding: 0; margin: 0; } #goog-fixurl form { margin: 0; } #goog-wm-qt, #goog-wm-sb { border: 1px solid #bbb; font-size: 16px; line-height: normal; vertical-align: top; color: #444; border-radius: 2px; } #goog-wm-qt { width: 220px; height: 20px; padding: 5px; margin: 5px 10px 0 0; box-shadow: inset 0 1px 1px #ccc; } #goog-wm-sb { display: inline-block; height: 32px; padding: 0 10px; margin: 5px 0 0; white-space: nowrap; cursor: pointer; background-color: #f5f5f5; background-image: -webkit-linear-gradient(rgba(255,255,255,0), #f1f1f1); background-image: -moz-linear-gradient(rgba(255,255,255,0), #f1f1f1); background-image: -ms-linear-gradient(rgba(255,255,255,0), #f1f1f1); background-image: -o-linear-gradient(rgba(255,255,255,0), #f1f1f1); -webkit-appearance: none; -moz-appearance: none; appearance: none; *overflow: visible; *display: inline; *zoom: 1; } #goog-wm-sb:hover, #goog-wm-sb:focus { border-color: #aaa; box-shadow: 0 1px 1px rgba(0, 0, 0, 0.1); background-color: #f8f8f8; } #goog-wm-qt:focus, #goog-wm-sb:focus { border-color: #105cb6; outline: 0; color: #222; } input::-moz-focus-inner { padding: 0; border: 0; } </style> </head> <body> <div class="container"> {{ template:body }} <script> var GOOG_FIXURL_LANG = (navigator.language || '').slice(0,2),GOOG_FIXURL_SITE = location.host; </script> <script src="http://linkhelp.clients.google.com/tbproxy/lh/wm/fixurl.js"></script> </div> </body> </html>
# Copyright (c) Pymatgen Development Team. # Distributed under the terms of the MIT License. import json import os import unittest from monty.json import MontyDecoder from pymatgen.apps.battery.conversion_battery import ConversionElectrode from pymatgen.apps.battery.insertion_battery import InsertionElectrode from pymatgen.apps.battery.plotter import VoltageProfilePlotter from pymatgen.core.composition import Composition from pymatgen.entries.computed_entries import ComputedEntry from pymatgen.util.testing import PymatgenTest class VoltageProfilePlotterTest(unittest.TestCase): def setUp(self): entry_Li = ComputedEntry("Li", -1.90753119) with open(os.path.join(PymatgenTest.TEST_FILES_DIR, "LiTiO2_batt.json")) as f: entries_LTO = json.load(f, cls=MontyDecoder) self.ie_LTO = InsertionElectrode.from_entries(entries_LTO, entry_Li) with open(os.path.join(PymatgenTest.TEST_FILES_DIR, "FeF3_batt.json")) as fid: entries = json.load(fid, cls=MontyDecoder) self.ce_FF = ConversionElectrode.from_composition_and_entries(Composition("FeF3"), entries) def testName(self): plotter = VoltageProfilePlotter(xaxis="frac_x") plotter.add_electrode(self.ie_LTO, "LTO insertion") plotter.add_electrode(self.ce_FF, "FeF3 conversion") self.assertIsNotNone(plotter.get_plot_data(self.ie_LTO)) self.assertIsNotNone(plotter.get_plot_data(self.ce_FF)) def testPlotly(self): plotter = VoltageProfilePlotter(xaxis="frac_x") plotter.add_electrode(self.ie_LTO, "LTO insertion") plotter.add_electrode(self.ce_FF, "FeF3 conversion") fig = plotter.get_plotly_figure() self.assertEqual(fig.layout.xaxis.title.text, "Atomic Fraction of Li") plotter = VoltageProfilePlotter(xaxis="x_form") plotter.add_electrode(self.ce_FF, "FeF3 conversion") fig = plotter.get_plotly_figure() self.assertEqual(fig.layout.xaxis.title.text, "x in Li<sub>x</sub>FeF3") plotter.add_electrode(self.ie_LTO, "LTO insertion") fig = plotter.get_plotly_figure() self.assertEqual(fig.layout.xaxis.title.text, "x Workion Ion per Host F.U.") if __name__ == "__main__": unittest.main()
// // XiuXiuViewController.h // AiyoyouCocoapods // // Created by aiyoyou on 2017/6/30. // Copyright © 2017年 zoenet. All rights reserved. // #import "BaseViewController.h" @interface XiuXiuViewController : BaseViewController @end
# Be sure to restart your server when you modify this file. # Your secret key is used for verifying the integrity of signed cookies. # If you change this key, all old signed cookies will become invalid! # Make sure the secret is at least 30 characters and all random, # no regular words or you'll be exposed to dictionary attacks. # You can use `rake secret` to generate a secure secret key. # Make sure your secret_key_base is kept private # if you're sharing your code publicly. Cheersee::Application.config.secret_key_base = '5318197a09a060234e6733cfc9b60a20a1d1f22f37926b862dc6c0ab8dffb975e21dd10e9e3f2bd4593ea0013c8e020df30c60c78b74446c41f53cea2e420733'
// Copyright (c) 2014 Xiaomi.com, Inc. All Rights Reserved // @file logger.go // @author 王靖 (wangjing1@xiaomi.com) // @date 14-11-25 20:02:50 // @version $Revision: 1.0 $ // @brief package log import ( "crypto/rand" "fmt" "math/big" "path/filepath" "runtime" "strconv" "strings" "sync" ) // SysLog 系统Log var SysLog *ProxyLogger = nil // AppLog 应用Log var AppLog *ProxyLogger = nil // Logger the log.Logger wrapper type ProxyLogger struct { l *Logger } func logidGenerator() string { if i, err := rand.Int(rand.Reader, big.NewInt(1<<30-1)); err != nil { return "0" } else { return i.String() } } func comMessage(strfmt string, args ...interface{}) map[string]string { pc, file, line, ok := runtime.Caller(2) if !ok { file = "?" line = 0 } fn := runtime.FuncForPC(pc) var fnName string if fn == nil { fnName = "?()" } else { dotName := filepath.Ext(fn.Name()) fnName = strings.TrimLeft(dotName, ".") + "()" } ret := map[string]string{ "file": filepath.Base(file) + ":" + strconv.Itoa(line), "func": fnName, "msg": fmt.Sprintf(strfmt, args...), } return ret } // Notice print notice message to logfile func (lg *ProxyLogger) Notice(strfmt string, args ...interface{}) { lg.l.Notice(comMessage(strfmt, args...), logidGenerator()) } // Debug print debug message to logfile func (lg *ProxyLogger) Debug(strfmt string, args ...interface{}) { lg.l.Debug(comMessage(strfmt, args...), logidGenerator()) } // Warn print warning message to logfile func (lg *ProxyLogger) Warn(strfmt string, args ...interface{}) { lg.l.Warn(comMessage(strfmt, args...), logidGenerator()) } // Fatal print fatal message to logfile func (lg *ProxyLogger) Fatal(strfmt string, args ...interface{}) { lg.l.Fatal(comMessage(strfmt, args...), logidGenerator()) } // Config Config of One Log Instance type Config struct { FilePath string LogLevel int AppTag string } func init() { realInit(&Config{FilePath: "/dev/stdout", LogLevel: 0}, &Config{FilePath: "/dev/stdout", LogLevel: 3}) } var once sync.Once func Init(syslog, applog *Config) { f := func() { realInit(syslog, applog) } once.Do(f) } func realInit(syslog, applog *Config) { SysLog = &ProxyLogger{ l: NewLogger(syslog.FilePath), } SysLog.l.SetLevel(syslog.LogLevel) SysLog.l.SetAppTag(defaultAppTag()) AppLog = &ProxyLogger{ l: NewLogger(applog.FilePath), } AppLog.l.SetLevel(applog.LogLevel) AppLog.l.SetAppTag(defaultAppTag()) } func defaultAppTag() string { return "mysql-proxy" } /* vim: set expandtab ts=4 sw=4 */
//============== IV: Multiplayer - http://code.iv-multiplayer.com ============== // // File: CMutex.cpp // Project: Shared // Author(s): jenksta // License: See LICENSE in root directory // //============================================================================== #include "CMutex.h" #include <SharedUtility.h> CMutex::CMutex() { // Create the mutex #ifdef WIN32 #ifdef USE_CRITICAL_SECTION InitializeCriticalSection(&m_criticalSection); #else m_hMutex = CreateMutex(NULL, FALSE, NULL); #endif #else pthread_mutex_init(&m_mutex, NULL); #endif // Set the lock count to its default value m_iLockCount = 0; } CMutex::~CMutex() { // Delete the mutex #ifdef WIN32 #ifdef USE_CRITICAL_SECTION DeleteCriticalSection(&m_criticalSection); #else CloseHandle(m_hMutex); #endif #else pthread_mutex_destroy(&m_mutex); #endif } void CMutex::Lock() { // Lock the mutex #ifdef WIN32 #ifdef USE_CRITICAL_SECTION EnterCriticalSection(&m_criticalSection); #else WaitForSingleObject(m_hMutex, INFINITE); #endif #else pthread_mutex_lock(&m_mutex); #endif // Increment the lock count m_iLockCount++; } bool CMutex::TryLock(unsigned int uiTimeOutMilliseconds) { // Attempt to lock the mutex bool bLocked = false; #if defined(WIN32) && !defined(USE_CRITICAL_SECTION) bLocked = (WaitForSingleObject(m_hMutex, uiTimeOutMilliseconds) == 0); #else if(uiTimeOutMilliseconds == 0) { #ifdef WIN32 bLocked = (TryEnterCriticalSection(&m_criticalSection) != 0); #else bLocked = pthread_mutex_trylock(&m_mutex); #endif } else { unsigned long ulEndTime = (SharedUtility::GetTime() + uiTimeOutMilliseconds); while(SharedUtility::GetTime() < ulEndTime) { #ifdef WIN32 if(TryEnterCriticalSection(&m_criticalSection)) #else if(pthread_mutex_trylock(&m_mutex)) #endif { bLocked = true; break; } } } #endif // Did the mutex lock successfully? if(bLocked) { // Increment the lock count m_iLockCount++; } return bLocked; } void CMutex::Unlock() { // Decrement the lock count m_iLockCount--; // Unlock the mutex #ifdef WIN32 #ifdef USE_CRITICAL_SECTION LeaveCriticalSection(&m_criticalSection); #else ReleaseMutex(m_hMutex); #endif #else pthread_mutex_unlock(&m_mutex); #endif }
<!DOCTYPE html> <html class="light page-post"> <head> <meta charset="utf-8"> <title>产品岗面试题 | Cain</title> <meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1"> <meta name="keywords" content="面试,产品,题目," /> <meta name="description" content="面试产品岗的问题 先介绍下自己,试着用别人能记住的方式。(考察PM抓重点的能力) 讲讲你做的这些项目,以及你在中间扮演的角色。 重点讲一个你最有成就的项目。 如果这个项目再复盘的话,你会怎么做? 你一般从哪里看数据?最近比较重要的数据变化是什么?有什么结论? 讲一下你每天从上班都下班的典型工作流程是什么样子的。(考察产品经理基础技能、表达能力、逻辑思维能力) 最近用哪个新的 App,讲讲这个 Ap"> <meta property="og:type" content="article"> <meta property="og:title" content="产品岗面试题"> <meta property="og:url" content="http://10000mile.com/2016/05/17/Interview Questions/index.html"> <meta property="og:site_name" content="Cain"> <meta property="og:description" content="面试产品岗的问题 先介绍下自己,试着用别人能记住的方式。(考察PM抓重点的能力) 讲讲你做的这些项目,以及你在中间扮演的角色。 重点讲一个你最有成就的项目。 如果这个项目再复盘的话,你会怎么做? 你一般从哪里看数据?最近比较重要的数据变化是什么?有什么结论? 讲一下你每天从上班都下班的典型工作流程是什么样子的。(考察产品经理基础技能、表达能力、逻辑思维能力) 最近用哪个新的 App,讲讲这个 Ap"> <meta property="og:updated_time" content="2017-01-11T03:18:30.000Z"> <meta name="twitter:card" content="summary"> <meta name="twitter:title" content="产品岗面试题"> <meta name="twitter:description" content="面试产品岗的问题 先介绍下自己,试着用别人能记住的方式。(考察PM抓重点的能力) 讲讲你做的这些项目,以及你在中间扮演的角色。 重点讲一个你最有成就的项目。 如果这个项目再复盘的话,你会怎么做? 你一般从哪里看数据?最近比较重要的数据变化是什么?有什么结论? 讲一下你每天从上班都下班的典型工作流程是什么样子的。(考察产品经理基础技能、表达能力、逻辑思维能力) 最近用哪个新的 App,讲讲这个 Ap"> <link rel="icon" href="/favicon.ico"> <link href="/css/styles.css?v=d671a41f" rel="stylesheet"> <link rel="stylesheet" href="/css/personal-style.css"> <script type="text/javascript"> var _hmt = _hmt || []; (function() { var hm = document.createElement("script"); hm.src = "//hm.baidu.com/hm.js?b09bba9c7c6bde427475af64a114525f"; var s = document.getElementsByTagName("script")[0]; s.parentNode.insertBefore(hm, s); })(); </script> <script async src="https://dn-lbstatics.qbox.me/busuanzi/2.3/busuanzi.pure.mini.js"></script> <link rel="stylesheet" href="//cdn.bootcss.com/font-awesome/4.3.0/css/font-awesome.min.css"> </head> <body> <span id="toolbox-mobile" class="toolbox-mobile">魔方</span> <div class="post-header CENTER"> <div class="toolbox"> <a class="toolbox-entry" href="/"> <span class="toolbox-entry-text">魔方</span> <i class="icon-angle-down"></i> <i class="icon-home"></i> </a> <ul class="list-toolbox"> <li class="item-toolbox"> <a class="CIRCLE" href="/archives/" rel="noopener noreferrer" target="_self" > 博客 </a> </li> <li class="item-toolbox"> <a class="CIRCLE" href="/tag/" rel="noopener noreferrer" target="_self" > 标签 </a> </li> <li class="item-toolbox"> <a class="CIRCLE" href="/link/" rel="noopener noreferrer" target="_self" > 链接 </a> </li> <li class="item-toolbox"> <a class="CIRCLE" href="/about/" rel="noopener noreferrer" target="_self" > 关于 </a> </li> <li class="item-toolbox"> <a class="CIRCLE" href="/search/" rel="noopener noreferrer" target="_self" > 搜索 </a> </li> </ul> </div> </div> <div id="toc" class="toc-article"> <strong class="toc-title">Posts List</strong> <ol class="toc"><li class="toc-item toc-level-1"><a class="toc-link" href="#面试产品岗的问题"><span class="toc-text">面试产品岗的问题</span></a></li></ol> </div> <div class="content content-post CENTER"> <article id="post-Interview Questions" class="article article-type-post" itemprop="blogPost"> <header class="article-header"> <h1 class="post-title">产品岗面试题</h1> <div class="article-meta"> <span> <i class="icon-calendar"></i> <span>2016.05.17</span> </span> <span class="article-author"> <i class="icon-user"></i> <span>Cain</span> </span> <i class="fa fa-eye"></i> <span id="busuanzi_container_page_pv"> &nbsp热度 <span id="busuanzi_value_page_pv"> <i class="fa fa-spinner fa-spin"></i></span>℃ </span> <i class="icon-comment"></i> <span class="ds-thread-count" data-thread-key="post-Interview Questions"><i class="fa fa-spinner fa-spin"></i></span> 条评论 </div> </header> <div class="article-content"> <h1 id="面试产品岗的问题"><a href="#面试产品岗的问题" class="headerlink" title="面试产品岗的问题"></a>面试产品岗的问题</h1><ol> <li>先介绍下自己,试着用别人能记住的方式。(考察PM抓重点的能力)</li> <li>讲讲你做的这些项目,以及你在中间扮演的角色。</li> <li>重点讲一个你最有成就的项目。</li> <li>如果这个项目再复盘的话,你会怎么做?</li> <li>你一般从哪里看数据?最近比较重要的数据变化是什么?有什么结论?</li> <li>讲一下你每天从上班都下班的典型工作流程是什么样子的。(考察产品经理基础技能、表达能力、逻辑思维能力)</li> <li>最近用哪个新的 App,讲讲这个 App 的特点、逻辑、细节、模式等等</li> <li>说一下你理解的完整的产品流程是什么样子的?</li> <li>当你的意见和上级的意见不一致的时候,你会怎么做?</li> <li>介意看一下您手机上都装了什么 App 吗?(考察好奇心、产品视角,对产品完整流程的理解、执行力和向上的管理能力)</li> <li>说一个你自己接触或者知道的 PM,他身上有什么特质异于其他人,导致他在从事这个职业时显得出类拔萃。</li> <li>你认为产品经理的核心竞争力是什么?(考察好学习能力,对产品经理职业理解的深度)</li> <li>你的优势是什么?</li> <li>你有什么长期保持的兴趣爱好,可以深入聊聊?(考察对自己的认知程度,以及对某一个事情挖的深浅程度。</li> </ol> </div> </article> </div> <div class="text-center donation"> <div class="inner-donation"> <span class="btn-donation">支持一下</span> <div class="donation-body"> <div class="tip text-center">扫一扫,支持Cain</div> <ul class="theme.donation.items.length"> <li class="item"> <img src="/images/qr-wechat.png" alt=""> </li> <li class="item"> <img src="/images/qr-alipay.png" alt=""> </li> </ul> </div> </div> </div> <a id="backTop" class="back-top"> <i class="icon-angle-up"></i> </a> <div class="modal" id="modal"> <span id="cover" class="cover hide"></span> <div id="modal-dialog" class="modal-dialog hide-dialog"> <div class="modal-header"> <span id="close" class="btn-close">Close</span> </div> <hr> <div class="modal-body"> <ul class="list-toolbox"> <li class="item-toolbox"> <a class="CIRCLE" href="/archives/" rel="noopener noreferrer" target="_self" > 博客 </a> </li> <li class="item-toolbox"> <a class="CIRCLE" href="/tag/" rel="noopener noreferrer" target="_self" > 标签 </a> </li> <li class="item-toolbox"> <a class="CIRCLE" href="/link/" rel="noopener noreferrer" target="_self" > 链接 </a> </li> <li class="item-toolbox"> <a class="CIRCLE" href="/about/" rel="noopener noreferrer" target="_self" > 关于 </a> </li> <li class="item-toolbox"> <a class="CIRCLE" href="/search/" rel="noopener noreferrer" target="_self" > 搜索 </a> </li> </ul> </div> </div> </div> <div class="fexo-comments comments-post"> <section class="duoshuo-comments"> <!-- 多说评论框 start --> <div class="ds-thread" data-thread-key="post-Interview Questions" data-title="产品岗面试题" data-url="http://10000mile.com/2016/05/17/Interview Questions/index.html"></div> <!-- 多说评论框 end --> </section> <script type="text/javascript"> var duoshuoQuery = {short_name:"cain"}; (function() { var ds = document.createElement('script'); ds.type = 'text/javascript';ds.async = true; ds.src = (document.location.protocol == 'https:' ? 'https:' : 'http:') + '//static.duoshuo.com/embed.js'; ds.charset = 'UTF-8'; (document.getElementsByTagName('head')[0] || document.getElementsByTagName('body')[0]).appendChild(ds); })(); </script> </div> <script type="text/javascript"> function loadScript(url, callback) { var script = document.createElement('script') script.type = 'text/javascript'; if (script.readyState) { //IE script.onreadystatechange = function() { if (script.readyState == 'loaded' || script.readyState == 'complete') { script.onreadystatechange = null; callback(); } }; } else { //Others script.onload = function() { callback(); }; } script.src = url; document.getElementsByTagName('head')[0].appendChild(script); } window.onload = function() { loadScript('/js/bundle.js?235683', function() { // load success }); } </script> </body> </html>
/** * Automatically generated file. Please do not edit. * @author Highcharts Config Generator by Karasiq * @see [[http://api.highcharts.com/highcharts]] */ package com.highcharts.config import scalajs.js, js.`|` import com.highcharts.CleanJsObject import com.highcharts.HighchartsUtils._ /** * @note JavaScript name: <code>plotOptions-line-states-hover-marker-states-hover-animation</code> */ @js.annotation.ScalaJSDefined class PlotOptionsLineStatesHoverMarkerStatesHoverAnimation extends com.highcharts.HighchartsGenericObject { val duration: js.UndefOr[Double] = js.undefined } object PlotOptionsLineStatesHoverMarkerStatesHoverAnimation { /** */ def apply(duration: js.UndefOr[Double] = js.undefined): PlotOptionsLineStatesHoverMarkerStatesHoverAnimation = { val durationOuter: js.UndefOr[Double] = duration com.highcharts.HighchartsGenericObject.toCleanObject(new PlotOptionsLineStatesHoverMarkerStatesHoverAnimation { override val duration: js.UndefOr[Double] = durationOuter }) } }
include ./.env .PHONY: build check clean help lint install start test test-coverage .DEFAULT_GOAL := help build: ## transpile the files from ES6 to JS @$(MAKE) -s lint @$(MAKE) -s clean @echo " > Building the project in $(BUILD_DIRECTORY)" @$(BABEL) -q ./src -d $(BUILD_DIRECTORY) check: ## check dependencies @echo " > Checking dependencies" @$(NCU) clean: ## clean artifacts @echo " > Cleaning $(BUILD_DIRECTORY)" @rm -rf $(BUILD_DIRECTORY) help: ## provide help to you @echo "Please use \`make <target>' where <target> is one of" @echo "$$(grep -hE '^\S+:.*##' $(MAKEFILE_LIST) | sed -e 's/:.*##\s*/:/' -e 's/^\(.\+\):\(.*\)/\1:\2/' | column -c2 -t -s :)" lint: ## check the quality code and ES6 integration @echo " > Linting the source" @$(ESLINT) ./src install: ## install dependencies @echo " > Installing the project" @npm install start: ## start the web server @echo " > Starting the project" @$(MAKE) -s build @export NODE_ENV=dev && \ node $(BUILD_DIRECTORY)/index.js test: ## launch tests @echo " > Testing the project" @$(MAKE) -s build @export PORT=0 && \ export NODE_ENV=test && \ $(MOCHA) --require babel-core/register --recursive --exit test-coverage: ## launch tests with coverage @echo " > Testing with coverage" @$(MAKE) -s build @export PORT=0 && \ export NODE_ENV=test && \ $(BABEL_NODE) $(BABEL_ISTANBUL) cover $(MOCHA_) --report html --report text --check-coverage -- --recursive --exit
#include <zmq.h> #include <zlib.h> #include <czmq.h> #include <zframe.h> #include "Crowbar.h" #include "boost/thread.hpp" #include "g2log.hpp" #include "Death.h" /** * Construct a crowbar for beating things at the binding location * * @param binding * A std::string description of a ZMQ socket */ Crowbar::Crowbar(const std::string& binding) : mContext(NULL), mBinding(binding), mTip(NULL), mOwnsContext(true) { } /** * Construct a crowbar for beating the specific headcrab * * @param target * A living(initialized) headcrab */ Crowbar::Crowbar(const Headcrab& target) : mContext(target.GetContext()), mBinding(target.GetBinding()), mTip(NULL), mOwnsContext(false) { if (mContext == NULL) { mOwnsContext = true; } } /** * Construct a crowbar for beating things at binding with the given context * @param binding * The binding of the bound socket for the given context * @param context * A working context */ Crowbar::Crowbar(const std::string& binding, zctx_t* context) : mContext(context), mBinding(binding), mTip(NULL), mOwnsContext(false) { } /** * Default deconstructor */ Crowbar::~Crowbar() { if (mOwnsContext && mContext != NULL) { zctx_destroy(&mContext); } } /** * Get the high water mark for socket sends * * @return * the high water mark */ int Crowbar::GetHighWater() { return 1024; } /** * Get the "tip" socket used to hit things * * @return * A pointer to a zmq socket (or NULL in a failure) */ void* Crowbar::GetTip() { void* tip = zsocket_new(mContext, ZMQ_REQ); if (!tip) { return NULL; } zsocket_set_sndhwm(tip, GetHighWater()); zsocket_set_rcvhwm(tip, GetHighWater()); zsocket_set_linger(tip, 0); int connectRetries = 100; while (zsocket_connect(tip, mBinding.c_str()) != 0 && connectRetries-- > 0 && !zctx_interrupted) { boost::this_thread::interruption_point(); int err = zmq_errno(); if (err == ETERM) { zsocket_destroy(mContext, tip); return NULL; } std::string error(zmq_strerror(err)); LOG(WARNING) << "Could not connect to " << mBinding << ":" << error; zclock_sleep(100); } Death::Instance().RegisterDeathEvent(&Death::DeleteIpcFiles, mBinding); if (zctx_interrupted) { LOG(INFO) << "Caught Interrupt Signal"; } if (connectRetries <= 0) { zsocket_destroy(mContext, tip); return NULL; } return tip; } bool Crowbar::Wield() { if (!mContext) { mContext = zctx_new(); zctx_set_linger(mContext, 0); // linger for a millisecond on close zctx_set_sndhwm(mContext, GetHighWater()); zctx_set_rcvhwm(mContext, GetHighWater()); // HWM on internal thread communicaiton zctx_set_iothreads(mContext, 1); } if (!mTip) { mTip = GetTip(); if (!mTip && mOwnsContext) { zctx_destroy(&mContext); mContext = NULL; } } return ((mContext != NULL) && (mTip != NULL)); } bool Crowbar::Swing(const std::string& hit) { //std::cout << "sending " << hit << std::endl; std::vector<std::string> hits; hits.push_back(hit); return Flurry(hits); } /** * Poll to see if the other side of the socket is ready * @return */ bool Crowbar::PollForReady() { zmq_pollitem_t item; if (!mTip) { return false; } item.socket = mTip; item.events = ZMQ_POLLOUT; int returnVal = zmq_poll(&item, 1, 0); if (returnVal < 0) { LOG(WARNING) << "Socket error: " << zmq_strerror(zmq_errno()); } return (returnVal >= 1); } /** * Send a bunch of strings to a socket * @param hits * @return */ bool Crowbar::Flurry(std::vector<std::string>& hits) { if (!mTip) { LOG(WARNING) << "Cannot send, not Wielded"; return false; } if (!PollForReady()) { LOG(WARNING) << "Cannot send, no listener ready"; return false; } zmsg_t* message = zmsg_new(); for (auto it = hits.begin(); it != hits.end(); it++) { zmsg_addmem(message, &((*it)[0]), it->size()); } bool success = true; //std::cout << "Sending message with " << zmsg_size(message) << " " << hits.size() << std::endl; if (zmsg_send(&message, mTip) != 0) { LOG(WARNING) << "zmsg_send returned non-zero exit " << zmq_strerror(zmq_errno()); success = false; } if (message) { zmsg_destroy(&message); } return success; } bool Crowbar::BlockForKill(std::string& guts) { std::vector<std::string> allReplies; if (BlockForKill(allReplies) && !allReplies.empty()) { guts = allReplies[0]; return true; } return false; } bool Crowbar::BlockForKill(std::vector<std::string>& guts) { if (!mTip) { return false; } zmsg_t* message = zmsg_recv(mTip); if (!message) { return false; } guts.clear(); int msgSize = zmsg_size(message); for (int i = 0; i < msgSize; i++) { zframe_t* frame = zmsg_pop(message); std::string aString; aString.insert(0, reinterpret_cast<const char*> (zframe_data(frame)), zframe_size(frame)); guts.push_back(aString); zframe_destroy(&frame); //std::cout << guts[0] << " found " << aString << std::endl; } zmsg_destroy(&message); return true; } bool Crowbar::WaitForKill(std::string& guts, const int timeout) { std::vector<std::string> allReplies; if (WaitForKill(allReplies, timeout) && !allReplies.empty()) { guts = allReplies[0]; return true; } return false; } bool Crowbar::WaitForKill(std::vector<std::string>& guts, const int timeout) { if (!mTip) { return false; } if (zsocket_poll(mTip, timeout)) { return BlockForKill(guts); } return false; } zctx_t* Crowbar::GetContext() { return mContext; }
# encoding: utf-8 # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::KeyVault::V2015_06_01 module Models # # The certificate issuer list result # class CertificateIssuerListResult include MsRestAzure include MsRest::JSONable # @return [Array<CertificateIssuerItem>] A response message containing a # list of certificate issuers in the vault along with a link to the next # page of certificate issuers attr_accessor :value # @return [String] The URL to get the next set of certificate issuers. attr_accessor :next_link # return [Proc] with next page method call. attr_accessor :next_method # # Gets the rest of the items for the request, enabling auto-pagination. # # @return [Array<CertificateIssuerItem>] operation results. # def get_all_items items = @value page = self while page.next_link != nil && !page.next_link.strip.empty? do page = page.get_next_page items.concat(page.value) end items end # # Gets the next page of results. # # @return [CertificateIssuerListResult] with next page content. # def get_next_page response = @next_method.call(@next_link).value! unless @next_method.nil? unless response.nil? @next_link = response.body.next_link @value = response.body.value self end end # # Mapper for CertificateIssuerListResult class as Ruby Hash. # This will be used for serialization/deserialization. # def self.mapper() { client_side_validation: true, required: false, serialized_name: 'CertificateIssuerListResult', type: { name: 'Composite', class_name: 'CertificateIssuerListResult', model_properties: { value: { client_side_validation: true, required: false, read_only: true, serialized_name: 'value', type: { name: 'Sequence', element: { client_side_validation: true, required: false, serialized_name: 'CertificateIssuerItemElementType', type: { name: 'Composite', class_name: 'CertificateIssuerItem' } } } }, next_link: { client_side_validation: true, required: false, read_only: true, serialized_name: 'nextLink', type: { name: 'String' } } } } } end end end end
#pragma once #include <zlib.h> #include "istream.h" namespace nano { /// /// \brief zlib-based streaming of gzip-compressed binary data. /// struct NANO_PUBLIC zlib_istream_t final : public istream_t { explicit zlib_istream_t(istream_t& istream, const std::streamsize max_num_bytes = max_streamsize()); ~zlib_istream_t() override; io_status advance(const std::streamsize num_bytes, buffer_t& buffer) override; private: // attributes istream_t& m_istream; ///< input stream std::streamsize m_max_num_bytes; ///< maximum number of bytes to read from the input stream z_stream m_zstream; ///< zlib stream }; }
// // ______ _____ _____ // | ____/ ____/ ____| // | |__ | | | | __ // | __|| | | | |_ | // | |___| |___| |__| | // |______\_____\_____| // // // NSObject+Swizzling.h // SnapEcgDoctor // // Created by tan on 2017/2/14. // Copyright © 2017年 baotiao ni. All rights reserved. // #import <Foundation/Foundation.h> @interface NSObject (Swizzling) + (void)swizzleSelector:(SEL)originalSelector withSwizzledSelector:(SEL)swizzledSelector; @end
name 'meteor' maintainer 'Logan Koester' maintainer_email 'logan@logankoester.com' license 'MIT' description 'Install the Meteor JavaScript App Platform' long_description IO.read(File.join(File.dirname(__FILE__), 'README.md')) version '0.1.3'
import React from 'react'; import ReactDOM from 'react-dom'; import { BrowserRouter } from "react-router-dom"; import App, {serviceWorkCallbacks} from './App'; import Globals from './Globals'; import * as serviceWorker from './serviceWorkerRegistration'; // Enable at the very start for logging most messages. Globals.enableAppLog(); ReactDOM.render(<BrowserRouter><App /></BrowserRouter>, document.getElementById('root')); // If you want your app to work offline and load faster, you can change // unregister() to register() below. Note this comes with some pitfalls. // Learn more about service workers: https://bit.ly/CRA-PWA serviceWorker.register({ onSuccess: (registration: ServiceWorkerRegistration) => { console.log('Precache app loaded!'); serviceWorkCallbacks.onSuccess(registration); }, onUpdate: (registration: ServiceWorkerRegistration) => { console.log('Found app updated!'); serviceWorkCallbacks.onUpdate(registration); }, });
from abc import ABCMeta, abstractmethod class AbstractAuthenticator(metaclass=ABCMeta): def __init__(self): """ Every authenticator has to have a name :param name: """ super().__init__() @abstractmethod def authorise_transaction(self, customer): """ Decide whether to authorise transaction. Note that all relevant information can be obtained from the customer. :param customer: the customer making a transaction :return: boolean, whether or not to authorise the transaction """
using System.Diagnostics; namespace ChainUtils.BouncyCastle.Math.EC.Custom.Sec { internal class SecP192K1Field { // 2^192 - 2^32 - 2^12 - 2^8 - 2^7 - 2^6 - 2^3 - 1 internal static readonly uint[] P = new uint[]{ 0xFFFFEE37, 0xFFFFFFFE, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF }; internal static readonly uint[] PExt = new uint[]{ 0x013C4FD1, 0x00002392, 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0xFFFFDC6E, 0xFFFFFFFD, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF }; private static readonly uint[] PExtInv = new uint[]{ 0xFEC3B02F, 0xFFFFDC6D, 0xFFFFFFFE, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0x00002391, 0x00000002 }; private const uint P5 = 0xFFFFFFFF; private const uint PExt11 = 0xFFFFFFFF; private const uint PInv33 = 0x11C9; public static void Add(uint[] x, uint[] y, uint[] z) { var c = Nat192.Add(x, y, z); if (c != 0 || (z[5] == P5 && Nat192.Gte(z, P))) { Nat.Add33To(6, PInv33, z); } } public static void AddExt(uint[] xx, uint[] yy, uint[] zz) { var c = Nat.Add(12, xx, yy, zz); if (c != 0 || (zz[11] == PExt11 && Nat.Gte(12, zz, PExt))) { if (Nat.AddTo(PExtInv.Length, PExtInv, zz) != 0) { Nat.IncAt(12, zz, PExtInv.Length); } } } public static void AddOne(uint[] x, uint[] z) { var c = Nat.Inc(6, x, z); if (c != 0 || (z[5] == P5 && Nat192.Gte(z, P))) { Nat.Add33To(6, PInv33, z); } } public static uint[] FromBigInteger(BigInteger x) { var z = Nat192.FromBigInteger(x); if (z[5] == P5 && Nat192.Gte(z, P)) { Nat192.SubFrom(P, z); } return z; } public static void Half(uint[] x, uint[] z) { if ((x[0] & 1) == 0) { Nat.ShiftDownBit(6, x, 0, z); } else { var c = Nat192.Add(x, P, z); Nat.ShiftDownBit(6, z, c); } } public static void Multiply(uint[] x, uint[] y, uint[] z) { var tt = Nat192.CreateExt(); Nat192.Mul(x, y, tt); Reduce(tt, z); } public static void MultiplyAddToExt(uint[] x, uint[] y, uint[] zz) { var c = Nat192.MulAddTo(x, y, zz); if (c != 0 || (zz[11] == PExt11 && Nat.Gte(12, zz, PExt))) { if (Nat.AddTo(PExtInv.Length, PExtInv, zz) != 0) { Nat.IncAt(12, zz, PExtInv.Length); } } } public static void Negate(uint[] x, uint[] z) { if (Nat192.IsZero(x)) { Nat192.Zero(z); } else { Nat192.Sub(P, x, z); } } public static void Reduce(uint[] xx, uint[] z) { var cc = Nat192.Mul33Add(PInv33, xx, 6, xx, 0, z, 0); var c = Nat192.Mul33DWordAdd(PInv33, cc, z, 0); Debug.Assert(c == 0 || c == 1); if (c != 0 || (z[5] == P5 && Nat192.Gte(z, P))) { Nat.Add33To(6, PInv33, z); } } public static void Reduce32(uint x, uint[] z) { if ((x != 0 && Nat192.Mul33WordAdd(PInv33, x, z, 0) != 0) || (z[5] == P5 && Nat192.Gte(z, P))) { Nat.Add33To(6, PInv33, z); } } public static void Square(uint[] x, uint[] z) { var tt = Nat192.CreateExt(); Nat192.Square(x, tt); Reduce(tt, z); } public static void SquareN(uint[] x, int n, uint[] z) { Debug.Assert(n > 0); var tt = Nat192.CreateExt(); Nat192.Square(x, tt); Reduce(tt, z); while (--n > 0) { Nat192.Square(z, tt); Reduce(tt, z); } } public static void Subtract(uint[] x, uint[] y, uint[] z) { var c = Nat192.Sub(x, y, z); if (c != 0) { Nat.Sub33From(6, PInv33, z); } } public static void SubtractExt(uint[] xx, uint[] yy, uint[] zz) { var c = Nat.Sub(12, xx, yy, zz); if (c != 0) { if (Nat.SubFrom(PExtInv.Length, PExtInv, zz) != 0) { Nat.DecAt(12, zz, PExtInv.Length); } } } public static void Twice(uint[] x, uint[] z) { var c = Nat.ShiftUpBit(6, x, 0, z); if (c != 0 || (z[5] == P5 && Nat192.Gte(z, P))) { Nat.Add33To(6, PInv33, z); } } } }
document.body.onkeydown = function( e ) { var keys = { 37: 'left', 39: 'right', 40: 'down', 38: 'rotate', 80: 'pause' }; if ( typeof keys[ e.keyCode ] != 'undefined' ) { keyPress( keys[ e.keyCode ] ); render(); } };
using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using System.Threading.Tasks; using Sigil; using System.Reflection; using Jil.Common; namespace Jil.Serialize { class SerializeDynamicThunk<T> { public static Action<object, TextWriter, Options> Thunk; static SerializeDynamicThunk() { var serializeGenMtd = typeof(JSON).GetMethods(BindingFlags.Public | BindingFlags.Static).Single(s => s.Name == "Serialize" && s.ReturnType == typeof(void)); var t = typeof(T); var serialize = serializeGenMtd.MakeGenericMethod(t); var emit = Emit<Action<object, TextWriter, Options>>.NewDynamicMethod(); emit.LoadArgument(0); // obj if (t._IsValueType()) { emit.UnboxAny<T>(); // T } else { emit.CastClass(t); // T } emit.LoadArgument(1); emit.LoadArgument(2); emit.Call(serialize); emit.Return(); Thunk = emit.CreateDelegate(Utils.DelegateOptimizationOptions); } } }
# Setup We recommend you use a virtual environment to to run the examples pip install -r requirements.txt # Run python helloworld.py YOUR_NAME
limitTextLength =============== jQuery dependant function to limit the length of a text string inside a dom element. Doesn't trim words. It inserts "..." after the max characters and provides callbacks for when the mouse hovers over them and when the mouse leaves. Default behaviour is to replace the "..." with the remainder of the text when the mouse hovers, and get back to the original state when the mouse leaves. Usage ----- ```javascript limitTextLength({ element: jQuery("selector"), //element is necessary. Can be a jQuery element, dom node or jQuery selector maxLength: 25 //Max string length (not including ...). }); ``` Optional parameters: enter: Callback function when the mouse enters the span containing the "...". The callback "this" is set to the span, and gets passed the trimmed text and the original text as first and second parameters. leave: Same as enter but triggered on mouseleave class: CSS class for custom styling the span. Empty by default. css: An object containing CSS directives to be added to the span. Working example: http://jsfiddle.net/K8KgR/4/
# frozen_string_literal: true module Gitlab module GithubImport # HTTP client for interacting with the GitHub API. # # This class is basically a fancy wrapped around Octokit while adding some # functionality to deal with rate limiting and parallel imports. Usage is # mostly the same as Octokit, for example: # # client = GithubImport::Client.new('hunter2') # # client.labels.each do |label| # puts label.name # end class Client include ::Gitlab::Utils::StrongMemoize attr_reader :octokit # A single page of data and the corresponding page number. Page = Struct.new(:objects, :number) # The minimum number of requests we want to keep available. # # We don't use a value of 0 as multiple threads may be using the same # token in parallel. This could result in all of them hitting the GitHub # rate limit at once. The threshold is put in place to not hit the limit # in most cases. RATE_LIMIT_THRESHOLD = 50 # token - The GitHub API token to use. # # per_page - The number of objects that should be displayed per page. # # parallel - When set to true hitting the rate limit will result in a # dedicated error being raised. When set to `false` we will # instead just `sleep()` until the rate limit is reset. Setting # this value to `true` for parallel importing is crucial as # otherwise hitting the rate limit will result in a thread # being blocked in a `sleep()` call for up to an hour. def initialize(token, per_page: 100, parallel: true) @octokit = ::Octokit::Client.new( access_token: token, per_page: per_page, api_endpoint: api_endpoint ) @octokit.connection_options[:ssl] = { verify: verify_ssl } @parallel = parallel end def parallel? @parallel end # Returns the details of a GitHub user. # # username - The username of the user. def user(username) with_rate_limit { octokit.user(username) } end # Returns the details of a GitHub repository. # # name - The path (in the form `owner/repository`) of the repository. def repository(name) with_rate_limit { octokit.repo(name) } end def labels(*args) each_object(:labels, *args) end def milestones(*args) each_object(:milestones, *args) end def releases(*args) each_object(:releases, *args) end # Fetches data from the GitHub API and yields a Page object for every page # of data, without loading all of them into memory. # # method - The Octokit method to use for getting the data. # args - Arguments to pass to the Octokit method. # # rubocop: disable GitlabSecurity/PublicSend def each_page(method, *args, &block) return to_enum(__method__, method, *args) unless block_given? page = if args.last.is_a?(Hash) && args.last[:page] args.last[:page] else 1 end collection = with_rate_limit { octokit.public_send(method, *args) } next_url = octokit.last_response.rels[:next] yield Page.new(collection, page) while next_url response = with_rate_limit { next_url.get } next_url = response.rels[:next] yield Page.new(response.data, page += 1) end end # Iterates over all of the objects for the given method (e.g. `:labels`). # # method - The method to send to Octokit for querying data. # args - Any arguments to pass to the Octokit method. def each_object(method, *args, &block) return to_enum(__method__, method, *args) unless block_given? each_page(method, *args) do |page| page.objects.each do |object| yield object end end end # Yields the supplied block, responding to any rate limit errors. # # The exact strategy used for handling rate limiting errors depends on # whether we are running in parallel mode or not. For more information see # `#rate_or_wait_for_rate_limit`. def with_rate_limit return yield unless rate_limiting_enabled? request_count_counter.increment raise_or_wait_for_rate_limit unless requests_remaining? begin yield rescue ::Octokit::TooManyRequests raise_or_wait_for_rate_limit # This retry will only happen when running in sequential mode as we'll # raise an error in parallel mode. retry end end # Returns `true` if we're still allowed to perform API calls. def requests_remaining? remaining_requests > RATE_LIMIT_THRESHOLD end def remaining_requests octokit.rate_limit.remaining end def raise_or_wait_for_rate_limit rate_limit_counter.increment if parallel? raise RateLimitError else sleep(rate_limit_resets_in) end end def rate_limit_resets_in # We add a few seconds to the rate limit so we don't _immediately_ # resume when the rate limit resets as this may result in us performing # a request before GitHub has a chance to reset the limit. octokit.rate_limit.resets_in + 5 end def rate_limiting_enabled? strong_memoize(:rate_limiting_enabled) do api_endpoint.include?('.github.com') end end def api_endpoint custom_api_endpoint || default_api_endpoint end def custom_api_endpoint github_omniauth_provider.dig('args', 'client_options', 'site') end def default_api_endpoint OmniAuth::Strategies::GitHub.default_options[:client_options][:site] end def verify_ssl github_omniauth_provider.fetch('verify_ssl', true) end def github_omniauth_provider @github_omniauth_provider ||= Gitlab::Auth::OAuth::Provider.config_for('github').to_h end def rate_limit_counter @rate_limit_counter ||= Gitlab::Metrics.counter( :github_importer_rate_limit_hits, 'The number of times we hit the GitHub rate limit when importing projects' ) end def request_count_counter @request_counter ||= Gitlab::Metrics.counter( :github_importer_request_count, 'The number of GitHub API calls performed when importing projects' ) end end end end
# frozen_string_literal: true require 'slack-notifier' module ChatMessage class BaseMessage attr_reader :markdown attr_reader :user_full_name attr_reader :user_name attr_reader :user_avatar attr_reader :project_name attr_reader :project_url attr_reader :commit_message_html def initialize(params) @markdown = params[:markdown] || false @project_name = params.dig(:project, :path_with_namespace) || params[:project_name] @project_url = params.dig(:project, :web_url) || params[:project_url] @user_full_name = params.dig(:user, :name) || params[:user_full_name] @user_name = params.dig(:user, :username) || params[:user_name] @user_avatar = params.dig(:user, :avatar_url) || params[:user_avatar] @commit_message_html = params[:commit_message_html] || false end def user_combined_name if user_full_name.present? "#{user_full_name} (#{user_name})" else user_name end end def summary return message if markdown format(message) end def pretext summary end def fallback format(message) end def attachments raise NotImplementedError end def activity raise NotImplementedError end private def message raise NotImplementedError end def format(string) Slack::Notifier::LinkFormatter.format(string) end def attachment_color '#345' end def link(text, url) "[#{text}](#{url})" end def pretty_duration(seconds) parse_string = if duration < 1.hour '%M:%S' else '%H:%M:%S' end Time.at(seconds).utc.strftime(parse_string) end end end
module.exports = { 'plugins': { 'local': { 'browsers': [ 'chrome', 'firefox' ] } } }
<?php /** * Copyright (c) 2017 Yohei Yoshikawa * */ $lang = 'ja'; require_once dirname(__FILE__).'/../../lib/Controller.php'; if ($argv[1] == 1) $is_excute_sql = true; if (!$host) $host = 'localhost'; if ($is_excute_sql) { echo('--- Mode: excute SQL ---').PHP_EOL; } else { echo('--- Mode: Do not excute SQL ---').PHP_EOL; } echo("host: {$host}").PHP_EOL; $pgsql = new PwPgsql(); $pgsql->is_excute_sql = $is_excute_sql; $pgsql->diffFromVoModel();
gcc -c -fpic -O2 ring_vmmysql.c -I $PWD/../../language/include -I /usr/include/mysql gcc -shared -o $PWD/../../lib/libring_mysql.so ring_vmmysql.o -L $PWD/../../lib -lring -L /usr/lib/i386-linux-gnu -L /usr/lib/mysql/lib -lmysqlclient
<a href="/edit/{{_id}}" class="btn btn-success pull-right">Edit</a> <p><a href="/">&lt; Back to Calendar</a></p> <hgroup> <h2 data-ng-bind="details.title"></h2> <h3><span data-ng-repeat="user in details.lector"><a href="user/{{user}}" data-ng-bind="global.data.users[user].full_name"></a><i ng-if="!$last">, </i></span></h3> <h4><span data-ng-repeat="tag in details.tags"><a href="tag/{{tag}}" data-ng-bind="tag"></a><i ng-if="!$last">, </i></span></h4> </hgroup> <div data-ng-bind-html="details.description"></div> <div class="well"> <span data-ng-repeat="user in details.attendees"><a href="user/{{user}}">{{global.data.users[user].full_name}} </a><i ng-if="!$last">, </i></span> </div>
package sales.bucket.domain; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import sales.users.domain.User; import javax.persistence.*; import java.util.List; /** * Created by taras on 13.08.15. */ @Entity @Table(name="buckets") public class Bucket { @Id @GeneratedValue(strategy = GenerationType.AUTO) @JsonProperty private Long id; @ManyToOne(targetEntity = User.class) @JoinColumn(name = "client", referencedColumnName = "id") @JsonProperty private User client; @OneToMany(cascade = CascadeType.ALL, mappedBy="bucket", fetch = FetchType.EAGER) @JsonProperty private List<GoodInBucket> goodsInBucket; public Long getId() { return id; } public void setId(Long id) { this.id = id; } public User getClient() { return client; } public void setClient(User client) { this.client = client; } }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta name="description" content=""> <link rel="shortcut icon" href="img/favicon.png"> <title>FreeMarket</title> <!-- Bootstrap core CSS --> <link href="css/bootstrap.min.css" rel="stylesheet"> <link href="css/bootstrap-reset.css" rel="stylesheet"> <!--external css--> <link href="assets/font-awesome/css/font-awesome.min.css" rel="stylesheet" /> <!-- Custom styles for this template --> <link href="css/style.css" rel="stylesheet"> <link href="css/style-responsive.css" rel="stylesheet" /> <link href="css/spinner.css" rel="stylesheet"> <link href="assets/datatables/css/demo_page.css" rel="stylesheet" /> <link href="assets/datatables/css/demo_table.css" rel="stylesheet" /> <link href="assets/datatables/DT_bootstrap.css" rel="stylesheet" /> <!-- HTML5 shim and Respond.js IE8 support of HTML5 tooltips and media queries --> <!--[if lt IE 9]> <script src="js/html5shiv.js"></script> <script src="js/respond.min.js"></script> <![endif]--> </head> <script> // Redirect the user if the user is not already logged in function checkLogin() { var loginStatus = sessionStorage.getItem("loginStatus"); if (loginStatus == 'notloggedin') { localStorage.setItem("lastpage", "ViewYourActiveItems.html"); // set the page for the Continue button on Login Notice page var fmlang = localStorage.getItem("fmlang"); window.location.href = 'login_notice.html' + '?setLng=' + fmlang; } }; </script> <body class="full-width" onload="setAccountNumber(); checkLogin(); setLanguage(); setLanguageQueryStrings();"> <section id="container" class=""> <!--include main menu--> <script type="text/javascript" src="js/menu.js"> </script> <!--end include main menu--> <!--sidebar start--> <!--sidebar end--> <!--main content start--> <section id="main-content"> <section class="wrapper site-min-height"> <!-- page start--> <div class="row"> <div class="col-lg-12"> <section class="panel"> <header class="panel-heading"> <span data-i18n="header_all_your_active_items">All Your Active Items</span> </header> <div class="panel-body"> <div id="error-message"> <div class="adv-table"> <table class="display table table-bordered table-striped" id="example"> <thead> <tr> <th><span data-i18n="image">Image</span></th> <th><span data-i18n="item_id">Item ID</span></th> <th><span data-i18n="item_title">Title</span></th> <th class="hidden-phone"><span data-i18n="price">Price</span></th> <th class="hidden-phone"><span data-i18n="status">Status</span></th> </tr> </thead> <tfoot> <tr> <th><span data-i18n="image">Image</span></th> <th><span data-i18n="item_id">Item ID</span></th> <th><span data-i18n="item_title">Title</span></th> <th class="hidden-phone"><span data-i18n="price">Price</span></th> <th class="hidden-phone"><span data-i18n="status">Status</span></th> </tr> </tfoot> </table> </div> </div> </div> </section> </div> </div> <!-- page end--> </section> </section> <!--main content end--> <!-- js placed at the end of the document so the pages load faster --> <script src="js/jquery-1.11.1.min.js"></script> <script src="js/bootstrap.min.js"></script> <script src="js/jquery.dcjqaccordion.2.7.min.js"></script> <script src="js/bootstrap-hover-dropdown.min.js"></script> <script src="js/respond.min.js"></script> <!--common script for all pages--> <script src="js/constants.js"></script> <script src="js/freemarket_scripts.js"></script> <script src="js/common-scripts.js"></script> <script src="js/i18next-1.7.4.min.js"></script> <script src="js/freemarket_localization_scripts.js"></script> <script src="assets/datatables/jquery.dataTables2.min.js"></script> <script src="assets/datatables/DT_bootstrap.js"></script> <script src="assets/datatables/dataTables.fixedHeader.min.js"></script> <script src="js/moment.min.js"></script> <script src="js/pending_sales_notice.js"></script> <script> ///HACK: do not forget to have jquery and scripts above our calls $(document).ready(function() { var current_lang = getSearchMenuLocalised(); var loggedInAccount = sessionStorage.getItem("RSaccountNumber"); var table = $('#example').DataTable({ "language": current_lang, "aaSorting": [ [0, "asc"] ], "stateSave": true, "iDisplayLength": 50, "fnInitComplete": function() { $("#example_filter input").focus(); } }); $.post(Constants.nxtpassApiUrl, { requestType: 'ownListings', account: loggedInAccount }, function(data) { if (data.query_status == "bad") { document.getElementById("error-message").innerHTML = "<p class=\"validation_message\">Error: Please ensure that NXT is running.</p><p></p>"; } else { for (var i = 0; i < data['items'].length; i++) { // Gather some data about the item for calculation below var pending_count = data.items[i].pending_count; // The pending count var originalQuantity = parseInt(data.items[i].quantity); // The quantity initially available sessionStorage.setItem("startQuantity", originalQuantity); // In case we need this in session storage // Expired items do not belong in the Active Items view - there is a separate view for that, so exclude them // Calculate expired or not expired status, since the item_statuses API does not report expired status // Get end datestamp var endDate = escapeHtml(data.items[i].end_timestamp); // Get the current datestamp for comparison var currentTime = Date.now(); // Use moment library to convert current time to NXT timestamp var unixEpoch = moment("1970-01-01T00:00:00.0Z"); var nxtEpoch = moment(unixEpoch).add('seconds', 1385294400); var recalculatedEndDate = moment(nxtEpoch).add('seconds', endDate); // Compare the item's end date to the current time if (recalculatedEndDate < currentTime) { // Note this fact in the console, and that is it. console.log("This item is expired: " + data.items[i].listing_id); } else { // If item is NOT expired, continue on... // Now see if this item has any Pending sales, and if so, do some extra processing. // We only want to include an item in the Active Items view if it is still available. // So if the pending count is less than the original quantity available, then we display it. // Otherwise, if the all of the items are pending, we do not display it. if (pending_count >= originalQuantity) { // All items are pending, so no need to display as an Active Item // Yell at the console var listingid = data.items[i].listing_id; var quantityStillAvailable = (originalQuantity - pending_count); console.log("Pending item found: the listingid is " + listingid + " the quantity still for sale is " + quantityStillAvailable + "."); console.log("If the quantity still for sale is anything other than zero, then there is a bug. Please report."); } // End if all items are pending else { // Otherwise, check for Available status. If Available, list it; if not (Canceled), do not list it. // Clean listing id field var cleanListingID = escapeHtml(data.items[i].listing_id); $.post(Constants.nxtpassApiUrl, { requestType: 'getItemPublic', listing_id: cleanListingID }, function(item) { // This is to get the status, which should be either Available or Canceled at this point var itemID = item.listing_id; console.log("getItemPublic running for listing ID " + itemID + "."); var status = item.item_status; if (status === "Available") { console.log("Listing ID " + itemID + " has status of Available."); var displayStatus = 'Available'; // Set the currency var displayCurrency = 'NXT'; var displayPrice = item.price / 100000000 + " " + displayCurrency; if (item.currency == Constants.coinoUsdAssetName) { displayCurrency = 'CoinoUSD'; displayPrice = item.price / 100 + " " + displayCurrency; } var linkStatus = '<a href="ViewYourItemSingle.html'; // Clean title field var cleanTitle = escapeHtml(item.item_title); var fmlang = localStorage.getItem("fmlang"); var searchstorage = localStorage.setItem("searchpage", "ViewYourActiveItems.html"); var listImage = '<i class="fa fa-picture-o fa-3x"></i>'; if (item.main_image_url > "") { // Clean image URL var cleanURL = escapeHtml(item.main_image_url); listImage = '<img src="' + cleanURL + '" alt="Item Thumbnail Image" height="40" width="40">'; } table.rows.add([ [ listImage, linkStatus + '?setLng=' + fmlang + '&itemid=' + itemID + '">' + itemID + '</a>', linkStatus + '?setLng=' + fmlang + '&itemid=' + itemID + '">' + cleanTitle + '</a>', displayPrice, displayStatus, itemID ], ]).draw(); } else { // Yell at the console console.log("Canceled item found, listing ID: " + cleanListingID + "."); } }, "json"); // Specifies JSON as the expected result } // End of else for non-Pending items } // End of else for non-expired items }; // Close for loop var rows = table.rows({ page: "current" }).data(); if(rows.length > 0) { //fetch the status for initial page fetchStatuses(); } else { var oTable = $('#example').dataTable(); oTable.fnDestroy(); current_lang.sEmptyTable = current_lang.sZeroRecords; table = $('#example').DataTable({ "language": current_lang, "aaSorting": [ [0, "asc"] ], "stateSave": true, "iDisplayLength": 50, "fnInitComplete": function() { $("#example_filter input").focus(); } }); } } // Close else statement for non-bad query }, "json"); // Specifies JSON as the expected result }); </script> </body> </html>
Elixir agents -used for keeping state
/* Copyright 2014 GitHub Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ .pl-c /* comment */ { color: #969896; } .pl-c1 /* constant, markup.raw, meta.diff.header, meta.module-reference, meta.property-name, support, support.constant, support.variable, variable.other.constant */, .pl-s .pl-v /* string variable */ { color: #0086b3; } .pl-e /* entity */, .pl-en /* entity.name */ { color: #795da3; } .pl-s .pl-s1 /* string source */, .pl-smi /* storage.modifier.import, storage.modifier.package, storage.type.java, variable.other, variable.parameter.function */ { color: #333; } .pl-ent /* entity.name.tag */ { color: #63a35c; } .pl-k /* keyword, storage, storage.type */ { color: #a71d5d; } .pl-pds /* punctuation.definition.string, string.regexp.character-class */, .pl-s /* string */, .pl-s .pl-pse .pl-s1 /* string punctuation.section.embedded source */, .pl-sr /* string.regexp */, .pl-sr .pl-cce /* string.regexp constant.character.escape */, .pl-sr .pl-sra /* string.regexp string.regexp.arbitrary-repitition */, .pl-sr .pl-sre /* string.regexp source.ruby.embedded */ { color: #183691; } .pl-v /* variable */ { color: #ed6a43; } .pl-id /* invalid.deprecated */ { color: #b52a1d; } .pl-ii /* invalid.illegal */ { background-color: #b52a1d; color: #f8f8f8; } .pl-sr .pl-cce /* string.regexp constant.character.escape */ { color: #63a35c; font-weight: bold; } .pl-ml /* markup.list */ { color: #693a17; } .pl-mh /* markup.heading */, .pl-mh .pl-en /* markup.heading entity.name */, .pl-ms /* meta.separator */ { color: #1d3e81; font-weight: bold; } .pl-mq /* markup.quote */ { color: #008080; } .pl-mi /* markup.italic */ { color: #333; font-style: italic; } .pl-mb /* markup.bold */ { color: #333; font-weight: bold; } .pl-md /* markup.deleted, meta.diff.header.from-file */ { background-color: #ffecec; color: #bd2c00; } .pl-mi1 /* markup.inserted, meta.diff.header.to-file */ { background-color: #eaffea; color: #55a532; } .pl-mdr /* meta.diff.range */ { color: #795da3; font-weight: bold; } .pl-mo /* meta.output */ { color: #1d3e81; }
import React, { useEffect, useMemo, useRef } from 'react' import styled from 'styled-components' import { Box, Button, Dialog, extend, Paragraph, Progress, RefreshIcon, themeGet, } from '../../components' import { useFullSizeMode } from '../FullSizeView' import { useIntersection } from '../IntersectionObserver' import { usePages } from '../Pages' import { Nav, useRouteActions } from '../Router' import { Overlay, ScrollSpy } from '../ScrollSpy' import { isUgoira } from '../Ugoira' import { PADDING, StandardImg } from './StandardImg' import { StandardUgoira } from './StandardUgoira' import { LazyLoadingObserver } from './useLazyLoad' interface Props { illustId: string } interface SuspenseProps extends Props { children?: React.ReactNode } interface SuccessProps { pages: Pixiv.Pages } export const StandardView = ({ illustId, children }: SuspenseProps) => { const root = useRef<HTMLDivElement>(null) const { unset } = useRouteActions() const [isFullSize, setFullSize] = useFullSizeMode() // 作品を移動したら先頭までスクロールしてフォーカス useEffect(() => { setFullSize(false) const node = root.current if (!node) return node.scroll(0, 0) node.focus() }, [illustId, setFullSize]) // フルサイズモードから戻ったらルートにフォーカス useEffect(() => { if (isFullSize) return const node = root.current if (!node) return node.focus() }, [isFullSize]) return ( <Root ref={root} tabIndex={0} hidden={isFullSize}> <Box sx={{ userSelect: 'none', position: 'relative' }}> <span onClick={unset}> <React.Suspense fallback={<Loading />}> <Loader illustId={illustId} /> </React.Suspense> </span> <Nav /> </Box> {children} </Root> ) } const Loader = ({ illustId }: Props) => { const pages = usePages(illustId) if (!pages) return <Failure illustId={illustId} /> return <Success pages={pages} /> } const Loading = () => ( <ImageBox> <Progress /> </ImageBox> ) const Failure = ({ illustId }: Props) => ( <ImageBox> <Dialog onClick={(e) => e.stopPropagation()}> <Dialog.Content> <Paragraph>リクエストに失敗しました[illustId: {illustId}]</Paragraph> </Dialog.Content> <Dialog.Footer> <Button onClick={() => usePages.remove(illustId)}> <RefreshIcon width={18} height={18} sx={{ mr: 2 }} /> 再取得 </Button> </Dialog.Footer> </Dialog> </ImageBox> ) const Success = ({ pages }: SuccessProps) => { const isMultiple = pages.length > 1 const imgs = useMemo(() => { const ugoira = isUgoira(pages[0]) return pages.map((page, index) => ( <ScrollSpy.SpyItem key={page.urls.original} index={index}> <ImageBox tabIndex={0}> {!ugoira && <StandardImg {...page} />} {ugoira && <StandardUgoira {...page} />} </ImageBox> </ScrollSpy.SpyItem> )) }, [pages]) const observer = useIntersection() useEffect(() => { observer.start() }, [observer]) return ( <LazyLoadingObserver.Provider value={observer}> {imgs} <ScrollSpy.SpyItemLast /> {isMultiple && <Overlay pages={pages} />} </LazyLoadingObserver.Provider> ) } const Root = styled.section( extend({ '--caption-height': '56px', pointerEvents: 'auto', outline: 'none', position: 'relative', overflow: 'auto', width: '100%', height: '100vh', '&[hidden]': { display: 'block', opacity: 0, }, } as any) ) const ImageBox = styled.div( extend({ outline: 'none', position: 'relative', display: 'flex', flexDirection: 'column', width: '100%', height: 'calc(100vh - var(--caption-height))', p: PADDING, }) ) const Action = styled.div( extend({ pointerEvents: 'none', position: 'absolute', top: 0, left: 0, display: 'flex', justifyContent: 'space-between', width: '100%', height: '100%', }) ) const Circle = styled.div( extend({ pointerEvents: 'auto', position: 'sticky', top: 'calc(50vh - var(--caption-height))', width: '48px', height: '48px', mx: 2, borderRadius: '50%', bg: 'surface', opacity: themeGet('opacities.inactive'), transform: 'translateY(-50%)', ':hover': { opacity: 1, }, }) ) if (__DEV__) { Loader.displayName = 'StandardView.Loader' Loading.displayName = 'StandardView.Loading' Success.displayName = 'StandardView.Success' Failure.displayName = 'StandardView.Failure' Root.displayName = 'StandardView.Root' ImageBox.displayName = 'StandardView.ImageBox' Action.displayName = 'StandardView.Action' Circle.displayName = 'StandardView.Circle' }
// JScript File var borderstyle function editorOn(divid){ $('#'+divid).parent().parent().find(' >*:last-child img').css('visibility','hidden'); borderstyle = $('#'+divid).parent().parent().css('border'); $('#'+divid).parent().parent().css('border','') } function editorOff(divid){ $('#'+divid).parent().parent().find(' >*:last-child img').css('visibility',''); $('#'+divid).parent().parent().css('border',borderstyle); }
#pragma once /* Copyright (c) 2013, Phil Vachon <phil@cowpig.ca> All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifdef __cplusplus extern "C" { #endif /* defined(__cplusplus) */ #include <tsl/errors.h> #include <stddef.h> /* Forward declare opaque structures */ struct cpu_mask; struct config; /** * Create a new, empty CPU mask * \param mask reference to receive a pointer to the new mask * \return A_OK on success, an error code otherwise */ aresult_t cpu_mask_new(struct cpu_mask **mask); /** * Clear a single node in the CPU mask * \param mask mask to update * \param cpu_id the CPU to clear * \return A_OK on success, an error code otherwise */ aresult_t cpu_mask_clear(struct cpu_mask *mask, size_t cpu_id); /** * Set a single node in the CPU mask * \param mask mask to update * \param cpu_id the CPU to clear * \return A_OK on success, an error code otherwise */ aresult_t cpu_mask_set(struct cpu_mask *mask, size_t cpu_id); /** * Clear entire CPU mask * \param mask mask to update * \return A_OK on success, an error code otherwise */ aresult_t cpu_mask_clear_all(struct cpu_mask *mask); /** * Set entire CPU mask * \param mask mask to update * \return A_OK on success, an error code otherwise */ aresult_t cpu_mask_set_all(struct cpu_mask *mask); /** * Test if a CPU is in the set * \param mask Mask to check * \param cpu_id The ID number of the CPU in question * \param value Reference to an integer to return the result in * \return A_OK on success, an error code otherwise */ aresult_t cpu_mask_test(struct cpu_mask *mask, size_t cpu_id, int *value); /** * Clone a CPU mask * \param _new Reference to a new CPU mask * \param orig Original mask to clone * \return A_OK on success, an error code otherwise */ aresult_t cpu_mask_clone(struct cpu_mask **_new, const struct cpu_mask *orig); /** * Free a CPU mask * \param mask Reference to a pointer to existing CPU mask * \return A_OK on success, an error code otherwise */ aresult_t cpu_mask_delete(struct cpu_mask **mask); /** * Apply a CPU mask * \param mask Applies the CPU mask to the current thread. * \param mask The CPU mask to be applied. * \return A_OK on success, an error code otherwise */ aresult_t cpu_mask_apply(struct cpu_mask *mask); #ifdef __cplusplus } /* extern "C" */ #endif /* defined(__cplusplus) */
<?php namespace LaravelDoctrine\Extensions\Uploadable; use Doctrine\Common\Annotations\Reader; use Doctrine\Common\EventManager; use Doctrine\ORM\EntityManagerInterface; use Gedmo\Uploadable\UploadableListener; use LaravelDoctrine\ORM\Extensions\Extension; class UploadableExtension implements Extension { public function __construct(UploadableListener $uploadableListener) { $this->uploadableListener = $uploadableListener; } /** * @param EventManager $manager * @param EntityManagerInterface $em * @param Reader $reader */ public function addSubscribers(EventManager $manager, EntityManagerInterface $em, Reader $reader = null) { $this->uploadableListener->setAnnotationReader($reader); $manager->addEventSubscriber($this->uploadableListener); } /** * @return array */ public function getFilters() { return []; } }