prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>DateOfBirthImpl.java<|end_file_name|><|fim▁begin|>/* * Copyright 2010-2011 Nabeel Mukhtar * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ <|fim▁hole|> package com.google.code.linkedinapi.schema.impl; import java.io.Serializable; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import com.google.code.linkedinapi.schema.Adapter1; import com.google.code.linkedinapi.schema.DateOfBirth; @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "year", "month", "day" }) @XmlRootElement(name = "date-of-birth") public class DateOfBirthImpl implements Serializable, DateOfBirth { private final static long serialVersionUID = 2461660169443089969L; @XmlElement(required = true, type = String.class) @XmlJavaTypeAdapter(Adapter1 .class) protected Long year; @XmlElement(required = true, type = String.class) @XmlJavaTypeAdapter(Adapter1 .class) protected Long month; @XmlElement(required = true, type = String.class) @XmlJavaTypeAdapter(Adapter1 .class) protected Long day; public Long getYear() { return year; } public void setYear(Long value) { this.year = value; } public Long getMonth() { return month; } public void setMonth(Long value) { this.month = value; } public Long getDay() { return day; } public void setDay(Long value) { this.day = value; } }<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>__import__("pkg_resources").declare_namespace(__name__) from infi.clickhouse_orm.database import * from infi.clickhouse_orm.engines import *<|fim▁hole|>from infi.clickhouse_orm.models import * from infi.clickhouse_orm.query import * from infi.clickhouse_orm.system_models import * from inspect import isclass __all__ = [c.__name__ for c in locals().values() if isclass(c)]<|fim▁end|>
from infi.clickhouse_orm.fields import * from infi.clickhouse_orm.funcs import * from infi.clickhouse_orm.migrations import *
<|file_name|>tag-variant-cast-non-nullary.rs<|end_file_name|><|fim▁begin|>// run-rustfix #![allow(dead_code, unused_variables)] enum NonNullary { Nullary, Other(isize), } impl From<NonNullary> for isize { fn from(val: NonNullary) -> isize { match val { NonNullary::Nullary => 0,<|fim▁hole|>} fn main() { let v = NonNullary::Nullary; let val = v as isize; //~ ERROR non-primitive cast: `NonNullary` as `isize` [E0605] }<|fim▁end|>
NonNullary::Other(i) => i, } }
<|file_name|>JsonAction.java<|end_file_name|><|fim▁begin|>/* * Copyright 2020 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.server.controller.actions; import com.thoughtworks.go.config.validation.GoConfigValidity; import com.thoughtworks.go.server.web.JsonView; import com.thoughtworks.go.server.web.SimpleJsonView; import com.thoughtworks.go.serverhealth.ServerHealthState; import com.thoughtworks.go.util.GoConstants; import org.springframework.web.servlet.ModelAndView; import javax.servlet.http.HttpServletResponse; import java.util.LinkedHashMap; import java.util.Map; import static com.thoughtworks.go.util.GoConstants.ERROR_FOR_JSON; import static com.thoughtworks.go.util.GoConstants.RESPONSE_CHARSET_JSON; import static javax.servlet.http.HttpServletResponse.*; public class JsonAction implements RestfulAction { private final int status; private final Object json; public static JsonAction from(ServerHealthState serverHealthState) { if (serverHealthState.isSuccess()) { return jsonCreated(new LinkedHashMap()); } Map<String, Object> jsonLog = new LinkedHashMap<>(); jsonLog.put(ERROR_FOR_JSON, serverHealthState.getDescription()); return new JsonAction(serverHealthState.getType().getHttpCode(), jsonLog); } public static JsonAction jsonCreated(Object json) { return new JsonAction(SC_CREATED, json); } public static JsonAction jsonFound(Object json) { return new JsonAction(SC_OK, json); } public static JsonAction jsonOK() { return jsonOK(new LinkedHashMap()); } public static JsonAction jsonNotAcceptable(Object json) { return new JsonAction(SC_NOT_ACCEPTABLE, json); } public static JsonAction jsonForbidden() { return new JsonAction(SC_FORBIDDEN, new LinkedHashMap()); } public static JsonAction jsonForbidden(String message) { Map<String, Object> map = new LinkedHashMap<>(); map.put(ERROR_FOR_JSON, message); return new JsonAction(SC_FORBIDDEN, map); } public static JsonAction jsonForbidden(Exception e) { return jsonForbidden(e.getMessage()); } public static JsonAction jsonBadRequest(Object json) { return new JsonAction(SC_BAD_REQUEST, json); } public static JsonAction jsonNotFound(Object json) { return new JsonAction(SC_NOT_FOUND, json); } public static JsonAction jsonConflict(Object json) { return new JsonAction(SC_CONFLICT, json); } public static JsonAction jsonByValidity(Object json, GoConfigValidity.InvalidGoConfig configValidity) { return (configValidity.isType(GoConfigValidity.VT_CONFLICT) || configValidity.isType(GoConfigValidity.VT_MERGE_OPERATION_ERROR) || configValidity.isType(GoConfigValidity.VT_MERGE_POST_VALIDATION_ERROR) || configValidity.isType(GoConfigValidity.VT_MERGE_PRE_VALIDATION_ERROR)) ? jsonConflict(json) : jsonNotFound(json); } /** * @deprecated replace with createView */ @Override<|fim▁hole|> public ModelAndView respond(HttpServletResponse response) { return new JsonModelAndView(response, json, status); } private JsonAction(int status, Object json) { this.status = status; this.json = json; } public ModelAndView createView() { SimpleJsonView view = new SimpleJsonView(status, json); return new ModelAndView(view, JsonView.asMap(json)); } public static JsonAction jsonOK(Map jsonMap) { return new JsonAction(SC_OK, jsonMap); } private class JsonModelAndView extends ModelAndView { @Override public String getViewName() { return "jsonView"; } public JsonModelAndView(HttpServletResponse response, Object json, int status) { super(new JsonView(), JsonView.asMap(json)); // In IE, there's a problem with caching. We want to cache if we can. // This will force the browser to clear the cache only for this page. // If any other pages need to clear the cache, we might want to move this // logic to an intercepter. response.addHeader("Cache-Control", GoConstants.CACHE_CONTROL); response.setStatus(status); response.setContentType(RESPONSE_CHARSET_JSON); } } }<|fim▁end|>
<|file_name|>gen.go<|end_file_name|><|fim▁begin|>// Copyright 2013 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build ignore // This program generates internet protocol constatns and tables by // reading IANA protocol registries. // // Usage of this program: // go run gen.go > iana.go package main import ( "bytes" "encoding/xml" "fmt" "go/format" "io" "net/http" "os" "strconv" "strings" ) var registries = []struct { url string parse func(io.Writer, io.Reader) error }{ { "http://www.iana.org/assignments/icmp-parameters", parseICMPv4Parameters, }, { "http://www.iana.org/assignments/protocol-numbers/protocol-numbers.xml", parseProtocolNumbers, }, } func main() { var bb bytes.Buffer fmt.Fprintf(&bb, "// go run gen.go\n") fmt.Fprintf(&bb, "// GENERATED BY THE COMMAND ABOVE; DO NOT EDIT\n\n") fmt.Fprintf(&bb, "package ipv4\n\n") for _, r := range registries { resp, err := http.Get(r.url) if err != nil { fmt.Fprintln(os.Stderr, err) os.Exit(1) } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { fmt.Fprintf(os.Stderr, "got HTTP status code %v for %v\n", resp.StatusCode, r.url) os.Exit(1) } if err := r.parse(&bb, resp.Body); err != nil { fmt.Fprintln(os.Stderr, err) os.Exit(1) } fmt.Fprintf(&bb, "\n") } b, err := format.Source(bb.Bytes()) if err != nil { fmt.Fprintln(os.Stderr, err) os.Exit(1) } os.Stdout.Write(b) } func parseICMPv4Parameters(w io.Writer, r io.Reader) error { dec := xml.NewDecoder(r) var icp icmpv4Parameters if err := dec.Decode(&icp); err != nil { return err } prs := icp.escape(0) fmt.Fprintf(w, "// %s, Updated: %s\n", icp.Title, icp.Updated) fmt.Fprintf(w, "const (\n") for _, pr := range prs { if pr.Descr == "" { continue } fmt.Fprintf(w, "ICMPType%s ICMPType = %d", pr.Descr, pr.Value) fmt.Fprintf(w, "// %s\n", pr.OrigDescr) } fmt.Fprintf(w, ")\n\n") fmt.Fprintf(w, "// %s, Updated: %s\n", icp.Title, icp.Updated)<|fim▁hole|> continue } fmt.Fprintf(w, "%d: %q,\n", pr.Value, strings.ToLower(pr.OrigDescr)) } fmt.Fprintf(w, "}\n") return nil } type icmpv4Parameters struct { XMLName xml.Name `xml:"registry"` Title string `xml:"title"` Updated string `xml:"updated"` Registries []icmpv4ParamRegistry `xml:"registry"` } type icmpv4ParamRegistry struct { Title string `xml:"title"` Records []icmpv4ParamRecord `xml:"record"` } type icmpv4ParamRecord struct { Value string `xml:"value"` Descr string `xml:"description"` } type canonICMPv4ParamRecord struct { OrigDescr string Descr string Value int } func (icp *icmpv4Parameters) escape(id int) []canonICMPv4ParamRecord { prs := make([]canonICMPv4ParamRecord, len(icp.Registries[id].Records)) sr := strings.NewReplacer( "Messages", "", "Message", "", "ICMP", "", "+", "P", "-", "", "/", "", ".", "", " ", "", ) for i, pr := range icp.Registries[id].Records { if strings.Contains(pr.Descr, "Reserved") || strings.Contains(pr.Descr, "Unassigned") || strings.Contains(pr.Descr, "Deprecated") || strings.Contains(pr.Descr, "Experiment") || strings.Contains(pr.Descr, "experiment") { continue } ss := strings.Split(pr.Descr, "\n") if len(ss) > 1 { prs[i].Descr = strings.Join(ss, " ") } else { prs[i].Descr = ss[0] } s := strings.TrimSpace(prs[i].Descr) prs[i].OrigDescr = s prs[i].Descr = sr.Replace(s) prs[i].Value, _ = strconv.Atoi(pr.Value) } return prs } func parseProtocolNumbers(w io.Writer, r io.Reader) error { dec := xml.NewDecoder(r) var pn protocolNumbers if err := dec.Decode(&pn); err != nil { return err } prs := pn.escape() prs = append([]canonProtocolRecord{{ Name: "IP", Descr: "IPv4 encapsulation, pseudo protocol number", Value: 0, }}, prs...) fmt.Fprintf(w, "// %s, Updated: %s\n", pn.Title, pn.Updated) fmt.Fprintf(w, "const (\n") for _, pr := range prs { if pr.Name == "" { continue } fmt.Fprintf(w, "ianaProtocol%s = %d", pr.Name, pr.Value) s := pr.Descr if s == "" { s = pr.OrigName } fmt.Fprintf(w, "// %s\n", s) } fmt.Fprintf(w, ")\n") return nil } type protocolNumbers struct { XMLName xml.Name `xml:"registry"` Title string `xml:"title"` Updated string `xml:"updated"` RegTitle string `xml:"registry>title"` Note string `xml:"registry>note"` Records []protocolRecord `xml:"registry>record"` } type protocolRecord struct { Value string `xml:"value"` Name string `xml:"name"` Descr string `xml:"description"` } type canonProtocolRecord struct { OrigName string Name string Descr string Value int } func (pn *protocolNumbers) escape() []canonProtocolRecord { prs := make([]canonProtocolRecord, len(pn.Records)) sr := strings.NewReplacer( "-in-", "in", "-within-", "within", "-over-", "over", "+", "P", "-", "", "/", "", ".", "", " ", "", ) for i, pr := range pn.Records { prs[i].OrigName = pr.Name s := strings.TrimSpace(pr.Name) switch pr.Name { case "ISIS over IPv4": prs[i].Name = "ISIS" case "manet": prs[i].Name = "MANET" default: prs[i].Name = sr.Replace(s) } ss := strings.Split(pr.Descr, "\n") for i := range ss { ss[i] = strings.TrimSpace(ss[i]) } if len(ss) > 1 { prs[i].Descr = strings.Join(ss, " ") } else { prs[i].Descr = ss[0] } prs[i].Value, _ = strconv.Atoi(pr.Value) } return prs }<|fim▁end|>
fmt.Fprintf(w, "var icmpTypes = map[ICMPType]string{\n") for _, pr := range prs { if pr.Descr == "" {
<|file_name|>base.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # # Copyright 2018-2021 Polyaxon, Inc. #<|fim▁hole|># You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from polyaxon import settings from polyaxon.proxies.schemas.base import clean_config from polyaxon.proxies.schemas.buffering import get_buffering_config from polyaxon.proxies.schemas.charset import get_charset_config from polyaxon.proxies.schemas.error_page import get_error_page_config from polyaxon.proxies.schemas.gzip import get_gzip_config from polyaxon.proxies.schemas.listen import get_listen_config from polyaxon.proxies.schemas.locations import get_streams_locations_config from polyaxon.proxies.schemas.logging import get_logging_config from polyaxon.proxies.schemas.streams.gunicorn import ( get_gunicorn_config, get_k8s_auth_config, ) from polyaxon.proxies.schemas.streams.k8s import get_k8s_root_location_config from polyaxon.proxies.schemas.timeout import get_timeout_config def get_base_config(): config = [ get_listen_config( is_proxy=False, port=settings.PROXIES_CONFIG.streams_target_port ) ] config += [ get_logging_config(), get_gzip_config(), get_charset_config(), get_buffering_config(), get_timeout_config(), get_gunicorn_config(), get_k8s_auth_config(), get_error_page_config(), get_streams_locations_config(), get_k8s_root_location_config(), ] return clean_config(config)<|fim▁end|>
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License.
<|file_name|>fastq.rs<|end_file_name|><|fim▁begin|>// Copyright 2014-2018 Johannes Köster, Henning Timm. // Licensed under the MIT license (http://opensource.org/licenses/MIT) // This file may not be copied, modified, or distributed // except according to those terms. //! Structs and trait to read and write files in FASTQ format. //! //! # Example //! //! ## Read //! //! In this example, we parse a fastq file from stdin and compute some statistics //! //! ``` //! use bio::io::fastq; //! use std::io; //! let mut reader = fastq::Reader::new(io::stdin()); //! //! let mut nb_reads = 0; //! let mut nb_bases = 0; //! //! for result in reader.records() { //! let record = result.expect("Error during fastq record parsing"); //! //! nb_reads += 1; //! nb_bases += record.seq().len(); //! } //! //! println!("Number of reads: {}", nb_reads); //! println!("Number of bases: {}", nb_bases); //! ``` //! //! We can also use a `while` loop to iterate over records //! ``` //! use bio::io::fastq; //! use std::io; //! let mut records = fastq::Reader::new(io::stdin()).records(); //! //! let mut nb_reads = 0; //! let mut nb_bases = 0; //! //! while let Some(Ok(record)) = records.next() { //! nb_reads += 1; //! nb_bases += record.seq().len(); //! } //! //! println!("Number of reads: {}", nb_reads); //! println!("Number of bases: {}", nb_bases); //! ``` //! //! ## Write //! //! In this example we generate 10 random sequences with length 100 and write them to stdout. //! //! ``` //! use std::io; //! use bio::io::fastq; //! //! let mut seed = 42; //! //! let nucleotides = [b'A', b'C', b'G', b'T']; //! //! let mut writer = fastq::Writer::new(io::stdout()); //! //! for _ in 0..10 { //! let seq = (0..100).map(|_| { //! seed = ((seed ^ seed << 13) ^ seed >> 7) ^ seed << 17; // don't use this random generator //! nucleotides[seed % 4] //! }).collect::<Vec<u8>>(); //! //! let qual = (0..100).map(|_| b'!').collect::<Vec<u8>>(); //! //! writer.write("random", None, seq.as_slice(), qual.as_slice()); //! } //! ``` //! //! ## Read and Write //! //! In this example we filter reads from stdin on mean quality (Phred + 33) and write them to stdout //! //! ``` //! use bio::io::fastq; //! use bio::io::fastq::FastqRead; //! use std::io; //! //! let mut reader = fastq::Reader::new(io::stdin()); //! let mut writer = fastq::Writer::new(io::stdout()); //! let mut record = fastq::Record::new(); //! //! while let Ok(()) = reader.read(&mut record) { //! if record.is_empty() { //! let check = record.check(); //! break; //! } //! //! let mut sum_qual = record.qual().iter().sum::<u8>() as f64; //! //! if (sum_qual / record.seq().len() as f64 - 33.0) > 30.0 { //! writer.write_record(&record); //! } //! } //! ``` use anyhow::Context; use std::convert::AsRef; use std::fmt; use std::fs; use std::io; use std::io::prelude::*; use std::path::{Path, PathBuf}; use thiserror::Error; #[derive(Error, Debug)] pub enum Error { #[error("expected '@' at record start")] MissingAt, #[error("can't open {path} file: {source}")] FileOpen { path: PathBuf, source: io::Error }, #[error("can't read input")] ReadError(#[from] io::Error), #[error("Incomplete record. Each FastQ record has to consist of 4 lines: header, sequence, separator and qualities.")] IncompleteRecord, } pub type Result<T, E = Error> = std::result::Result<T, E>; use bio_types::sequence::SequenceRead; use crate::utils::TextSlice; /// Trait for FastQ readers. pub trait FastqRead { fn read(&mut self, record: &mut Record) -> Result<()>; } /// A FastQ reader. #[derive(Debug)] pub struct Reader<B> { reader: B, line_buffer: String, } impl Reader<io::BufReader<fs::File>> { /// Read from a given file. pub fn from_file<P: AsRef<Path> + std::fmt::Debug>(path: P) -> anyhow::Result<Self> { fs::File::open(path.as_ref()) .map_err(|e| Error::FileOpen { path: path.as_ref().to_owned(), source: e, }) .map(Reader::new) .with_context(|| format!("Failed to read fastq from {:#?}", path)) } } impl<R: io::Read> Reader<io::BufReader<R>> { /// Read from a given [`io::Read`](https://doc.rust-lang.org/std/io/trait.Read.html). pub fn new(reader: R) -> Self { Reader { reader: io::BufReader::new(reader), line_buffer: String::new(), } } /// Create a new Fastq reader given a capacity and an instance of `io::Read`. pub fn with_capacity(capacity: usize, reader: R) -> Self { Reader { reader: io::BufReader::with_capacity(capacity, reader), line_buffer: String::new(), } } } impl<B> Reader<B> where B: io::BufRead, { /// Create a new Fastq reader with an object that implements `io::BufReader`. pub fn from_bufread(bufreader: B) -> Self { Reader { reader: bufreader, line_buffer: String::new(), } } /// Return an iterator over the records of this FastQ file. /// /// # Errors /// /// This function will return an error if a record is incomplete /// or syntax is violated. /// /// # Example /// /// ```rust /// use bio::io::fastq; /// /// let fq: &'static [u8] = b"@id description\nACGT\n+\n!!!!\n"; /// let records = fastq::Reader::new(fq) /// .records() /// .map(|record| record.unwrap()); /// for record in records { /// assert!(record.check().is_ok()) /// } /// ``` pub fn records(self) -> Records<B> { Records { reader: self } } } impl<B> FastqRead for Reader<B> where B: io::BufRead, { /// Read the next FastQ entry into the given [`Record`](#Record). /// An empty record indicates that no more records can be read. /// /// This method is useful when you want to read records as fast as /// possible because it allows the reuse of a `Record` allocation. /// /// A more ergonomic approach to reading FastQ records is the /// [records](Reader::records) iterator. /// /// FastQ files with wrapped sequence and quality strings are allowed. /// /// # Errors /// /// This function will return an error if the record is incomplete, /// syntax is violated or any form of I/O error is encountered. /// Additionally, if the FastQ file has line-wrapped records, and the wrapping is not /// consistent between the sequence and quality string for a record, parsing will fail. /// /// # Example /// /// ```rust /// use bio::io::fastq::Record; /// use bio::io::fastq::{FastqRead, Reader}; /// const FASTQ_FILE: &'static [u8] = b"@id desc /// AAAA /// + /// IIII /// "; /// let mut reader = Reader::new(FASTQ_FILE); /// let mut record = Record::new(); /// /// reader.read(&mut record).unwrap(); /// /// assert_eq!(record.id(), "id"); /// assert_eq!(record.desc().unwrap(), "desc"); /// assert_eq!(record.seq().to_vec(), b"AAAA"); /// assert_eq!(record.qual().to_vec(), b"IIII"); /// ``` fn read(&mut self, record: &mut Record) -> Result<()> { record.clear(); self.line_buffer.clear(); self.reader.read_line(&mut self.line_buffer)?; if !self.line_buffer.is_empty() { if !self.line_buffer.starts_with('@') { return Err(Error::MissingAt); } let mut header_fields = self.line_buffer[1..].trim_end().splitn(2, ' '); record.id = header_fields.next().unwrap_or_default().to_owned(); record.desc = header_fields.next().map(|s| s.to_owned()); self.line_buffer.clear(); self.reader.read_line(&mut self.line_buffer)?; let mut lines_read = 0; while !self.line_buffer.is_empty() && !self.line_buffer.starts_with('+') { record.seq.push_str(self.line_buffer.trim_end()); self.line_buffer.clear(); self.reader.read_line(&mut self.line_buffer)?; lines_read += 1; } for _ in 0..lines_read { self.line_buffer.clear(); self.reader .read_line(&mut self.line_buffer) .map_err(Error::ReadError)?; record.qual.push_str(self.line_buffer.trim_end()); } if record.qual.is_empty() { return Err(Error::IncompleteRecord); } } Ok(()) } } /// A FastQ record. #[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] pub struct Record { id: String, desc: Option<String>, seq: String, qual: String, } impl Record { /// Create a new, empty FastQ record. pub fn new() -> Self { Record { id: String::new(), desc: None, seq: String::new(), qual: String::new(), } } /// Create a new FastQ record from given attributes. /// /// # Example /// /// ```rust /// use bio::io::fastq::Record; /// /// let record = Record::with_attrs("id_str", Some("desc"), b"ATGCGGG", b"QQQQQQQ"); /// assert_eq!(record.id(), "id_str"); /// assert_eq!(record.desc(), Some("desc")); /// assert_eq!(record.seq(), b"ATGCGGG"); /// assert_eq!(record.qual(), b"QQQQQQQ"); /// ``` pub fn with_attrs(id: &str, desc: Option<&str>, seq: TextSlice<'_>, qual: &[u8]) -> Self { let desc = desc.map(|desc| desc.to_owned()); Record { id: id.to_owned(), desc, seq: String::from_utf8(seq.to_vec()).unwrap(), qual: String::from_utf8(qual.to_vec()).unwrap(), } } /// Check if a record is empty. /// /// # Example /// /// ```rust /// use bio::io::fastq::Record; /// /// let mut record = Record::new(); /// assert!(record.is_empty()); /// /// record = Record::with_attrs("id_str", Some("desc"), b"ATGCGGG", b"QQQQQQQ"); /// assert!(!record.is_empty()); /// ``` pub fn is_empty(&self) -> bool { self.id.is_empty() && self.desc.is_none() && self.seq.is_empty() && self.qual.is_empty() } /// Check the validity of a FastQ record. /// /// # Errors /// This function will return an `Err` if one of the following conditions is met: /// - The record identifier is empty. /// - There is a non-ASCII character found in either the sequence or quality strings. /// - The sequence and quality strings do not have the same length. /// /// # Example /// /// ```rust /// use bio::io::fastq::Record; /// /// let mut record = Record::with_attrs("id", None, "Prüfung".as_ref(), b"!!!!!!!"); /// let actual = record.check().unwrap_err(); /// let expected = "Non-ascii character found in sequence."; /// assert_eq!(actual, expected); /// /// record = Record::with_attrs("id_str", Some("desc"), b"ATGCGGG", b"QQQQQQQ"); /// assert!(record.check().is_ok()); /// ``` pub fn check(&self) -> Result<(), &str> { if self.id().is_empty() { return Err("Expecting id for FastQ record."); } if !self.seq.is_ascii() { return Err("Non-ascii character found in sequence."); } if !self.qual.is_ascii() { return Err("Non-ascii character found in qualities."); } if self.seq().len() != self.qual().len() { return Err("Unequal length of sequence an qualities."); } Ok(()) } /// Return the id of the record. pub fn id(&self) -> &str { self.id.as_ref() } /// Return descriptions if present. pub fn desc(&self) -> Option<&str> { match self.desc.as_ref() { Some(desc) => Some(desc), None => None, } } /// Return the sequence of the record. pub fn seq(&self) -> TextSlice<'_> { self.seq.trim_end().as_bytes() } /// Return the base qualities of the record. pub fn qual(&self) -> &[u8] { self.qual.trim_end().as_bytes() } /// Clear the record. fn clear(&mut self) { self.id.clear(); self.desc = None; self.seq.clear(); self.qual.clear(); } } impl fmt::Display for Record { /// Allows for using `Record` in a given formatter `f`. In general this is for /// creating a `String` representation of a `Record` and, optionally, writing it to /// a file. /// /// # Errors /// Returns [`std::fmt::Error`](https://doc.rust-lang.org/std/fmt/struct.Error.html) /// if there is an issue formatting to the stream. /// /// # Examples /// /// Read in a Fastq `Record` and create a `String` representation of it. /// /// ```rust /// use bio::io::fastq::Reader; /// use std::fmt::Write; /// // create a "fake" fastq file /// let fq: &'static [u8] = b"@id description\nACGT\n+\n!!!!\n"; /// let mut records = Reader::new(fq).records().map(|r| r.unwrap()); /// let record = records.next().unwrap(); /// /// let mut actual = String::new(); /// // populate `actual` with a string representation of our record /// write!(actual, "{}", record).unwrap(); /// /// let expected = std::str::from_utf8(fq).unwrap(); /// /// assert_eq!(actual, expected) /// ``` fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { let header = match self.desc() { Some(d) => format!("{} {}", self.id().to_owned(), d), None => self.id().to_owned(), }; write!( f, "@{}\n{}\n+\n{}\n", header, std::str::from_utf8(self.seq()).unwrap(), std::str::from_utf8(self.qual()).unwrap() ) } } impl SequenceRead for Record { fn name(&self) -> &[u8] { self.id.as_bytes() } fn base(&self, i: usize) -> u8 { self.seq()[i] } fn base_qual(&self, i: usize) -> u8 { self.qual()[i] } fn len(&self) -> usize { self.seq().len() } } /// An iterator over the records of a FastQ file. #[derive(Debug)] pub struct Records<R: io::Read> { reader: Reader<R>, } impl<B> Iterator for Records<B> where B: io::BufRead, { type Item = Result<Record>; fn next(&mut self) -> Option<Result<Record>> { let mut record = Record::new(); match self.reader.read(&mut record) { Ok(()) if record.is_empty() => None, Ok(()) => Some(Ok(record)), Err(err) => Some(Err(err)), } } } /// A FastQ writer. #[derive(Debug)] pub struct Writer<W: io::Write> { writer: io::BufWriter<W>, } impl Writer<fs::File> { /// Write to a given file path. #[allow(clippy::wrong_self_convention)] pub fn to_file<P: AsRef<Path>>(path: P) -> io::Result<Self> { fs::File::create(path).map(Writer::new) } /// Write to the given file path and a buffer capacity pub fn to_file_with_capacity<P: AsRef<Path>>(capacity: usize, path: P) -> io::Result<Self> { fs::File::create(path).map(|file| Writer::with_capacity(capacity, file)) } } impl<W: io::Write> Writer<W> { /// Write to a given `io::Write`. pub fn new(writer: W) -> Self { Writer { writer: io::BufWriter::new(writer), } } /// Create a new Fastq writer with a capacity of write buffer pub fn with_capacity(capacity: usize, writer: W) -> Self { Writer { writer: io::BufWriter::with_capacity(capacity, writer), } } /// Create a new Fastq writer with a given BufWriter pub fn from_bufwriter(bufwriter: io::BufWriter<W>) -> Self { Writer { writer: bufwriter } } /// Directly write a FastQ record. pub fn write_record(&mut self, record: &Record) -> io::Result<()> { self.write(record.id(), record.desc(), record.seq(), record.qual()) } /// Write a FastQ record with given id, optional description, sequence and qualities. pub fn write( &mut self, id: &str, desc: Option<&str>, seq: TextSlice<'_>, qual: &[u8], ) -> io::Result<()> { self.writer.write_all(b"@")?; self.writer.write_all(id.as_bytes())?; if let Some(desc) = desc { self.writer.write_all(b" ")?; self.writer.write_all(desc.as_bytes())?; } self.writer.write_all(b"\n")?; self.writer.write_all(seq)?; self.writer.write_all(b"\n+\n")?; self.writer.write_all(qual)?; self.writer.write_all(b"\n")?; Ok(()) } /// Flush the writer, ensuring that everything is written. pub fn flush(&mut self) -> io::Result<()> { self.writer.flush() } } #[cfg(test)] mod tests { use super::*; use std::fmt::Write as FmtWrite; use std::io; const FASTQ_FILE: &[u8] = b"@id desc ACCGTAGGCTGA + IIIIIIJJJJJJ "; #[test] fn test_reader() { let reader = Reader::new(FASTQ_FILE); let records: Vec<Result<Record>> = reader.records().collect(); assert_eq!(records.len(), 1); for res in records { let record = res.unwrap(); assert_eq!(record.check(), Ok(())); assert_eq!(record.id(), "id"); assert_eq!(record.desc(), Some("desc")); assert_eq!(record.seq(), b"ACCGTAGGCTGA"); assert_eq!(record.qual(), b"IIIIIIJJJJJJ"); } let reader = Reader::with_capacity(100, FASTQ_FILE); let records: Vec<Result<Record>> = reader.records().collect(); assert_eq!(records.len(), 1); for res in records { let record = res.unwrap(); assert_eq!(record.check(), Ok(())); assert_eq!(record.id(), "id"); assert_eq!(record.desc(), Some("desc")); assert_eq!(record.seq(), b"ACCGTAGGCTGA"); assert_eq!(record.qual(), b"IIIIIIJJJJJJ"); } let reader = Reader::from_bufread(io::BufReader::new(FASTQ_FILE)); let records: Vec<Result<Record>> = reader.records().collect(); assert_eq!(records.len(), 1); for res in records { let record = res.unwrap(); assert_eq!(record.check(), Ok(())); assert_eq!(record.id(), "id"); assert_eq!(record.desc(), Some("desc")); assert_eq!(record.seq(), b"ACCGTAGGCTGA"); assert_eq!(record.qual(), b"IIIIIIJJJJJJ"); } } #[test] fn test_display_record_no_desc_id_without_space_after() { let fq: &'static [u8] = b"@id\nACGT\n+\n!!!!\n"; let mut records = Reader::new(fq).records().map(|r| r.unwrap()); let record = records.next().unwrap(); let mut actual = String::new(); write!(actual, "{}", record).unwrap(); let expected = std::str::from_utf8(fq).unwrap(); assert_eq!(actual, expected) } #[test] fn test_display_record_with_desc_id_has_space_between_id_and_desc() { let fq: &'static [u8] = b"@id description\nACGT\n+\n!!!!\n"; let mut records = Reader::new(fq).records().map(|r| r.unwrap()); let record = records.next().unwrap(); let mut actual = String::new(); write!(actual, "{}", record).unwrap(); let expected = std::str::from_utf8(fq).unwrap(); assert_eq!(actual, expected) } #[test] fn test_fqread_trait() { let path = "reads.fq.gz"; let mut fq_reader: Box<dyn FastqRead> = match path.ends_with(".gz") { true => Box::new(Reader::new(io::BufReader::new(FASTQ_FILE))), false => Box::new(Reader::new(FASTQ_FILE)), }; // The read method can be called, since it is implemented by // `Read`. Right now, the records method would not work. let mut record = Record::new(); fq_reader.read(&mut record).unwrap(); // Check if the returned result is correct. assert_eq!(record.check(), Ok(())); assert_eq!(record.id(), "id"); assert_eq!(record.desc(), Some("desc")); assert_eq!(record.seq(), b"ACCGTAGGCTGA"); assert_eq!(record.qual(), b"IIIIIIJJJJJJ"); } #[test] fn test_record_with_attrs() { let record = Record::with_attrs("id_str", Some("desc"), b"ATGCGGG", b"QQQQQQQ"); assert_eq!(record.id(), "id_str"); assert_eq!(record.desc(), Some("desc")); assert_eq!(record.seq(), b"ATGCGGG"); assert_eq!(record.qual(), b"QQQQQQQ"); } #[test] fn test_writer() { let mut writer = Writer::new(Vec::new()); writer .write("id", Some("desc"), b"ACCGTAGGCTGA", b"IIIIIIJJJJJJ") .expect("Expected successful write"); writer.flush().expect("Expected successful write"); assert_eq!(writer.writer.get_ref(), &FASTQ_FILE); let mut writer = Writer::with_capacity(100, Vec::new()); writer .write("id", Some("desc"), b"ACCGTAGGCTGA", b"IIIIIIJJJJJJ") .expect("Expected successful write"); writer.flush().expect("Expected successful write"); assert_eq!(writer.writer.get_ref(), &FASTQ_FILE); let mut writer = Writer::from_bufwriter(std::io::BufWriter::with_capacity(100, Vec::new())); writer .write("id", Some("desc"), b"ACCGTAGGCTGA", b"IIIIIIJJJJJJ") .expect("Expected successful write"); writer.flush().expect("Expected successful write"); assert_eq!(writer.writer.get_ref(), &FASTQ_FILE); } #[test] fn test_check_record_id_is_empty_raises_err() { let record = Record::with_attrs("", None, b"ACGT", b"!!!!"); let actual = record.check().unwrap_err(); let expected = "Expecting id for FastQ record."; assert_eq!(actual, expected) } #[test] fn test_check_record_seq_is_not_ascii_raises_err() { let record = Record::with_attrs("id", None, "Prüfung".as_ref(), b"!!!!"); let actual = record.check().unwrap_err(); let expected = "Non-ascii character found in sequence."; assert_eq!(actual, expected) } #[test] fn test_check_record_quality_is_not_ascii_raises_err() { let record = Record::with_attrs("id", None, b"ACGT", "Qualität".as_ref()); let actual = record.check().unwrap_err(); let expected = "Non-ascii character found in qualities."; assert_eq!(actual, expected) } #[test] fn test_check_record_quality_and_seq_diff_len_raises_err() { let record = Record::with_attrs("id", None, b"ACGT", b"!!!"); let actual = record.check().unwrap_err(); let expected = "Unequal length of sequence an qualities."; assert_eq!(actual, expected) } #[test] fn test_check_valid_record() { let record = Record::with_attrs("id", None, b"ACGT", b"!!!!"); assert!(record.check().is_ok()) } #[test] fn test_read_header_does_not_start_with_correct_char_raises_err() { let fq: &'static [u8] = b">id description\nACGT\n+\n!!!!\n"; let mut reader = Reader::new(fq); let mut record = Record::new(); let error = reader.read(&mut record).unwrap_err(); assert!(matches!(error, Error::MissingAt)) } #[test] fn test_read_quality_is_empty_raises_err() { let fq: &'static [u8] = b"@id description\nACGT\n+\n"; let mut reader = Reader::new(fq); let mut record = Record::new(); let error = reader.read(&mut record).unwrap_err(); assert!(matches!(error, Error::IncompleteRecord)) } #[test] fn test_read_sequence_and_quality_are_wrapped_is_handled_with_one_sequence() { let fq: &'static [u8] = b"@id description\nACGT\nGGGG\nC\n+\n@@@@\n!!!!\n$\n"; let mut reader = Reader::new(fq); let mut actual = Record::new(); reader.read(&mut actual).unwrap(); let expected = Record::with_attrs("id", Some("description"), b"ACGTGGGGC", b"@@@@!!!!$"); assert_eq!(actual, expected) } #[test] fn test_read_sequence_and_quality_are_wrapped_is_handled_with_three_sequences() { let fq: &'static [u8] = b"@id description\nACGT\nGGGG\nC\n+\n@@@@\n!!!!\n$\n@id2 description\nACGT\nGGGG\nC\n+\n@@@@\n!!!!\n$\n@id3 desc1 desc2\nAAA\nAAA\nAA\n+\n^^^\n^^^\n^^\n"; let mut reader = Reader::new(fq); let mut actual = Record::new(); reader.read(&mut actual).unwrap(); let expected = Record::with_attrs("id", Some("description"), b"ACGTGGGGC", b"@@@@!!!!$"); assert_eq!(actual, expected); reader.read(&mut actual).unwrap(); let expected = Record::with_attrs("id2", Some("description"), b"ACGTGGGGC", b"@@@@!!!!$"); assert_eq!(actual, expected); reader.read(&mut actual).unwrap(); let expected = Record::with_attrs("id3", Some("desc1 desc2"), b"AAAAAAAA", b"^^^^^^^^"); assert_eq!(actual, expected) } #[test] fn test_read_wrapped_record_with_inconsistent_wrapping_errors() { let fq: &'static [u8] = b"@id description\nACGT\nGGGG\nC\n+\n@@@@\n!!!!$\n@id2 description\nACGT\nGGGG\nC\n+\n@@@@\n!!!!\n$\n@id3 desc1 desc2\nAAA\nAAA\nAA\n+\n^^^\n^^^\n^^\n"; let mut reader = Reader::new(fq); let mut record = Record::new(); reader.read(&mut record).unwrap(); let error = reader.read(&mut record).unwrap_err(); assert!(matches!(error, Error::MissingAt)) } #[test] fn test_record_iterator_next_read_returns_err_causes_next_to_return_some_err() { let fq: &'static [u8] = b"@id description\nACGT\n+\n"; let mut records = Reader::new(fq).records(); let error = records.next().unwrap().unwrap_err(); assert!(matches!(error, Error::IncompleteRecord)); } #[test] fn test_reader_from_file_path_doesnt_exist_returns_err() { let path = Path::new("/I/dont/exist.fq"); let error = Reader::from_file(path) .unwrap_err() .downcast::<String>() .unwrap(); assert_eq!(&error, "Failed to read fastq from \"/I/dont/exist.fq\"") } #[test] fn test_reader_from_file_path_exists_returns_ok() { let path = Path::new("Cargo.toml"); assert!(Reader::from_file(path).is_ok()) } #[test] fn test_sequence_read_for_record_trait_method_name() { let record = Record::with_attrs("id", None, b"ACGT", b"!!!!"); let actual = record.name(); let expected = b"id"; assert_eq!(actual, expected) } #[test] fn test_sequence_read_for_record_trait_method_base_idx_in_range() { let fq: &'static [u8] = b"@id description\nACGT\n+\n!!!!\n"; let mut reader = Reader::new(fq); let mut record = Record::new(); reader.read(&mut record).unwrap(); let idx = 2; let actual = record.base(idx); let expected = b'G'; assert_eq!(actual, expected) } #[test] #[should_panic] fn test_sequence_read_for_record_trait_method_base_idx_out_of_range() { let fq: &'static [u8] = b"@id description\nACGT\n+\n!!!!\n"; let mut reader = Reader::new(fq); let mut record = Record::new(); reader.read(&mut record).unwrap(); // idx 4 is where the newline character would be - we dont want that included let idx = 4; record.base(idx); } #[test] fn test_sequence_read_for_record_trait_method_base_qual_idx_in_range() { let fq: &'static [u8] = b"@id description\nACGT\n+\n!!!!\n"; let mut reader = Reader::new(fq); let mut record = Record::new(); reader.read(&mut record).unwrap(); let idx = 2; let actual = record.base_qual(idx); let expected = b'!'; assert_eq!(actual, expected) } #[test] #[should_panic] fn test_sequence_read_for_record_trait_method_base_qual_idx_out_of_range() { let fq: &'static [u8] = b"@id description\nACGT\n+\n!!!!\n"; let mut reader = Reader::new(fq); let mut record = Record::new(); reader.read(&mut record).unwrap(); // idx 4 is where the newline character would be - we dont want that included let idx = 4; record.base_qual(idx); } #[test] fn test_sequence_read_for_record_trait_method_len() { let fq: &'static [u8] = b"@id description\nACGT\n+\n!!!!\n"; let mut reader = Reader::new(fq); let mut record = Record::new(); reader.read(&mut record).unwrap(); let actual = record.len(); let expected = 4; assert_eq!(actual, expected) } #[test] fn test_read_with_missing_plus() { let fq: &'static [u8] = b"@id description\nACGT\n*\n!!!!\n"; let mut reader = Reader::new(fq); let mut record = Record::new(); let err = reader.read(&mut record).unwrap_err(); assert!(matches!(err, Error::IncompleteRecord)) } #[test] fn test_writer_to_file_dir_doesnt_exist_returns_err() { let path = Path::new("/I/dont/exist.fq");<|fim▁hole|> assert_eq!(actual.kind(), expected.kind()); } #[test] fn test_writer_to_file_dir_exists_returns_ok() { let file = tempfile::NamedTempFile::new().expect("Could not create temp file"); let path = file.path(); assert!(Writer::to_file(path).is_ok()) } #[test] fn test_write_record() { let path = Path::new("test.fq"); let file = fs::File::create(path).unwrap(); { let handle = io::BufWriter::new(file); let mut writer = Writer { writer: handle }; let record = Record::with_attrs("id", Some("desc"), b"ACGT", b"!!!!"); let write_result = writer.write_record(&record); assert!(write_result.is_ok()); } let actual = fs::read_to_string(path).unwrap(); let expected = "@id desc\nACGT\n+\n!!!!\n"; assert!(fs::remove_file(path).is_ok()); assert_eq!(actual, expected) } }<|fim▁end|>
let actual = Writer::to_file(path).unwrap_err(); let expected = io::Error::new(io::ErrorKind::NotFound, "foo");
<|file_name|>admin.py<|end_file_name|><|fim▁begin|><|fim▁hole|> # Register your models here. from learning_logs.models import Topic, Entry admin.site.register(Topic) admin.site.register(Entry)<|fim▁end|>
from django.contrib import admin
<|file_name|>utils.rs<|end_file_name|><|fim▁begin|>use bidir_map::BidirMap; use serde_json; use serde_json::Value; use serde_json::value::Map; use error::*; use types::Currency; use types::Pair; use types::Pair::*; lazy_static! { static ref PAIRS_STRING: BidirMap<Pair, &'static str> = { let mut m = BidirMap::new(); m.insert(BCH_EUR, "BCHEUR"); m.insert(BCH_USD, "BCHUSD"); m.insert(BCH_BTC, "BCHXBT"); m.insert(DASH_EUR, "DASHEUR"); m.insert(DASH_USD, "DASHUSD"); m.insert(DASH_BTC, "DASHXBT"); m.insert(EOS_ETH, "EOSETH"); m.insert(EOS_BTC, "EOSXBT"); m.insert(GNO_ETH, "GNOETH"); m.insert(GNO_BTC, "GNOXBT"); m.insert(USDT_USD, "USDTZUSD"); m.insert(ETC_ETH, "XETCXETH"); m.insert(ETC_BTC, "XETCXXBT"); m.insert(ETC_EUR, "XETCZEUR"); m.insert(ETC_USD, "XETCZUSD"); m.insert(ETH_BTC, "XETHXXBT"); m.insert(ETH_BTC_d, "XETHXXBT.d"); m.insert(ETH_CAD, "XETHZCAD"); m.insert(ETH_CAD_d, "XETHZCAD.d"); m.insert(ETH_EUR, "XETHZEUR"); m.insert(ETH_EUR_d, "XETHZEUR.d"); m.insert(ETH_GBP, "XETHZGBP"); m.insert(ETH_GBP_d, "XETHZGBP.d"); m.insert(ETH_JPY, "XETHZJPY"); m.insert(ETH_JPY_d, "XETHZJPY.d"); m.insert(ETH_USD, "XETHZUSD"); m.insert(ETH_USD_d, "XETHZUSD.d"); m.insert(ICN_ETH, "XICNXETH"); m.insert(ICN_BTC, "XICNXXBT"); m.insert(LTC_BTC, "XLTCXXBT"); m.insert(LTC_EUR, "XLTCZEUR"); m.insert(LTC_USD, "XLTCZUSD"); m.insert(MLN_ETH, "XMLNXETH"); m.insert(MLN_BTC, "XMLNXXBT"); m.insert(REP_ETH, "XREPXETH"); m.insert(REP_BTC, "XREPXXBT"); m.insert(REP_EUR, "XREPZEUR"); m.insert(BTC_CAD, "XXBTZCAD"); m.insert(BTC_CAD_d, "XXBTZCAD.d"); m.insert(BTC_EUR, "XXBTZEUR"); m.insert(BTC_EUR_d, "XXBTZEUR.d"); m.insert(BTC_GBP, "XXBTZGBP"); m.insert(BTC_GBP_d, "XXBTZGBP.d"); m.insert(BTC_JPY, "XXBTZJPY"); m.insert(BTC_JPY_d, "XXBTZJPY.d"); m.insert(BTC_USD, "XXBTZUSD"); m.insert(BTC_USD_d, "XXBTZUSD.d"); m.insert(XDG_BTC, "XXDGXXBT"); m.insert(XLM_BTC, "XXLMXXBT"); m.insert(XMR_BTC, "XXMRXXBT"); m.insert(XMR_EUR, "XXMRZEUR"); m.insert(XMR_USD, "XXMRZUSD"); m.insert(XRP_BTC, "XXRPXXBT"); m.insert(XRP_EUR, "XXRPZEUR"); m.insert(XRP_USD, "XXRPZUSD"); m.insert(ZEC_BTC, "XZECXXBT"); m.insert(ZEC_EUR, "XZECZEUR");<|fim▁hole|> m.insert(ZEC_USD, "XZECZUSD"); m }; } /// Return the name associated to pair used by Kraken /// If the Pair is not supported, None is returned. pub fn get_pair_string(pair: &Pair) -> Option<&&str> { PAIRS_STRING.get_by_first(pair) } /// Return the Pair enum associated to the string used by Kraken /// If the Pair is not supported, None is returned. pub fn get_pair_enum(pair: &str) -> Option<&Pair> { PAIRS_STRING.get_by_second(&pair) } pub fn deserialize_json(json_string: &str) -> Result<Map<String, Value>> { let data: Value = match serde_json::from_str(json_string) { Ok(data) => data, Err(_) => return Err(ErrorKind::BadParse.into()), }; match data.as_object() { Some(value) => Ok(value.clone()), None => Err(ErrorKind::BadParse.into()), } } /// If error array is null, return the result (encoded in a json object) /// else return the error string found in array pub fn parse_result(response: &Map<String, Value>) -> Result<Map<String, Value>> { let error_array = match response.get("error") { Some(array) => { array .as_array() .ok_or_else(|| ErrorKind::InvalidFieldFormat("error".to_string()))? } None => return Err(ErrorKind::BadParse.into()), }; if error_array.is_empty() { return Ok(response .get("result") .ok_or_else(|| ErrorKind::MissingField("result".to_string()))? .as_object() .ok_or_else(|| ErrorKind::InvalidFieldFormat("result".to_string()))? .clone()); } let error_msg = error_array[0] .as_str() .ok_or_else(|| ErrorKind::InvalidFieldFormat(error_array[0].to_string()))? .to_string(); //TODO: Parse correctly the reason for "EService:Unavailable". match error_msg.as_ref() { "EService:Unavailable" => { Err(ErrorKind::ServiceUnavailable("Unknown...".to_string()).into()) } "EAPI:Invalid key" => Err(ErrorKind::BadCredentials.into()), "EAPI:Invalid nonce" => Err(ErrorKind::InvalidNonce.into()), "EOrder:Rate limit exceeded" => Err(ErrorKind::RateLimitExceeded.into()), "EQuery:Unknown asset pair" => Err(ErrorKind::PairUnsupported.into()), "EGeneral:Invalid arguments" => Err(ErrorKind::InvalidArguments.into()), "EGeneral:Permission denied" => Err(ErrorKind::PermissionDenied.into()), "EOrder:Insufficient funds" => Err(ErrorKind::InsufficientFunds.into()), "EOrder:Order minimum not met" => Err(ErrorKind::InsufficientOrderSize.into()), other => Err(ErrorKind::ExchangeSpecificError(other.to_string()).into()), } } /// Return the currency enum associated with the /// string used by Kraken. If no currency is found, /// return None /// # Examples /// /// ``` /// use coinnect::kraken::utils::get_currency_enum; /// use coinnect::types::Currency; /// /// let currency = get_currency_enum("ZUSD"); /// assert_eq!(Some(Currency::USD), currency); /// ``` pub fn get_currency_enum(currency: &str) -> Option<Currency> { match currency { "ZEUR" => Some(Currency::EUR), "ZCAD" => Some(Currency::CAD), "ZGBP" => Some(Currency::GBP), "ZJPY" => Some(Currency::JPY), "ZUSD" => Some(Currency::USD), "XDASH" => Some(Currency::DASH), "XETC" => Some(Currency::ETC), "XETH" => Some(Currency::ETH), "XGNO" => Some(Currency::GNO), "XICN" => Some(Currency::ICN), "XLTC" => Some(Currency::LTC), "XMLN" => Some(Currency::MLN), "XREP" => Some(Currency::REP), "XUSDT" => Some(Currency::USDT), "XXBT" => Some(Currency::BTC), "XXDG" => Some(Currency::XDG), "XXLM" => Some(Currency::XLM), "XXMR" => Some(Currency::XMR), "XXRP" => Some(Currency::XRP), "XZEC" => Some(Currency::ZEC), _ => None, } } /// Return the currency String associated with the /// string used by Kraken. If no currency is found, /// return None /// # Examples /// /// ``` /// use coinnect::kraken::utils::get_currency_string; /// use coinnect::types::Currency; /// /// let currency = get_currency_string(Currency::BTC); /// assert_eq!(currency, Some("XXBT".to_string())); /// ``` pub fn get_currency_string(currency: Currency) -> Option<String> { match currency { Currency::EUR => Some("ZEUR".to_string()), Currency::CAD => Some("ZCAD".to_string()), Currency::GBP => Some("ZGBP".to_string()), Currency::JPY => Some("ZJPY".to_string()), Currency::USD => Some("ZUSD".to_string()), Currency::DASH => Some("XDASH".to_string()), Currency::ETC => Some("XETC".to_string()), Currency::ETH => Some("XETH".to_string()), Currency::GNO => Some("XGNO".to_string()), Currency::ICN => Some("XICN".to_string()), Currency::LTC => Some("XLTC".to_string()), Currency::MLN => Some("XMLN".to_string()), Currency::REP => Some("XREP".to_string()), Currency::USDT => Some("XUSDT".to_string()), Currency::BTC => Some("XXBT".to_string()), Currency::XDG => Some("XXDG".to_string()), Currency::XLM => Some("XXLM".to_string()), Currency::XMR => Some("XXMR".to_string()), Currency::XRP => Some("XXRP".to_string()), Currency::ZEC => Some("XZEC".to_string()), _ => None, } }<|fim▁end|>
<|file_name|>net.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Chromium OS Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use std::mem; use std::net::Ipv4Addr; use std::path::Path; use std::thread; use net_util::{MacAddress, TapT}; use base::{error, warn, AsRawDescriptor, Event, RawDescriptor, Tube}; use vhost::NetT as VhostNetT; use virtio_sys::virtio_net; use vm_memory::GuestMemory; use super::control_socket::*; use super::worker::Worker; use super::{Error, Result}; use crate::pci::MsixStatus; use crate::virtio::{Interrupt, Queue, VirtioDevice, TYPE_NET}; const QUEUE_SIZE: u16 = 256; const NUM_QUEUES: usize = 2; const QUEUE_SIZES: &[u16] = &[QUEUE_SIZE; NUM_QUEUES]; pub struct Net<T: TapT, U: VhostNetT<T>> { workers_kill_evt: Option<Event>, kill_evt: Event, worker_thread: Option<thread::JoinHandle<(Worker<U>, T)>>, tap: Option<T>, vhost_net_handle: Option<U>, vhost_interrupt: Option<Vec<Event>>, avail_features: u64, acked_features: u64, request_tube: Tube, response_tube: Option<Tube>, } impl<T, U> Net<T, U> where T: TapT, U: VhostNetT<T>, { /// Create a new virtio network device with the given IP address and /// netmask. pub fn new( vhost_net_device_path: &Path, base_features: u64, ip_addr: Ipv4Addr, netmask: Ipv4Addr, mac_addr: MacAddress, ) -> Result<Net<T, U>> { let kill_evt = Event::new().map_err(Error::CreateKillEvent)?; let tap: T = T::new(true, false).map_err(Error::TapOpen)?; tap.set_ip_addr(ip_addr).map_err(Error::TapSetIp)?; tap.set_netmask(netmask).map_err(Error::TapSetNetmask)?; tap.set_mac_address(mac_addr) .map_err(Error::TapSetMacAddress)?; // Set offload flags to match the virtio features below. tap.set_offload( net_sys::TUN_F_CSUM | net_sys::TUN_F_UFO | net_sys::TUN_F_TSO4 | net_sys::TUN_F_TSO6, ) .map_err(Error::TapSetOffload)?; // We declare VIRTIO_NET_F_MRG_RXBUF, so set the vnet hdr size to match. let vnet_hdr_size = mem::size_of::<virtio_net::virtio_net_hdr_mrg_rxbuf>() as i32; tap.set_vnet_hdr_size(vnet_hdr_size) .map_err(Error::TapSetVnetHdrSize)?; tap.enable().map_err(Error::TapEnable)?; let vhost_net_handle = U::new(vhost_net_device_path).map_err(Error::VhostOpen)?; let avail_features = base_features | 1 << virtio_net::VIRTIO_NET_F_GUEST_CSUM | 1 << virtio_net::VIRTIO_NET_F_CSUM | 1 << virtio_net::VIRTIO_NET_F_GUEST_TSO4 | 1 << virtio_net::VIRTIO_NET_F_GUEST_UFO | 1 << virtio_net::VIRTIO_NET_F_HOST_TSO4 | 1 << virtio_net::VIRTIO_NET_F_HOST_UFO | 1 << virtio_net::VIRTIO_NET_F_MRG_RXBUF | 1 << virtio_sys::vhost::VIRTIO_RING_F_INDIRECT_DESC | 1 << virtio_sys::vhost::VIRTIO_RING_F_EVENT_IDX | 1 << virtio_sys::vhost::VIRTIO_F_NOTIFY_ON_EMPTY; let mut vhost_interrupt = Vec::new(); for _ in 0..NUM_QUEUES { vhost_interrupt.push(Event::new().map_err(Error::VhostIrqCreate)?); } let (request_tube, response_tube) = Tube::pair().map_err(Error::CreateTube)?; Ok(Net { workers_kill_evt: Some(kill_evt.try_clone().map_err(Error::CloneKillEvent)?), kill_evt, worker_thread: None, tap: Some(tap), vhost_net_handle: Some(vhost_net_handle), vhost_interrupt: Some(vhost_interrupt), avail_features, acked_features: 0u64, request_tube, response_tube: Some(response_tube), }) } } impl<T, U> Drop for Net<T, U> where T: TapT, U: VhostNetT<T>, { fn drop(&mut self) { // Only kill the child if it claimed its event. if self.workers_kill_evt.is_none() { // Ignore the result because there is nothing we can do about it. let _ = self.kill_evt.write(1); } if let Some(worker_thread) = self.worker_thread.take() { let _ = worker_thread.join(); } } } impl<T, U> VirtioDevice for Net<T, U> where T: TapT + 'static, U: VhostNetT<T> + 'static, { fn keep_rds(&self) -> Vec<RawDescriptor> { let mut keep_rds = Vec::new(); if let Some(tap) = &self.tap { keep_rds.push(tap.as_raw_descriptor()); } if let Some(vhost_net_handle) = &self.vhost_net_handle { keep_rds.push(vhost_net_handle.as_raw_descriptor()); } if let Some(vhost_interrupt) = &self.vhost_interrupt { for vhost_int in vhost_interrupt.iter() { keep_rds.push(vhost_int.as_raw_descriptor()); } } if let Some(workers_kill_evt) = &self.workers_kill_evt { keep_rds.push(workers_kill_evt.as_raw_descriptor()); } keep_rds.push(self.kill_evt.as_raw_descriptor()); keep_rds.push(self.request_tube.as_raw_descriptor()); if let Some(response_tube) = &self.response_tube { keep_rds.push(response_tube.as_raw_descriptor()); } keep_rds } fn device_type(&self) -> u32 { TYPE_NET } fn queue_max_sizes(&self) -> &[u16] { QUEUE_SIZES } fn features(&self) -> u64 { self.avail_features } fn ack_features(&mut self, value: u64) { let mut v = value; // Check if the guest is ACK'ing a feature that we didn't claim to have. let unrequested_features = v & !self.avail_features; if unrequested_features != 0 { warn!("net: virtio net got unknown feature ack: {:x}", v); // Don't count these features as acked. v &= !unrequested_features; } self.acked_features |= v; } fn activate( &mut self, mem: GuestMemory, interrupt: Interrupt, queues: Vec<Queue>, queue_evts: Vec<Event>, ) { if queues.len() != NUM_QUEUES || queue_evts.len() != NUM_QUEUES { error!("net: expected {} queues, got {}", NUM_QUEUES, queues.len()); return; } if let Some(vhost_net_handle) = self.vhost_net_handle.take() { if let Some(tap) = self.tap.take() { if let Some(vhost_interrupt) = self.vhost_interrupt.take() { if let Some(kill_evt) = self.workers_kill_evt.take() { let acked_features = self.acked_features; let socket = if self.response_tube.is_some() {<|fim▁hole|> self.response_tube.take() } else { None }; let mut worker = Worker::new( queues, vhost_net_handle, vhost_interrupt, interrupt, acked_features, kill_evt, socket, ); let activate_vqs = |handle: &U| -> Result<()> { for idx in 0..NUM_QUEUES { handle .set_backend(idx, Some(&tap)) .map_err(Error::VhostNetSetBackend)?; } Ok(()) }; let result = worker.init(mem, queue_evts, QUEUE_SIZES, activate_vqs); if let Err(e) = result { error!("net worker thread exited with error: {}", e); } let worker_result = thread::Builder::new() .name("vhost_net".to_string()) .spawn(move || { let cleanup_vqs = |handle: &U| -> Result<()> { for idx in 0..NUM_QUEUES { handle .set_backend(idx, None) .map_err(Error::VhostNetSetBackend)?; } Ok(()) }; let result = worker.run(cleanup_vqs); if let Err(e) = result { error!("net worker thread exited with error: {}", e); } (worker, tap) }); match worker_result { Err(e) => { error!("failed to spawn vhost_net worker: {}", e); return; } Ok(join_handle) => { self.worker_thread = Some(join_handle); } } } } } } } fn on_device_sandboxed(&mut self) { // ignore the error but to log the error. We don't need to do // anything here because when activate, the other vhost set up // will be failed to stop the activate thread. if let Some(vhost_net_handle) = &self.vhost_net_handle { match vhost_net_handle.set_owner() { Ok(_) => {} Err(e) => error!("{}: failed to set owner: {:?}", self.debug_label(), e), } } } fn control_notify(&self, behavior: MsixStatus) { if self.worker_thread.is_none() { return; } match behavior { MsixStatus::EntryChanged(index) => { if let Err(e) = self .request_tube .send(&VhostDevRequest::MsixEntryChanged(index)) { error!( "{} failed to send VhostMsixEntryChanged request for entry {}: {:?}", self.debug_label(), index, e ); return; } if let Err(e) = self.request_tube.recv::<VhostDevResponse>() { error!( "{} failed to receive VhostMsixEntryChanged response for entry {}: {:?}", self.debug_label(), index, e ); } } MsixStatus::Changed => { if let Err(e) = self.request_tube.send(&VhostDevRequest::MsixChanged) { error!( "{} failed to send VhostMsixChanged request: {:?}", self.debug_label(), e ); return; } if let Err(e) = self.request_tube.recv::<VhostDevResponse>() { error!( "{} failed to receive VhostMsixChanged response {:?}", self.debug_label(), e ); } } _ => {} } } fn reset(&mut self) -> bool { // Only kill the child if it claimed its event. if self.workers_kill_evt.is_none() && self.kill_evt.write(1).is_err() { error!("{}: failed to notify the kill event", self.debug_label()); return false; } if let Some(worker_thread) = self.worker_thread.take() { match worker_thread.join() { Err(_) => { error!("{}: failed to get back resources", self.debug_label()); return false; } Ok((worker, tap)) => { self.vhost_net_handle = Some(worker.vhost_handle); self.tap = Some(tap); self.vhost_interrupt = Some(worker.vhost_interrupt); self.workers_kill_evt = Some(worker.kill_evt); self.response_tube = worker.response_tube; return true; } } } false } } #[cfg(test)] pub mod tests { use super::*; use crate::virtio::base_features; use crate::virtio::VIRTIO_MSI_NO_VECTOR; use hypervisor::ProtectionType; use net_util::fakes::FakeTap; use std::path::PathBuf; use std::result; use std::sync::atomic::AtomicUsize; use std::sync::Arc; use vhost::net::fakes::FakeNet; use vm_memory::{GuestAddress, GuestMemory, GuestMemoryError}; fn create_guest_memory() -> result::Result<GuestMemory, GuestMemoryError> { let start_addr1 = GuestAddress(0x0); let start_addr2 = GuestAddress(0x1000); GuestMemory::new(&[(start_addr1, 0x1000), (start_addr2, 0x4000)]) } fn create_net_common() -> Net<FakeTap, FakeNet<FakeTap>> { let features = base_features(ProtectionType::Unprotected); Net::<FakeTap, FakeNet<FakeTap>>::new( &PathBuf::from(""), features, Ipv4Addr::new(127, 0, 0, 1), Ipv4Addr::new(255, 255, 255, 0), "de:21:e8:47:6b:6a".parse().unwrap(), ) .unwrap() } #[test] fn create_net() { create_net_common(); } #[test] fn keep_rds() { let net = create_net_common(); let fds = net.keep_rds(); assert!(!fds.is_empty(), "We should have gotten at least one fd"); } #[test] fn features() { let net = create_net_common(); assert_eq!(net.features(), 5117103235); } #[test] fn ack_features() { let mut net = create_net_common(); // Just testing that we don't panic, for now net.ack_features(1); net.ack_features(1 << 32); } #[test] fn activate() { let mut net = create_net_common(); let guest_memory = create_guest_memory().unwrap(); // Just testing that we don't panic, for now net.activate( guest_memory, Interrupt::new( Arc::new(AtomicUsize::new(0)), Event::new().unwrap(), Event::new().unwrap(), None, VIRTIO_MSI_NO_VECTOR, ), vec![Queue::new(1)], vec![Event::new().unwrap()], ); } }<|fim▁end|>
<|file_name|>SpecializeModuleTest.java<|end_file_name|><|fim▁begin|>/* * Copyright 2010 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.collect.ImmutableSet; import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback; import com.google.javascript.jscomp.SpecializeModule.SpecializationState; import com.google.javascript.rhino.Node; /** * Tests for {@link SpecializeModule}. * * @author dcc@google.com (Devin Coughlin) */ public class SpecializeModuleTest extends CompilerTestCase { private static final String SHARED_EXTERNS = "var alert = function() {}"; public SpecializeModuleTest() { super(SHARED_EXTERNS); } private PassFactory inlineFunctions = new PassFactory("inlineFunctions", true) { @Override protected CompilerPass createInternal(AbstractCompiler compiler) { return new InlineFunctions(compiler, compiler.getUniqueNameIdSupplier(), true, false, true); } }; private PassFactory removeUnusedPrototypeProperties = new PassFactory("removeUnusedPrototypeProperties", true) { @Override protected CompilerPass createInternal(AbstractCompiler compiler) { return new RemoveUnusedPrototypeProperties(compiler, false, false); } }; private PassFactory devirtualizePrototypeMethods = new PassFactory("devirtualizePrototypeMethods", true) { @Override protected CompilerPass createInternal(AbstractCompiler compiler) { return new DevirtualizePrototypeMethods(compiler); } }; @Override protected CompilerPass getProcessor(final Compiler compiler) { final SpecializeModule specializeModule = new SpecializeModule(compiler, devirtualizePrototypeMethods, inlineFunctions, removeUnusedPrototypeProperties); return new CompilerPass() { public void process(Node externs, Node root) { specializeModule.process(externs, root); /* Make sure variables are declared before used */ new VarCheck(compiler).process(externs, root); } }; } @Override public void setUp() throws Exception { super.setUp(); enableNormalize(); } public void testSpecializeInline() { JSModule[] modules = createModuleStar( // m1 /* Recursion in A() prevents inline of A*/ "var A = function() {alert(B());A()};" + "var B = function() {return 6};" + "A();", // m2 "A();" + "B();" + "B = function() {return 7};" + "A();" + "B();" ); test(modules, new String[] { // m1 "var A = function() {alert(6);A()};" + /* Specialized A */ "A();" + "var B;", // m2 "A = function() {alert(B());A()};" + /* Unspecialized A */ "B = function() {return 6};" + /* Removed from m1, so add to m2 */ "A();" + "B();" + "B = function() {return 7};" + "A();" + "B();" }); } public void testSpecializeCascadedInline() { JSModule[] modules = createModuleStar( // m1 /* Recursion in A() prevents inline of A*/ "var A = function() {alert(B());A()};" + "var B = function() {return C()};" + "var C = function() {return 6};" + "A();", // m2 "B = function() {return 7};" + "A();"); test(modules, new String[] { // m1 "var A = function() {alert(6);A()};" + /* Specialized A */ "A();" + "var B, C;", // m2 "A = function() {alert(B());A()};" + /* Unspecialized A */ "B = function() {return C()};" + /* Removed from m1, so add to m2 */ "C = function() {return 6};" + /* Removed from m1, so add to m2 */ "B = function() {return 7};" + "A();" }); } public void testSpecializeInlineWithMultipleDependents() { JSModule[] modules = createModuleStar( // m1 /* Recursion in A() prevents inline of A*/ "var A = function() {alert(B());A()};" + "var B = function() {return 6};" + "A();", // m2 "B = function() {return 7};" + "A();", // m3 "A();" ); test(modules, new String[] { // m1 "var A = function() {alert(6);A()};" + /* Specialized A */ "A();" + "var B;", // m2 "A = function() {alert(B());A()};" + /* Unspecialized A */ "B = function() {return 6};" + /* Removed from m1, so add to m2 */ "B = function() {return 7};" + "A();", "A = function() {alert(B());A()};" + /* Unspecialized A */ "B = function() {return 6};" + /* Removed from m1, so add to m2 */ "A();", }); } public void testSpecializeInlineWithNamespaces() { JSModule[] modules = createModuleStar(<|fim▁hole|> /* Recursion in A() prevents inline of A*/ "ns.A = function() {alert(B());ns.A()};" + "var B = function() {return 6};" + "ns.A();", // m2 "B = function() {return 7};" + "ns.A();"); test(modules, new String[] { // m1 "var ns = {};" + "ns.A = function() {alert(6);ns.A()};" + /* Specialized A */ "ns.A();" + "var B;", // m2 "ns.A = function() {alert(B());ns.A()};" + /* Unspecialized A */ "B = function() {return 6};" + /* Removed from m1, so add to m2 */ "B = function() {return 7};" + "ns.A();" }); } public void testSpecializeInlineWithRegularFunctions() { JSModule[] modules = createModuleStar( // m1 /* Recursion in A() prevents inline of A*/ "function A() {alert(B());A()}" + "function B() {return 6}" + "A();", // m2 "B = function() {return 7};" + "A();"); test(modules, new String[] { // m1 "function A() {alert(6);A()}" + /* Specialized A */ "A();" + "var B;", // m2 "A = function() {alert(B());A()};" + /* Unspecialized A */ "B = function() {return 6};" + /* Removed from m1, so add to m2 */ /* Start of original m2 */ "B = function() {return 7};" + "A();" }); } public void testDontSpecializeLocalNonAnonymousFunctions() { /* normalize result, but not expected */ enableNormalize(false); JSModule[] modules = createModuleStar( // m1 "(function(){var noSpecialize = " + "function() {alert(6)};noSpecialize()})()", // m2 ""); test(modules, new String[] { // m1 "(function(){var noSpecialize = " + "function() {alert(6)};noSpecialize()})()", // m2 "" }); } public void testAddDummyVarsForRemovedFunctions() { JSModule[] modules = createModuleStar( // m1 /* Recursion in A() prevents inline of A*/ "var A = function() {alert(B() + C());A()};" + "var B = function() {return 6};" + "var C = function() {return 8};" + "A();", // m2 "" + "A();"); test(modules, new String[] { // m1 "var A = function() {alert(6 + 8);A()};" + /* Specialized A */ "A();" + "var B, C;", // m2 "A = function() {alert(B() + C());A()};" + /* Unspecialized A */ "B = function() {return 6};" + /* Removed from m1, so add to m2 */ "C = function() {return 8};" + /* Removed from m1, so add to m2 */ "A();" }); } public void testSpecializeRemoveUnusedProperties() { JSModule[] modules = createModuleStar( // m1 /* Recursion in A() prevents inline of A*/ "var Foo = function(){};" + /* constructor */ "Foo.prototype.a = function() {this.a()};" + "Foo.prototype.b = function() {return 6};" + "Foo.prototype.c = function() {return 7};" + "var aliasA = Foo.prototype.a;" + // Prevents devirtualization of a "var x = new Foo();" + "x.a();", // m2 ""); test(modules, new String[] { // m1 "var Foo = function(){};" + /* constructor */ "Foo.prototype.a = function() {this.a()};" + "var aliasA = Foo.prototype.a;" + "var x = new Foo();" + "x.a();", // m2 "Foo.prototype.b = function() {return 6};" + "Foo.prototype.c = function() {return 7};" }); } public void testDontSpecializeAliasedFunctions_inline() { JSModule[] modules = createModuleStar( // m1 /* Recursion in A() prevents inline of A*/ "function A() {alert(B());A()}" + "function B() {return 6}" + "var aliasA = A;" + "A();", // m2 "B = function() {return 7};" + "B();"); test(modules, new String[] { // m1 /* Recursion in A() prevents inline of A*/ "function A() {alert(B());A()}" + "function B() {return 6}" + "var aliasA = A;" + "A();", // m2 "B = function() {return 7};" + "B();" }); } public void testDontSpecializeAliasedFunctions_remove_unused_properties() { JSModule[] modules = createModuleStar( // m1 "var Foo = function(){};" + /* constructor */ "Foo.prototype.a = function() {this.a()};" + "Foo.prototype.b = function() {return 6};" + "var aliasB = Foo.prototype.b;" + "Foo.prototype.c = function() {return 7};" + "Foo.prototype.d = function() {return 7};" + "var aliasA = Foo.prototype.a;" + // Prevents devirtualization of a "var x = new Foo();" + "x.a();" + "var aliasC = (new Foo).c", // m2 ""); test(modules, new String[] { // m1 "var Foo = function(){};" + /* constructor */ "Foo.prototype.a = function() {this.a()};" + "Foo.prototype.b = function() {return 6};" + "var aliasB = Foo.prototype.b;" + "Foo.prototype.c = function() {return 7};" + "var aliasA = Foo.prototype.a;" + // Prevents devirtualization of a "var x = new Foo();" + "x.a();" + "var aliasC = (new Foo).c", // m2 "Foo.prototype.d = function() {return 7};" }); } public void testSpecializeDevirtualizePrototypeMethods() { JSModule[] modules = createModuleStar( // m1 "/** @constructor */" + "var Foo = function(){};" + /* constructor */ "Foo.prototype.a = function() {this.a();return 7};" + "Foo.prototype.b = function() {this.a()};" + "var x = new Foo();" + "x.a();", // m2 ""); test(modules, new String[] { // m1 "var Foo = function(){};" + /* constructor */ "var JSCompiler_StaticMethods_a =" + "function(JSCompiler_StaticMethods_a$self) {" + "JSCompiler_StaticMethods_a(JSCompiler_StaticMethods_a$self);" + "return 7" + "};" + "var x = new Foo();" + "JSCompiler_StaticMethods_a(x);", // m2 "Foo.prototype.a = function() {this.a();return 7};" + "Foo.prototype.b = function() {this.a()};" }); } public void testSpecializeDevirtualizePrototypeMethodsWithInline() { JSModule[] modules = createModuleStar( // m1 "/** @constructor */" + "var Foo = function(){};" + /* constructor */ "Foo.prototype.a = function() {return 7};" + "var x = new Foo();" + "var z = x.a();", // m2 ""); test(modules, new String[] { // m1 "var Foo = function(){};" + /* constructor */ "var x = new Foo();" + "var z = 7;", // m2 "Foo.prototype.a = function() {return 7};" }); } /** * Tests for {@link SpecializeModule.SpecializationState}. */ public static class SpecializeModuleSpecializationStateTest extends CompilerTestCase { Compiler lastCompiler; SpecializationState lastState; @Override public CompilerPass getProcessor(final Compiler compiler) { lastCompiler = compiler; return new CompilerPass() { public void process(Node externs, Node root) { SimpleDefinitionFinder defFinder = new SimpleDefinitionFinder(compiler); defFinder.process(externs, root); SimpleFunctionAliasAnalysis functionAliasAnalysis = new SimpleFunctionAliasAnalysis(); functionAliasAnalysis.analyze(defFinder); lastState = new SpecializationState(functionAliasAnalysis); } }; } public void testRemovedFunctions() { testSame("function F(){}\nvar G = function(a){};"); assertEquals(ImmutableSet.of(), lastState.getRemovedFunctions()); Node functionF = findFunction("F"); lastState.reportRemovedFunction(functionF, functionF.getParent()); assertEquals(ImmutableSet.of(functionF), lastState.getRemovedFunctions()); Node functionG = findFunction("F"); lastState.reportRemovedFunction(functionG, functionF.getParent()); assertEquals(ImmutableSet.of(functionF, functionG), lastState.getRemovedFunctions()); assertEquals(ImmutableSet.of(), lastState.getSpecializedFunctions()); } public void testSpecializedFunctions() { testSame("function F(){}\nvar G = function(a){};"); assertEquals(ImmutableSet.of(), lastState.getSpecializedFunctions()); Node functionF = findFunction("F"); lastState.reportSpecializedFunction(functionF); assertEquals(ImmutableSet.of(functionF), lastState.getSpecializedFunctions()); Node functionG = findFunction("F"); lastState.reportSpecializedFunction(functionG); assertEquals(ImmutableSet.of(functionF, functionG), lastState.getSpecializedFunctions()); assertEquals(ImmutableSet.of(), lastState.getRemovedFunctions()); } public void testCanFixupFunction() { testSame("function F(){}\n" + "var G = function(a){};\n" + "var ns = {};" + "ns.H = function(){};" + "var ns2 = {I : function anon1(){}};" + "(function anon2(){})();"); assertTrue(lastState.canFixupFunction(findFunction("F"))); assertTrue(lastState.canFixupFunction(findFunction("G"))); assertTrue(lastState.canFixupFunction(findFunction("ns.H"))); assertFalse(lastState.canFixupFunction(findFunction("anon1"))); assertFalse(lastState.canFixupFunction(findFunction("anon2"))); // Can't guarantee safe fixup for aliased functions testSame("function A(){}\n" + "var aliasA = A;\n"); assertFalse(lastState.canFixupFunction(findFunction("A"))); } private Node findFunction(String name) { FunctionFinder f = new FunctionFinder(name); new NodeTraversal(lastCompiler, f).traverse(lastCompiler.jsRoot); assertNotNull("Couldn't find " + name, f.found); return f.found; } /** * Quick Traversal to find a given function in the AST. */ private class FunctionFinder extends AbstractPostOrderCallback { Node found = null; final String target; FunctionFinder(String target) { this.target = target; } public void visit(NodeTraversal t, Node n, Node parent) { if (NodeUtil.isFunction(n) && target.equals(NodeUtil.getFunctionName(n))) { found = n; } } } } }<|fim▁end|>
// m1 "var ns = {};" +
<|file_name|>vec.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A growable list type, written `Vec<T>` but pronounced 'vector.' //! //! Vectors have `O(1)` indexing, push (to the end) and pop (from the end). use core::prelude::*; use alloc::boxed::Box; use alloc::heap::{EMPTY, allocate, reallocate, deallocate}; use core::cmp::max; use core::default::Default; use core::fmt; use core::kinds::marker::{ContravariantLifetime, InvariantType}; use core::mem; use core::num; use core::ops; use core::ptr; use core::raw::Slice as RawSlice; use core::uint; use {Mutable, MutableSeq}; use slice::{MutableOrdSlice, MutableSliceAllocating, CloneableVector}; use slice::{Items, MutItems}; /// An owned, growable vector. /// /// # Examples /// /// ``` /// let mut vec = Vec::new(); /// vec.push(1i); /// vec.push(2i); /// /// assert_eq!(vec.len(), 2); /// assert_eq!(vec[0], 1); /// /// assert_eq!(vec.pop(), Some(2)); /// assert_eq!(vec.len(), 1); /// /// *vec.get_mut(0) = 7i; /// assert_eq!(vec[0], 7); /// /// vec.push_all([1, 2, 3]); /// /// for x in vec.iter() { /// println!("{}", x); /// } /// assert_eq!(vec, vec![7i, 1, 2, 3]); /// ``` /// /// The `vec!` macro is provided to make initialization more convenient: /// /// ``` /// let mut vec = vec![1i, 2i, 3i]; /// vec.push(4); /// assert_eq!(vec, vec![1, 2, 3, 4]); /// ``` /// /// Use a `Vec` as an efficient stack: /// /// ``` /// let mut stack = Vec::new(); /// /// stack.push(1i); /// stack.push(2i); /// stack.push(3i); /// /// loop { /// let top = match stack.pop() { /// None => break, // empty /// Some(x) => x, /// }; /// // Prints 3, 2, 1 /// println!("{}", top); /// } /// ``` /// /// # Capacity and reallocation /// /// The capacity of a vector is the amount of space allocated for any future /// elements that will be added onto the vector. This is not to be confused /// with the *length* of a vector, which specifies the number of actual /// elements within the vector. If a vector's length exceeds its capacity, /// its capacity will automatically be increased, but its elements will /// have to be reallocated. /// /// For example, a vector with capacity 10 and length 0 would be an empty /// vector with space for 10 more elements. Pushing 10 or fewer elements onto /// the vector will not change its capacity or cause reallocation to occur. /// However, if the vector's length is increased to 11, it will have to /// reallocate, which can be slow. For this reason, it is recommended /// to use `Vec::with_capacity` whenever possible to specify how big the vector /// is expected to get. #[unsafe_no_drop_flag] #[stable] pub struct Vec<T> { len: uint, cap: uint, ptr: *mut T } impl<T> Vec<T> { /// Constructs a new, empty `Vec`. /// /// The vector will not allocate until elements are pushed onto it. /// /// # Example /// /// ``` /// let mut vec: Vec<int> = Vec::new(); /// ``` #[inline] #[stable] pub fn new() -> Vec<T> { // We want ptr to never be NULL so instead we set it to some arbitrary // non-null value which is fine since we never call deallocate on the ptr // if cap is 0. The reason for this is because the pointer of a slice // being NULL would break the null pointer optimization for enums. Vec { len: 0, cap: 0, ptr: EMPTY as *mut T } } /// Constructs a new, empty `Vec` with the specified capacity. /// /// The vector will be able to hold exactly `capacity` elements without /// reallocating. If `capacity` is 0, the vector will not allocate. /// /// It is important to note that this function does not specify the /// *length* of the returned vector, but only the *capacity*. (For an /// explanation of the difference between length and capacity, see /// the main `Vec` docs above, 'Capacity and reallocation'.) To create /// a vector of a given length, use `Vec::from_elem` or `Vec::from_fn`. /// /// # Example /// /// ``` /// let mut vec: Vec<int> = Vec::with_capacity(10); /// /// // The vector contains no items, even though it has capacity for more /// assert_eq!(vec.len(), 0); /// /// // These are all done without reallocating... /// for i in range(0i, 10) { /// vec.push(i); /// } /// /// // ...but this may make the vector reallocate /// vec.push(11); /// ``` #[inline] #[stable] pub fn with_capacity(capacity: uint) -> Vec<T> { if mem::size_of::<T>() == 0 { Vec { len: 0, cap: uint::MAX, ptr: EMPTY as *mut T } } else if capacity == 0 { Vec::new() } else { let size = capacity.checked_mul(&mem::size_of::<T>()) .expect("capacity overflow"); let ptr = unsafe { allocate(size, mem::min_align_of::<T>()) }; Vec { len: 0, cap: capacity, ptr: ptr as *mut T } } } /// Creates and initializes a `Vec`. /// /// Creates a `Vec` of size `length` and initializes the elements to the /// value returned by the closure `op`. /// /// # Example /// /// ``` /// let vec = Vec::from_fn(3, |idx| idx * 2); /// assert_eq!(vec, vec![0, 2, 4]); /// ``` #[inline] #[unstable = "the naming is uncertain as well as this migrating to unboxed \ closures in the future"] pub fn from_fn(length: uint, op: |uint| -> T) -> Vec<T> { unsafe { let mut xs = Vec::with_capacity(length); while xs.len < length { let len = xs.len; ptr::write(xs.as_mut_slice().unsafe_mut(len), op(len)); xs.len += 1; } xs } } /// Creates a `Vec<T>` directly from the raw constituents. /// /// This is highly unsafe: /// /// - if `ptr` is null, then `length` and `capacity` should be 0 /// - `ptr` must point to an allocation of size `capacity` /// - there must be `length` valid instances of type `T` at the /// beginning of that allocation /// - `ptr` must be allocated by the default `Vec` allocator /// /// # Example /// /// ``` /// use std::ptr; /// use std::mem; /// /// fn main() { /// let mut v = vec![1i, 2, 3]; /// /// // Pull out the various important pieces of information about `v` /// let p = v.as_mut_ptr(); /// let len = v.len(); /// let cap = v.capacity(); /// /// unsafe { /// // Cast `v` into the void: no destructor run, so we are in /// // complete control of the allocation to which `p` points. /// mem::forget(v); /// /// // Overwrite memory with 4, 5, 6 /// for i in range(0, len as int) { /// ptr::write(p.offset(i), 4 + i); /// } /// /// // Put everything back together into a Vec /// let rebuilt = Vec::from_raw_parts(len, cap, p); /// assert_eq!(rebuilt, vec![4i, 5i, 6i]); /// } /// } /// ``` #[experimental] pub unsafe fn from_raw_parts(length: uint, capacity: uint, ptr: *mut T) -> Vec<T> { Vec { len: length, cap: capacity, ptr: ptr } } /// Consumes the `Vec`, partitioning it based on a predicate. /// /// Partitions the `Vec` into two `Vec`s `(A,B)`, where all elements of `A` /// satisfy `f` and all elements of `B` do not. The order of elements is /// preserved. /// /// # Example /// /// ``` /// let vec = vec![1i, 2i, 3i, 4i]; /// let (even, odd) = vec.partition(|&n| n % 2 == 0); /// assert_eq!(even, vec![2, 4]); /// assert_eq!(odd, vec![1, 3]); /// ``` #[inline] #[experimental] pub fn partition(self, f: |&T| -> bool) -> (Vec<T>, Vec<T>) { let mut lefts = Vec::new(); let mut rights = Vec::new(); for elt in self.into_iter() { if f(&elt) { lefts.push(elt); } else { rights.push(elt); } } (lefts, rights) } } impl<T: Clone> Vec<T> { /// Deprecated, call `extend` instead. #[inline] #[deprecated = "this function has been deprecated in favor of extend()"] pub fn append(mut self, second: &[T]) -> Vec<T> { self.push_all(second); self } /// Deprecated, call `to_vec()` instead #[inline] #[deprecated = "this function has been deprecated in favor of to_vec()"] pub fn from_slice(values: &[T]) -> Vec<T> { values.to_vec() } /// Constructs a `Vec` with copies of a value. /// /// Creates a `Vec` with `length` copies of `value`. /// /// # Example /// ``` /// let vec = Vec::from_elem(3, "hi"); /// println!("{}", vec); // prints [hi, hi, hi] /// ``` #[inline] #[unstable = "this functionality may become more generic over all collections"] pub fn from_elem(length: uint, value: T) -> Vec<T> { unsafe { let mut xs = Vec::with_capacity(length); while xs.len < length { let len = xs.len; ptr::write(xs.as_mut_slice().unsafe_mut(len), value.clone()); xs.len += 1; } xs } } /// Appends all elements in a slice to the `Vec`. /// /// Iterates over the slice `other`, clones each element, and then appends /// it to this `Vec`. The `other` vector is traversed in-order. /// /// # Example /// /// ``` /// let mut vec = vec![1i]; /// vec.push_all([2i, 3, 4]); /// assert_eq!(vec, vec![1, 2, 3, 4]); /// ``` #[inline] #[experimental] pub fn push_all(&mut self, other: &[T]) { self.reserve_additional(other.len()); for i in range(0, other.len()) { let len = self.len(); // Unsafe code so this can be optimised to a memcpy (or something similarly // fast) when T is Copy. LLVM is easily confused, so any extra operations // during the loop can prevent this optimisation. unsafe { ptr::write( self.as_mut_slice().unsafe_mut(len), other.unsafe_get(i).clone()); self.set_len(len + 1); } } } /// Grows the `Vec` in-place. /// /// Adds `n` copies of `value` to the `Vec`. /// /// # Example /// /// ``` /// let mut vec = vec!["hello"]; /// vec.grow(2, "world"); /// assert_eq!(vec, vec!["hello", "world", "world"]); /// ``` #[stable] pub fn grow(&mut self, n: uint, value: T) { self.reserve_additional(n); let mut i: uint = 0u; while i < n { self.push(value.clone()); i += 1u; } } /// Sets the value of a vector element at a given index, growing the vector /// as needed. /// /// Sets the element at position `index` to `value`. If `index` is past the /// end of the vector, expands the vector by replicating `initval` to fill /// the intervening space. /// /// # Example /// /// ``` /// # #![allow(deprecated)] /// let mut vec = vec!["a", "b", "c"]; /// vec.grow_set(1, &("fill"), "d"); /// vec.grow_set(4, &("fill"), "e"); /// assert_eq!(vec, vec!["a", "d", "c", "fill", "e"]); /// ``` #[deprecated = "call .grow() and .push() manually instead"] pub fn grow_set(&mut self, index: uint, initval: &T, value: T) { let l = self.len(); if index >= l { self.grow(index - l + 1u, initval.clone()); } *self.get_mut(index) = value; } /// Partitions a vector based on a predicate. /// /// Clones the elements of the vector, partitioning them into two `Vec`s /// `(a, b)`, where all elements of `a` satisfy `f` and all elements of `b` /// do not. The order of elements is preserved. /// /// # Example /// /// ``` /// let vec = vec![1i, 2, 3, 4]; /// let (even, odd) = vec.partitioned(|&n| n % 2 == 0); /// assert_eq!(even, vec![2i, 4]); /// assert_eq!(odd, vec![1i, 3]); /// ``` #[experimental] pub fn partitioned(&self, f: |&T| -> bool) -> (Vec<T>, Vec<T>) { let mut lefts = Vec::new(); let mut rights = Vec::new(); for elt in self.iter() { if f(elt) { lefts.push(elt.clone()); } else { rights.push(elt.clone()); } } (lefts, rights) } } #[unstable] impl<T:Clone> Clone for Vec<T> { fn clone(&self) -> Vec<T> { self.as_slice().to_vec() } fn clone_from(&mut self, other: &Vec<T>) { // drop anything in self that will not be overwritten if self.len() > other.len() { self.truncate(other.len()) } // reuse the contained values' allocations/resources. for (place, thing) in self.iter_mut().zip(other.iter()) { place.clone_from(thing) } // self.len <= other.len due to the truncate above, so the // slice here is always in-bounds. let slice = other[self.len()..]; self.push_all(slice); } } #[experimental = "waiting on Index stability"] impl<T> Index<uint,T> for Vec<T> { #[inline] #[allow(deprecated)] // allow use of get fn index<'a>(&'a self, index: &uint) -> &'a T { self.get(*index) } } #[cfg(not(stage0))] impl<T> IndexMut<uint,T> for Vec<T> { #[inline] fn index_mut<'a>(&'a mut self, index: &uint) -> &'a mut T { self.get_mut(*index) } } impl<T> ops::Slice<uint, [T]> for Vec<T> { #[inline] fn as_slice_<'a>(&'a self) -> &'a [T] { self.as_slice() } #[inline] fn slice_from_or_fail<'a>(&'a self, start: &uint) -> &'a [T] { self.as_slice().slice_from_or_fail(start) } #[inline] fn slice_to_or_fail<'a>(&'a self, end: &uint) -> &'a [T] { self.as_slice().slice_to_or_fail(end) } #[inline] fn slice_or_fail<'a>(&'a self, start: &uint, end: &uint) -> &'a [T] { self.as_slice().slice_or_fail(start, end) } } impl<T> ops::SliceMut<uint, [T]> for Vec<T> { #[inline] fn as_mut_slice_<'a>(&'a mut self) -> &'a mut [T] { self.as_mut_slice() } #[inline] fn slice_from_or_fail_mut<'a>(&'a mut self, start: &uint) -> &'a mut [T] { self.as_mut_slice().slice_from_or_fail_mut(start) } #[inline] fn slice_to_or_fail_mut<'a>(&'a mut self, end: &uint) -> &'a mut [T] { self.as_mut_slice().slice_to_or_fail_mut(end) } #[inline] fn slice_or_fail_mut<'a>(&'a mut self, start: &uint, end: &uint) -> &'a mut [T] { self.as_mut_slice().slice_or_fail_mut(start, end) } } #[experimental = "waiting on FromIterator stability"] impl<T> FromIterator<T> for Vec<T> { #[inline] fn from_iter<I:Iterator<T>>(mut iterator: I) -> Vec<T> { let (lower, _) = iterator.size_hint(); let mut vector = Vec::with_capacity(lower); for element in iterator { vector.push(element) } vector } } #[experimental = "waiting on Extendable stability"] impl<T> Extendable<T> for Vec<T> { #[inline] fn extend<I: Iterator<T>>(&mut self, mut iterator: I) { let (lower, _) = iterator.size_hint(); self.reserve_additional(lower); for element in iterator { self.push(element) } } } #[unstable = "waiting on PartialEq stability"] impl<T: PartialEq> PartialEq for Vec<T> { #[inline] fn eq(&self, other: &Vec<T>) -> bool { self.as_slice() == other.as_slice() } } #[unstable = "waiting on PartialOrd stability"] impl<T: PartialOrd> PartialOrd for Vec<T> { #[inline] fn partial_cmp(&self, other: &Vec<T>) -> Option<Ordering> { self.as_slice().partial_cmp(&other.as_slice()) } } #[unstable = "waiting on Eq stability"] impl<T: Eq> Eq for Vec<T> {} #[experimental] impl<T: PartialEq, V: AsSlice<T>> Equiv<V> for Vec<T> { #[inline] fn equiv(&self, other: &V) -> bool { self.as_slice() == other.as_slice() } } #[unstable = "waiting on Ord stability"] impl<T: Ord> Ord for Vec<T> { #[inline] fn cmp(&self, other: &Vec<T>) -> Ordering { self.as_slice().cmp(&other.as_slice()) } } #[experimental = "waiting on Collection stability"] impl<T> Collection for Vec<T> { #[inline] #[stable] fn len(&self) -> uint { self.len } } // FIXME: #13996: need a way to mark the return value as `noalias` #[inline(never)] unsafe fn alloc_or_realloc<T>(ptr: *mut T, old_size: uint, size: uint) -> *mut T { if old_size == 0 { allocate(size, mem::min_align_of::<T>()) as *mut T } else { reallocate(ptr as *mut u8, old_size, size, mem::min_align_of::<T>()) as *mut T } } #[inline] unsafe fn dealloc<T>(ptr: *mut T, len: uint) { if mem::size_of::<T>() != 0 { deallocate(ptr as *mut u8, len * mem::size_of::<T>(), mem::min_align_of::<T>()) } } impl<T> Vec<T> { /// Returns the number of elements the vector can hold without /// reallocating. /// /// # Example /// /// ``` /// let vec: Vec<int> = Vec::with_capacity(10); /// assert_eq!(vec.capacity(), 10); /// ``` #[inline] #[stable] pub fn capacity(&self) -> uint { self.cap } /// Reserves capacity for at least `n` additional elements in the given /// vector. /// /// # Failure /// /// Fails if the new capacity overflows `uint`. /// /// # Example /// /// ``` /// let mut vec: Vec<int> = vec![1i]; /// vec.reserve_additional(10); /// assert!(vec.capacity() >= 11); /// ``` pub fn reserve_additional(&mut self, extra: uint) { if self.cap - self.len < extra { match self.len.checked_add(&extra) { None => fail!("Vec::reserve_additional: `uint` overflow"), Some(new_cap) => self.reserve(new_cap) } } } /// Reserves capacity for at least `n` elements in the given vector. /// /// This function will over-allocate in order to amortize the allocation /// costs in scenarios where the caller may need to repeatedly reserve /// additional space. /// /// If the capacity for `self` is already equal to or greater than the /// requested capacity, then no action is taken. /// /// # Example /// /// ``` /// let mut vec = vec![1i, 2, 3]; /// vec.reserve(10); /// assert!(vec.capacity() >= 10); /// ``` pub fn reserve(&mut self, capacity: uint) { if capacity > self.cap { self.reserve_exact(num::next_power_of_two(capacity)) } } /// Reserves capacity for exactly `capacity` elements in the given vector. /// /// If the capacity for `self` is already equal to or greater than the /// requested capacity, then no action is taken. /// /// # Example /// /// ``` /// let mut vec: Vec<int> = Vec::with_capacity(10); /// vec.reserve_exact(11); /// assert_eq!(vec.capacity(), 11); /// ``` pub fn reserve_exact(&mut self, capacity: uint) { if mem::size_of::<T>() == 0 { return } if capacity > self.cap { let size = capacity.checked_mul(&mem::size_of::<T>()) .expect("capacity overflow"); unsafe { self.ptr = alloc_or_realloc(self.ptr, self.cap * mem::size_of::<T>(), size); } self.cap = capacity; } } /// Shrinks the capacity of the vector as much as possible. /// /// # Example /// /// ``` /// let mut vec = vec![1i, 2, 3]; /// vec.shrink_to_fit(); /// ``` #[stable] pub fn shrink_to_fit(&mut self) { if mem::size_of::<T>() == 0 { return } if self.len == 0 { if self.cap != 0 { unsafe { dealloc(self.ptr, self.cap) } self.cap = 0; } } else { unsafe { // Overflow check is unnecessary as the vector is already at // least this large. self.ptr = reallocate(self.ptr as *mut u8, self.cap * mem::size_of::<T>(), self.len * mem::size_of::<T>(), mem::min_align_of::<T>()) as *mut T; } self.cap = self.len; } } /// Convert the vector into Box<[T]>. /// /// Note that this will drop any excess capacity. Calling this and converting back to a vector /// with `into_vec()` is equivalent to calling `shrink_to_fit()`. #[experimental] pub fn into_boxed_slice(mut self) -> Box<[T]> { self.shrink_to_fit(); unsafe { let xs: Box<[T]> = mem::transmute(self.as_mut_slice()); mem::forget(self); xs } } /// Deprecated, call `push` instead #[inline] #[deprecated = "call .push() instead"] pub fn append_one(mut self, x: T) -> Vec<T> { self.push(x); self } /// Shorten a vector, dropping excess elements. /// /// If `len` is greater than the vector's current length, this has no /// effect. /// /// # Example /// /// ``` /// let mut vec = vec![1i, 2, 3, 4]; /// vec.truncate(2); /// assert_eq!(vec, vec![1, 2]); /// ``` #[unstable = "waiting on failure semantics"] pub fn truncate(&mut self, len: uint) { unsafe { // drop any extra elements while len < self.len { // decrement len before the read(), so a failure on Drop doesn't // re-drop the just-failed value. self.len -= 1; ptr::read(self.as_slice().unsafe_get(self.len)); } } } /// Returns a mutable slice of the elements of `self`. /// /// # Example /// /// ``` /// fn foo(slice: &mut [int]) {} /// /// let mut vec = vec![1i, 2]; /// foo(vec.as_mut_slice()); /// ``` #[inline] #[stable] pub fn as_mut_slice<'a>(&'a mut self) -> &'a mut [T] { unsafe { mem::transmute(RawSlice { data: self.as_mut_ptr() as *const T, len: self.len, }) } } /// Deprecated: use `into_iter`. #[deprecated = "use into_iter"] pub fn move_iter(self) -> MoveItems<T> { self.into_iter() } /// Creates a consuming iterator, that is, one that moves each /// value out of the vector (from start to end). The vector cannot /// be used after calling this. /// /// # Example /// /// ``` /// let v = vec!["a".to_string(), "b".to_string()]; /// for s in v.into_iter() { /// // s has type String, not &String /// println!("{}", s); /// } /// ``` #[inline] pub fn into_iter(self) -> MoveItems<T> { unsafe { let ptr = self.ptr; let cap = self.cap; let begin = self.ptr as *const T; let end = if mem::size_of::<T>() == 0 { (ptr as uint + self.len()) as *const T } else { ptr.offset(self.len() as int) as *const T }; mem::forget(self); MoveItems { allocation: ptr, cap: cap, ptr: begin, end: end } } } /// Sets the length of a vector. /// /// This will explicitly set the size of the vector, without actually /// modifying its buffers, so it is up to the caller to ensure that the /// vector is actually the specified size. /// /// # Example /// /// ``` /// let mut v = vec![1u, 2, 3, 4]; /// unsafe { /// v.set_len(1); /// } /// ``` #[inline] #[stable] pub unsafe fn set_len(&mut self, len: uint) { self.len = len; } /// Returns a reference to the value at index `index`. /// /// # Failure /// /// Fails if `index` is out of bounds /// /// # Example /// /// ``` /// #![allow(deprecated)] /// /// let vec = vec![1i, 2, 3]; /// assert!(vec.get(1) == &2); /// ``` #[deprecated="prefer using indexing, e.g., vec[0]"] #[inline] pub fn get<'a>(&'a self, index: uint) -> &'a T { &self.as_slice()[index] } /// Returns a mutable reference to the value at index `index`. /// /// # Failure /// /// Fails if `index` is out of bounds /// /// # Example /// /// ``` /// let mut vec = vec![1i, 2, 3]; /// *vec.get_mut(1) = 4; /// assert_eq!(vec, vec![1i, 4, 3]); /// ``` #[inline] #[unstable = "this is likely to be moved to actual indexing"] pub fn get_mut<'a>(&'a mut self, index: uint) -> &'a mut T { &mut self.as_mut_slice()[index] } /// Returns an iterator over references to the elements of the vector in /// order. /// /// # Example /// /// ``` /// let vec = vec![1i, 2, 3]; /// for num in vec.iter() { /// println!("{}", *num); /// } /// ``` #[inline] pub fn iter<'a>(&'a self) -> Items<'a,T> { self.as_slice().iter() } /// Deprecated: use `iter_mut`. #[deprecated = "use iter_mut"] pub fn mut_iter<'a>(&'a mut self) -> MutItems<'a,T> { self.iter_mut() } /// Returns an iterator over mutable references to the elements of the /// vector in order. /// /// # Example /// /// ``` /// let mut vec = vec![1i, 2, 3]; /// for num in vec.iter_mut() { /// *num = 0; /// } /// ``` #[inline] pub fn iter_mut<'a>(&'a mut self) -> MutItems<'a,T> { self.as_mut_slice().iter_mut() } /// Sorts the vector, in place, using `compare` to compare elements. /// /// This sort is `O(n log n)` worst-case and stable, but allocates /// approximately `2 * n`, where `n` is the length of `self`. /// /// # Example /// /// ``` /// let mut v = vec![5i, 4, 1, 3, 2]; /// v.sort_by(|a, b| a.cmp(b)); /// assert_eq!(v, vec![1i, 2, 3, 4, 5]); /// /// // reverse sorting /// v.sort_by(|a, b| b.cmp(a)); /// assert_eq!(v, vec![5i, 4, 3, 2, 1]); /// ``` #[inline] pub fn sort_by(&mut self, compare: |&T, &T| -> Ordering) { self.as_mut_slice().sort_by(compare) } /// Returns a slice of self spanning the interval [`start`, `end`). /// /// # Failure /// /// Fails when the slice (or part of it) is outside the bounds of self, or when /// `start` > `end`. /// /// # Example /// /// ``` /// let vec = vec![1i, 2, 3, 4]; /// assert!(vec[0..2] == [1, 2]); /// ``` #[inline] pub fn slice<'a>(&'a self, start: uint, end: uint) -> &'a [T] { self[start..end] } /// Returns a slice containing all but the first element of the vector. /// /// # Failure /// /// Fails when the vector is empty. /// /// # Example /// /// ``` /// let vec = vec![1i, 2, 3]; /// assert!(vec.tail() == [2, 3]); /// ``` #[inline] pub fn tail<'a>(&'a self) -> &'a [T] { self[].tail() } /// Returns all but the first `n' elements of a vector. /// /// # Failure /// /// Fails when there are fewer than `n` elements in the vector. /// /// # Example /// /// ``` /// #![allow(deprecated)] /// let vec = vec![1i, 2, 3, 4]; /// assert!(vec.tailn(2) == [3, 4]); /// ``` #[inline] #[deprecated = "use slice_from"] pub fn tailn<'a>(&'a self, n: uint) -> &'a [T] { self[n..] } /// Returns a reference to the last element of a vector, or `None` if it is /// empty. /// /// # Example /// /// ``` /// let vec = vec![1i, 2, 3]; /// assert!(vec.last() == Some(&3)); /// ``` #[inline] pub fn last<'a>(&'a self) -> Option<&'a T> { self[].last() } /// Deprecated: use `last_mut`. #[deprecated = "use last_mut"] pub fn mut_last<'a>(&'a mut self) -> Option<&'a mut T> { self.last_mut() } /// Returns a mutable reference to the last element of a vector, or `None` /// if it is empty. /// /// # Example /// /// ``` /// let mut vec = vec![1i, 2, 3]; /// *vec.last_mut().unwrap() = 4; /// assert_eq!(vec, vec![1i, 2, 4]); /// ``` #[inline] pub fn last_mut<'a>(&'a mut self) -> Option<&'a mut T> { self.as_mut_slice().last_mut() } /// Removes an element from anywhere in the vector and return it, replacing /// it with the last element. This does not preserve ordering, but is O(1). /// /// Returns `None` if `index` is out of bounds. /// /// # Example /// ``` /// let mut v = vec!["foo".to_string(), "bar".to_string(), /// "baz".to_string(), "qux".to_string()]; /// /// assert_eq!(v.swap_remove(1), Some("bar".to_string())); /// assert_eq!(v, vec!["foo".to_string(), "qux".to_string(), "baz".to_string()]); /// /// assert_eq!(v.swap_remove(0), Some("foo".to_string())); /// assert_eq!(v, vec!["baz".to_string(), "qux".to_string()]); /// /// assert_eq!(v.swap_remove(2), None); /// ``` #[inline] #[unstable = "the naming of this function may be altered"] pub fn swap_remove(&mut self, index: uint) -> Option<T> { let length = self.len(); if length > 0 && index < length - 1 { self.as_mut_slice().swap(index, length - 1); } else if index >= length { return None } self.pop() } /// Prepends an element to the vector. /// /// # Warning /// /// This is an O(n) operation as it requires copying every element in the /// vector. /// /// # Example /// /// ```ignore /// let mut vec = vec![1i, 2, 3]; /// vec.unshift(4); /// assert_eq!(vec, vec![4, 1, 2, 3]); /// ``` #[inline] #[deprecated = "use insert(0, ...)"] pub fn unshift(&mut self, element: T) { self.insert(0, element) } /// Removes the first element from a vector and returns it, or `None` if /// the vector is empty. /// /// # Warning /// /// This is an O(n) operation as it requires copying every element in the /// vector. /// /// # Example /// /// ``` /// #![allow(deprecated)] /// let mut vec = vec![1i, 2, 3]; /// assert!(vec.shift() == Some(1)); /// assert_eq!(vec, vec![2, 3]); /// ``` #[inline] #[deprecated = "use remove(0)"] pub fn shift(&mut self) -> Option<T> { self.remove(0) } /// Inserts an element at position `index` within the vector, shifting all /// elements after position `i` one position to the right. /// /// # Failure /// /// Fails if `index` is not between `0` and the vector's length (both /// bounds inclusive). /// /// # Example /// /// ``` /// let mut vec = vec![1i, 2, 3]; /// vec.insert(1, 4); /// assert_eq!(vec, vec![1, 4, 2, 3]); /// vec.insert(4, 5); /// assert_eq!(vec, vec![1, 4, 2, 3, 5]); /// ``` #[unstable = "failure semantics need settling"] pub fn insert(&mut self, index: uint, element: T) { let len = self.len(); assert!(index <= len); // space for the new element self.reserve(len + 1); unsafe { // infallible // The spot to put the new value { let p = self.as_mut_ptr().offset(index as int); // Shift everything over to make space. (Duplicating the // `index`th element into two consecutive places.) ptr::copy_memory(p.offset(1), &*p, len - index); // Write it in, overwriting the first copy of the `index`th // element. ptr::write(&mut *p, element); } self.set_len(len + 1); } } /// Removes and returns the element at position `index` within the vector, /// shifting all elements after position `index` one position to the left. /// Returns `None` if `i` is out of bounds. /// /// # Example /// /// ``` /// let mut v = vec![1i, 2, 3]; /// assert_eq!(v.remove(1), Some(2)); /// assert_eq!(v, vec![1, 3]); /// /// assert_eq!(v.remove(4), None); /// // v is unchanged: /// assert_eq!(v, vec![1, 3]); /// ``` #[unstable = "failure semantics need settling"] pub fn remove(&mut self, index: uint) -> Option<T> { let len = self.len(); if index < len { unsafe { // infallible let ret; { // the place we are taking from. let ptr = self.as_mut_ptr().offset(index as int); // copy it out, unsafely having a copy of the value on // the stack and in the vector at the same time. ret = Some(ptr::read(ptr as *const T)); // Shift everything down to fill in that spot. ptr::copy_memory(ptr, &*ptr.offset(1), len - index - 1); } self.set_len(len - 1); ret } } else { None } } /// Takes ownership of the vector `other`, moving all elements into /// the current vector. This does not copy any elements, and it is /// illegal to use the `other` vector after calling this method /// (because it is moved here). /// /// # Example /// /// ``` /// # #![allow(deprecated)] /// let mut vec = vec![box 1i]; /// vec.push_all_move(vec![box 2, box 3, box 4]); /// assert_eq!(vec, vec![box 1, box 2, box 3, box 4]); /// ``` #[inline] #[deprecated = "use .extend(other.into_iter())"] pub fn push_all_move(&mut self, other: Vec<T>) { self.extend(other.into_iter()); } /// Deprecated: use `slice_mut`. #[deprecated = "use slice_mut"] pub fn mut_slice<'a>(&'a mut self, start: uint, end: uint) -> &'a mut [T] { self[mut start..end] } /// Returns a mutable slice of `self` between `start` and `end`. /// /// # Failure /// /// Fails when `start` or `end` point outside the bounds of `self`, or when /// `start` > `end`. /// /// # Example /// /// ``` /// let mut vec = vec![1i, 2, 3, 4]; /// assert!(vec[mut 0..2] == [1, 2]); /// ``` #[inline] pub fn slice_mut<'a>(&'a mut self, start: uint, end: uint) -> &'a mut [T] { self[mut start..end] } /// Deprecated: use "slice_from_mut". #[deprecated = "use slice_from_mut"] pub fn mut_slice_from<'a>(&'a mut self, start: uint) -> &'a mut [T] { self[mut start..] } /// Returns a mutable slice of `self` from `start` to the end of the `Vec`. /// /// # Failure /// /// Fails when `start` points outside the bounds of self. /// /// # Example /// /// ``` /// let mut vec = vec![1i, 2, 3, 4]; /// assert!(vec[mut 2..] == [3, 4]); /// ``` #[inline] pub fn slice_from_mut<'a>(&'a mut self, start: uint) -> &'a mut [T] { self[mut start..] } /// Deprecated: use `slice_to_mut`. #[deprecated = "use slice_to_mut"] pub fn mut_slice_to<'a>(&'a mut self, end: uint) -> &'a mut [T] { self[mut ..end] } /// Returns a mutable slice of `self` from the start of the `Vec` to `end`. /// /// # Failure /// /// Fails when `end` points outside the bounds of self. /// /// # Example /// /// ``` /// let mut vec = vec![1i, 2, 3, 4]; /// assert!(vec[mut ..2] == [1, 2]); /// ``` #[inline] pub fn slice_to_mut<'a>(&'a mut self, end: uint) -> &'a mut [T] { self[mut ..end] } /// Deprecated: use `split_at_mut`. #[deprecated = "use split_at_mut"] pub fn mut_split_at<'a>(&'a mut self, mid: uint) -> (&'a mut [T], &'a mut [T]) { self.split_at_mut(mid) } /// Returns a pair of mutable slices that divides the `Vec` at an index. /// /// The first will contain all indices from `[0, mid)` (excluding /// the index `mid` itself) and the second will contain all /// indices from `[mid, len)` (excluding the index `len` itself). /// /// # Failure /// /// Fails if `mid > len`. /// /// # Example /// /// ``` /// let mut vec = vec![1i, 2, 3, 4, 5, 6]; /// /// // scoped to restrict the lifetime of the borrows /// { /// let (left, right) = vec.split_at_mut(0); /// assert!(left == &mut []); /// assert!(right == &mut [1, 2, 3, 4, 5, 6]); /// } /// /// { /// let (left, right) = vec.split_at_mut(2); /// assert!(left == &mut [1, 2]); /// assert!(right == &mut [3, 4, 5, 6]); /// } /// /// { /// let (left, right) = vec.split_at_mut(6); /// assert!(left == &mut [1, 2, 3, 4, 5, 6]); /// assert!(right == &mut []); /// } /// ``` #[inline] pub fn split_at_mut<'a>(&'a mut self, mid: uint) -> (&'a mut [T], &'a mut [T]) { self[mut].split_at_mut(mid) } /// Reverses the order of elements in a vector, in place. /// /// # Example /// /// ``` /// let mut v = vec![1i, 2, 3]; /// v.reverse(); /// assert_eq!(v, vec![3i, 2, 1]); /// ``` #[inline] pub fn reverse(&mut self) { self[mut].reverse() } /// Returns a slice of `self` from `start` to the end of the vec. /// /// # Failure /// /// Fails when `start` points outside the bounds of self. /// /// # Example /// /// ``` /// let vec = vec![1i, 2, 3]; /// assert!(vec[1..] == [2, 3]); /// ``` #[inline] pub fn slice_from<'a>(&'a self, start: uint) -> &'a [T] { self[start..] } /// Returns a slice of self from the start of the vec to `end`. /// /// # Failure /// /// Fails when `end` points outside the bounds of self. /// /// # Example /// /// ``` /// let vec = vec![1i, 2, 3, 4]; /// assert!(vec[..2] == [1, 2]); /// ``` #[inline] pub fn slice_to<'a>(&'a self, end: uint) -> &'a [T] { self[..end] } /// Returns a slice containing all but the last element of the vector. /// /// # Failure /// /// Fails if the vector is empty /// /// # Example /// /// ``` /// let vec = vec![1i, 2, 3]; /// assert!(vec.init() == [1, 2]); /// ``` #[inline] pub fn init<'a>(&'a self) -> &'a [T] { self[0..self.len() - 1] } /// Returns an unsafe pointer to the vector's buffer. /// /// The caller must ensure that the vector outlives the pointer this /// function returns, or else it will end up pointing to garbage. /// /// Modifying the vector may cause its buffer to be reallocated, which /// would also make any pointers to it invalid. /// /// # Example /// /// ``` /// let v = vec![1i, 2, 3]; /// let p = v.as_ptr(); /// unsafe { /// // Examine each element manually /// assert_eq!(*p, 1i); /// assert_eq!(*p.offset(1), 2i); /// assert_eq!(*p.offset(2), 3i); /// } /// ``` #[inline] pub fn as_ptr(&self) -> *const T { self.ptr as *const T } /// Returns a mutable unsafe pointer to the vector's buffer. /// /// The caller must ensure that the vector outlives the pointer this /// function returns, or else it will end up pointing to garbage. /// /// Modifying the vector may cause its buffer to be reallocated, which /// would also make any pointers to it invalid. /// /// # Example /// /// ``` /// use std::ptr; /// /// let mut v = vec![1i, 2, 3]; /// let p = v.as_mut_ptr(); /// unsafe { /// ptr::write(p, 9i); /// ptr::write(p.offset(2), 5i); /// } /// assert_eq!(v, vec![9i, 2, 5]); /// ``` #[inline] pub fn as_mut_ptr(&mut self) -> *mut T { self.ptr } /// Retains only the elements specified by the predicate. /// /// In other words, remove all elements `e` such that `f(&e)` returns false. /// This method operates in place and preserves the order of the retained elements. /// /// # Example /// /// ``` /// let mut vec = vec![1i, 2, 3, 4]; /// vec.retain(|x| x%2 == 0); /// assert_eq!(vec, vec![2, 4]); /// ``` #[unstable = "the closure argument may become an unboxed closure"] pub fn retain(&mut self, f: |&T| -> bool) { let len = self.len(); let mut del = 0u; { let v = self.as_mut_slice(); for i in range(0u, len) { if !f(&v[i]) { del += 1; } else if del > 0 { v.swap(i-del, i); } } } if del > 0 { self.truncate(len - del); } } /// Expands a vector in place, initializing the new elements to the result of a function. /// /// The vector is grown by `n` elements. The i-th new element are initialized to the value /// returned by `f(i)` where `i` is in the range [0, n). /// /// # Example /// /// ``` /// let mut vec = vec![0u, 1]; /// vec.grow_fn(3, |i| i); /// assert_eq!(vec, vec![0, 1, 0, 1, 2]); /// ``` #[unstable = "this function may be renamed or change to unboxed closures"] pub fn grow_fn(&mut self, n: uint, f: |uint| -> T) { self.reserve_additional(n); for i in range(0u, n) { self.push(f(i)); } } } impl<T:Ord> Vec<T> { /// Sorts the vector in place. /// /// This sort is `O(n log n)` worst-case and stable, but allocates /// approximately `2 * n`, where `n` is the length of `self`. /// /// # Example /// /// ``` /// let mut vec = vec![3i, 1, 2]; /// vec.sort(); /// assert_eq!(vec, vec![1, 2, 3]); /// ``` pub fn sort(&mut self) { self.as_mut_slice().sort() } } #[experimental = "waiting on Mutable stability"] impl<T> Mutable for Vec<T> { #[inline] #[stable] fn clear(&mut self) { self.truncate(0) } } impl<T: PartialEq> Vec<T> { /// Returns true if a vector contains an element equal to the given value. /// /// # Example /// /// ``` /// let vec = vec![1i, 2, 3]; /// assert!(vec.contains(&1)); /// ``` #[inline] pub fn contains(&self, x: &T) -> bool { self.as_slice().contains(x) } /// Removes consecutive repeated elements in the vector. /// /// If the vector is sorted, this removes all duplicates. /// /// # Example /// /// ``` /// let mut vec = vec![1i, 2, 2, 3, 2]; /// vec.dedup(); /// assert_eq!(vec, vec![1i, 2, 3, 2]); /// ``` #[unstable = "this function may be renamed"] pub fn dedup(&mut self) { unsafe { // Although we have a mutable reference to `self`, we cannot make // *arbitrary* changes. The `PartialEq` comparisons could fail, so we // must ensure that the vector is in a valid state at all time. // // The way that we handle this is by using swaps; we iterate // over all the elements, swapping as we go so that at the end // the elements we wish to keep are in the front, and those we // wish to reject are at the back. We can then truncate the // vector. This operation is still O(n). // // Example: We start in this state, where `r` represents "next // read" and `w` represents "next_write`. // // r // +---+---+---+---+---+---+ // | 0 | 1 | 1 | 2 | 3 | 3 | // +---+---+---+---+---+---+ // w // // Comparing self[r] against self[w-1], this is not a duplicate, so // we swap self[r] and self[w] (no effect as r==w) and then increment both // r and w, leaving us with: // // r // +---+---+---+---+---+---+ // | 0 | 1 | 1 | 2 | 3 | 3 | // +---+---+---+---+---+---+ // w // // Comparing self[r] against self[w-1], this value is a duplicate, // so we increment `r` but leave everything else unchanged: // // r // +---+---+---+---+---+---+ // | 0 | 1 | 1 | 2 | 3 | 3 | // +---+---+---+---+---+---+ // w // // Comparing self[r] against self[w-1], this is not a duplicate, // so swap self[r] and self[w] and advance r and w: // // r // +---+---+---+---+---+---+ // | 0 | 1 | 2 | 1 | 3 | 3 | // +---+---+---+---+---+---+ // w // // Not a duplicate, repeat: // // r // +---+---+---+---+---+---+ // | 0 | 1 | 2 | 3 | 1 | 3 | // +---+---+---+---+---+---+ // w // // Duplicate, advance r. End of vec. Truncate to w. let ln = self.len(); if ln < 1 { return; } // Avoid bounds checks by using unsafe pointers. let p = self.as_mut_slice().as_mut_ptr(); let mut r = 1; let mut w = 1; while r < ln { let p_r = p.offset(r as int); let p_wm1 = p.offset((w - 1) as int); if *p_r != *p_wm1 { if r != w { let p_w = p_wm1.offset(1); mem::swap(&mut *p_r, &mut *p_w); } w += 1; } r += 1; } self.truncate(w); } } } impl<T> AsSlice<T> for Vec<T> { /// Returns a slice into `self`. /// /// # Example /// /// ``` /// fn foo(slice: &[int]) {} /// /// let vec = vec![1i, 2]; /// foo(vec.as_slice()); /// ``` #[inline] #[stable] fn as_slice<'a>(&'a self) -> &'a [T] { unsafe { mem::transmute(RawSlice { data: self.as_ptr(), len: self.len }) } } } impl<T: Clone, V: AsSlice<T>> Add<V, Vec<T>> for Vec<T> { #[inline] fn add(&self, rhs: &V) -> Vec<T> { let mut res = Vec::with_capacity(self.len() + rhs.as_slice().len()); res.push_all(self.as_slice()); res.push_all(rhs.as_slice()); res } } #[unsafe_destructor] impl<T> Drop for Vec<T> { fn drop(&mut self) { // This is (and should always remain) a no-op if the fields are // zeroed (when moving out, because of #[unsafe_no_drop_flag]). if self.cap != 0 { unsafe { for x in self.as_mut_slice().iter() { ptr::read(x); } dealloc(self.ptr, self.cap) } } } } #[stable] impl<T> Default for Vec<T> { fn default() -> Vec<T> { Vec::new() } } #[experimental = "waiting on Show stability"] impl<T:fmt::Show> fmt::Show for Vec<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.as_slice().fmt(f) } } #[experimental = "waiting on MutableSeq stability"] impl<T> MutableSeq<T> for Vec<T> { /// Appends an element to the back of a collection. /// /// # Failure /// /// Fails if the number of elements in the vector overflows a `uint`. /// /// # Example /// /// ```rust /// let mut vec = vec!(1i, 2); /// vec.push(3); /// assert_eq!(vec, vec!(1, 2, 3)); /// ``` #[inline] #[stable] fn push(&mut self, value: T) { if mem::size_of::<T>() == 0 { // zero-size types consume no memory, so we can't rely on the address space running out self.len = self.len.checked_add(&1).expect("length overflow"); unsafe { mem::forget(value); } return } if self.len == self.cap { let old_size = self.cap * mem::size_of::<T>(); let size = max(old_size, 2 * mem::size_of::<T>()) * 2; if old_size > size { fail!("capacity overflow") } unsafe { self.ptr = alloc_or_realloc(self.ptr, old_size, size); } self.cap = max(self.cap, 2) * 2; } unsafe { let end = (self.ptr as *const T).offset(self.len as int) as *mut T; ptr::write(&mut *end, value); self.len += 1; } } #[inline] #[stable] fn pop(&mut self) -> Option<T> { if self.len == 0 { None } else { unsafe { self.len -= 1; Some(ptr::read(self.as_slice().unsafe_get(self.len()))) } } } } /// An iterator that moves out of a vector. pub struct MoveItems<T> { allocation: *mut T, // the block of memory allocated for the vector cap: uint, // the capacity of the vector ptr: *const T, end: *const T } impl<T> MoveItems<T> { #[inline] /// Drops all items that have not yet been moved and returns the empty vector. pub fn unwrap(mut self) -> Vec<T> { unsafe { for _x in self { } let MoveItems { allocation, cap, ptr: _ptr, end: _end } = self; mem::forget(self); Vec { ptr: allocation, cap: cap, len: 0 } } } } impl<T> Iterator<T> for MoveItems<T> { #[inline] fn next<'a>(&'a mut self) -> Option<T> { unsafe { if self.ptr == self.end { None } else { if mem::size_of::<T>() == 0 { // purposefully don't use 'ptr.offset' because for // vectors with 0-size elements this would return the // same pointer. self.ptr = mem::transmute(self.ptr as uint + 1); // Use a non-null pointer value Some(ptr::read(mem::transmute(1u))) } else { let old = self.ptr; self.ptr = self.ptr.offset(1); Some(ptr::read(old)) } } } } #[inline] fn size_hint(&self) -> (uint, Option<uint>) { let diff = (self.end as uint) - (self.ptr as uint); let size = mem::size_of::<T>(); let exact = diff / (if size == 0 {1} else {size}); (exact, Some(exact)) } } impl<T> DoubleEndedIterator<T> for MoveItems<T> { #[inline] fn next_back<'a>(&'a mut self) -> Option<T> { unsafe { if self.end == self.ptr { None } else { if mem::size_of::<T>() == 0 { // See above for why 'ptr.offset' isn't used self.end = mem::transmute(self.end as uint - 1); // Use a non-null pointer value Some(ptr::read(mem::transmute(1u))) } else { self.end = self.end.offset(-1); Some(ptr::read(mem::transmute(self.end))) } } } } } impl<T> ExactSize<T> for MoveItems<T> {} #[unsafe_destructor] impl<T> Drop for MoveItems<T> { fn drop(&mut self) { // destroy the remaining elements if self.cap != 0 { for _x in *self {} unsafe { dealloc(self.allocation, self.cap); } } } } /// Converts an iterator of pairs into a pair of vectors. /// /// Returns a tuple containing two vectors where the i-th element of the first /// vector contains the first element of the i-th tuple of the input iterator, /// and the i-th element of the second vector contains the second element /// of the i-th tuple of the input iterator. #[unstable = "this functionality may become more generic over time"] pub fn unzip<T, U, V: Iterator<(T, U)>>(mut iter: V) -> (Vec<T>, Vec<U>) { let (lo, _) = iter.size_hint(); let mut ts = Vec::with_capacity(lo); let mut us = Vec::with_capacity(lo); for (t, u) in iter { ts.push(t); us.push(u); } (ts, us) } /// Wrapper type providing a `&Vec<T>` reference via `Deref`. #[experimental] pub struct DerefVec<'a, T> { x: Vec<T>, l: ContravariantLifetime<'a> } impl<'a, T> Deref<Vec<T>> for DerefVec<'a, T> { fn deref<'b>(&'b self) -> &'b Vec<T> { &self.x } } // Prevent the inner `Vec<T>` from attempting to deallocate memory. #[unsafe_destructor] impl<'a, T> Drop for DerefVec<'a, T> { fn drop(&mut self) { self.x.len = 0; self.x.cap = 0; } } /// Convert a slice to a wrapper type providing a `&Vec<T>` reference. #[experimental] pub fn as_vec<'a, T>(x: &'a [T]) -> DerefVec<'a, T> { unsafe { DerefVec { x: Vec::from_raw_parts(x.len(), x.len(), x.as_ptr() as *mut T), l: ContravariantLifetime::<'a> } } } /// Unsafe vector operations. #[unstable] pub mod raw { use super::Vec; use core::ptr; /// Constructs a vector from an unsafe pointer to a buffer. /// /// The elements of the buffer are copied into the vector without cloning, /// as if `ptr::read()` were called on them. #[inline] #[unstable] pub unsafe fn from_buf<T>(ptr: *const T, elts: uint) -> Vec<T> { let mut dst = Vec::with_capacity(elts); dst.set_len(elts); ptr::copy_nonoverlapping_memory(dst.as_mut_ptr(), ptr, elts); dst } } /// An owned, partially type-converted vector of elements with non-zero size. /// /// `T` and `U` must have the same, non-zero size. They must also have the same /// alignment. /// /// When the destructor of this struct runs, all `U`s from `start_u` (incl.) to /// `end_u` (excl.) and all `T`s from `start_t` (incl.) to `end_t` (excl.) are /// destructed. Additionally the underlying storage of `vec` will be freed. struct PartialVecNonZeroSized<T,U> { vec: Vec<T>, start_u: *mut U, end_u: *mut U, start_t: *mut T, end_t: *mut T, } /// An owned, partially type-converted vector of zero-sized elements. /// /// When the destructor of this struct runs, all `num_t` `T`s and `num_u` `U`s /// are destructed. struct PartialVecZeroSized<T,U> { num_t: uint, num_u: uint, marker_t: InvariantType<T>, marker_u: InvariantType<U>, } #[unsafe_destructor] impl<T,U> Drop for PartialVecNonZeroSized<T,U> { fn drop(&mut self) { unsafe { // `vec` hasn't been modified until now. As it has a length // currently, this would run destructors of `T`s which might not be // there. So at first, set `vec`s length to `0`. This must be done // at first to remain memory-safe as the destructors of `U` or `T` // might cause unwinding where `vec`s destructor would be executed. self.vec.set_len(0); // We have instances of `U`s and `T`s in `vec`. Destruct them. while self.start_u != self.end_u { let _ = ptr::read(self.start_u as *const U); // Run a `U` destructor. self.start_u = self.start_u.offset(1); } while self.start_t != self.end_t { let _ = ptr::read(self.start_t as *const T); // Run a `T` destructor. self.start_t = self.start_t.offset(1); } // After this destructor ran, the destructor of `vec` will run, // deallocating the underlying memory. } } } #[unsafe_destructor] impl<T,U> Drop for PartialVecZeroSized<T,U> { fn drop(&mut self) { unsafe { // Destruct the instances of `T` and `U` this struct owns. while self.num_t != 0 { let _: T = mem::uninitialized(); // Run a `T` destructor. self.num_t -= 1; } while self.num_u != 0 { let _: U = mem::uninitialized(); // Run a `U` destructor. self.num_u -= 1; } } } } impl<T> Vec<T> { /// Converts a `Vec<T>` to a `Vec<U>` where `T` and `U` have the same /// size and in case they are not zero-sized the same minimal alignment. /// /// # Failure /// /// Fails if `T` and `U` have differing sizes or are not zero-sized and /// have differing minimal alignments. /// /// # Example /// /// ``` /// let v = vec![0u, 1, 2]; /// let w = v.map_in_place(|i| i + 3); /// assert_eq!(w.as_slice(), [3, 4, 5].as_slice()); /// /// #[deriving(PartialEq, Show)] /// struct Newtype(u8); /// let bytes = vec![0x11, 0x22]; /// let newtyped_bytes = bytes.map_in_place(|x| Newtype(x)); /// assert_eq!(newtyped_bytes.as_slice(), [Newtype(0x11), Newtype(0x22)].as_slice()); /// ``` pub fn map_in_place<U>(self, f: |T| -> U) -> Vec<U> { // FIXME: Assert statically that the types `T` and `U` have the same // size. assert!(mem::size_of::<T>() == mem::size_of::<U>()); let mut vec = self; if mem::size_of::<T>() != 0 { // FIXME: Assert statically that the types `T` and `U` have the // same minimal alignment in case they are not zero-sized. // These asserts are necessary because the `min_align_of` of the // types are passed to the allocator by `Vec`. assert!(mem::min_align_of::<T>() == mem::min_align_of::<U>()); // This `as int` cast is safe, because the size of the elements of the // vector is not 0, and: // // 1) If the size of the elements in the vector is 1, the `int` may // overflow, but it has the correct bit pattern so that the // `.offset()` function will work. // // Example: // Address space 0x0-0xF. // `u8` array at: 0x1. // Size of `u8` array: 0x8. // Calculated `offset`: -0x8. // After `array.offset(offset)`: 0x9. // (0x1 + 0x8 = 0x1 - 0x8) // // 2) If the size of the elements in the vector is >1, the `uint` -> // `int` conversion can't overflow. let offset = vec.len() as int; let start = vec.as_mut_ptr(); let mut pv = PartialVecNonZeroSized { vec: vec, start_t: start, // This points inside the vector, as the vector has length // `offset`. end_t: unsafe { start.offset(offset) }, start_u: start as *mut U, end_u: start as *mut U, }; // start_t // start_u // | // +-+-+-+-+-+-+ // |T|T|T|...|T| // +-+-+-+-+-+-+ // | | // end_u end_t while pv.end_u as *mut T != pv.end_t { unsafe { // start_u start_t // | | // +-+-+-+-+-+-+-+-+-+ // |U|...|U|T|T|...|T| // +-+-+-+-+-+-+-+-+-+ // | | // end_u end_t let t = ptr::read(pv.start_t as *const T); // start_u start_t // | | // +-+-+-+-+-+-+-+-+-+ // |U|...|U|X|T|...|T| // +-+-+-+-+-+-+-+-+-+ // | | // end_u end_t // We must not fail here, one cell is marked as `T` // although it is not `T`. pv.start_t = pv.start_t.offset(1); // start_u start_t // | | // +-+-+-+-+-+-+-+-+-+ // |U|...|U|X|T|...|T| // +-+-+-+-+-+-+-+-+-+ // | | // end_u end_t // We may fail again. // The function given by the user might fail. let u = f(t); ptr::write(pv.end_u, u); // start_u start_t // | | // +-+-+-+-+-+-+-+-+-+ // |U|...|U|U|T|...|T| // +-+-+-+-+-+-+-+-+-+ // | | // end_u end_t // We should not fail here, because that would leak the `U` // pointed to by `end_u`. pv.end_u = pv.end_u.offset(1); // start_u start_t // | | // +-+-+-+-+-+-+-+-+-+ // |U|...|U|U|T|...|T| // +-+-+-+-+-+-+-+-+-+ // | | // end_u end_t // We may fail again. } } // start_u start_t // | | // +-+-+-+-+-+-+ // |U|...|U|U|U| // +-+-+-+-+-+-+ // | // end_t // end_u // Extract `vec` and prevent the destructor of // `PartialVecNonZeroSized` from running. Note that none of the // function calls can fail, thus no resources can be leaked (as the // `vec` member of `PartialVec` is the only one which holds // allocations -- and it is returned from this function. None of // this can fail. unsafe { let vec_len = pv.vec.len(); let vec_cap = pv.vec.capacity(); let vec_ptr = pv.vec.as_mut_ptr() as *mut U; mem::forget(pv); Vec::from_raw_parts(vec_len, vec_cap, vec_ptr) } } else { // Put the `Vec` into the `PartialVecZeroSized` structure and // prevent the destructor of the `Vec` from running. Since the // `Vec` contained zero-sized objects, it did not allocate, so we // are not leaking memory here. let mut pv = PartialVecZeroSized::<T,U> { num_t: vec.len(), num_u: 0, marker_t: InvariantType, marker_u: InvariantType, }; unsafe { mem::forget(vec); } while pv.num_t != 0 { unsafe { // Create a `T` out of thin air and decrement `num_t`. This // must not fail between these steps, as otherwise a // destructor of `T` which doesn't exist runs. let t = mem::uninitialized(); pv.num_t -= 1; // The function given by the user might fail. let u = f(t); // Forget the `U` and increment `num_u`. This increment // cannot overflow the `uint` as we only do this for a // number of times that fits into a `uint` (and start with // `0`). Again, we should not fail between these steps. mem::forget(u); pv.num_u += 1; } } // Create a `Vec` from our `PartialVecZeroSized` and make sure the // destructor of the latter will not run. None of this can fail. let mut result = Vec::new(); unsafe { result.set_len(pv.num_u); } result } } } #[cfg(test)] mod tests { extern crate test; use std::prelude::*; use std::mem::size_of; use test::Bencher; use super::{as_vec, unzip, raw, Vec}; use MutableSeq; struct DropCounter<'a> { count: &'a mut int } #[unsafe_destructor] impl<'a> Drop for DropCounter<'a> { fn drop(&mut self) { *self.count += 1; } } #[test] fn test_as_vec() { let xs = [1u8, 2u8, 3u8]; assert_eq!(as_vec(xs).as_slice(), xs.as_slice()); } #[test] fn test_as_vec_dtor() { let (mut count_x, mut count_y) = (0, 0); { let xs = &[DropCounter { count: &mut count_x }, DropCounter { count: &mut count_y }]; assert_eq!(as_vec(xs).len(), 2); } assert_eq!(count_x, 1); assert_eq!(count_y, 1); } #[test] fn test_small_vec_struct() { assert!(size_of::<Vec<u8>>() == size_of::<uint>() * 3); } #[test] fn test_double_drop() { struct TwoVec<T> { x: Vec<T>, y: Vec<T> } let (mut count_x, mut count_y) = (0, 0); { let mut tv = TwoVec { x: Vec::new(), y: Vec::new() }; tv.x.push(DropCounter {count: &mut count_x}); tv.y.push(DropCounter {count: &mut count_y}); // If Vec had a drop flag, here is where it would be zeroed. // Instead, it should rely on its internal state to prevent // doing anything significant when dropped multiple times. drop(tv.x); // Here tv goes out of scope, tv.y should be dropped, but not tv.x. } assert_eq!(count_x, 1); assert_eq!(count_y, 1); } #[test] fn test_reserve_additional() { let mut v = Vec::new(); assert_eq!(v.capacity(), 0); v.reserve_additional(2); assert!(v.capacity() >= 2); for i in range(0i, 16) { v.push(i); } assert!(v.capacity() >= 16); v.reserve_additional(16); assert!(v.capacity() >= 32); v.push(16); v.reserve_additional(16); assert!(v.capacity() >= 33) } #[test] fn test_extend() { let mut v = Vec::new(); let mut w = Vec::new(); v.extend(range(0i, 3)); for i in range(0i, 3) { w.push(i) } assert_eq!(v, w); v.extend(range(3i, 10)); for i in range(3i, 10) { w.push(i) } assert_eq!(v, w); } #[test] fn test_slice_from_mut() { let mut values = vec![1u8,2,3,4,5]; { let slice = values.slice_from_mut(2); assert!(slice == [3, 4, 5]); for p in slice.iter_mut() { *p += 2; } } assert!(values.as_slice() == [1, 2, 5, 6, 7]); } #[test] fn test_slice_to_mut() { let mut values = vec![1u8,2,3,4,5]; { let slice = values.slice_to_mut(2); assert!(slice == [1, 2]); for p in slice.iter_mut() { *p += 1; } } assert!(values.as_slice() == [2, 3, 3, 4, 5]); } #[test] fn test_split_at_mut() { let mut values = vec![1u8,2,3,4,5]; { let (left, right) = values.split_at_mut(2); { let left: &[_] = left; assert!(left[0..left.len()] == [1, 2]); } for p in left.iter_mut() { *p += 1; } { let right: &[_] = right; assert!(right[0..right.len()] == [3, 4, 5]); } for p in right.iter_mut() { *p += 2; } } assert!(values == vec![2u8, 3, 5, 6, 7]); } #[test] fn test_clone() { let v: Vec<int> = vec!(); let w = vec!(1i, 2, 3); assert_eq!(v, v.clone()); let z = w.clone(); assert_eq!(w, z); // they should be disjoint in memory. assert!(w.as_ptr() != z.as_ptr()) } #[test] fn test_clone_from() { let mut v = vec!(); let three = vec!(box 1i, box 2, box 3); let two = vec!(box 4i, box 5); // zero, long v.clone_from(&three); assert_eq!(v, three); // equal v.clone_from(&three); assert_eq!(v, three); // long, short v.clone_from(&two); assert_eq!(v, two); // short, long v.clone_from(&three); assert_eq!(v, three) } #[test] fn test_grow_fn() { let mut v = vec![0u, 1]; v.grow_fn(3, |i| i); assert!(v == vec![0u, 1, 0, 1, 2]); } #[test] fn test_retain() { let mut vec = vec![1u, 2, 3, 4]; vec.retain(|x| x%2 == 0); assert!(vec == vec![2u, 4]); } #[test] fn zero_sized_values() { let mut v = Vec::new(); assert_eq!(v.len(), 0); v.push(()); assert_eq!(v.len(), 1); v.push(()); assert_eq!(v.len(), 2); assert_eq!(v.pop(), Some(())); assert_eq!(v.pop(), Some(())); assert_eq!(v.pop(), None); assert_eq!(v.iter().count(), 0); v.push(()); assert_eq!(v.iter().count(), 1); v.push(()); assert_eq!(v.iter().count(), 2); for &() in v.iter() {} assert_eq!(v.iter_mut().count(), 2); v.push(()); assert_eq!(v.iter_mut().count(), 3); v.push(()); assert_eq!(v.iter_mut().count(), 4); for &() in v.iter_mut() {} unsafe { v.set_len(0); } assert_eq!(v.iter_mut().count(), 0); } #[test] fn test_partition() { assert_eq!(vec![].partition(|x: &int| *x < 3), (vec![], vec![])); assert_eq!(vec![1i, 2, 3].partition(|x: &int| *x < 4), (vec![1, 2, 3], vec![])); assert_eq!(vec![1i, 2, 3].partition(|x: &int| *x < 2), (vec![1], vec![2, 3])); assert_eq!(vec![1i, 2, 3].partition(|x: &int| *x < 0), (vec![], vec![1, 2, 3])); } #[test] fn test_partitioned() { assert_eq!(vec![].partitioned(|x: &int| *x < 3), (vec![], vec![])) assert_eq!(vec![1i, 2, 3].partitioned(|x: &int| *x < 4), (vec![1, 2, 3], vec![])); assert_eq!(vec![1i, 2, 3].partitioned(|x: &int| *x < 2), (vec![1], vec![2, 3])); assert_eq!(vec![1i, 2, 3].partitioned(|x: &int| *x < 0), (vec![], vec![1, 2, 3])); } #[test] fn test_zip_unzip() { let z1 = vec![(1i, 4i), (2, 5), (3, 6)]; let (left, right) = unzip(z1.iter().map(|&x| x)); let (left, right) = (left.as_slice(), right.as_slice()); assert_eq!((1, 4), (left[0], right[0])); assert_eq!((2, 5), (left[1], right[1])); assert_eq!((3, 6), (left[2], right[2])); } #[test] fn test_unsafe_ptrs() { unsafe { // Test on-stack copy-from-buf. let a = [1i, 2, 3]; let ptr = a.as_ptr(); let b = raw::from_buf(ptr, 3u); assert_eq!(b, vec![1, 2, 3]); // Test on-heap copy-from-buf. let c = vec![1i, 2, 3, 4, 5]; let ptr = c.as_ptr(); let d = raw::from_buf(ptr, 5u); assert_eq!(d, vec![1, 2, 3, 4, 5]); } } #[test] fn test_vec_truncate_drop() { static mut drops: uint = 0; struct Elem(int); impl Drop for Elem { fn drop(&mut self) { unsafe { drops += 1; } } } let mut v = vec![Elem(1), Elem(2), Elem(3), Elem(4), Elem(5)]; assert_eq!(unsafe { drops }, 0); v.truncate(3); assert_eq!(unsafe { drops }, 2); v.truncate(0); assert_eq!(unsafe { drops }, 5); } #[test] #[should_fail] fn test_vec_truncate_fail() { struct BadElem(int); impl Drop for BadElem { fn drop(&mut self) { let BadElem(ref mut x) = *self; if *x == 0xbadbeef { fail!("BadElem failure: 0xbadbeef") } } } let mut v = vec![BadElem(1), BadElem(2), BadElem(0xbadbeef), BadElem(4)]; v.truncate(0); } #[test] fn test_index() { let vec = vec!(1i, 2, 3); assert!(vec[1] == 2); } #[test] #[should_fail] fn test_index_out_of_bounds() { let vec = vec!(1i, 2, 3); let _ = vec[3]; } #[test] #[should_fail] fn test_slice_out_of_bounds_1() { let x: Vec<int> = vec![1, 2, 3, 4, 5]; x[-1..]; } #[test] #[should_fail] fn test_slice_out_of_bounds_2() { let x: Vec<int> = vec![1, 2, 3, 4, 5]; x[..6]; } #[test] #[should_fail] fn test_slice_out_of_bounds_3() { let x: Vec<int> = vec![1, 2, 3, 4, 5]; x[-1..4]; } #[test] #[should_fail] fn test_slice_out_of_bounds_4() { let x: Vec<int> = vec![1, 2, 3, 4, 5]; x[1..6]; } #[test] #[should_fail] fn test_slice_out_of_bounds_5() { let x: Vec<int> = vec![1, 2, 3, 4, 5]; x[3..2]; } #[test] fn test_swap_remove_empty() { let mut vec: Vec<uint> = vec!(); assert_eq!(vec.swap_remove(0), None); } #[test] fn test_move_iter_unwrap() { let mut vec: Vec<uint> = Vec::with_capacity(7); vec.push(1); vec.push(2); let ptr = vec.as_ptr(); vec = vec.into_iter().unwrap(); assert_eq!(vec.as_ptr(), ptr); assert_eq!(vec.capacity(), 7); assert_eq!(vec.len(), 0); } #[test] #[should_fail] fn test_map_in_place_incompatible_types_fail() { let v = vec![0u, 1, 2]; v.map_in_place(|_| ()); } #[test] fn test_map_in_place() { let v = vec![0u, 1, 2]; assert_eq!(v.map_in_place(|i: uint| i as int - 1).as_slice(), [-1i, 0, 1].as_slice()); } #[test] fn test_map_in_place_zero_sized() { let v = vec![(), ()]; #[deriving(PartialEq, Show)] struct ZeroSized; assert_eq!(v.map_in_place(|_| ZeroSized).as_slice(), [ZeroSized, ZeroSized].as_slice()); } #[test] fn test_move_items() { let vec = vec![1, 2, 3]; let mut vec2 : Vec<i32> = vec![]; for i in vec.into_iter() { vec2.push(i); } assert!(vec2 == vec![1, 2, 3]); } #[test] fn test_move_items_reverse() { let vec = vec![1, 2, 3]; let mut vec2 : Vec<i32> = vec![]; for i in vec.into_iter().rev() { vec2.push(i); } assert!(vec2 == vec![3, 2, 1]); } #[test] fn test_move_items_zero_sized() { let vec = vec![(), (), ()]; let mut vec2 : Vec<()> = vec![]; for i in vec.into_iter() { vec2.push(i); } assert!(vec2 == vec![(), (), ()]); } #[test] fn test_into_boxed_slice() { let xs = vec![1u, 2, 3]; let ys = xs.into_boxed_slice(); assert_eq!(ys.as_slice(), [1u, 2, 3].as_slice()); } #[bench] fn bench_new(b: &mut Bencher) { b.iter(|| { let v: Vec<uint> = Vec::new(); assert_eq!(v.len(), 0); assert_eq!(v.capacity(), 0); }) } fn do_bench_with_capacity(b: &mut Bencher, src_len: uint) { b.bytes = src_len as u64; b.iter(|| { let v: Vec<uint> = Vec::with_capacity(src_len); assert_eq!(v.len(), 0); assert_eq!(v.capacity(), src_len); }) } #[bench] fn bench_with_capacity_0000(b: &mut Bencher) { do_bench_with_capacity(b, 0) } #[bench] fn bench_with_capacity_0010(b: &mut Bencher) { do_bench_with_capacity(b, 10) } #[bench] fn bench_with_capacity_0100(b: &mut Bencher) { do_bench_with_capacity(b, 100) } #[bench] fn bench_with_capacity_1000(b: &mut Bencher) { do_bench_with_capacity(b, 1000) } fn do_bench_from_fn(b: &mut Bencher, src_len: uint) { b.bytes = src_len as u64; b.iter(|| { let dst = Vec::from_fn(src_len, |i| i); assert_eq!(dst.len(), src_len); assert!(dst.iter().enumerate().all(|(i, x)| i == *x)); }) } #[bench] fn bench_from_fn_0000(b: &mut Bencher) { do_bench_from_fn(b, 0) } #[bench] fn bench_from_fn_0010(b: &mut Bencher) { do_bench_from_fn(b, 10) } #[bench] fn bench_from_fn_0100(b: &mut Bencher) { do_bench_from_fn(b, 100) } #[bench] fn bench_from_fn_1000(b: &mut Bencher) { do_bench_from_fn(b, 1000) } fn do_bench_from_elem(b: &mut Bencher, src_len: uint) { b.bytes = src_len as u64; b.iter(|| { let dst: Vec<uint> = Vec::from_elem(src_len, 5); assert_eq!(dst.len(), src_len); assert!(dst.iter().all(|x| *x == 5)); }) } #[bench] fn bench_from_elem_0000(b: &mut Bencher) { do_bench_from_elem(b, 0) } #[bench] fn bench_from_elem_0010(b: &mut Bencher) { do_bench_from_elem(b, 10) } #[bench] fn bench_from_elem_0100(b: &mut Bencher) { do_bench_from_elem(b, 100) } #[bench] fn bench_from_elem_1000(b: &mut Bencher) { do_bench_from_elem(b, 1000) } fn do_bench_from_slice(b: &mut Bencher, src_len: uint) { let src: Vec<uint> = FromIterator::from_iter(range(0, src_len)); b.bytes = src_len as u64; b.iter(|| { let dst = src.clone().as_slice().to_vec(); assert_eq!(dst.len(), src_len); assert!(dst.iter().enumerate().all(|(i, x)| i == *x)); }); } #[bench] fn bench_from_slice_0000(b: &mut Bencher) { do_bench_from_slice(b, 0) } #[bench] fn bench_from_slice_0010(b: &mut Bencher) { do_bench_from_slice(b, 10) } #[bench] fn bench_from_slice_0100(b: &mut Bencher) { do_bench_from_slice(b, 100) } #[bench] fn bench_from_slice_1000(b: &mut Bencher) { do_bench_from_slice(b, 1000) } fn do_bench_from_iter(b: &mut Bencher, src_len: uint) { let src: Vec<uint> = FromIterator::from_iter(range(0, src_len)); b.bytes = src_len as u64; b.iter(|| { let dst: Vec<uint> = FromIterator::from_iter(src.clone().into_iter()); assert_eq!(dst.len(), src_len); assert!(dst.iter().enumerate().all(|(i, x)| i == *x)); }); } #[bench] fn bench_from_iter_0000(b: &mut Bencher) { do_bench_from_iter(b, 0) } #[bench] fn bench_from_iter_0010(b: &mut Bencher) { do_bench_from_iter(b, 10) } #[bench] fn bench_from_iter_0100(b: &mut Bencher) { do_bench_from_iter(b, 100) } #[bench] fn bench_from_iter_1000(b: &mut Bencher) { do_bench_from_iter(b, 1000) } fn do_bench_extend(b: &mut Bencher, dst_len: uint, src_len: uint) { let dst: Vec<uint> = FromIterator::from_iter(range(0, dst_len)); let src: Vec<uint> = FromIterator::from_iter(range(dst_len, dst_len + src_len)); b.bytes = src_len as u64; b.iter(|| { let mut dst = dst.clone(); dst.extend(src.clone().into_iter()); assert_eq!(dst.len(), dst_len + src_len); assert!(dst.iter().enumerate().all(|(i, x)| i == *x)); }); } #[bench] fn bench_extend_0000_0000(b: &mut Bencher) { do_bench_extend(b, 0, 0) } #[bench] fn bench_extend_0000_0010(b: &mut Bencher) { do_bench_extend(b, 0, 10) } #[bench] fn bench_extend_0000_0100(b: &mut Bencher) { do_bench_extend(b, 0, 100) } #[bench] fn bench_extend_0000_1000(b: &mut Bencher) { do_bench_extend(b, 0, 1000) } #[bench] fn bench_extend_0010_0010(b: &mut Bencher) { do_bench_extend(b, 10, 10) } #[bench] fn bench_extend_0100_0100(b: &mut Bencher) { do_bench_extend(b, 100, 100) } #[bench] fn bench_extend_1000_1000(b: &mut Bencher) { do_bench_extend(b, 1000, 1000) } fn do_bench_push_all(b: &mut Bencher, dst_len: uint, src_len: uint) { let dst: Vec<uint> = FromIterator::from_iter(range(0, dst_len)); let src: Vec<uint> = FromIterator::from_iter(range(dst_len, dst_len + src_len)); b.bytes = src_len as u64; b.iter(|| { let mut dst = dst.clone(); dst.push_all(src.as_slice()); assert_eq!(dst.len(), dst_len + src_len); assert!(dst.iter().enumerate().all(|(i, x)| i == *x)); }); } #[bench] fn bench_push_all_0000_0000(b: &mut Bencher) { do_bench_push_all(b, 0, 0) } #[bench] fn bench_push_all_0000_0010(b: &mut Bencher) { do_bench_push_all(b, 0, 10) } #[bench] fn bench_push_all_0000_0100(b: &mut Bencher) { do_bench_push_all(b, 0, 100) } #[bench] fn bench_push_all_0000_1000(b: &mut Bencher) { do_bench_push_all(b, 0, 1000) } #[bench] fn bench_push_all_0010_0010(b: &mut Bencher) { do_bench_push_all(b, 10, 10) } #[bench] fn bench_push_all_0100_0100(b: &mut Bencher) { do_bench_push_all(b, 100, 100) } #[bench] fn bench_push_all_1000_1000(b: &mut Bencher) { do_bench_push_all(b, 1000, 1000) } fn do_bench_push_all_move(b: &mut Bencher, dst_len: uint, src_len: uint) { let dst: Vec<uint> = FromIterator::from_iter(range(0u, dst_len)); let src: Vec<uint> = FromIterator::from_iter(range(dst_len, dst_len + src_len)); b.bytes = src_len as u64; b.iter(|| { let mut dst = dst.clone(); dst.extend(src.clone().into_iter()); assert_eq!(dst.len(), dst_len + src_len); assert!(dst.iter().enumerate().all(|(i, x)| i == *x)); }); } #[bench] fn bench_push_all_move_0000_0000(b: &mut Bencher) { do_bench_push_all_move(b, 0, 0) } #[bench] fn bench_push_all_move_0000_0010(b: &mut Bencher) { do_bench_push_all_move(b, 0, 10) } #[bench] fn bench_push_all_move_0000_0100(b: &mut Bencher) { do_bench_push_all_move(b, 0, 100) } #[bench] fn bench_push_all_move_0000_1000(b: &mut Bencher) { do_bench_push_all_move(b, 0, 1000) } #[bench] fn bench_push_all_move_0010_0010(b: &mut Bencher) { do_bench_push_all_move(b, 10, 10) } #[bench] fn bench_push_all_move_0100_0100(b: &mut Bencher) { do_bench_push_all_move(b, 100, 100) } <|fim▁hole|> fn do_bench_clone(b: &mut Bencher, src_len: uint) { let src: Vec<uint> = FromIterator::from_iter(range(0, src_len)); b.bytes = src_len as u64; b.iter(|| { let dst = src.clone(); assert_eq!(dst.len(), src_len); assert!(dst.iter().enumerate().all(|(i, x)| i == *x)); }); } #[bench] fn bench_clone_0000(b: &mut Bencher) { do_bench_clone(b, 0) } #[bench] fn bench_clone_0010(b: &mut Bencher) { do_bench_clone(b, 10) } #[bench] fn bench_clone_0100(b: &mut Bencher) { do_bench_clone(b, 100) } #[bench] fn bench_clone_1000(b: &mut Bencher) { do_bench_clone(b, 1000) } fn do_bench_clone_from(b: &mut Bencher, times: uint, dst_len: uint, src_len: uint) { let dst: Vec<uint> = FromIterator::from_iter(range(0, src_len)); let src: Vec<uint> = FromIterator::from_iter(range(dst_len, dst_len + src_len)); b.bytes = (times * src_len) as u64; b.iter(|| { let mut dst = dst.clone(); for _ in range(0, times) { dst.clone_from(&src); assert_eq!(dst.len(), src_len); assert!(dst.iter().enumerate().all(|(i, x)| dst_len + i == *x)); } }); } #[bench] fn bench_clone_from_01_0000_0000(b: &mut Bencher) { do_bench_clone_from(b, 1, 0, 0) } #[bench] fn bench_clone_from_01_0000_0010(b: &mut Bencher) { do_bench_clone_from(b, 1, 0, 10) } #[bench] fn bench_clone_from_01_0000_0100(b: &mut Bencher) { do_bench_clone_from(b, 1, 0, 100) } #[bench] fn bench_clone_from_01_0000_1000(b: &mut Bencher) { do_bench_clone_from(b, 1, 0, 1000) } #[bench] fn bench_clone_from_01_0010_0010(b: &mut Bencher) { do_bench_clone_from(b, 1, 10, 10) } #[bench] fn bench_clone_from_01_0100_0100(b: &mut Bencher) { do_bench_clone_from(b, 1, 100, 100) } #[bench] fn bench_clone_from_01_1000_1000(b: &mut Bencher) { do_bench_clone_from(b, 1, 1000, 1000) } #[bench] fn bench_clone_from_01_0010_0100(b: &mut Bencher) { do_bench_clone_from(b, 1, 10, 100) } #[bench] fn bench_clone_from_01_0100_1000(b: &mut Bencher) { do_bench_clone_from(b, 1, 100, 1000) } #[bench] fn bench_clone_from_01_0010_0000(b: &mut Bencher) { do_bench_clone_from(b, 1, 10, 0) } #[bench] fn bench_clone_from_01_0100_0010(b: &mut Bencher) { do_bench_clone_from(b, 1, 100, 10) } #[bench] fn bench_clone_from_01_1000_0100(b: &mut Bencher) { do_bench_clone_from(b, 1, 1000, 100) } #[bench] fn bench_clone_from_10_0000_0000(b: &mut Bencher) { do_bench_clone_from(b, 10, 0, 0) } #[bench] fn bench_clone_from_10_0000_0010(b: &mut Bencher) { do_bench_clone_from(b, 10, 0, 10) } #[bench] fn bench_clone_from_10_0000_0100(b: &mut Bencher) { do_bench_clone_from(b, 10, 0, 100) } #[bench] fn bench_clone_from_10_0000_1000(b: &mut Bencher) { do_bench_clone_from(b, 10, 0, 1000) } #[bench] fn bench_clone_from_10_0010_0010(b: &mut Bencher) { do_bench_clone_from(b, 10, 10, 10) } #[bench] fn bench_clone_from_10_0100_0100(b: &mut Bencher) { do_bench_clone_from(b, 10, 100, 100) } #[bench] fn bench_clone_from_10_1000_1000(b: &mut Bencher) { do_bench_clone_from(b, 10, 1000, 1000) } #[bench] fn bench_clone_from_10_0010_0100(b: &mut Bencher) { do_bench_clone_from(b, 10, 10, 100) } #[bench] fn bench_clone_from_10_0100_1000(b: &mut Bencher) { do_bench_clone_from(b, 10, 100, 1000) } #[bench] fn bench_clone_from_10_0010_0000(b: &mut Bencher) { do_bench_clone_from(b, 10, 10, 0) } #[bench] fn bench_clone_from_10_0100_0010(b: &mut Bencher) { do_bench_clone_from(b, 10, 100, 10) } #[bench] fn bench_clone_from_10_1000_0100(b: &mut Bencher) { do_bench_clone_from(b, 10, 1000, 100) } }<|fim▁end|>
#[bench] fn bench_push_all_move_1000_1000(b: &mut Bencher) { do_bench_push_all_move(b, 1000, 1000) }
<|file_name|>tasks.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import os import shutil import sys import datetime from invoke import task from invoke.util import cd from pelican.server import ComplexHTTPRequestHandler, RootedHTTPServer CONFIG = { # Local path configuration (can be absolute or relative to tasks.py) 'deploy_path': '..', # Github Pages configuration 'github_pages_branch': 'gh-pages', 'commit_message': "'Publish site on {}'".format(datetime.date.today().isoformat()), # Port for `serve` 'port': 8000, } @task def clean(c): """Remove generated files""" if os.path.isdir(CONFIG['deploy_path']): shutil.rmtree(CONFIG['deploy_path']) os.makedirs(CONFIG['deploy_path']) @task def build(c): """Build local version of site""" c.run('pelican -s pelicanconf.py') @task def rebuild(c): """`build` with the delete switch""" c.run('pelican -d -s pelicanconf.py') @task def regenerate(c): """Automatically regenerate site upon file modification""" c.run('pelican -r -s pelicanconf.py') @task def serve(c): """Serve site at http://localhost:8000/""" class AddressReuseTCPServer(RootedHTTPServer): allow_reuse_address = True server = AddressReuseTCPServer( CONFIG['deploy_path'], ('', CONFIG['port']), ComplexHTTPRequestHandler) sys.stderr.write('Serving on port {port} ...\n'.format(**CONFIG)) server.serve_forever() @task def reserve(c): """`build`, then `serve`""" build(c) serve(c) @task def preview(c): """Build production version of site""" c.run('pelican -s publishconf.py') @task def publish(c): """Publish to production via rsync""" c.run('pelican -s publishconf.py') c.run( 'rsync --delete --exclude ".DS_Store" -pthrvz -c '<|fim▁hole|>@task def gh_pages(c): """Publish to GitHub Pages""" preview(c) c.run('ghp-import -b {github_pages_branch} ' '-m {commit_message} ' '{deploy_path} -p'.format(**CONFIG))<|fim▁end|>
'{} {production}:{dest_path}'.format( CONFIG['deploy_path'].rstrip('/') + '/', **CONFIG))
<|file_name|>validation.go<|end_file_name|><|fim▁begin|>/* Copyright 2014 Google Inc. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package validation import ( "reflect" "strings" "github.com/GoogleCloudPlatform/kubernetes/pkg/api" errs "github.com/GoogleCloudPlatform/kubernetes/pkg/api/errors" "github.com/GoogleCloudPlatform/kubernetes/pkg/capabilities" "github.com/GoogleCloudPlatform/kubernetes/pkg/labels" "github.com/GoogleCloudPlatform/kubernetes/pkg/util" ) func validateVolumes(volumes []api.Volume) (util.StringSet, errs.ErrorList) { allErrs := errs.ErrorList{} allNames := util.StringSet{} for i := range volumes { vol := &volumes[i] // so we can set default values el := errs.ErrorList{} if vol.Source == nil { // TODO: Enforce that a source is set once we deprecate the implied form. vol.Source = &api.VolumeSource{ EmptyDir: &api.EmptyDir{}, } } el = validateSource(vol.Source).Prefix("source") if len(vol.Name) == 0 { el = append(el, errs.NewFieldRequired("name", vol.Name)) } else if !util.IsDNSLabel(vol.Name) { el = append(el, errs.NewFieldInvalid("name", vol.Name)) } else if allNames.Has(vol.Name) { el = append(el, errs.NewFieldDuplicate("name", vol.Name)) } if len(el) == 0 { allNames.Insert(vol.Name) } else { allErrs = append(allErrs, el.PrefixIndex(i)...) } } return allNames, allErrs } func validateSource(source *api.VolumeSource) errs.ErrorList { numVolumes := 0 allErrs := errs.ErrorList{} if source.HostDir != nil { numVolumes++ allErrs = append(allErrs, validateHostDir(source.HostDir).Prefix("hostDirectory")...) } if source.EmptyDir != nil { numVolumes++ //EmptyDirs have nothing to validate } if source.GCEPersistentDisk != nil { numVolumes++ allErrs = append(allErrs, validateGCEPersistentDisk(source.GCEPersistentDisk)...) } if numVolumes != 1 { allErrs = append(allErrs, errs.NewFieldInvalid("", source)) } return allErrs } func validateHostDir(hostDir *api.HostDir) errs.ErrorList { allErrs := errs.ErrorList{} if hostDir.Path == "" { allErrs = append(allErrs, errs.NewNotFound("path", hostDir.Path)) } return allErrs } var supportedPortProtocols = util.NewStringSet(string(api.ProtocolTCP), string(api.ProtocolUDP)) func validateGCEPersistentDisk(PD *api.GCEPersistentDisk) errs.ErrorList { allErrs := errs.ErrorList{} if PD.PDName == "" { allErrs = append(allErrs, errs.NewFieldInvalid("PD.PDName", PD.PDName)) } if PD.FSType == "" { allErrs = append(allErrs, errs.NewFieldInvalid("PD.FSType", PD.FSType)) } if PD.Partition < 0 || PD.Partition > 255 { allErrs = append(allErrs, errs.NewFieldInvalid("PD.Partition", PD.Partition)) } return allErrs } func validatePorts(ports []api.Port) errs.ErrorList { allErrs := errs.ErrorList{} allNames := util.StringSet{} for i := range ports { pErrs := errs.ErrorList{} port := &ports[i] // so we can set default values if len(port.Name) > 0 { if len(port.Name) > 63 || !util.IsDNSLabel(port.Name) { pErrs = append(pErrs, errs.NewFieldInvalid("name", port.Name)) } else if allNames.Has(port.Name) { pErrs = append(pErrs, errs.NewFieldDuplicate("name", port.Name)) } else { allNames.Insert(port.Name) } } if port.ContainerPort == 0 { pErrs = append(pErrs, errs.NewFieldRequired("containerPort", port.ContainerPort)) } else if !util.IsValidPortNum(port.ContainerPort) { pErrs = append(pErrs, errs.NewFieldInvalid("containerPort", port.ContainerPort)) } if port.HostPort != 0 && !util.IsValidPortNum(port.HostPort) { pErrs = append(pErrs, errs.NewFieldInvalid("hostPort", port.HostPort)) } if len(port.Protocol) == 0 { port.Protocol = "TCP" } else if !supportedPortProtocols.Has(strings.ToUpper(string(port.Protocol))) { pErrs = append(pErrs, errs.NewFieldNotSupported("protocol", port.Protocol)) } allErrs = append(allErrs, pErrs.PrefixIndex(i)...) } return allErrs } func validateEnv(vars []api.EnvVar) errs.ErrorList { allErrs := errs.ErrorList{} for i := range vars { vErrs := errs.ErrorList{} ev := &vars[i] // so we can set default values if len(ev.Name) == 0 { vErrs = append(vErrs, errs.NewFieldRequired("name", ev.Name)) } if !util.IsCIdentifier(ev.Name) { vErrs = append(vErrs, errs.NewFieldInvalid("name", ev.Name)) } allErrs = append(allErrs, vErrs.PrefixIndex(i)...) } return allErrs } func validateVolumeMounts(mounts []api.VolumeMount, volumes util.StringSet) errs.ErrorList { allErrs := errs.ErrorList{} for i := range mounts { mErrs := errs.ErrorList{} mnt := &mounts[i] // so we can set default values if len(mnt.Name) == 0 { mErrs = append(mErrs, errs.NewFieldRequired("name", mnt.Name)) } else if !volumes.Has(mnt.Name) { mErrs = append(mErrs, errs.NewNotFound("name", mnt.Name)) } if len(mnt.MountPath) == 0 { mErrs = append(mErrs, errs.NewFieldRequired("mountPath", mnt.MountPath)) } allErrs = append(allErrs, mErrs.PrefixIndex(i)...) } return allErrs } // AccumulateUniquePorts runs an extraction function on each Port of each Container, // accumulating the results and returning an error if any ports conflict. func AccumulateUniquePorts(containers []api.Container, accumulator map[int]bool, extract func(*api.Port) int) errs.ErrorList { allErrs := errs.ErrorList{} for ci := range containers { cErrs := errs.ErrorList{} ctr := &containers[ci] for pi := range ctr.Ports { port := extract(&ctr.Ports[pi]) if port == 0 { continue } if accumulator[port] { cErrs = append(cErrs, errs.NewFieldDuplicate("port", port)) } else { accumulator[port] = true } } allErrs = append(allErrs, cErrs.PrefixIndex(ci)...) } return allErrs } // checkHostPortConflicts checks for colliding Port.HostPort values across // a slice of containers. func checkHostPortConflicts(containers []api.Container) errs.ErrorList { allPorts := map[int]bool{} return AccumulateUniquePorts(containers, allPorts, func(p *api.Port) int { return p.HostPort }) } func validateExecAction(exec *api.ExecAction) errs.ErrorList { allErrors := errs.ErrorList{} if len(exec.Command) == 0 { allErrors = append(allErrors, errs.NewFieldRequired("command", exec.Command)) } return allErrors } func validateHTTPGetAction(http *api.HTTPGetAction) errs.ErrorList { allErrors := errs.ErrorList{} if len(http.Path) == 0 { allErrors = append(allErrors, errs.NewFieldRequired("path", http.Path)) } return allErrors } func validateHandler(handler *api.Handler) errs.ErrorList { allErrors := errs.ErrorList{} if handler.Exec != nil { allErrors = append(allErrors, validateExecAction(handler.Exec).Prefix("exec")...) } else if handler.HTTPGet != nil { allErrors = append(allErrors, validateHTTPGetAction(handler.HTTPGet).Prefix("httpGet")...) } else { allErrors = append(allErrors, errs.NewFieldInvalid("", handler)) } return allErrors } func validateLifecycle(lifecycle *api.Lifecycle) errs.ErrorList { allErrs := errs.ErrorList{} if lifecycle.PostStart != nil { allErrs = append(allErrs, validateHandler(lifecycle.PostStart).Prefix("postStart")...) } if lifecycle.PreStop != nil { allErrs = append(allErrs, validateHandler(lifecycle.PreStop).Prefix("preStop")...) } return allErrs } func validateContainers(containers []api.Container, volumes util.StringSet) errs.ErrorList { allErrs := errs.ErrorList{} allNames := util.StringSet{} for i := range containers { cErrs := errs.ErrorList{} ctr := &containers[i] // so we can set default values capabilities := capabilities.Get() if len(ctr.Name) == 0 { cErrs = append(cErrs, errs.NewFieldRequired("name", ctr.Name)) } else if !util.IsDNSLabel(ctr.Name) { cErrs = append(cErrs, errs.NewFieldInvalid("name", ctr.Name)) } else if allNames.Has(ctr.Name) { cErrs = append(cErrs, errs.NewFieldDuplicate("name", ctr.Name)) } else if ctr.Privileged && !capabilities.AllowPrivileged { cErrs = append(cErrs, errs.NewFieldForbidden("privileged", ctr.Privileged)) } else { allNames.Insert(ctr.Name) } if len(ctr.Image) == 0 { cErrs = append(cErrs, errs.NewFieldRequired("image", ctr.Image)) } if ctr.Lifecycle != nil { cErrs = append(cErrs, validateLifecycle(ctr.Lifecycle).Prefix("lifecycle")...) } cErrs = append(cErrs, validatePorts(ctr.Ports).Prefix("ports")...) cErrs = append(cErrs, validateEnv(ctr.Env).Prefix("env")...) cErrs = append(cErrs, validateVolumeMounts(ctr.VolumeMounts, volumes).Prefix("volumeMounts")...) allErrs = append(allErrs, cErrs.PrefixIndex(i)...) } // Check for colliding ports across all containers. // TODO(thockin): This really is dependent on the network config of the host (IP per pod?) // and the config of the new manifest. But we have not specced that out yet, so we'll just // make some assumptions for now. As of now, pods share a network namespace, which means that // every Port.HostPort across the whole pod must be unique. allErrs = append(allErrs, checkHostPortConflicts(containers)...) return allErrs } var supportedManifestVersions = util.NewStringSet("v1beta1", "v1beta2") // ValidateManifest tests that the specified ContainerManifest has valid data. // This includes checking formatting and uniqueness. It also canonicalizes the // structure by setting default values and implementing any backwards-compatibility // tricks. func ValidateManifest(manifest *api.ContainerManifest) errs.ErrorList { allErrs := errs.ErrorList{} if len(manifest.Version) == 0 { allErrs = append(allErrs, errs.NewFieldRequired("version", manifest.Version)) } else if !supportedManifestVersions.Has(strings.ToLower(manifest.Version)) { allErrs = append(allErrs, errs.NewFieldNotSupported("version", manifest.Version)) } allVolumes, vErrs := validateVolumes(manifest.Volumes) allErrs = append(allErrs, vErrs.Prefix("volumes")...) allErrs = append(allErrs, validateContainers(manifest.Containers, allVolumes).Prefix("containers")...) allErrs = append(allErrs, validateRestartPolicy(&manifest.RestartPolicy).Prefix("restartPolicy")...) return allErrs } func validateRestartPolicy(restartPolicy *api.RestartPolicy) errs.ErrorList { numPolicies := 0 allErrors := errs.ErrorList{} if restartPolicy.Always != nil { numPolicies++ } if restartPolicy.OnFailure != nil { numPolicies++ } if restartPolicy.Never != nil { numPolicies++ } if numPolicies == 0 { restartPolicy.Always = &api.RestartPolicyAlways{} } if numPolicies > 1 { allErrors = append(allErrors, errs.NewFieldInvalid("", restartPolicy)) } return allErrors } func ValidatePodState(podState *api.PodState) errs.ErrorList { allErrs := errs.ErrorList(ValidateManifest(&podState.Manifest)).Prefix("manifest") return allErrs } // ValidatePod tests if required fields in the pod are set. func ValidatePod(pod *api.Pod) errs.ErrorList { allErrs := errs.ErrorList{} if len(pod.ID) == 0 { allErrs = append(allErrs, errs.NewFieldRequired("id", pod.ID)) } if !util.IsDNSSubdomain(pod.Namespace) { allErrs = append(allErrs, errs.NewFieldInvalid("namespace", pod.Namespace)) } allErrs = append(allErrs, ValidatePodState(&pod.DesiredState).Prefix("desiredState")...) return allErrs } // ValidatePodUpdate tests to see if the update is legal func ValidatePodUpdate(newPod, oldPod *api.Pod) errs.ErrorList { allErrs := errs.ErrorList{} if newPod.ID != oldPod.ID { allErrs = append(allErrs, errs.NewFieldInvalid("ID", newPod.ID)) } if len(newPod.DesiredState.Manifest.Containers) != len(oldPod.DesiredState.Manifest.Containers) { allErrs = append(allErrs, errs.NewFieldInvalid("DesiredState.Manifest.Containers", newPod.DesiredState.Manifest.Containers)) return allErrs } pod := *newPod<|fim▁hole|> pod.TypeMeta.ResourceVersion = oldPod.TypeMeta.ResourceVersion // Tricky, we need to copy the container list so that we don't overwrite the update var newContainers []api.Container for ix, container := range pod.DesiredState.Manifest.Containers { container.Image = oldPod.DesiredState.Manifest.Containers[ix].Image newContainers = append(newContainers, container) } pod.DesiredState.Manifest.Containers = newContainers if !reflect.DeepEqual(pod.DesiredState.Manifest, oldPod.DesiredState.Manifest) { allErrs = append(allErrs, errs.NewFieldInvalid("DesiredState.Manifest.Containers", newPod.DesiredState.Manifest.Containers)) } return allErrs } // ValidateService tests if required fields in the service are set. func ValidateService(service *api.Service) errs.ErrorList { allErrs := errs.ErrorList{} if len(service.ID) == 0 { allErrs = append(allErrs, errs.NewFieldRequired("id", service.ID)) } else if !util.IsDNS952Label(service.ID) { allErrs = append(allErrs, errs.NewFieldInvalid("id", service.ID)) } if !util.IsDNSSubdomain(service.Namespace) { allErrs = append(allErrs, errs.NewFieldInvalid("namespace", service.Namespace)) } if !util.IsValidPortNum(service.Port) { allErrs = append(allErrs, errs.NewFieldInvalid("port", service.Port)) } if len(service.Protocol) == 0 { service.Protocol = "TCP" } else if !supportedPortProtocols.Has(strings.ToUpper(string(service.Protocol))) { allErrs = append(allErrs, errs.NewFieldNotSupported("protocol", service.Protocol)) } if labels.Set(service.Selector).AsSelector().Empty() { allErrs = append(allErrs, errs.NewFieldRequired("selector", service.Selector)) } return allErrs } // ValidateReplicationController tests if required fields in the replication controller are set. func ValidateReplicationController(controller *api.ReplicationController) errs.ErrorList { allErrs := errs.ErrorList{} if len(controller.ID) == 0 { allErrs = append(allErrs, errs.NewFieldRequired("id", controller.ID)) } if !util.IsDNSSubdomain(controller.Namespace) { allErrs = append(allErrs, errs.NewFieldInvalid("namespace", controller.Namespace)) } allErrs = append(allErrs, ValidateReplicationControllerState(&controller.DesiredState).Prefix("desiredState")...) return allErrs } // ValidateReplicationControllerState tests if required fields in the replication controller state are set. func ValidateReplicationControllerState(state *api.ReplicationControllerState) errs.ErrorList { allErrs := errs.ErrorList{} if labels.Set(state.ReplicaSelector).AsSelector().Empty() { allErrs = append(allErrs, errs.NewFieldRequired("replicaSelector", state.ReplicaSelector)) } selector := labels.Set(state.ReplicaSelector).AsSelector() labels := labels.Set(state.PodTemplate.Labels) if !selector.Matches(labels) { allErrs = append(allErrs, errs.NewFieldInvalid("podTemplate.labels", state.PodTemplate)) } if state.Replicas < 0 { allErrs = append(allErrs, errs.NewFieldInvalid("replicas", state.Replicas)) } allErrs = append(allErrs, ValidateManifest(&state.PodTemplate.DesiredState.Manifest).Prefix("podTemplate.desiredState.manifest")...) allErrs = append(allErrs, ValidateReadOnlyPersistentDisks(state.PodTemplate.DesiredState.Manifest.Volumes).Prefix("podTemplate.desiredState.manifest")...) return allErrs } func ValidateReadOnlyPersistentDisks(volumes []api.Volume) errs.ErrorList { allErrs := errs.ErrorList{} for _, vol := range volumes { if vol.Source.GCEPersistentDisk != nil { if vol.Source.GCEPersistentDisk.ReadOnly == false { allErrs = append(allErrs, errs.NewFieldInvalid("GCEPersistentDisk.ReadOnly", false)) } } } return allErrs } // ValidateBoundPod tests if required fields on a bound pod are set. func ValidateBoundPod(pod *api.BoundPod) (errors []error) { if !util.IsDNSSubdomain(pod.ID) { errors = append(errors, errs.NewFieldInvalid("id", pod.ID)) } if !util.IsDNSSubdomain(pod.Namespace) { errors = append(errors, errs.NewFieldInvalid("namespace", pod.Namespace)) } containerManifest := &api.ContainerManifest{ Version: "v1beta2", ID: pod.ID, UUID: pod.UID, Containers: pod.Spec.Containers, Volumes: pod.Spec.Volumes, RestartPolicy: pod.Spec.RestartPolicy, } if errs := ValidateManifest(containerManifest); len(errs) != 0 { errors = append(errors, errs...) } return errors }<|fim▁end|>
pod.Labels = oldPod.Labels
<|file_name|>application_credit_test.py<|end_file_name|><|fim▁begin|>import shopify import json from test.test_helper import TestCase class ApplicationCreditTest(TestCase): def test_get_application_credit(self): self.fake("application_credits/445365009", method="GET", body=self.load_fixture("application_credit"), code=200) application_credit = shopify.ApplicationCredit.find(445365009) self.assertEqual("5.00", application_credit.amount) def test_get_all_application_credits(self): self.fake("application_credits", method="GET", body=self.load_fixture("application_credits"), code=200) application_credits = shopify.ApplicationCredit.find()<|fim▁hole|> self.assertEqual(445365009, application_credits[0].id) def test_create_application_credit(self): self.fake( "application_credits", method="POST", body=self.load_fixture("application_credit"), headers={"Content-type": "application/json"}, code=201, ) application_credit = shopify.ApplicationCredit.create( {"description": "application credit for refund", "amount": 5.0} ) expected_body = {"application_credit": {"description": "application credit for refund", "amount": 5.0}} self.assertEqual(expected_body, json.loads(self.http.request.data.decode("utf-8")))<|fim▁end|>
self.assertEqual(1, len(application_credits))
<|file_name|>description-list-fixtures.module.ts<|end_file_name|><|fim▁begin|>import { CommonModule } from '@angular/common'; import { NgModule } from '@angular/core'; import { SkyThemeService } from '@skyux/theme'; import { SkyDescriptionListModule } from '../description-list.module'; import { SkyDescriptionListTestComponent } from './description-list.component.fixture'; @NgModule({ declarations: [SkyDescriptionListTestComponent], imports: [CommonModule, SkyDescriptionListModule], exports: [SkyDescriptionListTestComponent], providers: [SkyThemeService], })<|fim▁hole|><|fim▁end|>
export class SkyDescriptionListFixturesModule {}
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from mindfeed.mindfeed import main <|fim▁hole|><|fim▁end|>
if __name__ == "__main__": main()
<|file_name|>cost.py<|end_file_name|><|fim▁begin|># Copyright (c) 2016, the GPyOpt Authors # Licensed under the BSD 3-clause license (see LICENSE.txt) from ...models import GPModel import numpy as np class CostModel(object): """ Class to handle the cost of evaluating the function. param cost_withGradients: function that returns the cost of evaluating the function and its gradient. By default no cost is used. Options are: - cost_withGradients is some pre-defined cost function. Should return numpy array as outputs. - cost_withGradients = 'evaluation_time'. .. Note:: if cost_withGradients = 'evaluation time' the evaluation time of the function is used to model a GP whose mean is used as cost. """ def __init__(self, cost_withGradients): super(CostModel, self).__init__() self.cost_type = cost_withGradients # --- Set-up evaluation cost if self.cost_type is None: self.cost_withGradients = constant_cost_withGradients self.cost_type = 'Constant cost' elif self.cost_type == 'evaluation_time': self.cost_model = GPModel() self.cost_withGradients = self._cost_gp_withGradients self.num_updates = 0 else: self.cost_withGradients = cost_withGradients self.cost_type = 'User defined cost' def _cost_gp(self,x): """ Predicts the time cost of evaluating the function at x. """ m, _, _, _ = self.cost_model.predict_withGradients(x) return np.exp(m) def _cost_gp_withGradients(self,x): """ Predicts the time cost and its gradient of evaluating the function at x. """ m, _, dmdx, _= self.cost_model.predict_withGradients(x) return np.exp(m), np.exp(m)*dmdx def update_cost_model(self, x, cost_x): """ Updates the GP used to handle the cost. param x: input of the GP for the cost model. param x_cost: values of the time cost at the input locations. """ if self.cost_type == 'evaluation_time': cost_evals = np.log(np.atleast_2d(np.asarray(cost_x)).T) if self.num_updates == 0: X_all = x costs_all = cost_evals<|fim▁hole|> X_all = np.vstack((self.cost_model.model.X,x)) costs_all = np.vstack((self.cost_model.model.Y,cost_evals)) self.num_updates += 1 self.cost_model.updateModel(X_all, costs_all, None, None) def constant_cost_withGradients(x): """ Constant cost function used by default: cost = 1, d_cost = 0. """ return np.ones(x.shape[0])[:,None], np.zeros(x.shape)<|fim▁end|>
else:
<|file_name|>header.go<|end_file_name|><|fim▁begin|>package jasm const header = ` function jasm(stdlib, foreign, heap) { "use asm"; var pc = 0; // pseudo program counter var sp = 0; // stack pointer var ret = 0; // return address, for jal var r0 = 0, r1 = 0, r2 = 0, r3 = 0; // general purpose 32-bit registers var f0 = 0.0, f1 = 0.0, f2 = 0.0, f3 = 0.0; // temp floating point registers var err = 0; var memI32 = new stdlib.Int32Array(heap); var memU32 = new stdlib.Uint32Array(heap); var memI8 = new stdlib.Int8Array(heap); var memU8 = new stdlib.Uint8Array(heap); var memF64 = new stdlib.Float64Array(heap); function setpc(newpc) { newpc = newpc|0; pc = newpc|0; } function setsp(newsp) { newsp = newsp|0; sp = newsp|0; } function seterr(newerr) { newerr = newerr|0; err = newerr|0; } function setret(newret) { newret = newret|0; ret = newret|0; } function getpc() { return pc|0; } function getsp() { return sp|0; } function getret() { return ret|0; } function geterr() { return err|0; } function getr1() { return r1|0; } function getr2() { return r2|0; } function getr3() { return r3|0; } function getf0() { return +f0; } function getf1() { return +f1; } function getf2() { return +f2; } function getf3() { return +f3; } function clearRegs() { pc = 0|0; sp = 0|0; ret = 0|0; err = 0|0; r0 = 0|0; r1 = 0|0; r2 = 0|0; r3 = 0|0; f0 = 0.0; f1 = 0.0; f2 = 0.0; f3 = 0.0; } function step() { var pc = 0; pc_ = pc|0; pc = (pc + 4) | 0; switch (pc_|0) { ` const footer = ` default: err = 1|0; } } function run(ncycle) { ncycle = ncycle|0; while (ncycle|0 > 0) { step(); r0 = 0|0; ncycle = ((ncycle|0) + -1)|0; if ((err|0) != (0|0)) { break;<|fim▁hole|> return { setpc: setpc, setsp: setsp, seterr: seterr, setret: setret, getpc: getpc, getsp: getsp, geterr: geterr, getret: getret, getr1: getr1, getr2: getr2, getr3: getr3, getf0: getf0, getf1: getf1, getf2: getf2, getf3: getf3, clearRegs: clearRegs, run: run, }; } `<|fim▁end|>
} } }
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// `encode` and `decode` are used instead of `ser` and `de` to avoid confusion with the serder // Serializer and Deserializer traits which occupy a similar namespace. mod cow_iter; pub mod decode; pub mod encode; mod into_cow; pub use crate::decode::Deserializer; pub use crate::encode::Serializer; pub use crate::into_cow::IntoCow; /// Use serde Deserialize to build `T` from a `redis::Value` pub fn from_redis_value<'a, 'de, T, RV>(rv: RV) -> decode::Result<T> where T: serde::de::Deserialize<'de>, RV: IntoCow<'a>, { let value = rv.into_cow(); serde::de::Deserialize::deserialize(Deserializer::new(value)) } <|fim▁hole|>{ fn deserialize(&'de self) -> decode::Result<T>; } impl<'de, T> RedisDeserialize<'de, T> for redis::Value where T: serde::de::Deserialize<'de>, { fn deserialize(&'de self) -> decode::Result<T> { serde::de::Deserialize::deserialize(Deserializer::new(self)) } } #[cfg(test)] mod tests { use super::*; use redis::Value; #[test] fn chain_deserialize_works() { let v = Value::Bulk(vec![Value::Int(5), Value::Data(b"hello".to_vec())]); let actual: (u8, String) = v.deserialize().unwrap(); let expected = (5, "hello".into()); assert_eq!(expected, actual); } #[test] fn from_redis_value_works_with_owned() { let v = Value::Bulk(vec![Value::Int(5), Value::Data(b"hello".to_vec())]); let actual: (u8, String) = from_redis_value(v).unwrap(); let expected = (5, "hello".into()); assert_eq!(expected, actual); } #[test] fn from_redis_value_works_with_borrow() { let v = Value::Bulk(vec![Value::Int(5), Value::Data(b"hello".to_vec())]); let actual: (u8, String) = from_redis_value(&v).unwrap(); let expected = (5, "hello".into()); assert_eq!(expected, actual); } }<|fim▁end|>
pub trait RedisDeserialize<'de, T> where T: serde::de::Deserialize<'de>,
<|file_name|>rate.js<|end_file_name|><|fim▁begin|>'use strict'; Object.defineProperty(exports, '__esModule', { value: true }); var __chunk_1 = require('./chunk-14c82365.js'); require('./helpers.js'); var __chunk_2 = require('./chunk-185921d7.js'); var __chunk_4 = require('./chunk-925c5339.js'); var __chunk_5 = require('./chunk-13e039f5.js'); var script = { name: 'BRate', components: __chunk_1._defineProperty({}, __chunk_4.Icon.name, __chunk_4.Icon), props: { value: { type: Number, default: 0 }, max: { type: Number, default: 5 }, icon: { type: String, default: 'star' }, iconPack: String, size: String, spaced: Boolean, rtl: Boolean, disabled: Boolean, showScore: Boolean, showText: Boolean, customText: String, texts: Array, locale: { type: [String, Array], default: function _default() { return __chunk_2.config.defaultLocale; } } }, data: function data() { return { newValue: this.value, hoverValue: 0 }; }, computed: { halfStyle: function halfStyle() { return "width:".concat(this.valueDecimal, "%"); }, showMe: function showMe() { var result = ''; if (this.showScore) { result = this.disabled ? this.value : this.newValue; if (result === 0) { result = ''; } else { result = new Intl.NumberFormat(this.locale).format(this.value); } } else if (this.showText) {<|fim▁hole|> }, valueDecimal: function valueDecimal() { return this.value * 100 - Math.floor(this.value) * 100; } }, watch: { // When v-model is changed set the new value. value: function value(_value) { this.newValue = _value; } }, methods: { resetNewValue: function resetNewValue() { if (this.disabled) return; this.hoverValue = 0; }, previewRate: function previewRate(index, event) { if (this.disabled) return; this.hoverValue = index; event.stopPropagation(); }, confirmValue: function confirmValue(index) { if (this.disabled) return; this.newValue = index; this.$emit('change', this.newValue); this.$emit('input', this.newValue); }, checkHalf: function checkHalf(index) { var showWhenDisabled = this.disabled && this.valueDecimal > 0 && index - 1 < this.value && index > this.value; return showWhenDisabled; }, rateClass: function rateClass(index) { var output = ''; var currentValue = this.hoverValue !== 0 ? this.hoverValue : this.newValue; if (index <= currentValue) { output = 'set-on'; } else if (this.disabled && Math.ceil(this.value) === index) { output = 'set-half'; } return output; } } }; /* script */ const __vue_script__ = script; /* template */ var __vue_render__ = function () {var _vm=this;var _h=_vm.$createElement;var _c=_vm._self._c||_h;return _c('div',{staticClass:"rate",class:{ 'is-disabled': _vm.disabled, 'is-spaced': _vm.spaced, 'is-rtl': _vm.rtl }},[_vm._l((_vm.max),function(item,index){return _c('div',{key:index,staticClass:"rate-item",class:_vm.rateClass(item),on:{"mousemove":function($event){return _vm.previewRate(item, $event)},"mouseleave":_vm.resetNewValue,"click":function($event){$event.preventDefault();return _vm.confirmValue(item)}}},[_c('b-icon',{attrs:{"pack":_vm.iconPack,"icon":_vm.icon,"size":_vm.size}}),(_vm.checkHalf(item))?_c('b-icon',{staticClass:"is-half",style:(_vm.halfStyle),attrs:{"pack":_vm.iconPack,"icon":_vm.icon,"size":_vm.size}}):_vm._e()],1)}),(_vm.showText || _vm.showScore || _vm.customText)?_c('div',{staticClass:"rate-text",class:_vm.size},[_c('span',[_vm._v(_vm._s(_vm.showMe))]),(_vm.customText && !_vm.showText)?_c('span',[_vm._v(_vm._s(_vm.customText))]):_vm._e()]):_vm._e()],2)}; var __vue_staticRenderFns__ = []; /* style */ const __vue_inject_styles__ = undefined; /* scoped */ const __vue_scope_id__ = undefined; /* module identifier */ const __vue_module_identifier__ = undefined; /* functional template */ const __vue_is_functional_template__ = false; /* style inject */ /* style inject SSR */ var Rate = __chunk_5.__vue_normalize__( { render: __vue_render__, staticRenderFns: __vue_staticRenderFns__ }, __vue_inject_styles__, __vue_script__, __vue_scope_id__, __vue_is_functional_template__, __vue_module_identifier__, undefined, undefined ); var Plugin = { install: function install(Vue) { __chunk_5.registerComponent(Vue, Rate); } }; __chunk_5.use(Plugin); exports.BRate = Rate; exports.default = Plugin;<|fim▁end|>
result = this.texts[Math.ceil(this.newValue) - 1]; } return result;
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use std::io;<|fim▁hole|> pub use self::posix::PosixInputHandler; pub use self::default::DefaultInputHandler; mod posix; mod default; const CMD_PROMPT: &'static str = ">> "; #[derive(Debug)] enum Key { Esc, Enter, Tab, Up, Down, Left, Right, Home, End, Insert, PgUp, PgDown, Backspace, Delete, Char(char), F(u32), Unknown, } pub enum InputCmd { None, Quit, Equation(String), } pub trait InputHandler { fn start(&mut self) -> io::Result<()>; fn stop(&mut self) -> io::Result<()>; fn handle_input(&mut self) -> InputCmd; fn print_prompt(&self); }<|fim▁end|>
<|file_name|>seodiv.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # ------------------------------------------------------------ # Canal (seodiv) por Hernan_Ar_c # ------------------------------------------------------------ import urlparse,urllib2,urllib,re import os, sys from core import logger from core import config from core import scrapertools <|fim▁hole|>from core import servertools from core import httptools host='http://www.seodiv.com' def mainlist(item): logger.info() itemlist = [] itemlist.append( Item(channel=item.channel, title="Todos", action="todas", url=host,thumbnail='https://s32.postimg.org/544rx8n51/series.png', fanart='https://s32.postimg.org/544rx8n51/series.png')) return itemlist def todas(item): logger.info() itemlist = [] data = httptools.downloadpage(item.url).data patron ='<\/div><img src="([^"]+)".*?\/>.*?' patron+='<div class="title-topic">([^<]+)<\/div>.*?' patron +='<div class="sh-topic">([^<]+)<\/div><\/a>.*?' patron +='<div class="read-more-top"><a href="([^"]+)" style=' matches = re.compile(patron,re.DOTALL).findall(data) for scrapedthumbnail, scrapedtitle, scrapedplot, scrapedurl in matches: url = host+scrapedurl title = scrapedtitle.decode('utf-8') thumbnail = scrapedthumbnail fanart = 'https://s32.postimg.org/gh8lhbkb9/seodiv.png' plot = scrapedplot itemlist.append( Item(channel=item.channel, action="temporadas" ,title=title , url=url, thumbnail=thumbnail, fanart=fanart, plot= plot, contentSerieName=title, extra='')) return itemlist def temporadas(item): logger.info() itemlist = [] templist = [] data = httptools.downloadpage(item.url).data url_base= item.url patron = '<a class="collapsed" data-toggle="collapse" data-parent="#accordion" href=.*? aria-expanded="false" aria-controls=.*?>([^<]+)<\/a>' matches = re.compile(patron,re.DOTALL).findall(data) temp=1 if 'Temporada'in str(matches): for scrapedtitle in matches: url = url_base tempo = re.findall(r'\d+',scrapedtitle) if tempo: title ='Temporada'+' '+ tempo[0] else: title = scrapedtitle.lower() thumbnail = item.thumbnail plot = item.plot fanart = scrapertools.find_single_match(data,'<img src="([^"]+)"/>.*?</a>') itemlist.append( Item(channel=item.channel, action="episodiosxtemp" , title=title , fulltitle=item.title, url=url, thumbnail=thumbnail, plot=plot, fanart = fanart, temp=str(temp),contentSerieName=item.contentSerieName)) temp = temp+1 if config.get_library_support() and len(itemlist) > 0: itemlist.append(Item(channel=item.channel, title='[COLOR yellow]Añadir esta serie a la biblioteca[/COLOR]', url=item.url, action="add_serie_to_library", extra="episodios", contentSerieName=item.contentSerieName, extra1 = item.extra1, temp=str(temp))) return itemlist else: itemlist=episodiosxtemp(item) if config.get_library_support() and len(itemlist) > 0: itemlist.append(Item(channel=item.channel, title='[COLOR yellow]Añadir esta serie a la biblioteca[/COLOR]', url=item.url, action="add_serie_to_library", extra="episodios", contentSerieName=item.contentSerieName, extra1 = item.extra1, temp=str(temp))) return itemlist def episodios(item): logger.debug('pelisalacarta.channels.seodiv episodios') itemlist = [] templist = temporadas(item) for tempitem in templist: logger.debug(tempitem) itemlist += episodiosxtemp(tempitem) return itemlist def episodiosxtemp(item): logger.debug("pelisalacarta.channels.seodiv episodiosxtemp") itemlist = [] data = httptools.downloadpage(item.url).data tempo = item.title if 'Temporada'in item.title: item.title = item.title.replace('Temporada', 'temporada') item.title = item.title.strip() item.title = item.title.replace(' ','-') patron ='<li><a href="([^"]+)">.*?(Capitulo|Pelicula).*?([\d]+)' matches = re.compile(patron,re.DOTALL).findall(data) idioma = scrapertools.find_single_match(data,' <p><span class="ah-lead-tit">Idioma:</span>&nbsp;<span id="l-vipusk">([^<]+)</span></p>') for scrapedurl, scrapedtipo, scrapedtitle in matches: url = host+scrapedurl title ='' thumbnail = item.thumbnail plot = item.plot fanart='' if 'temporada' in item.title and item.title in scrapedurl and scrapedtipo =='Capitulo' and item.temp !='': title = item.contentSerieName+' '+item.temp+'x'+scrapedtitle+' ('+idioma+')' itemlist.append( Item(channel=item.channel, action="findvideos" , title=title, fulltitle=item.fulltitle, url=url, thumbnail=item.thumbnail, plot=plot)) if 'temporada' not in item.title and item.title not in scrapedurl and scrapedtipo =='Capitulo' and item.temp =='': if item.temp == '': temp = '1' title = item.contentSerieName+' '+temp+'x'+scrapedtitle+' ('+idioma+')' if '#' not in scrapedurl: itemlist.append( Item(channel=item.channel, action="findvideos" , title=title, fulltitle=item.fulltitle, url=url, thumbnail=item.thumbnail, plot=plot)) if 'temporada' not in item.title and item.title not in scrapedurl and scrapedtipo =='Pelicula': title = scrapedtipo +' '+scrapedtitle itemlist.append( Item(channel=item.channel, action="findvideos" , title=title, fulltitle=item.fulltitle, url=url, thumbnail=item.thumbnail, plot=plot)) return itemlist<|fim▁end|>
from core.item import Item
<|file_name|>form-password.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1<|fim▁hole|><|fim▁end|>
oid sha256:4f8b1998d2048d6a6cabacdfb3689eba7c9cb669d6f81dbbd18156bdb0dbe18f size 1880
<|file_name|>scene.go<|end_file_name|><|fim▁begin|>// Copyright 2015 Matthew Collins // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Package scene provides methods to manage and load multiple ui scenes. package scene import "github.com/thinkofdeath/steven/ui" // Type stores a scene that can be removed and shown at any time. type Type struct { visible bool drawables []ui.Drawable hidding bool } // New creates a new scene. func New(visible bool) *Type { return &Type{ visible: visible, } } // Show shows all the drawables in the scene func (t *Type) Show() { if t.visible { return } t.visible = true<|fim▁hole|> ui.AddDrawable(d) } } // Hide hides all the drawables in the scene func (t *Type) Hide() { if !t.visible { return } t.visible = false t.hidding = true for _, d := range t.drawables { ui.Remove(d) } t.hidding = false } // AddDrawable adds the drawable to the draw list. func (t *Type) AddDrawable(d ui.Drawable) { t.drawables = append(t.drawables, d) if t.visible { ui.AddDrawable(d) } d.SetRemoveHook(t.removeHook) } func (t *Type) removeHook(d ui.Drawable) { if t.hidding { return } for i, dd := range t.drawables { if dd == d { t.drawables = append(t.drawables[:i], t.drawables[i+1:]...) return } } } // IsVisible returns whether the scene is currently visible. func (t *Type) IsVisible() bool { return t.visible }<|fim▁end|>
for _, d := range t.drawables {
<|file_name|>robot.js<|end_file_name|><|fim▁begin|>define("dojo/robot", ["dojo", "doh/robot", "dojo/window"], function(dojo) { dojo.experimental("dojo.robot"); (function(){ // users who use doh+dojo get the added convenience of dojo.mouseMoveAt, // instead of computing the absolute coordinates of their elements themselves dojo.mixin(doh.robot,{ _resolveNode: function(/*String||DOMNode||Function*/ n){ if(typeof n == "function"){ // if the user passed a function returning a node, evaluate it n = n(); } return n? dojo.byId(n) : null; }, _scrollIntoView: function(/*Node*/ n){ // scrolls the passed node into view, scrolling all ancester frames/windows as well. // Assumes parent iframes can be made fully visible given the current browser window size var d = dojo, dr = doh.robot, p = null; d.forEach(dr._getWindowChain(n), function(w){ d.withGlobal(w, function(){ // get the position of the node wrt its parent window // if it is a parent frame, its padding and border extents will get added in var p2 = d.position(n, false), b = d._getPadBorderExtents(n), oldp = null; // if p2 is the position of the original passed node, store the position away as p // otherwise, node is actually an iframe. in this case, add the iframe's position wrt its parent window and also the iframe's padding and border extents if(!p){ p = p2; }else{ oldp = p; p = {x: p.x+p2.x+b.l, y: p.y+p2.y+b.t, w: p.w, h: p.h}; } // scroll the parent window so that the node translated into the parent window's coordinate space is in view dojo.window.scrollIntoView(n,p); // adjust position for the new scroll offsets p2 = d.position(n, false); if(!oldp){ p = p2; }else{ p = {x: oldp.x+p2.x+b.l, y: oldp.y+p2.y+b.t, w: p.w, h: p.h}; } // get the parent iframe so it can be scrolled too n = w.frameElement; }); }); }, _position: function(/*Node*/ n){ // Returns the dojo.position of the passed node wrt the passed window's viewport, // following any parent iframes containing the node and clipping the node to each iframe. // precondition: _scrollIntoView already called var d = dojo, p = null, M = Math.max, m = Math.min; // p: the returned position of the node d.forEach(doh.robot._getWindowChain(n), function(w){ d.withGlobal(w, function(){ // get the position of the node wrt its parent window // if it is a parent frame, its padding and border extents will get added in var p2 = d.position(n, false), b = d._getPadBorderExtents(n); // if p2 is the position of the original passed node, store the position away as p // otherwise, node is actually an iframe. in this case, add the iframe's position wrt its parent window and also the iframe's padding and border extents if(!p){ p = p2; }else{ var view; d.withGlobal(n.contentWindow,function(){ view=dojo.window.getBox(); }); p2.r = p2.x+view.w; p2.b = p2.y+view.h; p = {x: M(p.x+p2.x,p2.x)+b.l, // clip left edge of node wrt the iframe y: M(p.y+p2.y,p2.y)+b.t, // top edge r: m(p.x+p2.x+p.w,p2.r)+b.l, // right edge (to compute width) b: m(p.y+p2.y+p.h,p2.b)+b.t}; // bottom edge (to compute height) // save a few bytes by computing width and height from r and b p.w = p.r-p.x; p.h = p.b-p.y; } // the new node is now the old node's parent iframe n=w.frameElement; }); }); return p; }, _getWindowChain : function(/*Node*/ n){ // Returns an array of windows starting from the passed node's parent window and ending at dojo's window var cW = dojo.window.get(n.ownerDocument); var arr=[cW]; var f = cW.frameElement; return (cW == dojo.global || f == null)? arr : arr.concat(doh.robot._getWindowChain(f)); }, scrollIntoView : function(/*String||DOMNode||Function*/ node, /*Number, optional*/ delay){ // summary: // Scroll the passed node into view, if it is not. // // node: // The id of the node, or the node itself, to move the mouse to. // If you pass an id or a function that returns a node, the node will not be evaluated until the movement executes. // This is useful if you need to move the mouse to an node that is not yet present. // // delay: // Delay, in milliseconds, to wait before firing. // The delay is a delta with respect to the previous automation call. // doh.robot.sequence(function(){ doh.robot._scrollIntoView(doh.robot._resolveNode(node));<|fim▁hole|> }, mouseMoveAt : function(/*String||DOMNode||Function*/ node, /*Integer, optional*/ delay, /*Integer, optional*/ duration, /*Number, optional*/ offsetX, /*Number, optional*/ offsetY){ // summary: // Moves the mouse over the specified node at the specified relative x,y offset. // // description: // Moves the mouse over the specified node at the specified relative x,y offset. // If you do not specify an offset, mouseMove will default to move to the middle of the node. // Example: to move the mouse over a ComboBox's down arrow node, call doh.mouseMoveAt(dijit.byId('setvaluetest').downArrowNode); // // node: // The id of the node, or the node itself, to move the mouse to. // If you pass an id or a function that returns a node, the node will not be evaluated until the movement executes. // This is useful if you need to move the mouse to an node that is not yet present. // // delay: // Delay, in milliseconds, to wait before firing. // The delay is a delta with respect to the previous automation call. // For example, the following code ends after 600ms: // doh.robot.mouseClick({left:true}, 100) // first call; wait 100ms // doh.robot.typeKeys("dij", 500) // 500ms AFTER previous call; 600ms in all // // duration: // Approximate time Robot will spend moving the mouse // The default is 100ms. // // offsetX: // x offset relative to the node, in pixels, to move the mouse. The default is half the node's width. // // offsetY: // y offset relative to the node, in pixels, to move the mouse. The default is half the node's height. // doh.robot._assertRobot(); duration = duration||100; this.sequence(function(){ node=doh.robot._resolveNode(node); doh.robot._scrollIntoView(node); var pos = doh.robot._position(node); if(offsetY === undefined){ offsetX=pos.w/2; offsetY=pos.h/2; } var x = pos.x+offsetX; var y = pos.y+offsetY; doh.robot._mouseMove(x, y, false, duration); }, delay, duration); } }); })(); return doh.robot; });<|fim▁end|>
}, delay);
<|file_name|>add_apt_repository.py<|end_file_name|><|fim▁begin|>"""Resource manager for using the add-apt-repository command (part of the python-software-properties package). """ # Common stdlib imports import sys import os import os.path import re import glob # fix path if necessary (if running from source or running as test) try: import engage.utils except: sys.exc_clear() dir_to_add_to_python_path = os.path.abspath((os.path.join(os.path.dirname(__file__), "../../.."))) sys.path.append(dir_to_add_to_python_path) import engage.drivers.resource_manager as resource_manager import engage.drivers.utils # Drivers compose *actions* to implement their methods. from engage.drivers.action import * from engage.drivers.password_repo_mixin import PasswordRepoMixin from engage.drivers.genforma.aptget import update import engage_utils.process as procutils # setup errors from engage.utils.user_error import UserError, EngageErrInf import gettext _ = gettext.gettext <|fim▁hole|>errors = { } def define_error(error_code, msg): global errors error_info = EngageErrInf(__name__, error_code, msg) errors[error_info.error_code] = error_info # error codes # FILL IN ERR_TBD = 0 define_error(ERR_TBD, _("Replace this with your error codes")) # setup logging from engage.utils.log_setup import setup_engage_logger logger = setup_engage_logger(__name__) # this is used by the package manager to locate the packages.json # file associated with the driver def get_packages_filename(): return engage.drivers.utils.get_packages_filename(__file__) def make_context(resource_json, sudo_password_fn, dry_run=False): """Create a Context object (defined in engage.utils.action). This contains the resource's metadata in ctx.props, references to the logger and sudo password function, and various helper functions. The context object is used by individual actions. If your resource does not need the sudo password, you can just pass in None for sudo_password_fn. """ ctx = Context(resource_json, logger, __file__, sudo_password_fn=sudo_password_fn, dry_run=dry_run) ctx.check_port('input_ports.host', sudo_password=unicode) ctx.check_port('input_ports.add_rep_exe_info', add_apt_repository_exe=unicode) ctx.check_port('output_ports.repository', repo_name=unicode) if hasattr(ctx.props.output_ports.repository, 'repo_url'): ctx.add('repo_url', ctx.props.output_ports.repository.repo_url) else: ctx.add('repo_url', None) # add any extra computed properties here using the ctx.add() method. return ctx ADD_APT_REPO_COMMAND="/usr/bin/add-apt-repository" @make_action def run_add_apt_repository(self, repository_name): procutils.run_sudo_program([ADD_APT_REPO_COMMAND, '-y', repository_name], self.ctx._get_sudo_password(self), self.ctx.logger) def search_for_repository(repo_url): """Look in the all the repository files for the specified repository url. If it is found, then we have already added the repository. """ r = re.compile(re.escape('deb %s ' % repo_url) + r'\w+\ \w+') def find_url_in_file(fname): if not os.path.exists(fname): return False with open(fname) as f: for line in f: line = line.rstrip() if r.match(line)!=None: return True return False filelist = glob.glob('/etc/apt/sources.list.d/*.list') filelist.append('/etc/apt/sources.list') for fpath in filelist: if find_url_in_file(fpath): return True # found it return False # didn't find repo in any of the files # # Now, define the main resource manager class for the driver. # If this driver is a service, inherit from service_manager.Manager. # If the driver is just a resource, it should inherit from # resource_manager.Manager. If you need the sudo password, add # PasswordRepoMixin to the inheritance list. # class Manager(resource_manager.Manager, PasswordRepoMixin): REQUIRES_ROOT_ACCESS = True def __init__(self, metadata, dry_run=False): package_name = "%s %s" % (metadata.key["name"], metadata.key["version"]) resource_manager.Manager.__init__(self, metadata, package_name) self.ctx = make_context(metadata.to_json(), self._get_sudo_password, dry_run=dry_run) self._is_installed = False # fallback on this flag if repo_url isn't specified def validate_pre_install(self): pass def is_installed(self): p = self.ctx.props if p.repo_url and (not self._is_installed): self._is_installed = search_for_repository(p.repo_url) if self._is_installed: logger.info("Repository %s already installed" % p.output_ports.repository.repo_name) return self._is_installed def install(self, package): p = self.ctx.props r = self.ctx.r r(check_file_exists, ADD_APT_REPO_COMMAND) r(run_add_apt_repository, p.output_ports.repository.repo_name) r(update) self._is_installed = True def validate_post_install(self): pass<|fim▁end|>
<|file_name|>RepManager.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ ORCA Open Remote Control Application Copyright (C) 2013-2020 Carsten Thielepape Please contact me by : http://www.orca-remote.org/ This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ from __future__ import annotations import os from typing import Union from typing import List from typing import Dict from xml.etree.ElementTree import Element from xml.etree.ElementTree import SubElement from kivy.logger import Logger from ORCA.ui.ShowErrorPopUp import ShowErrorPopUp from ORCA.utils.TypeConvert import ToUnicode from ORCA.utils.TypeConvert import EscapeUnicode from ORCA.utils.Filesystem import AdjustPathToOs from ORCA.utils.FileName import cFileName from ORCA.utils.LogError import LogError from ORCA.vars.Replace import ReplaceVars from ORCA.vars.Access import SetVar from ORCA.vars.Actions import Var_DelArray from ORCA.utils.XML import XMLPrettify from ORCA.utils.Path import cPath from ORCA.download.RepManagerEntry import cRepManagerEntry import ORCA.Globals as Globals oRepositoryManager:Union[cRepositoryManager,None] = None def RepositoryManager(oPathRepSource:cPath) -> None: """ starts RepositoryManager, we make it global to avoid wrong garbage collection """ global oRepositoryManager oRepositoryManager=cRepositoryManager(oPathRepSource) oRepositoryManager.CollectAndUpload() def CreateRepVarArray(uBaseLocalDir:str) -> None: global oRepositoryManager if oRepositoryManager: oRepositoryManager.CreateRepVarArray(uBaseLocalDir) class cRepositoryManager: """ The Main repository manager class, which uploads all reps to the cloud """ def __init__(self,oPathRepSource) -> None: super(cRepositoryManager, self).__init__() self.aFiles:List[str] = [] self.aRepManagerEntries:List[cRepManagerEntry] = [] self.aZipFiles:List[Dict] = [] self.oPathRepSource:cPath = oPathRepSource def CollectAndUpload(self) -> None: """ Collects all Reps and uploads them """ try: oPath:cPath = Globals.oPathTmp + "RepManager" oPath.Delete() self.GetOthers() self.GetCodesets() self.GetDefinitions() self.GetSkins() self.GetInterfaces() self.GetLanguages() self.GetSounds() self.GetScripts() self.GetWizardTemplates() self.GetFonts() self.CreateRepository() except Exception as e: uMsg=LogError(uMsg='Critical failure on Repository Manager ...' ,oException=e) ShowErrorPopUp(uMessage=uMsg) def GetOthers(self) -> None: """ Gets all others reps """ del self.aFiles[:] del self.aRepManagerEntries[:] self.aFiles=(self.oPathRepSource + 'repositories/orca-remote/repositories/others').GetFileList(bSubDirs = False, bFullPath = True) for uFn in self.aFiles: oRepManagerEntry:cRepManagerEntry = cRepManagerEntry(oFileName=uFn) if oRepManagerEntry.ParseFromXML(): if not oRepManagerEntry.oRepEntry.bSkip: self.aRepManagerEntries.append(oRepManagerEntry) else: Logger.warning('Resource not ready for Repository Manager, skipped: '+uFn) self.SaveRepositoryXML('others','Various ORCA resources') def GetFonts(self) -> None: """ Gets all others reps """ del self.aFiles[:] del self.aRepManagerEntries[:] aFontsFolders:List[str] = Globals.oPathFonts.GetFolderList(bFullPath=True) for uFontFolder in aFontsFolders: <|fim▁hole|> self.aRepManagerEntries.append(oRepManagerEntry) else: Logger.warning('Font not ready for Repository Manager, skipped: '+oFnFontDefinition) self.SaveRepositoryXML('fonts','Font Resources') def GetCodesets(self) -> None: """ Gets all codeset reps """ del self.aFiles[:] del self.aRepManagerEntries[:] self.aFiles=Globals.oPathCodesets.GetFileList(bSubDirs = True, bFullPath = True) for uFn in self.aFiles: if uFn.lower().endswith('.xml'): oRepManagerEntry:cRepManagerEntry=cRepManagerEntry(oFileName=uFn) if oRepManagerEntry.ParseFromXML(): if not oRepManagerEntry.oRepEntry.bSkip: self.aRepManagerEntries.append(oRepManagerEntry) else: Logger.warning('Codeset not ready for Repository Manager, skipped: '+uFn) self.SaveRepositoryXML('codesets','Orca Genuine Codesets') def GetSounds(self) -> None: """ Gets all sounds reps """ del self.aFiles[:] del self.aRepManagerEntries[:] for uSound in Globals.oSound.aSoundsList: oFnSound:cFileName = cFileName(Globals.oPathSoundsRoot + uSound) +"sounds.xml" oRepManagerEntry:cRepManagerEntry = cRepManagerEntry(oFileName=oFnSound) if oRepManagerEntry.ParseFromXML(): if not oRepManagerEntry.oRepEntry.bSkip: self.aRepManagerEntries.append(oRepManagerEntry) else: Logger.warning('Soundset not ready for Repository Manager, skipped: '+oFnSound) self.SaveRepositoryXML('sounds','Orca Genuine Sounds') def GetDefinitions(self) -> None: """ Gets all definition reps """ del self.aFiles[:] del self.aRepManagerEntries[:] for uDefinitionName in Globals.aDefinitionList: oFnFile:cFileName=cFileName().ImportFullPath(uFnFullName='%s/definitions/%s/definition.xml' % (Globals.oPathRoot.string, uDefinitionName)) oRepManagerEntry:cRepManagerEntry=cRepManagerEntry(oFileName=oFnFile) if oRepManagerEntry.ParseFromXML(): if not oRepManagerEntry.oRepEntry.bSkip: self.aRepManagerEntries.append(oRepManagerEntry) else: Logger.warning('Definition not ready for Repository Manager, skipped: '+oFnFile) self.SaveRepositoryXML('definitions','Orca Genuine Definitions') def GetLanguages(self) -> None: """ Gets all Language reps """ del self.aFiles[:] del self.aRepManagerEntries[:] for uLanguage in Globals.aLanguageList: oFn:cFileName=cFileName().ImportFullPath(uFnFullName='%s/languages/%s/strings.xml' % (Globals.oPathRoot.string, uLanguage)) oRepManagerEntry:cRepManagerEntry=cRepManagerEntry(oFileName=oFn) if oRepManagerEntry.ParseFromXML(): if not oRepManagerEntry.oRepEntry.bSkip: self.aRepManagerEntries.append(oRepManagerEntry) else: Logger.warning('Language not ready for Repository Manager, skipped: '+oFn) self.SaveRepositoryXML('languages','Orca Genuine Language Files') def GetInterfaces(self) -> None: """ Gets all interface reps """ del self.aFiles[:] del self.aRepManagerEntries[:] for uInterFaceName in Globals.oInterFaces.aObjectNameList: oFn:cFileName=cFileName().ImportFullPath(uFnFullName='%s/interfaces/%s/interface.py' % (Globals.oPathRoot.string, uInterFaceName)) oRepManagerEntry:cRepManagerEntry=cRepManagerEntry(oFileName=oFn) if oRepManagerEntry.ParseFromSourceFile(): if not oRepManagerEntry.oRepEntry.bSkip: self.aRepManagerEntries.append(oRepManagerEntry) else: Logger.warning('Interface not ready for Repository Manager, skipped: '+oFn) self.SaveRepositoryXML('interfaces','Orca Genuine Interfaces') def GetScripts(self) -> None: """ Gets all scripts reps """ del self.aFiles[:] del self.aRepManagerEntries[:] for uScriptName in Globals.oScripts.dScriptPathList: oFn:cFileName=cFileName(Globals.oScripts.dScriptPathList[uScriptName])+'script.py' oRepManagerEntry:cRepManagerEntry=cRepManagerEntry(oFileName=oFn) if oRepManagerEntry.ParseFromSourceFile(): if not oRepManagerEntry.oRepEntry.bSkip: self.aRepManagerEntries.append(oRepManagerEntry) else: Logger.warning('Script not ready for Repository Manager, skipped: '+oFn) self.SaveRepositoryXML('scripts','Orca Genuine Scripts') def GetSkins(self) -> None: """ Gets all skins reps """ del self.aFiles[:] del self.aRepManagerEntries[:] for uSkinName in Globals.aSkinList: oFn:cFileName=cFileName().ImportFullPath(uFnFullName='%s/skins/%s/skin.xml' % (Globals.oPathRoot.string, uSkinName)) oRepManagerEntry:cRepManagerEntry=cRepManagerEntry(oFileName=oFn) if oRepManagerEntry.ParseFromXML(): if not oRepManagerEntry.oRepEntry.bSkip: self.aRepManagerEntries.append(oRepManagerEntry) else: Logger.warning('Skin not ready for Repository Manager, skipped: '+oFn) self.SaveRepositoryXML('skins','Orca Genuine Skins') def GetWizardTemplates(self) -> None: """ Gets all wizard reps """ del self.aFiles[:] del self.aRepManagerEntries[:] aDirs:List[str]=(Globals.oPathRoot + u'wizard templates').GetFolderList() for uDirName in aDirs: aDirsSub:List[str]=(Globals.oPathRoot + (u'wizard templates/' + uDirName)).GetFolderList() for uDirsSub in aDirsSub: oFn:cFileName=cFileName(Globals.oPathRoot + (u'wizard templates/' + uDirName + "/" + uDirsSub)) + (uDirsSub + ".xml") oRepManagerEntry:cRepManagerEntry=cRepManagerEntry(oFileName=oFn) if oRepManagerEntry.ParseFromXML(): if not oRepManagerEntry.oRepEntry.bSkip: self.aRepManagerEntries.append(oRepManagerEntry) else: Logger.warning('Wizard Template not ready for Repository Manager, skipped: '+oFn) self.SaveRepositoryXML('wizard templates','Wizard Templates') def SaveRepositoryXML(self,uType:str,uDescription:str) -> None: """ Saves the main repository directory xml """ oVal:Element uContent:str uRoot:str oPath:cPath= Globals.oPathTmp + "RepManager" oPath.Create() oPath=oPath+"repositories" oPath.Create() oPath=oPath+uType oPath.Create() oFnXml:cFileName=cFileName(oPath) +'repository.xml' oXMLRoot:Element = Element('repository') oVal = SubElement(oXMLRoot,'version') oVal.text = '1.00' oVal = SubElement(oXMLRoot,'type') oVal.text = uType oVal = SubElement(oXMLRoot,'description') oVal.text = uDescription oXMLEntries:Element = SubElement(oXMLRoot,'entries') for oEntry in self.aRepManagerEntries: Logger.debug ('Saving Repository-Entry [%s]' % oEntry.oFnEntry.string) oEntry.oRepEntry.WriteToXMLNode(oXMLNode=oXMLEntries) for oSource in oEntry.oRepEntry.aSources: bZipParentDir:bool = cPath.CheckIsDir(uCheckName=oSource.uLocal) # Create according Zip if bZipParentDir: uUpper:str = os.path.basename(oSource.uSourceFile) uFinalPath:str = uType oDest:cFileName = cFileName().ImportFullPath(uFnFullName='%s/RepManager/repositories/%s/%s' % (Globals.oPathTmp.string, uFinalPath, uUpper)) uUpper1:str = os.path.split(os.path.abspath(oSource.uLocal))[0] uRoot = AdjustPathToOs(uPath=ReplaceVars(uUpper1)+'/') self.aZipFiles.append({'filename':oSource.uLocal,'dstfilename':oDest.string, 'removepath':uRoot, 'skipfiles':ToUnicode(oEntry.oRepEntry.aSkipFileNames)}) else: uDest:str = AdjustPathToOs(uPath='%s/RepManager/repositories/%s/%s.zip' % (Globals.oPathTmp.string, uType, os.path.splitext(os.path.basename(oSource.uLocal))[0])) uRoot = AdjustPathToOs(uPath=Globals.oPathRoot.string + "/" + oSource.uTargetPath) self.aZipFiles.append({'filename':oSource.uLocal,'dstfilename':uDest, 'removepath':uRoot}) oFSFile = open(oFnXml.string, 'w') uContent = XMLPrettify(oElem=oXMLRoot) uContent = ReplaceVars(uContent) oFSFile.write(EscapeUnicode(uContent)) oFSFile.close() def CreateRepository(self) -> None: self.CreateZipVarArray() SetVar(uVarName="REPMAN_BASELOCALDIR", oVarValue=(Globals.oPathTmp + "RepManager").string) Globals.oTheScreen.AddActionToQueue(aActions=[{'string': 'call Create Repository'}]) return def CreateZipVarArray(self) -> None: SetVar(uVarName="REPMAN_ZIPCNTFILES", oVarValue= str(len(self.aZipFiles))) Var_DelArray("REPMAN_ZIPSOUREFILENAMES[]") Var_DelArray("REPMAN_ZIPDESTFILENAMES[]") Var_DelArray("REPMAN_ZIPREMOVEPATH[]") Var_DelArray("REPMAN_ZIPSKIPFILES[]") Var_DelArray("REPMAN_ZIPTYPE[]") i:int=0 for dZipFile in self.aZipFiles: uIndex:str = str(i) + "]" SetVar(uVarName="REPMAN_ZIPSOURCEFILENAMES[" + uIndex ,oVarValue=dZipFile['filename']) SetVar(uVarName="REPMAN_ZIPDESTFILENAMES[" + uIndex ,oVarValue=dZipFile['dstfilename']) SetVar(uVarName="REPMAN_ZIPREMOVEPATH[" + uIndex ,oVarValue=dZipFile['removepath']) uSkipFiles:str = dZipFile.get('skipfiles',None) if uSkipFiles is not None: SetVar(uVarName="REPMAN_ZIPSKIPFILES[" + uIndex, oVarValue=dZipFile['skipfiles']) SetVar(uVarName="REPMAN_ZIPTYPE[" + uIndex,oVarValue= "folder") else: SetVar(uVarName="REPMAN_ZIPTYPE[" + uIndex,oVarValue= "file") i += 1 # noinspection PyMethodMayBeStatic def CreateRepVarArray(self,uBaseLocalDir:str) -> None: aLocalFiles:List[str] = cPath(uBaseLocalDir).GetFileList(bSubDirs=True, bFullPath=True) SetVar(uVarName="REPMAN_LOCALBASENAME", oVarValue=uBaseLocalDir) SetVar(uVarName="REPMAN_CNTFILES", oVarValue= str(len(aLocalFiles))) Var_DelArray("REPMAN_LOCALFILENAMES[]") i:int=0 for uLocalFile in aLocalFiles: uIndex:str = str(i) + "]" SetVar(uVarName="REPMAN_LOCALFILENAMES[" + uIndex ,oVarValue=uLocalFile) i += 1<|fim▁end|>
oFnFontDefinition:cFileName = cFileName(cPath(uFontFolder)) + "fonts.xml" oRepManagerEntry:cRepManagerEntry = cRepManagerEntry(oFileName=oFnFontDefinition) if oRepManagerEntry.ParseFromXML(): if not oRepManagerEntry.oRepEntry.bSkip:
<|file_name|>info_gain.py<|end_file_name|><|fim▁begin|>import os import sys import numpy as np import math def findBinIndexFor(aFloatValue, binsList): #print "findBinIndexFor: %s" % aFloatValue returnIndex = -1 for i in range(len(binsList)): thisBin = binsList[i] if (aFloatValue >= thisBin[0]) and (aFloatValue < thisBin[1]): returnIndex = i break return returnIndex def compute_joint_prob(joint_list, vals1, vals2, bins1=None, bins2=None, asFreq=False): returnDict = {} for rec in joint_list: val1 = rec[0] val2 = rec[1] #Find name by which first val should appear dictName1 = val1 if bins1 is not None: dictName1 = findBinIndexFor(val1, bins1) #Find name by which second val should appear dictName2 = val2 if bins2 is not None: dictName2 = findBinIndexFor(val2, bins2) #If first name is not present in dict, #then initialize it if dictName1 not in returnDict: returnDict[dictName1] = {} for val in vals2: #Determine name under which #y-values should appear (i.e. as bin names #or as given names) asDictName = val if bins2 is not None: asDictName = findBinIndexFor(val, bins2) returnDict[dictName1][asDictName] = 0 returnDict[dictName1][dictName2]+=1 if not asFreq: #Normalize values for key in returnDict: for secondKey in returnDict[key]: returnDict[key][secondKey] = float(returnDict[key][secondKey]) / len(joint_list) return returnDict def getXForFixedY(joint_prob_dist, yVal): returnList = [] for key in joint_prob_dist: returnList.append( joint_prob_dist[key][yVal]) return returnList def compute_h(floatsList): returnFloat = None acc = 0 for f in floatsList: if f != 0: acc = acc - f * math.log(f, 2)<|fim▁hole|> returnFloat = acc return returnFloat # Computes Kullback-Leibler divergence between # P(X,Y) and P(X) def conditional_entropy(joint_prob_dist, xVals, yVals): returnFloat = None h_acc = 0 marginal_y_dist = getYMarginalDist(joint_prob_dist) for x in xVals: for y in yVals: joint_xy = 0 marginal_y = 0 if not x in joint_prob_dist or y not in joint_prob_dist[x]: joint_xy = 0 else: joint_xy = joint_prob_dist[x][y] if not y in marginal_y_dist: marginal_y = 0 else: marginal_y = marginal_y_dist[y] if joint_xy!=0 and marginal_y!=0: h_acc-=joint_xy*math.log(joint_xy/marginal_y, 2) # for yVal in yVals: # new_xDist = getXForFixedY(joint_prob_dist, yVal) # h_yVal = compute_h(new_xDist) # p_yVal = reduce(lambda x, y: x+y, new_xDist) # h_acc+=p_yVal * h_yVal returnFloat = h_acc return returnFloat def getYMarginalDist(joint_prob_dist): returnDict = {} for xKey in joint_prob_dist: for yKey in joint_prob_dist[xKey]: if not yKey in returnDict: returnDict[yKey] = 0 returnDict[yKey]+=joint_prob_dist[xKey][yKey] return returnDict def getXMarginalDist(joint_prob_dist): returnDict = {} for key in joint_prob_dist: yVals = joint_prob_dist[key] marginalVal = reduce(lambda x,y: x+y, [yVals[e] for e in yVals]) returnDict[key] = marginalVal return returnDict def entropy_loss(joint_prob_dist, xVals, yVals): returnFloat = None priorsDict = getXMarginalDist(joint_prob_dist) priors = priorsDict.values() h_prior = compute_h(priors) h_conditional = conditional_entropy(joint_prob_dist, xVals, yVals) returnFloat = h_prior - h_conditional return returnFloat<|fim▁end|>
<|file_name|>navigation-bar.ts<|end_file_name|><|fim▁begin|>// (C) Copyright 2015 Martin Dougiamas // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. import { Component, EventEmitter, Input, Output } from '@angular/core'; import { CoreTextUtilsProvider } from '@providers/utils/text'; /** * Component to show a "bar" with arrows to navigate forward/backward and a "info" icon to display more data. * * This directive will show two arrows at the left and right of the screen to navigate to previous/next item when clicked. * If no previous/next item is defined, that arrow won't be shown. It will also show a button to show more info. * * Example usage: * <core-navigation-bar [previous]="prevItem" [next]="nextItem" (action)="goTo($event)"></core-navigation-bar> */ @Component({ selector: 'core-navigation-bar', templateUrl: 'core-navigation-bar.html', }) export class CoreNavigationBarComponent { @Input() previous?: any; // Previous item. If not defined, the previous arrow won't be shown. @Input() next?: any; // Next item. If not defined, the next arrow won't be shown. @Input() info?: string; // Info to show when clicking the info button. If not defined, the info button won't be shown. @Input() title?: string; // Title to show when seeing the info (new page). @Input() component?: string; // Component the bar belongs to. @Input() componentId?: number; // Component ID. @Output() action?: EventEmitter<any>; // Function to call when an arrow is clicked. Will receive as a param the item to load. <|fim▁hole|> this.action = new EventEmitter<any>(); } showInfo(): void { this.textUtils.expandText(this.title, this.info, this.component, this.componentId); } }<|fim▁end|>
constructor(private textUtils: CoreTextUtilsProvider) {
<|file_name|>scrollSvc.js<|end_file_name|><|fim▁begin|>diamondApp.service('scrollSvc', function () { this.scrollTo = function (eID) { // This scrolling function // is from http://www.itnewb.com/tutorial/Creating-the-Smooth-Scroll-Effect-with-JavaScript var startY = currentYPosition(); var stopY = elmYPosition(eID); var distance = stopY > startY ? stopY - startY : startY - stopY; if (distance < 100) { scrollTo(0, stopY); return; } var speed = Math.round(distance / 10); if (speed >= 20) speed = 20; var step = Math.round(distance / 25); var leapY = stopY > startY ? startY + step : startY - step; var timer = 0; if (stopY > startY) { for (var i = startY; i < stopY; i += step) { setTimeout("window.scrollTo(0, " + leapY + ")", timer * speed); leapY += step; if (leapY > stopY) leapY = stopY; timer++; } return; } for (var i = startY; i > stopY; i -= step) { setTimeout("window.scrollTo(0, " + leapY + ")", timer * speed); leapY -= step; if (leapY < stopY) leapY = stopY; timer++; } function currentYPosition() { // Firefox, Chrome, Opera, Safari if (self.pageYOffset) return self.pageYOffset; // Internet Explorer 6 - standards mode if (document.documentElement && document.documentElement.scrollTop) return document.documentElement.scrollTop;<|fim▁hole|> function elmYPosition(eID) { var elm = document.getElementById(eID); var y = elm.offsetTop; var node = elm; while (node.offsetParent && node.offsetParent != document.body) { node = node.offsetParent; y += node.offsetTop; } return y; } }; });<|fim▁end|>
// Internet Explorer 6, 7 and 8 if (document.body.scrollTop) return document.body.scrollTop; return 0; }
<|file_name|>internetvideoarchive.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals from .common import InfoExtractor from ..compat import ( compat_parse_qs, compat_urlparse, ) from ..utils import ( determine_ext, int_or_none, xpath_text, ) class InternetVideoArchiveIE(InfoExtractor): _VALID_URL = r'https?://video\.internetvideoarchive\.net/(?:player|flash/players)/.*?\?.*?publishedid.*?' _TEST = { 'url': 'http://video.internetvideoarchive.net/player/6/configuration.ashx?customerid=69249&publishedid=194487&reporttag=vdbetatitle&playerid=641&autolist=0&domain=www.videodetective.com&maxrate=high&minrate=low&socialplayer=false', 'info_dict': { 'id': '194487', 'ext': 'mp4', 'title': 'KICK-ASS 2', 'description': 'md5:c189d5b7280400630a1d3dd17eaa8d8a', }, 'params': { # m3u8 download 'skip_download': True, }, } @staticmethod def _build_json_url(query): return 'http://video.internetvideoarchive.net/player/6/configuration.ashx?' + query @staticmethod def _build_xml_url(query): return 'http://video.internetvideoarchive.net/flash/players/flashconfiguration.aspx?' + query def _real_extract(self, url): query = compat_urlparse.urlparse(url).query query_dic = compat_parse_qs(query) video_id = query_dic['publishedid'][0] if '/player/' in url: configuration = self._download_json(url, video_id) <|fim▁hole|> # There are multiple videos in the playlist whlie only the first one # matches the video played in browsers video_info = configuration['playlist'][0] title = video_info['title'] formats = [] for source in video_info['sources']: file_url = source['file'] if determine_ext(file_url) == 'm3u8': m3u8_formats = self._extract_m3u8_formats( file_url, video_id, 'mp4', 'm3u8_native', m3u8_id='hls', fatal=False) if m3u8_formats: formats.extend(m3u8_formats) file_url = m3u8_formats[0]['url'] formats.extend(self._extract_f4m_formats( file_url.replace('.m3u8', '.f4m'), video_id, f4m_id='hds', fatal=False)) formats.extend(self._extract_mpd_formats( file_url.replace('.m3u8', '.mpd'), video_id, mpd_id='dash', fatal=False)) else: a_format = { 'url': file_url, } if source.get('label') and source['label'][-4:] == ' kbs': tbr = int_or_none(source['label'][:-4]) a_format.update({ 'tbr': tbr, 'format_id': 'http-%d' % tbr, }) formats.append(a_format) self._sort_formats(formats) description = video_info.get('description') thumbnail = video_info.get('image') else: configuration = self._download_xml(url, video_id) formats = [{ 'url': xpath_text(configuration, './file', 'file URL', fatal=True), }] thumbnail = xpath_text(configuration, './image', 'thumbnail') title = 'InternetVideoArchive video %s' % video_id description = None return { 'id': video_id, 'title': title, 'formats': formats, 'thumbnail': thumbnail, 'description': description, }<|fim▁end|>
<|file_name|>format.go<|end_file_name|><|fim▁begin|>package apachelog import ( "strings" ) // Format supported by the Apache mod_log_config module. // For more information, see:<|fim▁hole|>// Supported formats. // // TODO(gilliek): move complex format, such as COOKIE, at the bottom of the list // in order to treat them separately. const ( format_beg Format = iota REMOTE_IP_ADDRESS // %a LOCAL_IP_ADDRESS // %A RESPONSE_SIZE // %B RESPONSE_SIZE_CLF // %b COOKIE // %{Foobar}C ELAPSED_TIME // %D ENV_VAR // %{FOOBAR}e HEADER // %{Foobar}i FILENAME // %f REMOTE_HOST // %h REQUEST_PROTO // %H REMOTE_LOGNAME // %l REQUEST_METHOD // %m PORT // %p PROCESS_ID // %P QUERY_STRING // %q REQUEST_FIRST_LINE // %r STATUS // %s TIME // %t REMOTE_USER // %u URL_PATH // %U CANONICAL_SERVER_NAME // %v SERVER_NAME // %V BYTES_RECEIVED // %I BYTES_SENT // %O ELAPSED_TIME_IN_SEC // %T format_end UNKNOWN // for errors ) var formats = [...]string{ REMOTE_IP_ADDRESS: "%a", LOCAL_IP_ADDRESS: "%A", RESPONSE_SIZE: "%B", RESPONSE_SIZE_CLF: "%b", COOKIE: "%{...}C", ELAPSED_TIME: "%D", ENV_VAR: "%{...}e", HEADER: "%{...}i", FILENAME: "%f", REMOTE_HOST: "%h", REQUEST_PROTO: "%H", REMOTE_LOGNAME: "%l", REQUEST_METHOD: "%m", PORT: "%p", PROCESS_ID: "%P", QUERY_STRING: "%q", REQUEST_FIRST_LINE: "%r", STATUS: "%s", TIME: "%t", REMOTE_USER: "%u", URL_PATH: "%U", CANONICAL_SERVER_NAME: "%v", SERVER_NAME: "%V", BYTES_RECEIVED: "%I", BYTES_SENT: "%O", ELAPSED_TIME_IN_SEC: "%T", UNKNOWN: "UNKNOWN", } func (f Format) String() string { if f > format_beg && f < format_end { return formats[f+1] } return formats[UNKNOWN] } var formatsMapping map[string]Format func init() { formatsMapping = make(map[string]Format) for i := format_beg + 1; i < format_end; i++ { formatsMapping[formats[i]] = i } } // LookupFormat retrieves the format corresponding to the given format string. func LookupFormat(format string) Format { if strings.HasPrefix(format, "%{") { if idx := strings.Index(format, "}"); idx != -1 { format = "%{..." + format[idx:] } } if f, found := formatsMapping[format]; found { return f } return UNKNOWN }<|fim▁end|>
// https://httpd.apache.org/docs/2.4/en/mod/mod_log_config.html#formats type Format int
<|file_name|>templatetags.py<|end_file_name|><|fim▁begin|># Copyright 2012 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Copyright 2012 Nebula, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import re from django.conf import settings from django.template import Context # noqa from django.template import Template # noqa from django.utils.text import normalize_newlines # noqa from horizon.test import helpers as test from horizon.test.test_dashboards.cats.dashboard import Cats # noqa from horizon.test.test_dashboards.cats.kittens.panel import Kittens # noqa<|fim▁hole|>from horizon.test.test_dashboards.dogs.puppies.panel import Puppies # noqa def single_line(text): """Quick utility to make comparing template output easier.""" return re.sub(' +', ' ', normalize_newlines(text).replace('\n', '')).strip() class TemplateTagTests(test.TestCase): """Test Custom Template Tag.""" def render_template_tag(self, tag_name, tag_require=''): tag_call = "{%% %s %%}" % tag_name return self.render_template(tag_call, tag_require) def render_template(self, template_text, tag_require='', context={}): """Render a Custom Template to string.""" template = Template("{%% load %s %%} %s" % (tag_require, template_text)) return template.render(Context(context)) def test_site_branding_tag(self): """Test if site_branding tag renders the correct setting.""" rendered_str = self.render_template_tag("site_branding", "branding") self.assertEqual(settings.SITE_BRANDING, rendered_str.strip(), "tag site_branding renders %s" % rendered_str.strip()) def test_size_format_filters(self): size_str = ('5|diskgbformat', '10|diskgbformat', '5555|mb_float_format', '80|mb_float_format', '.5|mbformat', '0.005|mbformat', '0.0005|mbformat') expected = u' 5GB 10GB 5.4GB 80MB 512KB 5KB 524Bytes ' text = '' for size_filter in size_str: text += '{{' + size_filter + '}} ' rendered_str = self.render_template(tag_require='sizeformat', template_text=text) self.assertEqual(expected, rendered_str) def test_size_format_filters_with_string(self): size_str = ('"test"|diskgbformat', '"limit"|mb_float_format', '"no limit"|mbformat') expected = u' test limit no limit ' text = '' for size_filter in size_str: text += '{{' + size_filter + '}} ' rendered_str = self.render_template(tag_require='sizeformat', template_text=text) self.assertEqual(expected, rendered_str) def test_truncate_filter(self): ctx_string = {'val1': 'he', 'val2': 'hellotrunc', 'val3': 'four'} text = ('{{test.val1|truncate:1}}#{{test.val2|truncate:4}}#' '{{test.val3|truncate:10}}') expected = u' h#h...#four' rendered_str = self.render_template(tag_require='truncate_filter', template_text=text, context={'test': ctx_string}) self.assertEqual(expected, rendered_str) def test_quota_filter(self): ctx_string = {'val1': 100, 'val2': 1000, 'val3': float('inf')} text = ('{{test.val1|quota:"TB"}}#{{test.val2|quota}}#' '{{test.val3|quota}}') expected = u' 100 TB Available#1000 Available#No Limit' rendered_str = self.render_template(tag_require='horizon', template_text=text, context={'test': ctx_string}) self.assertEqual(expected, rendered_str) def test_horizon_main_nav(self): text = "{% horizon_main_nav %}" expected = """ <div class='clearfix'> <ul class=\"nav nav-tabs\"> <li> <a href=\"/cats/\" tabindex='1'>Cats</a> </li> <li> <a href=\"/dogs/\" tabindex='1'>Dogs</a> </li> </ul></div>""" rendered_str = self.render_template(tag_require='horizon', template_text=text, context={'request': self.request}) self.assertEqual(single_line(rendered_str), single_line(expected))<|fim▁end|>
from horizon.test.test_dashboards.dogs.dashboard import Dogs # noqa
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>import socket import subprocess import sys import time import h11 import pytest import requests @pytest.fixture def turq_instance(): return TurqInstance() class TurqInstance: """Spins up and controls a live instance of Turq for testing.""" def __init__(self): self.host = 'localhost' # Test instance listens on port 13095 instead of the default 13085, # to make it easier to run tests while also testing Turq manually. # Of course, ideally it should be a random free port instead. self.mock_port = 13095 self.editor_port = 13096 self.password = '' self.extra_args = [] self.wait = True self._process = None self.console_output = None def __enter__(self): args = [sys.executable, '-m', 'turq.main', '--bind', self.host, '--mock-port', str(self.mock_port), '--editor-port', str(self.editor_port)] if self.password is not None:<|fim▁hole|> stdout=subprocess.DEVNULL, stderr=subprocess.PIPE) if self.wait: self._wait_for_server() return self def __exit__(self, exc_type, exc_value, traceback): self._process.terminate() self._process.wait() self.console_output = self._process.stderr.read().decode() return False def _wait_for_server(self, timeout=3): # Wait until the mock server starts accepting connections, # but no more than `timeout` seconds. t0 = time.monotonic() while time.monotonic() - t0 < timeout: time.sleep(0.1) try: self.connect().close() self.connect_editor().close() return except OSError: pass raise RuntimeError('Turq failed to start') def connect(self): return socket.create_connection((self.host, self.mock_port), timeout=5) def connect_editor(self): return socket.create_connection((self.host, self.editor_port), timeout=5) def send(self, *events): hconn = h11.Connection(our_role=h11.CLIENT) with self.connect() as sock: for event in events: sock.sendall(hconn.send(event)) sock.shutdown(socket.SHUT_WR) while hconn.their_state is not h11.CLOSED: event = hconn.next_event() if event is h11.NEED_DATA: hconn.receive_data(sock.recv(4096)) elif not isinstance(event, h11.ConnectionClosed): yield event def request(self, method, url, **kwargs): full_url = 'http://%s:%d%s' % (self.host, self.mock_port, url) return requests.request(method, full_url, **kwargs) def request_editor(self, method, url, **kwargs): full_url = 'http://%s:%d%s' % (self.host, self.editor_port, url) return requests.request(method, full_url, **kwargs)<|fim▁end|>
args += ['--editor-password', self.password] args += self.extra_args self._process = subprocess.Popen(args, stdin=subprocess.DEVNULL,
<|file_name|>fileUtilTests.py<|end_file_name|><|fim▁begin|># # Copyright 2012 Red Hat, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # # Refer to the README and COPYING files for full details of the license # import tempfile import os import storage.fileUtils as fileUtils import testValidation from testrunner import VdsmTestCase as TestCaseBase class DirectFileTests(TestCaseBase): @classmethod def getConfigTemplate(cls): return {} def testRead(self): data = """Vestibulum. Libero leo nostra, pede nunc eu. Pellentesque platea lacus morbi nisl montes ve. Ac. A, consectetuer erat, justo eu. Elementum et, phasellus fames et rutrum donec magnis eu bibendum. Arcu, ante aliquam ipsum ut facilisis ad.""" srcFd, srcPath = tempfile.mkstemp() f = os.fdopen(srcFd, "wb") f.write(data) f.flush() f.close() with fileUtils.open_ex(srcPath, "dr") as f: self.assertEquals(f.read(), data) os.unlink(srcPath) def testSeekRead(self): data = """ Habitasse ipsum at fusce litora metus, placerat dui purus aenean ante, ve. Pede hymenaeos ut primis cum, rhoncus, lectus, nunc. Vestibulum curabitur vitae etiam magna auctor velit, mi tempus vivamus orci eros. Pellentesque curabitur risus fermentum eget. Elementum curae, donec nisl egestas ve, ut odio eu nunc elit felis primis id. Ridiculus metus morbi nulla erat, amet nisi. Amet ligula nisi, id penatibus risus in. Purus velit duis. Aenean eget, pellentesque eu rhoncus arcu et consectetuer laoreet, augue nisi dictum lacinia urna. Fermentum torquent. Ut interdum vivamus duis. Felis consequat nec pede. Orci sollicitudin parturient orci felis. Enim, diam velit sapien condimentum fames semper nibh. Integer at, egestas pede consectetuer ac augue pharetra dolor non placerat quisque id cursus ultricies. Ligula mi senectus sit. Habitasse. Integer sollicitudin dapibus cum quam. """ self.assertTrue(len(data) > 512) srcFd, srcPath = tempfile.mkstemp() f = os.fdopen(srcFd, "wb") f.write(data) f.flush() f.close() with fileUtils.open_ex(srcPath, "dr") as f: f.seek(512) self.assertEquals(f.read(), data[512:]) os.unlink(srcPath) def testWrite(self): data = """In ut non platea egestas, quisque magnis nunc nostra ac etiam suscipit nec integer sociosqu. Fermentum. Ante orci luctus, ipsum ullamcorper enim arcu class neque inceptos class. Ut, sagittis torquent, commodo facilisi.""" srcFd, srcPath = tempfile.mkstemp() os.close(srcFd) with fileUtils.open_ex(srcPath, "dw") as f: f.write(data) with fileUtils.open_ex(srcPath, "r") as f: self.assertEquals(f.read(len(data)), data) os.unlink(srcPath) def testSmallWrites(self): data = """ Aliquet habitasse tellus. Fringilla faucibus tortor parturient consectetuer sodales, venenatis platea habitant. Hendrerit nostra nunc odio. Primis porttitor consequat enim ridiculus. Taciti nascetur, nibh, convallis sit, cum dis mi. Nonummy justo odio cursus, ac hac curabitur nibh. Tellus. Montes, ut taciti orci ridiculus facilisis nunc. Donec. Risus adipiscing habitant donec vehicula non vitae class, porta vitae senectus. Nascetur felis laoreet integer, tortor ligula. Pellentesque vestibulum cras nostra. Ut sollicitudin posuere, per accumsan curabitur id, nisi fermentum vel, eget netus tristique per,<|fim▁hole|> pharetra, ac, condimentum orci, consequat mollis. Cras lacus augue ultrices proin fermentum nibh sed urna. Ve ipsum ultrices curae, feugiat faucibus proin et elementum vivamus, lectus. Torquent. Tempus facilisi. Cras suspendisse euismod consectetuer ornare nostra. Fusce amet cum amet diam. """ self.assertTrue(len(data) > 512) srcFd, srcPath = tempfile.mkstemp() os.close(srcFd) with fileUtils.open_ex(srcPath, "dw") as f: f.write(data[:512]) f.write(data[512:]) with fileUtils.open_ex(srcPath, "r") as f: self.assertEquals(f.read(len(data)), data) os.unlink(srcPath) def testUpdateRead(self): data = """ Aliquet. Aliquam eni ac nullam iaculis cras ante, adipiscing. Enim eget egestas pretium. Ultricies. Urna cubilia in, hac. Curabitur. Nibh. Purus ridiculus natoque sed id. Feugiat lacus quam, arcu maecenas nec egestas. Hendrerit duis nunc eget dis lacus porttitor per sodales class diam condimentum quisque condimentum nisi ligula. Dapibus blandit arcu nam non ac feugiat diam, dictumst. Ante eget fames eu penatibus in, porta semper accumsan adipiscing tellus in sagittis. Est parturient parturient mi fermentum commodo, per fermentum. Quis duis velit at quam risus mi. Facilisi id fames. Turpis, conubia rhoncus. Id. Elit eni tellus gravida, ut, erat morbi. Euismod, enim a ante vestibulum nibh. Curae curae primis vulputate adipiscing arcu ipsum suspendisse quam hymenaeos primis accumsan vestibulum. """ self.assertTrue(len(data) > 512) srcFd, srcPath = tempfile.mkstemp() os.close(srcFd) with fileUtils.open_ex(srcPath, "wd") as f: f.write(data[:512]) with fileUtils.open_ex(srcPath, "r+d") as f: f.seek(512) f.write(data[512:]) with fileUtils.open_ex(srcPath, "r") as f: self.assertEquals(f.read(len(data)), data) os.unlink(srcPath) class ChownTests(TestCaseBase): @testValidation.ValidateRunningAsRoot def test(self): targetId = 666 srcFd, srcPath = tempfile.mkstemp() os.close(srcFd) fileUtils.chown(srcPath, targetId, targetId) stat = os.stat(srcPath) self.assertTrue(stat.st_uid == stat.st_gid == targetId) os.unlink(srcPath) @testValidation.ValidateRunningAsRoot def testNames(self): # I convert to some id because I have no # idea what users are defined and what # there IDs are apart from root tmpId = 666 srcFd, srcPath = tempfile.mkstemp() os.close(srcFd) fileUtils.chown(srcPath, tmpId, tmpId) stat = os.stat(srcPath) self.assertTrue(stat.st_uid == stat.st_gid == tmpId) fileUtils.chown(srcPath, "root", "root") stat = os.stat(srcPath) self.assertTrue(stat.st_uid == stat.st_gid == 0) class CopyUserModeToGroupTests(TestCaseBase): MODE_MASK = 0777 # format: initialMode, expectedMode modesList = [ (0770, 0770), (0700, 0770), (0750, 0770), (0650, 0660), ] def testCopyUserModeToGroup(self): fd, path = tempfile.mkstemp() try: os.close(fd) for initialMode, expectedMode in self.modesList: os.chmod(path, initialMode) fileUtils.copyUserModeToGroup(path) self.assertEquals(os.stat(path).st_mode & self.MODE_MASK, expectedMode) finally: os.unlink(path)<|fim▁end|>
donec, curabitur senectus ut fusce. A. Mauris fringilla senectus et eni facilisis magna inceptos eu, cursus habitant fringilla neque. Nibh. Elit facilisis sed, elit, nostra ve torquent dictumst, aenean sapien quam, habitasse in. Eu tempus aptent, diam, nisi risus
<|file_name|>create-github-repo.ts<|end_file_name|><|fim▁begin|>import * as denodeify from 'denodeify'; const Task = require('../ember-cli/lib/models/task'); const SilentError = require('silent-error'); import { exec } from 'child_process'; import * as https from 'https'; import { oneLine } from 'common-tags'; export default Task.extend({ run: function(commandOptions: any) { const ui = this.ui; let promise: Promise<any>; // declared here so that tests can stub exec const execPromise = denodeify(exec); if (/.+/.test(commandOptions.ghToken) && /\w+/.test(commandOptions.ghUsername)) { promise = Promise.resolve({ ghToken: commandOptions.ghToken, ghUsername: commandOptions.ghUsername }); } else { ui.writeLine(); ui.writeLine(oneLine` In order to deploy this project via GitHub Pages, we must first create a repository for it. `); ui.writeLine(oneLine` It\'s safer to use a token than to use a password so you will need to create one `); ui.writeLine('Go to the following page and click "Generate new token".'); ui.writeLine('https://github.com/settings/tokens\n'); ui.writeLine('Choose "public_repo" as scope and then click "Generate token".\n'); promise = ui.prompt([ { name: 'ghToken', type: 'input', message: oneLine` Please enter GitHub token you just created (used only once to create the repo): `, validate: function(token: string) { return /.+/.test(token); } }, { name: 'ghUsername',<|fim▁hole|> return /\w+/.test(userName); } }]); } return promise .then((answers) => { return new Promise(function(resolve, reject) { const postData = JSON.stringify({ 'name': commandOptions.projectName }); const req = https.request({ hostname: 'api.github.com', port: 443, path: '/user/repos', method: 'POST', headers: { 'Authorization': `token ${answers.ghToken}`, 'Content-Type': 'application/json', 'Content-Length': postData.length, 'User-Agent': 'angular-cli-github-pages' } }); req.on('response', function(response: any) { if (response.statusCode === 201) { resolve(execPromise(oneLine` git remote add origin git@github.com:${answers.ghUsername}/${commandOptions.projectName}.git `)); } else { reject(new SilentError(oneLine` Failed to create GitHub repo. Error: ${response.statusCode} ${response.statusMessage} `)); } }); req.write(postData); req.end(); }); }); } });<|fim▁end|>
type: 'input', message: 'and your GitHub user name:', validate: function(userName: string) {
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>use std::ops::*; extern crate cgmath; use cgmath::num_traits::*; use cgmath::*; use std::num::FpCategory; #[derive(Copy, Clone, PartialEq, Debug)] pub struct F64Err { val: f64, err: f64, } impl F64Err { #[inline] pub fn new_errorfree(val: f64) -> F64Err { F64Err { val: val, err: 0. } } #[inline] pub fn new(val: f64) -> F64Err { F64Err { val: val, err: val } } #[inline] pub fn new_exact(val: f64, err: f64) -> F64Err { F64Err { val: val, err: err } } #[inline] pub fn val(&self) -> f64 { self.val } #[inline] pub fn err(&self) -> f64 { self.err } #[inline] pub fn err_times_eps(&self) -> f64 { self.err * ::std::f64::EPSILON } } impl Mul for F64Err { type Output = F64Err; #[inline] fn mul(self, rhs: Self) -> Self::Output { F64Err { val: self.val * rhs.val,<|fim▁hole|> } } } impl MulAssign for F64Err { #[inline] fn mul_assign(&mut self, rhs: Self) { *self = *self * rhs } } impl Div for F64Err { type Output = F64Err; #[inline] fn div(self, rhs: Self) -> Self::Output { F64Err { val: self.val / rhs.val, err: self.err / rhs.val.abs() + rhs.err * self.val / (rhs.val * rhs.val) } } } impl DivAssign for F64Err { #[inline] fn div_assign(&mut self, rhs: Self) { *self = *self / rhs } } impl Add for F64Err { type Output = Self; #[inline] fn add(self, rhs: Self) -> Self::Output { F64Err { val: self.val + rhs.val, err: self.val.abs().max(rhs.val.abs()) + self.err + rhs.err } } } impl AddAssign for F64Err { #[inline] fn add_assign(&mut self, rhs: Self) { *self = *self + rhs } } impl Sub for F64Err { type Output = Self; #[inline] fn sub(self, rhs: Self) -> Self::Output { F64Err { val: self.val - rhs.val, err: self.val.abs().max(rhs.val.abs()) + self.err + rhs.err } } } impl SubAssign for F64Err { #[inline] fn sub_assign(&mut self, rhs: Self) { *self = *self - rhs } } impl Rem for F64Err { type Output = Self; #[inline] fn rem(self, rhs: Self) -> Self::Output { unimplemented!(); } } impl RemAssign for F64Err { #[inline] fn rem_assign(&mut self, rhs: Self) { *self = *self % rhs } } impl PartialOrd for F64Err { #[inline] fn partial_min(self, other: Self) -> Self { if self.val < other.val { self } else { other } } #[inline] fn partial_max(self, other: Self) -> Self { if self.val > other.val { self } else { other } } } impl ::std::cmp::PartialOrd for F64Err { #[inline] fn partial_cmp(&self, other: &Self) -> Option<::std::cmp::Ordering> { self.val.partial_cmp(&other.val) } } impl Zero for F64Err { #[inline] fn zero() -> Self { Self::new_errorfree(0.) } #[inline] fn is_zero(&self) -> bool { self.val.is_zero() } } impl One for F64Err { #[inline] fn one() -> Self { Self::new_errorfree(1.) } } impl ToPrimitive for F64Err { /// Converts the value of `self` to an `isize`. #[inline] fn to_isize(&self) -> Option<isize> { self.val.to_isize() } /// Converts the value of `self` to an `i8`. #[inline] fn to_i8(&self) -> Option<i8> { self.val.to_i8() } /// Converts the value of `self` to an `i16`. #[inline] fn to_i16(&self) -> Option<i16> { self.val.to_i16() } /// Converts the value of `self` to an `i32`. #[inline] fn to_i32(&self) -> Option<i32> { self.val.to_i32() } #[inline] fn to_i64(&self) -> Option<i64> { self.val.to_i64() } /// Converts the value of `self` to a `usize`. #[inline] fn to_usize(&self) -> Option<usize> { self.val.to_usize() } /// Converts the value of `self` to an `u8`. #[inline] fn to_u8(&self) -> Option<u8> { self.val.to_u8() } /// Converts the value of `self` to an `u16`. #[inline] fn to_u16(&self) -> Option<u16> { self.val.to_u16() } /// Converts the value of `self` to an `u32`. #[inline] fn to_u32(&self) -> Option<u32> { self.val.to_u32() } /// Converts the value of `self` to an `u32`. #[inline] fn to_u64(&self) -> Option<u64> { self.val.to_u64() } /// Converts the value of `self` to an `f32`. #[inline] fn to_f32(&self) -> Option<f32> { self.val.to_f32() } /// Converts the value of `self` to an `f64`. #[inline] fn to_f64(&self) -> Option<f64> { self.val.to_f64() } } impl NumCast for F64Err { #[inline] fn from<T>(t: T) -> Option<Self> { unimplemented!(); } } impl Num for F64Err { type FromStrRadixErr = ParseFloatError; #[inline] fn from_str_radix(src: &str, radix: u32) -> Result<Self, ParseFloatError> { unimplemented!(); } } impl BaseNum for F64Err {} impl ApproxEq for F64Err { type Epsilon = Self; #[inline] fn default_epsilon() -> Self::Epsilon { unimplemented!() } #[inline] fn default_max_relative() -> Self::Epsilon { unimplemented!() } #[inline] fn default_max_ulps() -> u32 { unimplemented!() } #[inline] fn relative_eq(&self, other: &Self, epsilon: Self::Epsilon, max_relative: Self::Epsilon) -> bool { unimplemented!() } #[inline] fn ulps_eq(&self, other: &Self, epsilon: Self::Epsilon, max_ulps: u32) -> bool { unimplemented!() } } impl Neg for F64Err { type Output = Self; #[inline] fn neg(self) -> Self::Output { unimplemented!() } } impl Float for F64Err { #[inline] fn nan() -> Self { unimplemented!() } #[inline] fn infinity() -> Self { unimplemented!() } #[inline] fn neg_infinity() -> Self { unimplemented!() } #[inline] fn neg_zero() -> Self { unimplemented!() } #[inline] fn min_value() -> Self { unimplemented!() } #[inline] fn min_positive_value() -> Self { unimplemented!() } #[inline] fn max_value() -> Self { unimplemented!() } #[inline] fn is_nan(self) -> bool { unimplemented!() } #[inline] fn is_infinite(self) -> bool { unimplemented!() } #[inline] fn is_finite(self) -> bool { unimplemented!() } #[inline] fn is_normal(self) -> bool { unimplemented!() } #[inline] fn classify(self) -> FpCategory { unimplemented!() } #[inline] fn floor(self) -> Self { unimplemented!() } #[inline] fn ceil(self) -> Self { unimplemented!() } #[inline] fn round(self) -> Self { unimplemented!() } #[inline] fn trunc(self) -> Self { unimplemented!() } #[inline] fn fract(self) -> Self { unimplemented!() } #[inline] fn abs(self) -> Self { unimplemented!() } #[inline] fn signum(self) -> Self { unimplemented!() } #[inline] fn is_sign_positive(self) -> bool { unimplemented!() } #[inline] fn is_sign_negative(self) -> bool { unimplemented!() } #[inline] fn mul_add(self, a: Self, b: Self) -> Self { unimplemented!() } #[inline] fn recip(self) -> Self { unimplemented!() } #[inline] fn powi(self, n: i32) -> Self { unimplemented!() } #[inline] fn powf(self, n: Self) -> Self { unimplemented!() } #[inline] fn sqrt(self) -> Self { unimplemented!() } #[inline] fn exp(self) -> Self { unimplemented!() } #[inline] fn exp2(self) -> Self { unimplemented!() } #[inline] fn ln(self) -> Self { unimplemented!() } #[inline] fn log(self, base: Self) -> Self { unimplemented!() } #[inline] fn log2(self) -> Self { unimplemented!() } #[inline] fn log10(self) -> Self { unimplemented!() } #[inline] fn max(self, other: Self) -> Self { unimplemented!() } #[inline] fn min(self, other: Self) -> Self { unimplemented!() } #[inline] fn abs_sub(self, other: Self) -> Self { unimplemented!() } #[inline] fn cbrt(self) -> Self { unimplemented!() } #[inline] fn hypot(self, other: Self) -> Self { unimplemented!() } #[inline] fn sin(self) -> Self { unimplemented!() } #[inline] fn cos(self) -> Self { unimplemented!() } #[inline] fn tan(self) -> Self { unimplemented!() } #[inline] fn asin(self) -> Self { unimplemented!() } #[inline] fn acos(self) -> Self { unimplemented!() } #[inline] fn atan(self) -> Self { unimplemented!() } #[inline] fn atan2(self, other: Self) -> Self { unimplemented!() } #[inline] fn sin_cos(self) -> (Self, Self) { unimplemented!() } #[inline] fn exp_m1(self) -> Self { unimplemented!() } #[inline] fn ln_1p(self) -> Self { unimplemented!() } #[inline] fn sinh(self) -> Self { unimplemented!() } #[inline] fn cosh(self) -> Self { unimplemented!() } #[inline] fn tanh(self) -> Self { unimplemented!() } #[inline] fn asinh(self) -> Self { unimplemented!() } #[inline] fn acosh(self) -> Self { unimplemented!() } #[inline] fn atanh(self) -> Self { unimplemented!() } #[inline] fn integer_decode(self) -> (u64, i16, i8) { unimplemented!() } } impl BaseFloat for F64Err {} #[cfg(test)] mod tests { use super::*; #[test] fn multiplication_error() { let left = F64Err::new_errorfree(2.); let right = F64Err::new_errorfree(2.); let res = left * right; assert_eq!(4., res.val()); assert_eq!(0., res.err()); } #[test] fn addition_error() { let left = F64Err::new_errorfree(2.); let right = F64Err::new_errorfree(3.); let res = left + right; assert_eq!(5., res.val()); assert_eq!(3., res.err()); } #[test] fn addition_then_multipl() { let left = F64Err::new_errorfree(2.); let right = F64Err::new_errorfree(3.); let res_add = left + right; let res_mul = res_add * left; assert_eq!(10., res_mul.val()); assert_eq!(3. * 2., res_mul.err()); let res_mul = res_mul * res_mul; assert_eq!(100., res_mul.val()); assert_eq!((3. * 2.) * 10. + (3. * 2.) * 10., res_mul.err()); } #[test] fn subtract_error() { let left = F64Err::new_errorfree(2.); let right = F64Err::new_errorfree(3.); let res = left - right; assert_eq!(-1., res.val()); assert_eq!(3., res.err()); let res = res - right; assert_eq!(-4., res.val()); assert_eq!(6., res.err()); } #[test] fn can_use_cgmath_stuff() { type Point2 = ::cgmath::Point2<F64Err>; let a = Point2::new(F64Err::new(1.), F64Err::new(0.)); let b = Point2::new(F64Err::new(1.), F64Err::new(0.)); let det = ::cgmath::Matrix2::new(a.x, a.y, b.x, b.y).determinant(); } }<|fim▁end|>
err: self.val.abs() * rhs.err + rhs.val.abs() * self.err
<|file_name|>randomColor.js<|end_file_name|><|fim▁begin|>// randomColor by David Merfield under the CC0 license // https://github.com/davidmerfield/randomColor/ ;(function(root, factory) { // Support AMD if (typeof define === 'function' && define.amd) { define([], factory); // Support CommonJS } else if (typeof exports === 'object') { var randomColor = factory(); // Support NodeJS & Component, which allow module.exports to be a function if (typeof module === 'object' && module && module.exports) { exports = module.exports = randomColor; } // Support CommonJS 1.1.1 spec exports.randomColor = randomColor; // Support vanilla script loading } else { root.randomColor = factory(); } }(this, function() { // Seed to get repeatable colors var seed = null; // Shared color dictionary var colorDictionary = {}; // Populate the color dictionary loadColorBounds(); var randomColor = function (options) { options = options || {}; // Check if there is a seed and ensure it's an // integer. Otherwise, reset the seed value. if (options.seed && options.seed === parseInt(options.seed, 10)) { seed = options.seed; // A string was passed as a seed } else if (typeof options.seed === 'string') { seed = stringToInteger(options.seed); // Something was passed as a seed but it wasn't an integer or string } else if (options.seed !== undefined && options.seed !== null) { throw new TypeError('The seed value must be an integer or string'); // No seed, reset the value outside. } else { seed = null; } var H,S,B; // Check if we need to generate multiple colors if (options.count !== null && options.count !== undefined) { var totalColors = options.count, colors = []; options.count = null; while (totalColors > colors.length) { // Since we're generating multiple colors, // incremement the seed. Otherwise we'd just // generate the same color each time... if (seed && options.seed) options.seed += 1; colors.push(randomColor(options)); } options.count = totalColors; return colors; } // First we pick a hue (H) H = pickHue(options); // Then use H to determine saturation (S) S = pickSaturation(H, options); // Then use S and H to determine brightness (B). B = pickBrightness(H, S, options); // Then we return the HSB color in the desired format return setFormat([H,S,B], options); }; function pickHue (options) { var hueRange = getHueRange(options.hue), hue = randomWithin(hueRange); // Instead of storing red as two seperate ranges, // we group them, using negative numbers if (hue < 0) {hue = 360 + hue;} return hue; } function pickSaturation (hue, options) { if (options.luminosity === 'random') { return randomWithin([0,100]); } if (options.hue === 'monochrome') { return 0; } var saturationRange = getSaturationRange(hue); var sMin = saturationRange[0], sMax = saturationRange[1]; switch (options.luminosity) { case 'bright': sMin = 55; break; case 'dark': sMin = sMax - 10; break; case 'light': sMax = 55; break; } return randomWithin([sMin, sMax]); } function pickBrightness (H, S, options) { var bMin = getMinimumBrightness(H, S), bMax = 100; switch (options.luminosity) { case 'dark': bMax = bMin + 20; break; case 'light': bMin = (bMax + bMin)/2; break; case 'random': bMin = 0; bMax = 100; break; } return randomWithin([bMin, bMax]); } function setFormat (hsv, options) { switch (options.format) { case 'hsvArray': return hsv; case 'hslArray': return HSVtoHSL(hsv); case 'hsl': var hsl = HSVtoHSL(hsv); return 'hsl('+hsl[0]+', '+hsl[1]+'%, '+hsl[2]+'%)'; case 'hsla': var hslColor = HSVtoHSL(hsv); return 'hsla('+hslColor[0]+', '+hslColor[1]+'%, '+hslColor[2]+'%, ' + Math.random() + ')'; case 'rgbArray': return HSVtoRGB(hsv); case 'rgb': var rgb = HSVtoRGB(hsv); return 'rgb(' + rgb.join(', ') + ')'; case 'rgba': var rgbColor = HSVtoRGB(hsv); return 'rgba(' + rgbColor.join(', ') + ', ' + Math.random() + ')'; default: return HSVtoHex(hsv); } } function getMinimumBrightness(H, S) { var lowerBounds = getColorInfo(H).lowerBounds; for (var i = 0; i < lowerBounds.length - 1; i++) { var s1 = lowerBounds[i][0], v1 = lowerBounds[i][1]; var s2 = lowerBounds[i+1][0], v2 = lowerBounds[i+1][1]; if (S >= s1 && S <= s2) { var m = (v2 - v1)/(s2 - s1), b = v1 - m*s1; return m*S + b; } } return 0; } function getHueRange (colorInput) { if (typeof parseInt(colorInput) === 'number') { var number = parseInt(colorInput); if (number < 360 && number > 0) { return [number, number]; } } if (typeof colorInput === 'string') { if (colorDictionary[colorInput]) { var color = colorDictionary[colorInput]; if (color.hueRange) {return color.hueRange;} } } return [0,360]; } function getSaturationRange (hue) { return getColorInfo(hue).saturationRange; } function getColorInfo (hue) { // Maps red colors to make picking hue easier if (hue >= 334 && hue <= 360) { hue-= 360; } for (var colorName in colorDictionary) { var color = colorDictionary[colorName]; if (color.hueRange && hue >= color.hueRange[0] && hue <= color.hueRange[1]) { return colorDictionary[colorName]; } } return 'Color not found'; } function randomWithin (range) { if (seed === null) { return Math.floor(range[0] + Math.random()*(range[1] + 1 - range[0])); } else { //Seeded random algorithm from http://indiegamr.com/generate-repeatable-random-numbers-in-js/ var max = range[1] || 1; var min = range[0] || 0; seed = (seed * 9301 + 49297) % 233280; var rnd = seed / 233280.0; return Math.floor(min + rnd * (max - min)); } } function HSVtoHex (hsv){ var rgb = HSVtoRGB(hsv); function componentToHex(c) { var hex = c.toString(16); return hex.length == 1 ? '0' + hex : hex; } var hex = '#' + componentToHex(rgb[0]) + componentToHex(rgb[1]) + componentToHex(rgb[2]); return hex; } function defineColor (name, hueRange, lowerBounds) { var sMin = lowerBounds[0][0], sMax = lowerBounds[lowerBounds.length - 1][0], bMin = lowerBounds[lowerBounds.length - 1][1], bMax = lowerBounds[0][1]; colorDictionary[name] = { hueRange: hueRange, lowerBounds: lowerBounds, saturationRange: [sMin, sMax], brightnessRange: [bMin, bMax] }; } function loadColorBounds () { defineColor( 'monochrome', null, [[0,0],[100,0]] ); defineColor( 'red', [-26,18], [[20,100],[30,92],[40,89],[50,85],[60,78],[70,70],[80,60],[90,55],[100,50]] ); defineColor( 'orange', [19,46], [[20,100],[30,93],[40,88],[50,86],[60,85],[70,70],[100,70]] ); defineColor( 'yellow', [47,62], [[25,100],[40,94],[50,89],[60,86],[70,84],[80,82],[90,80],[100,75]] ); defineColor( 'green', [63,178], [[30,100],[40,90],[50,85],[60,81],[70,74],[80,64],[90,50],[100,40]] ); defineColor( 'blue', [179, 257], [[20,100],[30,86],[40,80],[50,74],[60,60],[70,52],[80,44],[90,39],[100,35]] ); defineColor( 'purple', [258, 282], [[20,100],[30,87],[40,79],[50,70],[60,65],[70,59],[80,52],[90,45],[100,42]] ); defineColor( 'pink', [283, 334], [[20,100],[30,90],[40,86],[60,84],[80,80],[90,75],[100,73]] ); } function HSVtoRGB (hsv) { // this doesn't work for the values of 0 and 360 // here's the hacky fix var h = hsv[0]; if (h === 0) {h = 1;} if (h === 360) {h = 359;} // Rebase the h,s,v values h = h/360; var s = hsv[1]/100, v = hsv[2]/100; var h_i = Math.floor(h*6), f = h * 6 - h_i, p = v * (1 - s), q = v * (1 - f*s), t = v * (1 - (1 - f)*s), r = 256, g = 256, b = 256; switch(h_i) { case 0: r = v; g = t; b = p; break; case 1: r = q; g = v; b = p; break; case 2: r = p; g = v; b = t; break; case 3: r = p; g = q; b = v; break; case 4: r = t; g = p; b = v; break; case 5: r = v; g = p; b = q; break; } var result = [Math.floor(r*255), Math.floor(g*255), Math.floor(b*255)]; return result; } function HSVtoHSL (hsv) { var h = hsv[0], s = hsv[1]/100, v = hsv[2]/100, k = (2-s)*v; return [ h, Math.round(s*v / (k<1 ? k : 2-k) * 10000) / 100,<|fim▁hole|> ]; } function stringToInteger (string) { var total = 0 for (var i = 0; i !== string.length; i++) { if (total >= Number.MAX_SAFE_INTEGER) break; total += string.charCodeAt(i) } return total } return randomColor; }));<|fim▁end|>
k/2 * 100
<|file_name|>stats.py<|end_file_name|><|fim▁begin|># uncompyle6 version 2.9.10 # Python bytecode 2.7 (62211) # Decompiled from: Python 3.6.0b2 (default, Oct 11 2016, 05:27:10) # [GCC 6.2.0 20161005] # Embedded file name: stats.py """Statistics analyzer for HotShot.""" import profile import pstats import hotshot.log from hotshot.log import ENTER, EXIT def load(filename): return StatsLoader(filename).load() class StatsLoader: def __init__(self, logfn): self._logfn = logfn self._code = {} self._stack = [] self.pop_frame = self._stack.pop def load(self): p = Profile()<|fim▁hole|> p.get_time = _brokentimer log = hotshot.log.LogReader(self._logfn) taccum = 0 for event in log: what, (filename, lineno, funcname), tdelta = event if tdelta > 0: taccum += tdelta if what == ENTER: frame = self.new_frame(filename, lineno, funcname) p.trace_dispatch_call(frame, taccum * 1e-06) taccum = 0 elif what == EXIT: frame = self.pop_frame() p.trace_dispatch_return(frame, taccum * 1e-06) taccum = 0 return pstats.Stats(p) def new_frame(self, *args): try: code = self._code[args] except KeyError: code = FakeCode(*args) self._code[args] = code if self._stack: back = self._stack[-1] else: back = None frame = FakeFrame(code, back) self._stack.append(frame) return frame class Profile(profile.Profile): def simulate_cmd_complete(self): pass class FakeCode: def __init__(self, filename, firstlineno, funcname): self.co_filename = filename self.co_firstlineno = firstlineno self.co_name = self.__name__ = funcname class FakeFrame: def __init__(self, code, back): self.f_back = back self.f_code = code def _brokentimer(): raise RuntimeError, 'this timer should not be called'<|fim▁end|>
<|file_name|>rancheros.go<|end_file_name|><|fim▁begin|>package provision import ( "bufio" "fmt" "net/http" "strings" "github.com/docker/machine/commands/mcndirs" "github.com/docker/machine/libmachine/auth" "github.com/docker/machine/libmachine/drivers" "github.com/docker/machine/libmachine/engine" "github.com/docker/machine/libmachine/log" "github.com/docker/machine/libmachine/mcnutils" "github.com/docker/machine/libmachine/provision/pkgaction" "github.com/docker/machine/libmachine/provision/serviceaction" "github.com/docker/machine/libmachine/state" "github.com/docker/machine/libmachine/swarm" ) const ( versionsUrl = "http://releases.rancher.com/os/versions.yml" isoUrl = "https://github.com/rancherio/os/releases/download/%s/machine-rancheros.iso" hostnameTmpl = `sudo mkdir -p /var/lib/rancher/conf/cloud-config.d/ sudo tee /var/lib/rancher/conf/cloud-config.d/machine-hostname.yml << EOF #cloud-config hostname: %s EOF ` ) func init() { Register("RancherOS", &RegisteredProvisioner{ New: NewRancherProvisioner, }) } func NewRancherProvisioner(d drivers.Driver) Provisioner { return &RancherProvisioner{ GenericProvisioner{ DockerOptionsDir: "/var/lib/rancher/conf", DaemonOptionsFile: "/var/lib/rancher/conf/docker", OsReleaseId: "rancheros", Driver: d, }, } } type RancherProvisioner struct { GenericProvisioner } func (provisioner *RancherProvisioner) Service(name string, action serviceaction.ServiceAction) error { command := fmt.Sprintf("sudo system-docker %s %s", action.String(), name) if _, err := provisioner.SSHCommand(command); err != nil { return err } return nil } func (provisioner *RancherProvisioner) Package(name string, action pkgaction.PackageAction) error { var packageAction string if name == "docker" && action == pkgaction.Upgrade { return provisioner.upgrade() } switch action { case pkgaction.Install: packageAction = "enabled" case pkgaction.Remove: packageAction = "disable" case pkgaction.Upgrade: // TODO: support upgrade packageAction = "upgrade" } command := fmt.Sprintf("sudo rancherctl service %s %s", packageAction, name) if _, err := provisioner.SSHCommand(command); err != nil { return err } return nil } func (provisioner *RancherProvisioner) Provision(swarmOptions swarm.SwarmOptions, authOptions auth.AuthOptions, engineOptions engine.EngineOptions) error { provisioner.SwarmOptions = swarmOptions provisioner.AuthOptions = authOptions provisioner.EngineOptions = engineOptions if provisioner.EngineOptions.StorageDriver == "" { provisioner.EngineOptions.StorageDriver = "overlay" } else if provisioner.EngineOptions.StorageDriver != "overlay" { return fmt.Errorf("Unsupported storage driver: %s", provisioner.EngineOptions.StorageDriver) } log.Debugf("Setting hostname %s", provisioner.Driver.GetMachineName()) if err := provisioner.SetHostname(provisioner.Driver.GetMachineName()); err != nil { return err } for _, pkg := range provisioner.Packages { log.Debugf("Installing package %s", pkg) if err := provisioner.Package(pkg, pkgaction.Install); err != nil { return err } } log.Debugf("Preparing certificates") provisioner.AuthOptions = setRemoteAuthOptions(provisioner) log.Debugf("Setting up certificates") if err := ConfigureAuth(provisioner); err != nil { return err } log.Debugf("Configuring swarm") if err := configureSwarm(provisioner, swarmOptions, provisioner.AuthOptions); err != nil { return err } return nil } func (provisioner *RancherProvisioner) SetHostname(hostname string) error { // /etc/hosts is bind mounted from Docker, this is hack to that the generic provisioner doesn't try to mv /etc/hosts if _, err := provisioner.SSHCommand("sed /127.0.1.1/d /etc/hosts > /tmp/hosts && cat /tmp/hosts | sudo tee /etc/hosts"); err != nil { return err } if err := provisioner.GenericProvisioner.SetHostname(hostname); err != nil { return err } if _, err := provisioner.SSHCommand(fmt.Sprintf(hostnameTmpl, hostname)); err != nil { return err } return nil } func (provisioner *RancherProvisioner) upgrade() error { switch provisioner.Driver.DriverName() { case "virtualbox": return provisioner.upgradeIso() default: log.Infof("Running upgrade") if _, err := provisioner.SSHCommand("sudo rancherctl os upgrade -f --no-reboot"); err != nil { return err } log.Infof("Upgrade succeeded, rebooting") // ignore errors here because the SSH connection will close provisioner.SSHCommand("sudo reboot") return nil } } <|fim▁hole|>func (provisioner *RancherProvisioner) upgradeIso() error { // Largely copied from Boot2Docker provisioner, we should find a way to share this code log.Info("Stopping machine to do the upgrade...") if err := provisioner.Driver.Stop(); err != nil { return err } if err := mcnutils.WaitFor(drivers.MachineInState(provisioner.Driver, state.Stopped)); err != nil { return err } machineName := provisioner.GetDriver().GetMachineName() log.Infof("Upgrading machine %s...", machineName) // TODO: Ideally, we should not read from mcndirs directory at all. // The driver should be able to communicate how and where to place the // relevant files. b2dutils := mcnutils.NewB2dUtils(mcndirs.GetBaseDir()) url, err := provisioner.getLatestISOURL() if err != nil { return err } if err := b2dutils.DownloadISOFromURL(url); err != nil { return err } // Copy the latest version of boot2docker ISO to the machine's directory if err := b2dutils.CopyIsoToMachineDir("", machineName); err != nil { return err } log.Infof("Starting machine back up...") if err := provisioner.Driver.Start(); err != nil { return err } return mcnutils.WaitFor(drivers.MachineInState(provisioner.Driver, state.Running)) } func (provisioner *RancherProvisioner) getLatestISOURL() (string, error) { log.Debugf("Reading %s", versionsUrl) resp, err := http.Get(versionsUrl) if err != nil { return "", err } defer resp.Body.Close() // Don't want to pull in yaml parser, we'll do this manually scanner := bufio.NewScanner(resp.Body) for scanner.Scan() { line := scanner.Text() if strings.HasPrefix(line, "current: ") { log.Debugf("Found %s", line) return fmt.Sprintf(isoUrl, strings.Split(line, ":")[2]), err } } return "", fmt.Errorf("Failed to find current version") }<|fim▁end|>
<|file_name|>lodash.compat.js<|end_file_name|><|fim▁begin|>/** * @license * Lo-Dash 2.4.1 (Custom Build) <http://lodash.com/> * Build: `lodash -o ./dist/lodash.compat.js` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ ;(function() { /** Used as a safe reference for `undefined` in pre ES5 environments */ var undefined; /** Used to pool arrays and objects used internally */ var arrayPool = [], objectPool = []; /** Used to generate unique IDs */ var idCounter = 0; /** Used internally to indicate various things */ var indicatorObject = {}; /** Used to prefix keys to avoid issues with `__proto__` and properties on `Object.prototype` */ var keyPrefix = +new Date + ''; /** Used as the size when optimizations are enabled for large arrays */ var largeArraySize = 75; /** Used as the max size of the `arrayPool` and `objectPool` */ var maxPoolSize = 40; /** Used to detect and test whitespace */ var whitespace = ( // whitespace ' \t\x0B\f\xA0\ufeff' + // line terminators '\n\r\u2028\u2029' + // unicode category "Zs" space separators '\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000' ); /** Used to match empty string literals in compiled template source */ var reEmptyStringLeading = /\b__p \+= '';/g, reEmptyStringMiddle = /\b(__p \+=) '' \+/g, reEmptyStringTrailing = /(__e\(.*?\)|\b__t\)) \+\n'';/g; /** * Used to match ES6 template delimiters * http://people.mozilla.org/~jorendorff/es6-draft.html#sec-literals-string-literals */ var reEsTemplate = /\$\{([^\\}]*(?:\\.[^\\}]*)*)\}/g; /** Used to match regexp flags from their coerced string values */ var reFlags = /\w*$/; /** Used to detected named functions */ var reFuncName = /^\s*function[ \n\r\t]+\w/; /** Used to match "interpolate" template delimiters */ var reInterpolate = /<%=([\s\S]+?)%>/g; /** Used to match leading whitespace and zeros to be removed */ var reLeadingSpacesAndZeros = RegExp('^[' + whitespace + ']*0+(?=.$)'); /** Used to ensure capturing order of template delimiters */ var reNoMatch = /($^)/; /** Used to detect functions containing a `this` reference */ var reThis = /\bthis\b/; /** Used to match unescaped characters in compiled string literals */ var reUnescapedString = /['\n\r\t\u2028\u2029\\]/g; /** Used to assign default `context` object properties */ var contextProps = [ 'Array', 'Boolean', 'Date', 'Error', 'Function', 'Math', 'Number', 'Object', 'RegExp', 'String', '_', 'attachEvent', 'clearTimeout', 'isFinite', 'isNaN', 'parseInt', 'setTimeout' ]; /** Used to fix the JScript [[DontEnum]] bug */ var shadowedProps = [ 'constructor', 'hasOwnProperty', 'isPrototypeOf', 'propertyIsEnumerable', 'toLocaleString', 'toString', 'valueOf' ]; /** Used to make template sourceURLs easier to identify */ var templateCounter = 0; /** `Object#toString` result shortcuts */ var argsClass = '[object Arguments]', arrayClass = '[object Array]', boolClass = '[object Boolean]', dateClass = '[object Date]', errorClass = '[object Error]', funcClass = '[object Function]', numberClass = '[object Number]', objectClass = '[object Object]', regexpClass = '[object RegExp]', stringClass = '[object String]'; /** Used to identify object classifications that `_.clone` supports */ var cloneableClasses = {}; cloneableClasses[funcClass] = false; cloneableClasses[argsClass] = cloneableClasses[arrayClass] = cloneableClasses[boolClass] = cloneableClasses[dateClass] = cloneableClasses[numberClass] = cloneableClasses[objectClass] = cloneableClasses[regexpClass] = cloneableClasses[stringClass] = true; /** Used as an internal `_.debounce` options object */ var debounceOptions = { 'leading': false, 'maxWait': 0, 'trailing': false }; /** Used as the property descriptor for `__bindData__` */ var descriptor = { 'configurable': false, 'enumerable': false, 'value': null, 'writable': false }; /** Used as the data object for `iteratorTemplate` */ var iteratorData = { 'args': '', 'array': null, 'bottom': '', 'firstArg': '', 'init': '', 'keys': null, 'loop': '', 'shadowedProps': null, 'support': null, 'top': '', 'useHas': false }; /** Used to determine if values are of the language type Object */ var objectTypes = { 'boolean': false, 'function': true, 'object': true, 'number': false, 'string': false, 'undefined': false }; /** Used to escape characters for inclusion in compiled string literals */ var stringEscapes = { '\\': '\\', "'": "'", '\n': 'n', '\r': 'r', '\t': 't', '\u2028': 'u2028', '\u2029': 'u2029' }; /** Used as a reference to the global object */ var root = (objectTypes[typeof window] && window) || this; /** Detect free variable `exports` */ var freeExports = objectTypes[typeof exports] && exports && !exports.nodeType && exports; /** Detect free variable `module` */ var freeModule = objectTypes[typeof module] && module && !module.nodeType && module; /** Detect the popular CommonJS extension `module.exports` */ var moduleExports = freeModule && freeModule.exports === freeExports && freeExports; /** Detect free variable `global` from Node.js or Browserified code and use it as `root` */ var freeGlobal = objectTypes[typeof global] && global; if (freeGlobal && (freeGlobal.global === freeGlobal || freeGlobal.window === freeGlobal)) { root = freeGlobal; } /*--------------------------------------------------------------------------*/ /** * The base implementation of `_.indexOf` without support for binary searches * or `fromIndex` constraints. * * @private * @param {Array} array The array to search. * @param {*} value The value to search for. * @param {number} [fromIndex=0] The index to search from. * @returns {number} Returns the index of the matched value or `-1`. */ function baseIndexOf(array, value, fromIndex) { var index = (fromIndex || 0) - 1, length = array ? array.length : 0; while (++index < length) { if (array[index] === value) { return index; } } return -1; } /** * An implementation of `_.contains` for cache objects that mimics the return * signature of `_.indexOf` by returning `0` if the value is found, else `-1`. * * @private * @param {Object} cache The cache object to inspect. * @param {*} value The value to search for. * @returns {number} Returns `0` if `value` is found, else `-1`. */ function cacheIndexOf(cache, value) { var type = typeof value; cache = cache.cache; if (type == 'boolean' || value == null) { return cache[value] ? 0 : -1; } if (type != 'number' && type != 'string') { type = 'object'; } var key = type == 'number' ? value : keyPrefix + value; cache = (cache = cache[type]) && cache[key]; return type == 'object' ? (cache && baseIndexOf(cache, value) > -1 ? 0 : -1) : (cache ? 0 : -1); } /** * Adds a given value to the corresponding cache object. * * @private * @param {*} value The value to add to the cache. */ function cachePush(value) { var cache = this.cache, type = typeof value; if (type == 'boolean' || value == null) { cache[value] = true; } else { if (type != 'number' && type != 'string') { type = 'object'; } var key = type == 'number' ? value : keyPrefix + value, typeCache = cache[type] || (cache[type] = {}); if (type == 'object') { (typeCache[key] || (typeCache[key] = [])).push(value); } else { typeCache[key] = true; } } } /** * Used by `_.max` and `_.min` as the default callback when a given * collection is a string value. * * @private * @param {string} value The character to inspect. * @returns {number} Returns the code unit of given character. */ function charAtCallback(value) { return value.charCodeAt(0); } /** * Used by `sortBy` to compare transformed `collection` elements, stable sorting * them in ascending order. * * @private * @param {Object} a The object to compare to `b`. * @param {Object} b The object to compare to `a`. * @returns {number} Returns the sort order indicator of `1` or `-1`. */ function compareAscending(a, b) { var ac = a.criteria, bc = b.criteria, index = -1, length = ac.length; while (++index < length) { var value = ac[index], other = bc[index]; if (value !== other) { if (value > other || typeof value == 'undefined') { return 1; } if (value < other || typeof other == 'undefined') { return -1; } } } // Fixes an `Array#sort` bug in the JS engine embedded in Adobe applications // that causes it, under certain circumstances, to return the same value for // `a` and `b`. See https://github.com/jashkenas/underscore/pull/1247 // // This also ensures a stable sort in V8 and other engines. // See http://code.google.com/p/v8/issues/detail?id=90 return a.index - b.index; } /** * Creates a cache object to optimize linear searches of large arrays. * * @private * @param {Array} [array=[]] The array to search. * @returns {null|Object} Returns the cache object or `null` if caching should not be used. */ function createCache(array) { var index = -1, length = array.length, first = array[0], mid = array[(length / 2) | 0], last = array[length - 1]; if (first && typeof first == 'object' && mid && typeof mid == 'object' && last && typeof last == 'object') { return false; } var cache = getObject(); cache['false'] = cache['null'] = cache['true'] = cache['undefined'] = false; var result = getObject(); result.array = array; result.cache = cache; result.push = cachePush; while (++index < length) { result.push(array[index]); } return result; } /** * Used by `template` to escape characters for inclusion in compiled * string literals. * * @private * @param {string} match The matched character to escape. * @returns {string} Returns the escaped character. */ function escapeStringChar(match) { return '\\' + stringEscapes[match]; } /** * Gets an array from the array pool or creates a new one if the pool is empty. * * @private * @returns {Array} The array from the pool. */ function getArray() { return arrayPool.pop() || []; } /** * Gets an object from the object pool or creates a new one if the pool is empty. * * @private * @returns {Object} The object from the pool. */ function getObject() { return objectPool.pop() || { 'array': null, 'cache': null, 'criteria': null, 'false': false, 'index': 0, 'null': false, 'number': null, 'object': null, 'push': null, 'string': null, 'true': false, 'undefined': false, 'value': null }; } /** * Checks if `value` is a DOM node in IE < 9. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if the `value` is a DOM node, else `false`. */ function isNode(value) { // IE < 9 presents DOM nodes as `Object` objects except they have `toString` // methods that are `typeof` "string" and still can coerce nodes to strings return typeof value.toString != 'function' && typeof (value + '') == 'string'; } /** * Releases the given array back to the array pool. * * @private * @param {Array} [array] The array to release. */ function releaseArray(array) { array.length = 0; if (arrayPool.length < maxPoolSize) { arrayPool.push(array); } } /** * Releases the given object back to the object pool. * * @private * @param {Object} [object] The object to release. */ function releaseObject(object) { var cache = object.cache; if (cache) { releaseObject(cache); } object.array = object.cache = object.criteria = object.object = object.number = object.string = object.value = null; if (objectPool.length < maxPoolSize) { objectPool.push(object); } } /** * Slices the `collection` from the `start` index up to, but not including, * the `end` index. * * Note: This function is used instead of `Array#slice` to support node lists * in IE < 9 and to ensure dense arrays are returned. * * @private * @param {Array|Object|string} collection The collection to slice. * @param {number} start The start index. * @param {number} end The end index. * @returns {Array} Returns the new array. */ function slice(array, start, end) { start || (start = 0); if (typeof end == 'undefined') { end = array ? array.length : 0; } var index = -1, length = end - start || 0, result = Array(length < 0 ? 0 : length); while (++index < length) { result[index] = array[start + index]; } return result; } /*--------------------------------------------------------------------------*/ /** * Create a new `lodash` function using the given context object. * * @static * @memberOf _ * @category Utilities * @param {Object} [context=root] The context object. * @returns {Function} Returns the `lodash` function. */ function runInContext(context) { // Avoid issues with some ES3 environments that attempt to use values, named // after built-in constructors like `Object`, for the creation of literals. // ES5 clears this up by stating that literals must use built-in constructors. // See http://es5.github.io/#x11.1.5. context = context ? _.defaults(root.Object(), context, _.pick(root, contextProps)) : root; /** Native constructor references */ var Array = context.Array, Boolean = context.Boolean, Date = context.Date, Error = context.Error, Function = context.Function, Math = context.Math, Number = context.Number, Object = context.Object, RegExp = context.RegExp, String = context.String, TypeError = context.TypeError; /** * Used for `Array` method references. * * Normally `Array.prototype` would suffice, however, using an array literal * avoids issues in Narwhal. */ var arrayRef = []; /** Used for native method references */ var errorProto = Error.prototype, objectProto = Object.prototype, stringProto = String.prototype; /** Used to restore the original `_` reference in `noConflict` */ var oldDash = context._; /** Used to resolve the internal [[Class]] of values */ var toString = objectProto.toString; /** Used to detect if a method is native */ var reNative = RegExp('^' + String(toString) .replace(/[.*+?^${}()|[\]\\]/g, '\\$&') .replace(/toString| for [^\]]+/g, '.*?') + '$' ); /** Native method shortcuts */ var ceil = Math.ceil, clearTimeout = context.clearTimeout, floor = Math.floor, fnToString = Function.prototype.toString, getPrototypeOf = isNative(getPrototypeOf = Object.getPrototypeOf) && getPrototypeOf, hasOwnProperty = objectProto.hasOwnProperty, push = arrayRef.push, propertyIsEnumerable = objectProto.propertyIsEnumerable, setTimeout = context.setTimeout, splice = arrayRef.splice, unshift = arrayRef.unshift; /** Used to set meta data on functions */ var defineProperty = (function() { // IE 8 only accepts DOM elements try { var o = {}, func = isNative(func = Object.defineProperty) && func, result = func(o, o, o) && func; } catch(e) { } return result; }()); /* Native method shortcuts for methods with the same name as other `lodash` methods */ var nativeCreate = isNative(nativeCreate = Object.create) && nativeCreate, nativeIsArray = isNative(nativeIsArray = Array.isArray) && nativeIsArray, nativeIsFinite = context.isFinite, nativeIsNaN = context.isNaN, nativeKeys = isNative(nativeKeys = Object.keys) && nativeKeys, nativeMax = Math.max, nativeMin = Math.min, nativeParseInt = context.parseInt, nativeRandom = Math.random; /** Used to lookup a built-in constructor by [[Class]] */ var ctorByClass = {}; ctorByClass[arrayClass] = Array; ctorByClass[boolClass] = Boolean; ctorByClass[dateClass] = Date; ctorByClass[funcClass] = Function; ctorByClass[objectClass] = Object; ctorByClass[numberClass] = Number; ctorByClass[regexpClass] = RegExp; ctorByClass[stringClass] = String; /** Used to avoid iterating non-enumerable properties in IE < 9 */ var nonEnumProps = {}; nonEnumProps[arrayClass] = nonEnumProps[dateClass] = nonEnumProps[numberClass] = { 'constructor': true, 'toLocaleString': true, 'toString': true, 'valueOf': true }; nonEnumProps[boolClass] = nonEnumProps[stringClass] = { 'constructor': true, 'toString': true, 'valueOf': true }; nonEnumProps[errorClass] = nonEnumProps[funcClass] = nonEnumProps[regexpClass] = { 'constructor': true, 'toString': true }; nonEnumProps[objectClass] = { 'constructor': true }; (function() { var length = shadowedProps.length; while (length--) { var key = shadowedProps[length]; for (var className in nonEnumProps) { if (hasOwnProperty.call(nonEnumProps, className) && !hasOwnProperty.call(nonEnumProps[className], key)) { nonEnumProps[className][key] = false; } } } }()); /*--------------------------------------------------------------------------*/ /** * Creates a `lodash` object which wraps the given value to enable intuitive * method chaining. * * In addition to Lo-Dash methods, wrappers also have the following `Array` methods: * `concat`, `join`, `pop`, `push`, `reverse`, `shift`, `slice`, `sort`, `splice`, * and `unshift` * * Chaining is supported in custom builds as long as the `value` method is * implicitly or explicitly included in the build. * * The chainable wrapper functions are: * `after`, `assign`, `bind`, `bindAll`, `bindKey`, `chain`, `compact`, * `compose`, `concat`, `countBy`, `create`, `createCallback`, `curry`, * `debounce`, `defaults`, `defer`, `delay`, `difference`, `filter`, `flatten`, * `forEach`, `forEachRight`, `forIn`, `forInRight`, `forOwn`, `forOwnRight`, * `functions`, `groupBy`, `indexBy`, `initial`, `intersection`, `invert`, * `invoke`, `keys`, `map`, `max`, `memoize`, `merge`, `min`, `object`, `omit`, * `once`, `pairs`, `partial`, `partialRight`, `pick`, `pluck`, `pull`, `push`, * `range`, `reject`, `remove`, `rest`, `reverse`, `shuffle`, `slice`, `sort`, * `sortBy`, `splice`, `tap`, `throttle`, `times`, `toArray`, `transform`, * `union`, `uniq`, `unshift`, `unzip`, `values`, `where`, `without`, `wrap`, * and `zip` * * The non-chainable wrapper functions are: * `clone`, `cloneDeep`, `contains`, `escape`, `every`, `find`, `findIndex`, * `findKey`, `findLast`, `findLastIndex`, `findLastKey`, `has`, `identity`, * `indexOf`, `isArguments`, `isArray`, `isBoolean`, `isDate`, `isElement`, * `isEmpty`, `isEqual`, `isFinite`, `isFunction`, `isNaN`, `isNull`, `isNumber`, * `isObject`, `isPlainObject`, `isRegExp`, `isString`, `isUndefined`, `join`, * `lastIndexOf`, `mixin`, `noConflict`, `parseInt`, `pop`, `random`, `reduce`, * `reduceRight`, `result`, `shift`, `size`, `some`, `sortedIndex`, `runInContext`, * `template`, `unescape`, `uniqueId`, and `value` * * The wrapper functions `first` and `last` return wrapped values when `n` is * provided, otherwise they return unwrapped values. * * Explicit chaining can be enabled by using the `_.chain` method. * * @name _ * @constructor * @category Chaining * @param {*} value The value to wrap in a `lodash` instance. * @returns {Object} Returns a `lodash` instance. * @example * * var wrapped = _([1, 2, 3]); * * // returns an unwrapped value * wrapped.reduce(function(sum, num) { * return sum + num; * }); * // => 6 * * // returns a wrapped value * var squares = wrapped.map(function(num) { * return num * num; * }); * * _.isArray(squares); * // => false * * _.isArray(squares.value()); * // => true */ function lodash(value) { // don't wrap if already wrapped, even if wrapped by a different `lodash` constructor return (value && typeof value == 'object' && !isArray(value) && hasOwnProperty.call(value, '__wrapped__')) ? value : new lodashWrapper(value); } /** * A fast path for creating `lodash` wrapper objects. * * @private * @param {*} value The value to wrap in a `lodash` instance. * @param {boolean} chainAll A flag to enable chaining for all methods * @returns {Object} Returns a `lodash` instance. */ function lodashWrapper(value, chainAll) { this.__chain__ = !!chainAll; this.__wrapped__ = value; } // ensure `new lodashWrapper` is an instance of `lodash` lodashWrapper.prototype = lodash.prototype; /** * An object used to flag environments features. * * @static * @memberOf _ * @type Object */ var support = lodash.support = {}; (function() { var ctor = function() { this.x = 1; }, object = { '0': 1, 'length': 1 }, props = []; ctor.prototype = { 'valueOf': 1, 'y': 1 }; for (var key in new ctor) { props.push(key); } for (key in arguments) { } /** * Detect if an `arguments` object's [[Class]] is resolvable (all but Firefox < 4, IE < 9). * * @memberOf _.support * @type boolean */ support.argsClass = toString.call(arguments) == argsClass; /** * Detect if `arguments` objects are `Object` objects (all but Narwhal and Opera < 10.5). * * @memberOf _.support * @type boolean */ support.argsObject = arguments.constructor == Object && !(arguments instanceof Array); /** * Detect if `name` or `message` properties of `Error.prototype` are * enumerable by default. (IE < 9, Safari < 5.1) * * @memberOf _.support * @type boolean */ support.enumErrorProps = propertyIsEnumerable.call(errorProto, 'message') || propertyIsEnumerable.call(errorProto, 'name'); /** * Detect if `prototype` properties are enumerable by default. * * Firefox < 3.6, Opera > 9.50 - Opera < 11.60, and Safari < 5.1 * (if the prototype or a property on the prototype has been set) * incorrectly sets a function's `prototype` property [[Enumerable]] * value to `true`. * * @memberOf _.support * @type boolean */ support.enumPrototypes = propertyIsEnumerable.call(ctor, 'prototype'); /** * Detect if functions can be decompiled by `Function#toString` * (all but PS3 and older Opera mobile browsers & avoided in Windows 8 apps). * * @memberOf _.support * @type boolean */ support.funcDecomp = !isNative(context.WinRTError) && reThis.test(runInContext); /** * Detect if `Function#name` is supported (all but IE). * * @memberOf _.support * @type boolean */ support.funcNames = typeof Function.name == 'string'; /** * Detect if `arguments` object indexes are non-enumerable * (Firefox < 4, IE < 9, PhantomJS, Safari < 5.1). * * @memberOf _.support * @type boolean */ support.nonEnumArgs = key != 0; /** * Detect if properties shadowing those on `Object.prototype` are non-enumerable. * * In IE < 9 an objects own properties, shadowing non-enumerable ones, are * made non-enumerable as well (a.k.a the JScript [[DontEnum]] bug). * * @memberOf _.support * @type boolean */ support.nonEnumShadows = !/valueOf/.test(props); /** * Detect if own properties are iterated after inherited properties (all but IE < 9). * * @memberOf _.support * @type boolean */ support.ownLast = props[0] != 'x'; /** * Detect if `Array#shift` and `Array#splice` augment array-like objects correctly. * * Firefox < 10, IE compatibility mode, and IE < 9 have buggy Array `shift()` * and `splice()` functions that fail to remove the last element, `value[0]`, * of array-like objects even though the `length` property is set to `0`. * The `shift()` method is buggy in IE 8 compatibility mode, while `splice()` * is buggy regardless of mode in IE < 9 and buggy in compatibility mode in IE 9. * * @memberOf _.support * @type boolean */ support.spliceObjects = (arrayRef.splice.call(object, 0, 1), !object[0]); /** * Detect lack of support for accessing string characters by index. * * IE < 8 can't access characters by index and IE 8 can only access * characters by index on string literals. * * @memberOf _.support * @type boolean */ support.unindexedChars = ('x'[0] + Object('x')[0]) != 'xx'; /** * Detect if a DOM node's [[Class]] is resolvable (all but IE < 9) * and that the JS engine errors when attempting to coerce an object to * a string without a `toString` function. * * @memberOf _.support * @type boolean */ try { support.nodeClass = !(toString.call(document) == objectClass && !({ 'toString': 0 } + '')); } catch(e) { support.nodeClass = true; } }(1)); /** * By default, the template delimiters used by Lo-Dash are similar to those in * embedded Ruby (ERB). Change the following template settings to use alternative * delimiters. * * @static * @memberOf _ * @type Object */ lodash.templateSettings = { /** * Used to detect `data` property values to be HTML-escaped. * * @memberOf _.templateSettings * @type RegExp */ 'escape': /<%-([\s\S]+?)%>/g, /** * Used to detect code to be evaluated. * * @memberOf _.templateSettings * @type RegExp */ 'evaluate': /<%([\s\S]+?)%>/g, /** * Used to detect `data` property values to inject. * * @memberOf _.templateSettings * @type RegExp */ 'interpolate': reInterpolate, /** * Used to reference the data object in the template text. * * @memberOf _.templateSettings * @type string */ 'variable': '', /** * Used to import variables into the compiled template. * * @memberOf _.templateSettings * @type Object */ 'imports': { /** * A reference to the `lodash` function. * * @memberOf _.templateSettings.imports * @type Function */ '_': lodash } }; /*--------------------------------------------------------------------------*/ /** * The template used to create iterator functions. * * @private * @param {Object} data The data object used to populate the text. * @returns {string} Returns the interpolated text. */ var iteratorTemplate = function(obj) { var __p = 'var index, iterable = ' + (obj.firstArg) + ', result = ' + (obj.init) + ';\nif (!iterable) return result;\n' + (obj.top) + ';'; if (obj.array) { __p += '\nvar length = iterable.length; index = -1;\nif (' + (obj.array) + ') { '; if (support.unindexedChars) { __p += '\n if (isString(iterable)) {\n iterable = iterable.split(\'\')\n } '; } __p += '\n while (++index < length) {\n ' + (obj.loop) + ';\n }\n}\nelse { '; } else if (support.nonEnumArgs) { __p += '\n var length = iterable.length; index = -1;\n if (length && isArguments(iterable)) {\n while (++index < length) {\n index += \'\';\n ' + (obj.loop) + ';\n }\n } else { '; } if (support.enumPrototypes) { __p += '\n var skipProto = typeof iterable == \'function\';\n '; } if (support.enumErrorProps) { __p += '\n var skipErrorProps = iterable === errorProto || iterable instanceof Error;\n '; } var conditions = []; if (support.enumPrototypes) { conditions.push('!(skipProto && index == "prototype")'); } if (support.enumErrorProps) { conditions.push('!(skipErrorProps && (index == "message" || index == "name"))'); } if (obj.useHas && obj.keys) { __p += '\n var ownIndex = -1,\n ownProps = objectTypes[typeof iterable] && keys(iterable),\n length = ownProps ? ownProps.length : 0;\n\n while (++ownIndex < length) {\n index = ownProps[ownIndex];\n'; if (conditions.length) { __p += ' if (' + (conditions.join(' && ')) + ') {\n '; } __p += (obj.loop) + '; '; if (conditions.length) { __p += '\n }'; } __p += '\n } '; } else { __p += '\n for (index in iterable) {\n'; if (obj.useHas) { conditions.push("hasOwnProperty.call(iterable, index)"); } if (conditions.length) { __p += ' if (' + (conditions.join(' && ')) + ') {\n '; } __p += (obj.loop) + '; '; if (conditions.length) { __p += '\n }'; } __p += '\n } '; if (support.nonEnumShadows) { __p += '\n\n if (iterable !== objectProto) {\n var ctor = iterable.constructor,\n isProto = iterable === (ctor && ctor.prototype),\n className = iterable === stringProto ? stringClass : iterable === errorProto ? errorClass : toString.call(iterable),\n nonEnum = nonEnumProps[className];\n '; for (k = 0; k < 7; k++) { __p += '\n index = \'' + (obj.shadowedProps[k]) + '\';\n if ((!(isProto && nonEnum[index]) && hasOwnProperty.call(iterable, index))'; if (!obj.useHas) { __p += ' || (!nonEnum[index] && iterable[index] !== objectProto[index])'; } __p += ') {\n ' + (obj.loop) + ';\n } '; } __p += '\n } '; } } if (obj.array || support.nonEnumArgs) { __p += '\n}'; } __p += (obj.bottom) + ';\nreturn result'; return __p }; /*--------------------------------------------------------------------------*/ /** * The base implementation of `_.bind` that creates the bound function and * sets its meta data. * * @private * @param {Array} bindData The bind data array. * @returns {Function} Returns the new bound function. */ function baseBind(bindData) { var func = bindData[0], partialArgs = bindData[2], thisArg = bindData[4]; function bound() { // `Function#bind` spec // http://es5.github.io/#x15.3.4.5 if (partialArgs) { // avoid `arguments` object deoptimizations by using `slice` instead // of `Array.prototype.slice.call` and not assigning `arguments` to a // variable as a ternary expression var args = slice(partialArgs); push.apply(args, arguments); } // mimic the constructor's `return` behavior // http://es5.github.io/#x13.2.2 if (this instanceof bound) { // ensure `new bound` is an instance of `func` var thisBinding = baseCreate(func.prototype), result = func.apply(thisBinding, args || arguments); return isObject(result) ? result : thisBinding; } return func.apply(thisArg, args || arguments); } setBindData(bound, bindData); return bound; } /** * The base implementation of `_.clone` without argument juggling or support * for `thisArg` binding. * * @private * @param {*} value The value to clone. * @param {boolean} [isDeep=false] Specify a deep clone. * @param {Function} [callback] The function to customize cloning values. * @param {Array} [stackA=[]] Tracks traversed source objects. * @param {Array} [stackB=[]] Associates clones with source counterparts. * @returns {*} Returns the cloned value. */ function baseClone(value, isDeep, callback, stackA, stackB) { if (callback) { var result = callback(value); if (typeof result != 'undefined') { return result; } } // inspect [[Class]] var isObj = isObject(value); if (isObj) { var className = toString.call(value); if (!cloneableClasses[className] || (!support.nodeClass && isNode(value))) { return value; } var ctor = ctorByClass[className]; switch (className) { case boolClass: case dateClass: return new ctor(+value); case numberClass: case stringClass: return new ctor(value); case regexpClass: result = ctor(value.source, reFlags.exec(value)); result.lastIndex = value.lastIndex; return result; } } else { return value; } var isArr = isArray(value); if (isDeep) { // check for circular references and return corresponding clone var initedStack = !stackA; stackA || (stackA = getArray()); stackB || (stackB = getArray()); var length = stackA.length; while (length--) { if (stackA[length] == value) { return stackB[length]; } } result = isArr ? ctor(value.length) : {}; } else { result = isArr ? slice(value) : assign({}, value); } // add array properties assigned by `RegExp#exec` if (isArr) { if (hasOwnProperty.call(value, 'index')) { result.index = value.index; } if (hasOwnProperty.call(value, 'input')) { result.input = value.input; } } // exit for shallow clone if (!isDeep) { return result; } // add the source value to the stack of traversed objects // and associate it with its clone stackA.push(value); stackB.push(result); // recursively populate clone (susceptible to call stack limits) (isArr ? baseEach : forOwn)(value, function(objValue, key) { result[key] = baseClone(objValue, isDeep, callback, stackA, stackB); }); if (initedStack) { releaseArray(stackA); releaseArray(stackB); } return result; } /** * The base implementation of `_.create` without support for assigning * properties to the created object. * * @private * @param {Object} prototype The object to inherit from. * @returns {Object} Returns the new object. */ function baseCreate(prototype, properties) { return isObject(prototype) ? nativeCreate(prototype) : {}; } // fallback for browsers without `Object.create` if (!nativeCreate) { baseCreate = (function() { function Object() {} return function(prototype) { if (isObject(prototype)) { Object.prototype = prototype; var result = new Object; Object.prototype = null; } return result || context.Object(); }; }()); } /** * The base implementation of `_.createCallback` without support for creating * "_.pluck" or "_.where" style callbacks. * * @private * @param {*} [func=identity] The value to convert to a callback. * @param {*} [thisArg] The `this` binding of the created callback. * @param {number} [argCount] The number of arguments the callback accepts. * @returns {Function} Returns a callback function. */ function baseCreateCallback(func, thisArg, argCount) { if (typeof func != 'function') { return identity; } // exit early for no `thisArg` or already bound by `Function#bind` if (typeof thisArg == 'undefined' || !('prototype' in func)) { return func; } var bindData = func.__bindData__; if (typeof bindData == 'undefined') { if (support.funcNames) { bindData = !func.name; } bindData = bindData || !support.funcDecomp; if (!bindData) { var source = fnToString.call(func); if (!support.funcNames) { bindData = !reFuncName.test(source); } if (!bindData) { // checks if `func` references the `this` keyword and stores the result bindData = reThis.test(source); setBindData(func, bindData); } } } // exit early if there are no `this` references or `func` is bound if (bindData === false || (bindData !== true && bindData[1] & 1)) { return func; } switch (argCount) { case 1: return function(value) { return func.call(thisArg, value); }; case 2: return function(a, b) { return func.call(thisArg, a, b); }; case 3: return function(value, index, collection) { return func.call(thisArg, value, index, collection); }; case 4: return function(accumulator, value, index, collection) { return func.call(thisArg, accumulator, value, index, collection); }; } return bind(func, thisArg); } /** * The base implementation of `createWrapper` that creates the wrapper and * sets its meta data. * * @private * @param {Array} bindData The bind data array. * @returns {Function} Returns the new function. */ function baseCreateWrapper(bindData) { var func = bindData[0], bitmask = bindData[1], partialArgs = bindData[2], partialRightArgs = bindData[3], thisArg = bindData[4], arity = bindData[5]; var isBind = bitmask & 1, isBindKey = bitmask & 2, isCurry = bitmask & 4, isCurryBound = bitmask & 8, key = func; function bound() { var thisBinding = isBind ? thisArg : this; if (partialArgs) { var args = slice(partialArgs); push.apply(args, arguments); } if (partialRightArgs || isCurry) { args || (args = slice(arguments)); if (partialRightArgs) { push.apply(args, partialRightArgs); } if (isCurry && args.length < arity) { bitmask |= 16 & ~32; return baseCreateWrapper([func, (isCurryBound ? bitmask : bitmask & ~3), args, null, thisArg, arity]); } } args || (args = arguments); if (isBindKey) { func = thisBinding[key]; } if (this instanceof bound) { thisBinding = baseCreate(func.prototype); var result = func.apply(thisBinding, args); return isObject(result) ? result : thisBinding; } return func.apply(thisBinding, args); } setBindData(bound, bindData); return bound; } /** * The base implementation of `_.difference` that accepts a single array * of values to exclude. * * @private * @param {Array} array The array to process. * @param {Array} [values] The array of values to exclude. * @returns {Array} Returns a new array of filtered values. */ function baseDifference(array, values) { var index = -1, indexOf = getIndexOf(), length = array ? array.length : 0, isLarge = length >= largeArraySize && indexOf === baseIndexOf, result = []; if (isLarge) { var cache = createCache(values); if (cache) { indexOf = cacheIndexOf; values = cache; } else { isLarge = false; } } while (++index < length) { var value = array[index]; if (indexOf(values, value) < 0) { result.push(value); } } if (isLarge) { releaseObject(values); } return result; } /** * The base implementation of `_.flatten` without support for callback * shorthands or `thisArg` binding. * * @private * @param {Array} array The array to flatten. * @param {boolean} [isShallow=false] A flag to restrict flattening to a single level. * @param {boolean} [isStrict=false] A flag to restrict flattening to arrays and `arguments` objects. * @param {number} [fromIndex=0] The index to start from. * @returns {Array} Returns a new flattened array. */ function baseFlatten(array, isShallow, isStrict, fromIndex) { var index = (fromIndex || 0) - 1, length = array ? array.length : 0, result = []; while (++index < length) { var value = array[index]; if (value && typeof value == 'object' && typeof value.length == 'number' && (isArray(value) || isArguments(value))) { // recursively flatten arrays (susceptible to call stack limits) if (!isShallow) { value = baseFlatten(value, isShallow, isStrict); } var valIndex = -1, valLength = value.length, resIndex = result.length; result.length += valLength; while (++valIndex < valLength) { result[resIndex++] = value[valIndex]; } } else if (!isStrict) { result.push(value); } } return result; } /** * The base implementation of `_.isEqual`, without support for `thisArg` binding, * that allows partial "_.where" style comparisons. * * @private * @param {*} a The value to compare. * @param {*} b The other value to compare. * @param {Function} [callback] The function to customize comparing values. * @param {Function} [isWhere=false] A flag to indicate performing partial comparisons. * @param {Array} [stackA=[]] Tracks traversed `a` objects. * @param {Array} [stackB=[]] Tracks traversed `b` objects. * @returns {boolean} Returns `true` if the values are equivalent, else `false`. */ function baseIsEqual(a, b, callback, isWhere, stackA, stackB) { // used to indicate that when comparing objects, `a` has at least the properties of `b` if (callback) { var result = callback(a, b); if (typeof result != 'undefined') { return !!result; } } // exit early for identical values if (a === b) { // treat `+0` vs. `-0` as not equal return a !== 0 || (1 / a == 1 / b); } var type = typeof a, otherType = typeof b; // exit early for unlike primitive values if (a === a && !(a && objectTypes[type]) && !(b && objectTypes[otherType])) { return false; } // exit early for `null` and `undefined` avoiding ES3's Function#call behavior // http://es5.github.io/#x15.3.4.4 if (a == null || b == null) { return a === b; } // compare [[Class]] names var className = toString.call(a), otherClass = toString.call(b); if (className == argsClass) { className = objectClass; } if (otherClass == argsClass) { otherClass = objectClass; } if (className != otherClass) { return false; } switch (className) { case boolClass: case dateClass: // coerce dates and booleans to numbers, dates to milliseconds and booleans // to `1` or `0` treating invalid dates coerced to `NaN` as not equal return +a == +b; case numberClass: // treat `NaN` vs. `NaN` as equal return (a != +a) ? b != +b // but treat `+0` vs. `-0` as not equal : (a == 0 ? (1 / a == 1 / b) : a == +b); case regexpClass: case stringClass: // coerce regexes to strings (http://es5.github.io/#x15.10.6.4) // treat string primitives and their corresponding object instances as equal return a == String(b); } var isArr = className == arrayClass; if (!isArr) { // unwrap any `lodash` wrapped values var aWrapped = hasOwnProperty.call(a, '__wrapped__'), bWrapped = hasOwnProperty.call(b, '__wrapped__'); if (aWrapped || bWrapped) { return baseIsEqual(aWrapped ? a.__wrapped__ : a, bWrapped ? b.__wrapped__ : b, callback, isWhere, stackA, stackB); } // exit for functions and DOM nodes if (className != objectClass || (!support.nodeClass && (isNode(a) || isNode(b)))) { return false; } // in older versions of Opera, `arguments` objects have `Array` constructors var ctorA = !support.argsObject && isArguments(a) ? Object : a.constructor, ctorB = !support.argsObject && isArguments(b) ? Object : b.constructor; // non `Object` object instances with different constructors are not equal if (ctorA != ctorB && !(isFunction(ctorA) && ctorA instanceof ctorA && isFunction(ctorB) && ctorB instanceof ctorB) && ('constructor' in a && 'constructor' in b) ) { return false; } } // assume cyclic structures are equal // the algorithm for detecting cyclic structures is adapted from ES 5.1 // section 15.12.3, abstract operation `JO` (http://es5.github.io/#x15.12.3) var initedStack = !stackA; stackA || (stackA = getArray()); stackB || (stackB = getArray()); var length = stackA.length; while (length--) { if (stackA[length] == a) { return stackB[length] == b; } } var size = 0; result = true; // add `a` and `b` to the stack of traversed objects stackA.push(a); stackB.push(b); // recursively compare objects and arrays (susceptible to call stack limits) if (isArr) { // compare lengths to determine if a deep comparison is necessary length = a.length; size = b.length; result = size == length; if (result || isWhere) { // deep compare the contents, ignoring non-numeric properties while (size--) { var index = length, value = b[size]; if (isWhere) { while (index--) { if ((result = baseIsEqual(a[index], value, callback, isWhere, stackA, stackB))) { break; } } } else if (!(result = baseIsEqual(a[size], value, callback, isWhere, stackA, stackB))) { break; } } } } else { // deep compare objects using `forIn`, instead of `forOwn`, to avoid `Object.keys` // which, in this case, is more costly forIn(b, function(value, key, b) { if (hasOwnProperty.call(b, key)) { // count the number of properties. size++; // deep compare each property value. return (result = hasOwnProperty.call(a, key) && baseIsEqual(a[key], value, callback, isWhere, stackA, stackB)); } }); if (result && !isWhere) { // ensure both objects have the same number of properties forIn(a, function(value, key, a) { if (hasOwnProperty.call(a, key)) { // `size` will be `-1` if `a` has more properties than `b` return (result = --size > -1); } }); } } stackA.pop(); stackB.pop(); if (initedStack) { releaseArray(stackA); releaseArray(stackB); } return result; } /** * The base implementation of `_.merge` without argument juggling or support * for `thisArg` binding. * * @private * @param {Object} object The destination object. * @param {Object} source The source object. * @param {Function} [callback] The function to customize merging properties. * @param {Array} [stackA=[]] Tracks traversed source objects. * @param {Array} [stackB=[]] Associates values with source counterparts. */ function baseMerge(object, source, callback, stackA, stackB) { (isArray(source) ? forEach : forOwn)(source, function(source, key) { var found, isArr, result = source, value = object[key]; if (source && ((isArr = isArray(source)) || isPlainObject(source))) { // avoid merging previously merged cyclic sources var stackLength = stackA.length; while (stackLength--) { if ((found = stackA[stackLength] == source)) { value = stackB[stackLength]; break; } } if (!found) { var isShallow; if (callback) { result = callback(value, source); if ((isShallow = typeof result != 'undefined')) { value = result; } } if (!isShallow) { value = isArr ? (isArray(value) ? value : []) : (isPlainObject(value) ? value : {}); } // add `source` and associated `value` to the stack of traversed objects stackA.push(source); stackB.push(value); // recursively merge objects and arrays (susceptible to call stack limits) if (!isShallow) { baseMerge(value, source, callback, stackA, stackB); } } } else { if (callback) { result = callback(value, source); if (typeof result == 'undefined') { result = source; } } if (typeof result != 'undefined') { value = result; } } object[key] = value; }); } /** * The base implementation of `_.random` without argument juggling or support * for returning floating-point numbers. * * @private * @param {number} min The minimum possible value. * @param {number} max The maximum possible value. * @returns {number} Returns a random number. */ function baseRandom(min, max) { return min + floor(nativeRandom() * (max - min + 1)); } /** * The base implementation of `_.uniq` without support for callback shorthands * or `thisArg` binding. * * @private * @param {Array} array The array to process. * @param {boolean} [isSorted=false] A flag to indicate that `array` is sorted. * @param {Function} [callback] The function called per iteration. * @returns {Array} Returns a duplicate-value-free array. */ function baseUniq(array, isSorted, callback) { var index = -1, indexOf = getIndexOf(), length = array ? array.length : 0, result = []; var isLarge = !isSorted && length >= largeArraySize && indexOf === baseIndexOf, seen = (callback || isLarge) ? getArray() : result; if (isLarge) { var cache = createCache(seen); indexOf = cacheIndexOf; seen = cache; } while (++index < length) { var value = array[index], computed = callback ? callback(value, index, array) : value; if (isSorted ? !index || seen[seen.length - 1] !== computed : indexOf(seen, computed) < 0 ) { if (callback || isLarge) { seen.push(computed); } result.push(value); } } if (isLarge) { releaseArray(seen.array); releaseObject(seen); } else if (callback) { releaseArray(seen); } return result; } /** * Creates a function that aggregates a collection, creating an object composed * of keys generated from the results of running each element of the collection * through a callback. The given `setter` function sets the keys and values * of the composed object. * * @private * @param {Function} setter The setter function. * @returns {Function} Returns the new aggregator function. */ function createAggregator(setter) { return function(collection, callback, thisArg) { var result = {}; callback = lodash.createCallback(callback, thisArg, 3); if (isArray(collection)) { var index = -1, length = collection.length; while (++index < length) { var value = collection[index]; setter(result, value, callback(value, index, collection), collection); } } else { baseEach(collection, function(value, key, collection) { setter(result, value, callback(value, key, collection), collection); }); } return result; }; } /** * Creates a function that, when called, either curries or invokes `func` * with an optional `this` binding and partially applied arguments. * * @private * @param {Function|string} func The function or method name to reference. * @param {number} bitmask The bitmask of method flags to compose. * The bitmask may be composed of the following flags: * 1 - `_.bind` * 2 - `_.bindKey` * 4 - `_.curry` * 8 - `_.curry` (bound) * 16 - `_.partial` * 32 - `_.partialRight` * @param {Array} [partialArgs] An array of arguments to prepend to those * provided to the new function. * @param {Array} [partialRightArgs] An array of arguments to append to those * provided to the new function. * @param {*} [thisArg] The `this` binding of `func`. * @param {number} [arity] The arity of `func`. * @returns {Function} Returns the new function. */ function createWrapper(func, bitmask, partialArgs, partialRightArgs, thisArg, arity) { var isBind = bitmask & 1, isBindKey = bitmask & 2, isCurry = bitmask & 4, isCurryBound = bitmask & 8, isPartial = bitmask & 16, isPartialRight = bitmask & 32; if (!isBindKey && !isFunction(func)) { throw new TypeError; } if (isPartial && !partialArgs.length) { bitmask &= ~16; isPartial = partialArgs = false; } if (isPartialRight && !partialRightArgs.length) { bitmask &= ~32; isPartialRight = partialRightArgs = false; } var bindData = func && func.__bindData__; if (bindData && bindData !== true) { // clone `bindData` bindData = slice(bindData); if (bindData[2]) { bindData[2] = slice(bindData[2]); } if (bindData[3]) { bindData[3] = slice(bindData[3]); } // set `thisBinding` is not previously bound if (isBind && !(bindData[1] & 1)) { bindData[4] = thisArg; } // set if previously bound but not currently (subsequent curried functions) if (!isBind && bindData[1] & 1) { bitmask |= 8; } // set curried arity if not yet set if (isCurry && !(bindData[1] & 4)) { bindData[5] = arity; } // append partial left arguments if (isPartial) { push.apply(bindData[2] || (bindData[2] = []), partialArgs); } // append partial right arguments if (isPartialRight) { unshift.apply(bindData[3] || (bindData[3] = []), partialRightArgs); } // merge flags bindData[1] |= bitmask; return createWrapper.apply(null, bindData); } // fast path for `_.bind` var creater = (bitmask == 1 || bitmask === 17) ? baseBind : baseCreateWrapper; return creater([func, bitmask, partialArgs, partialRightArgs, thisArg, arity]); } /** * Creates compiled iteration functions. * * @private * @param {...Object} [options] The compile options object(s). * @param {string} [options.array] Code to determine if the iterable is an array or array-like. * @param {boolean} [options.useHas] Specify using `hasOwnProperty` checks in the object loop. * @param {Function} [options.keys] A reference to `_.keys` for use in own property iteration. * @param {string} [options.args] A comma separated string of iteration function arguments. * @param {string} [options.top] Code to execute before the iteration branches. * @param {string} [options.loop] Code to execute in the object loop. * @param {string} [options.bottom] Code to execute after the iteration branches. * @returns {Function} Returns the compiled function. */ function createIterator() { // data properties iteratorData.shadowedProps = shadowedProps; // iterator options iteratorData.array = iteratorData.bottom = iteratorData.loop = iteratorData.top = ''; iteratorData.init = 'iterable'; iteratorData.useHas = true; // merge options into a template data object for (var object, index = 0; object = arguments[index]; index++) { for (var key in object) { iteratorData[key] = object[key]; } } var args = iteratorData.args; iteratorData.firstArg = /^[^,]+/.exec(args)[0]; // create the function factory var factory = Function( 'baseCreateCallback, errorClass, errorProto, hasOwnProperty, ' + 'indicatorObject, isArguments, isArray, isString, keys, objectProto, ' + 'objectTypes, nonEnumProps, stringClass, stringProto, toString', 'return function(' + args + ') {\n' + iteratorTemplate(iteratorData) + '\n}' ); // return the compiled function return factory( baseCreateCallback, errorClass, errorProto, hasOwnProperty, indicatorObject, isArguments, isArray, isString, iteratorData.keys, objectProto, objectTypes, nonEnumProps, stringClass, stringProto, toString ); } /** * Used by `escape` to convert characters to HTML entities. * * @private * @param {string} match The matched character to escape. * @returns {string} Returns the escaped character. */ function escapeHtmlChar(match) { return htmlEscapes[match]; } /** * Gets the appropriate "indexOf" function. If the `_.indexOf` method is * customized, this method returns the custom method, otherwise it returns * the `baseIndexOf` function. * * @private * @returns {Function} Returns the "indexOf" function. */ function getIndexOf() { var result = (result = lodash.indexOf) === indexOf ? baseIndexOf : result; return result; } /** * Checks if `value` is a native function. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if the `value` is a native function, else `false`. */ function isNative(value) { return typeof value == 'function' && reNative.test(value); } /** * Sets `this` binding data on a given function. * * @private * @param {Function} func The function to set data on. * @param {Array} value The data array to set. */ var setBindData = !defineProperty ? noop : function(func, value) { descriptor.value = value; defineProperty(func, '__bindData__', descriptor); }; /** * A fallback implementation of `isPlainObject` which checks if a given value * is an object created by the `Object` constructor, assuming objects created * by the `Object` constructor have no inherited enumerable properties and that * there are no `Object.prototype` extensions. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a plain object, else `false`. */ function shimIsPlainObject(value) { var ctor, result; // avoid non Object objects, `arguments` objects, and DOM elements if (!(value && toString.call(value) == objectClass) || (ctor = value.constructor, isFunction(ctor) && !(ctor instanceof ctor)) || (!support.argsClass && isArguments(value)) || (!support.nodeClass && isNode(value))) { return false; } // IE < 9 iterates inherited properties before own properties. If the first // iterated property is an object's own property then there are no inherited // enumerable properties. if (support.ownLast) { forIn(value, function(value, key, object) { result = hasOwnProperty.call(object, key); return false; }); return result !== false; } // In most environments an object's own properties are iterated before // its inherited properties. If the last iterated property is an object's // own property then there are no inherited enumerable properties. forIn(value, function(value, key) { result = key; }); return typeof result == 'undefined' || hasOwnProperty.call(value, result); } /** * Used by `unescape` to convert HTML entities to characters. * * @private * @param {string} match The matched character to unescape. * @returns {string} Returns the unescaped character. */ function unescapeHtmlChar(match) { return htmlUnescapes[match]; } /*--------------------------------------------------------------------------*/ /** * Checks if `value` is an `arguments` object. * * @static * @memberOf _ * @category Objects * @param {*} value The value to check. * @returns {boolean} Returns `true` if the `value` is an `arguments` object, else `false`. * @example * * (function() { return _.isArguments(arguments); })(1, 2, 3); * // => true * * _.isArguments([1, 2, 3]); * // => false */ function isArguments(value) { return value && typeof value == 'object' && typeof value.length == 'number' && toString.call(value) == argsClass || false; } // fallback for browsers that can't detect `arguments` objects by [[Class]] if (!support.argsClass) { isArguments = function(value) { return value && typeof value == 'object' && typeof value.length == 'number' && hasOwnProperty.call(value, 'callee') && !propertyIsEnumerable.call(value, 'callee') || false; }; } /** * Checks if `value` is an array. * * @static * @memberOf _ * @type Function * @category Objects * @param {*} value The value to check. * @returns {boolean} Returns `true` if the `value` is an array, else `false`. * @example * * (function() { return _.isArray(arguments); })(); * // => false * * _.isArray([1, 2, 3]); * // => true */ var isArray = nativeIsArray || function(value) { return value && typeof value == 'object' && typeof value.length == 'number' && toString.call(value) == arrayClass || false; }; /** * A fallback implementation of `Object.keys` which produces an array of the * given object's own enumerable property names. * * @private * @type Function * @param {Object} object The object to inspect. * @returns {Array} Returns an array of property names. */ var shimKeys = createIterator({ 'args': 'object', 'init': '[]', 'top': 'if (!(objectTypes[typeof object])) return result', 'loop': 'result.push(index)' }); /** * Creates an array composed of the own enumerable property names of an object. * * @static * @memberOf _ * @category Objects * @param {Object} object The object to inspect. * @returns {Array} Returns an array of property names. * @example * * _.keys({ 'one': 1, 'two': 2, 'three': 3 }); * // => ['one', 'two', 'three'] (property order is not guaranteed across environments) */ var keys = !nativeKeys ? shimKeys : function(object) { if (!isObject(object)) { return []; } if ((support.enumPrototypes && typeof object == 'function') || (support.nonEnumArgs && object.length && isArguments(object))) { return shimKeys(object); } return nativeKeys(object); }; /** Reusable iterator options shared by `each`, `forIn`, and `forOwn` */ var eachIteratorOptions = { 'args': 'collection, callback, thisArg', 'top': "callback = callback && typeof thisArg == 'undefined' ? callback : baseCreateCallback(callback, thisArg, 3)", 'array': "typeof length == 'number'", 'keys': keys, 'loop': 'if (callback(iterable[index], index, collection) === false) return result' }; /** Reusable iterator options for `assign` and `defaults` */ var defaultsIteratorOptions = { 'args': 'object, source, guard', 'top': 'var args = arguments,\n' + ' argsIndex = 0,\n' + " argsLength = typeof guard == 'number' ? 2 : args.length;\n" + 'while (++argsIndex < argsLength) {\n' + ' iterable = args[argsIndex];\n' + ' if (iterable && objectTypes[typeof iterable]) {', 'keys': keys, 'loop': "if (typeof result[index] == 'undefined') result[index] = iterable[index]", 'bottom': ' }\n}' }; /** Reusable iterator options for `forIn` and `forOwn` */ var forOwnIteratorOptions = { 'top': 'if (!objectTypes[typeof iterable]) return result;\n' + eachIteratorOptions.top, 'array': false }; /** * Used to convert characters to HTML entities: * * Though the `>` character is escaped for symmetry, characters like `>` and `/` * don't require escaping in HTML and have no special meaning unless they're part * of a tag or an unquoted attribute value. * http://mathiasbynens.be/notes/ambiguous-ampersands (under "semi-related fun fact") */ var htmlEscapes = { '&': '&amp;', '<': '&lt;', '>': '&gt;', '"': '&quot;', "'": '&#39;' }; /** Used to convert HTML entities to characters */ var htmlUnescapes = invert(htmlEscapes); /** Used to match HTML entities and HTML characters */ var reEscapedHtml = RegExp('(' + keys(htmlUnescapes).join('|') + ')', 'g'), reUnescapedHtml = RegExp('[' + keys(htmlEscapes).join('') + ']', 'g'); /** * A function compiled to iterate `arguments` objects, arrays, objects, and * strings consistenly across environments, executing the callback for each * element in the collection. The callback is bound to `thisArg` and invoked * with three arguments; (value, index|key, collection). Callbacks may exit * iteration early by explicitly returning `false`. * * @private * @type Function * @param {Array|Object|string} collection The collection to iterate over. * @param {Function} [callback=identity] The function called per iteration. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Array|Object|string} Returns `collection`. */ var baseEach = createIterator(eachIteratorOptions); /*--------------------------------------------------------------------------*/ /** * Assigns own enumerable properties of source object(s) to the destination * object. Subsequent sources will overwrite property assignments of previous * sources. If a callback is provided it will be executed to produce the * assigned values. The callback is bound to `thisArg` and invoked with two * arguments; (objectValue, sourceValue). * * @static * @memberOf _ * @type Function * @alias extend * @category Objects * @param {Object} object The destination object. * @param {...Object} [source] The source objects. * @param {Function} [callback] The function to customize assigning values. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Object} Returns the destination object. * @example * * _.assign({ 'name': 'fred' }, { 'employer': 'slate' }); * // => { 'name': 'fred', 'employer': 'slate' } * * var defaults = _.partialRight(_.assign, function(a, b) { * return typeof a == 'undefined' ? b : a; * }); * * var object = { 'name': 'barney' }; * defaults(object, { 'name': 'fred', 'employer': 'slate' }); * // => { 'name': 'barney', 'employer': 'slate' } */ var assign = createIterator(defaultsIteratorOptions, { 'top': defaultsIteratorOptions.top.replace(';', ';\n' + "if (argsLength > 3 && typeof args[argsLength - 2] == 'function') {\n" + ' var callback = baseCreateCallback(args[--argsLength - 1], args[argsLength--], 2);\n' + "} else if (argsLength > 2 && typeof args[argsLength - 1] == 'function') {\n" + ' callback = args[--argsLength];\n' + '}' ), 'loop': 'result[index] = callback ? callback(result[index], iterable[index]) : iterable[index]' }); /** * Creates a clone of `value`. If `isDeep` is `true` nested objects will also * be cloned, otherwise they will be assigned by reference. If a callback * is provided it will be executed to produce the cloned values. If the * callback returns `undefined` cloning will be handled by the method instead. * The callback is bound to `thisArg` and invoked with one argument; (value). * * @static * @memberOf _ * @category Objects * @param {*} value The value to clone. * @param {boolean} [isDeep=false] Specify a deep clone. * @param {Function} [callback] The function to customize cloning values. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {*} Returns the cloned value. * @example * * var characters = [ * { 'name': 'barney', 'age': 36 }, * { 'name': 'fred', 'age': 40 } * ]; * * var shallow = _.clone(characters); * shallow[0] === characters[0]; * // => true * * var deep = _.clone(characters, true); * deep[0] === characters[0]; * // => false * * _.mixin({ * 'clone': _.partialRight(_.clone, function(value) { * return _.isElement(value) ? value.cloneNode(false) : undefined; * }) * }); * * var clone = _.clone(document.body); * clone.childNodes.length; * // => 0 */ function clone(value, isDeep, callback, thisArg) { // allows working with "Collections" methods without using their `index` // and `collection` arguments for `isDeep` and `callback` if (typeof isDeep != 'boolean' && isDeep != null) { thisArg = callback; callback = isDeep; isDeep = false; } return baseClone(value, isDeep, typeof callback == 'function' && baseCreateCallback(callback, thisArg, 1)); } /** * Creates a deep clone of `value`. If a callback is provided it will be * executed to produce the cloned values. If the callback returns `undefined` * cloning will be handled by the method instead. The callback is bound to * `thisArg` and invoked with one argument; (value). * * Note: This method is loosely based on the structured clone algorithm. Functions * and DOM nodes are **not** cloned. The enumerable properties of `arguments` objects and * objects created by constructors other than `Object` are cloned to plain `Object` objects. * See http://www.w3.org/TR/html5/infrastructure.html#internal-structured-cloning-algorithm. * * @static * @memberOf _ * @category Objects * @param {*} value The value to deep clone. * @param {Function} [callback] The function to customize cloning values. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {*} Returns the deep cloned value. * @example * * var characters = [ * { 'name': 'barney', 'age': 36 }, * { 'name': 'fred', 'age': 40 } * ]; * * var deep = _.cloneDeep(characters); * deep[0] === characters[0]; * // => false * * var view = { * 'label': 'docs', * 'node': element * }; * * var clone = _.cloneDeep(view, function(value) { * return _.isElement(value) ? value.cloneNode(true) : undefined; * }); * * clone.node == view.node; * // => false */ function cloneDeep(value, callback, thisArg) { return baseClone(value, true, typeof callback == 'function' && baseCreateCallback(callback, thisArg, 1)); } /** * Creates an object that inherits from the given `prototype` object. If a * `properties` object is provided its own enumerable properties are assigned * to the created object. * * @static * @memberOf _ * @category Objects * @param {Object} prototype The object to inherit from. * @param {Object} [properties] The properties to assign to the object. * @returns {Object} Returns the new object. * @example * * function Shape() { * this.x = 0; * this.y = 0; * } * * function Circle() { * Shape.call(this); * } * * Circle.prototype = _.create(Shape.prototype, { 'constructor': Circle }); * * var circle = new Circle; * circle instanceof Circle; * // => true * * circle instanceof Shape; * // => true */ function create(prototype, properties) { var result = baseCreate(prototype); return properties ? assign(result, properties) : result; } /** * Assigns own enumerable properties of source object(s) to the destination * object for all destination properties that resolve to `undefined`. Once a * property is set, additional defaults of the same property will be ignored. * * @static * @memberOf _ * @type Function * @category Objects * @param {Object} object The destination object. * @param {...Object} [source] The source objects. * @param- {Object} [guard] Allows working with `_.reduce` without using its * `key` and `object` arguments as sources. * @returns {Object} Returns the destination object. * @example * * var object = { 'name': 'barney' }; * _.defaults(object, { 'name': 'fred', 'employer': 'slate' }); * // => { 'name': 'barney', 'employer': 'slate' } */ var defaults = createIterator(defaultsIteratorOptions); /** * This method is like `_.findIndex` except that it returns the key of the * first element that passes the callback check, instead of the element itself. * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Objects * @param {Object} object The object to search. * @param {Function|Object|string} [callback=identity] The function called per * iteration. If a property name or object is provided it will be used to * create a "_.pluck" or "_.where" style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {string|undefined} Returns the key of the found element, else `undefined`. * @example * * var characters = { * 'barney': { 'age': 36, 'blocked': false }, * 'fred': { 'age': 40, 'blocked': true }, * 'pebbles': { 'age': 1, 'blocked': false } * }; * * _.findKey(characters, function(chr) { * return chr.age < 40; * }); * // => 'barney' (property order is not guaranteed across environments) * * // using "_.where" callback shorthand * _.findKey(characters, { 'age': 1 }); * // => 'pebbles' * * // using "_.pluck" callback shorthand * _.findKey(characters, 'blocked'); * // => 'fred' */ function findKey(object, callback, thisArg) { var result; callback = lodash.createCallback(callback, thisArg, 3); forOwn(object, function(value, key, object) { if (callback(value, key, object)) { result = key; return false; } }); return result; } /** * This method is like `_.findKey` except that it iterates over elements * of a `collection` in the opposite order. * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Objects * @param {Object} object The object to search. * @param {Function|Object|string} [callback=identity] The function called per * iteration. If a property name or object is provided it will be used to * create a "_.pluck" or "_.where" style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {string|undefined} Returns the key of the found element, else `undefined`. * @example * * var characters = { * 'barney': { 'age': 36, 'blocked': true }, * 'fred': { 'age': 40, 'blocked': false }, * 'pebbles': { 'age': 1, 'blocked': true } * }; * * _.findLastKey(characters, function(chr) { * return chr.age < 40; * }); * // => returns `pebbles`, assuming `_.findKey` returns `barney` * * // using "_.where" callback shorthand * _.findLastKey(characters, { 'age': 40 }); * // => 'fred' * * // using "_.pluck" callback shorthand * _.findLastKey(characters, 'blocked'); * // => 'pebbles' */ function findLastKey(object, callback, thisArg) { var result; callback = lodash.createCallback(callback, thisArg, 3); forOwnRight(object, function(value, key, object) { if (callback(value, key, object)) { result = key; return false; } }); return result; } /** * Iterates over own and inherited enumerable properties of an object, * executing the callback for each property. The callback is bound to `thisArg` * and invoked with three arguments; (value, key, object). Callbacks may exit * iteration early by explicitly returning `false`. * * @static * @memberOf _ * @type Function * @category Objects * @param {Object} object The object to iterate over. * @param {Function} [callback=identity] The function called per iteration. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Object} Returns `object`. * @example * * function Shape() { * this.x = 0; * this.y = 0; * } * * Shape.prototype.move = function(x, y) { * this.x += x; * this.y += y; * }; * * _.forIn(new Shape, function(value, key) { * console.log(key); * }); * // => logs 'x', 'y', and 'move' (property order is not guaranteed across environments) */ var forIn = createIterator(eachIteratorOptions, forOwnIteratorOptions, { 'useHas': false }); /** * This method is like `_.forIn` except that it iterates over elements * of a `collection` in the opposite order. * * @static * @memberOf _ * @category Objects * @param {Object} object The object to iterate over. * @param {Function} [callback=identity] The function called per iteration. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Object} Returns `object`. * @example * * function Shape() { * this.x = 0; * this.y = 0; * } * * Shape.prototype.move = function(x, y) { * this.x += x; * this.y += y; * }; * * _.forInRight(new Shape, function(value, key) { * console.log(key); * }); * // => logs 'move', 'y', and 'x' assuming `_.forIn ` logs 'x', 'y', and 'move' */ function forInRight(object, callback, thisArg) { var pairs = []; forIn(object, function(value, key) { pairs.push(key, value); }); var length = pairs.length; callback = baseCreateCallback(callback, thisArg, 3); while (length--) { if (callback(pairs[length--], pairs[length], object) === false) { break; } } return object; } /** * Iterates over own enumerable properties of an object, executing the callback * for each property. The callback is bound to `thisArg` and invoked with three * arguments; (value, key, object). Callbacks may exit iteration early by * explicitly returning `false`. * * @static * @memberOf _ * @type Function * @category Objects * @param {Object} object The object to iterate over. * @param {Function} [callback=identity] The function called per iteration. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Object} Returns `object`. * @example * * _.forOwn({ '0': 'zero', '1': 'one', 'length': 2 }, function(num, key) { * console.log(key); * }); * // => logs '0', '1', and 'length' (property order is not guaranteed across environments) */ var forOwn = createIterator(eachIteratorOptions, forOwnIteratorOptions); /** * This method is like `_.forOwn` except that it iterates over elements * of a `collection` in the opposite order. * * @static * @memberOf _ * @category Objects * @param {Object} object The object to iterate over. * @param {Function} [callback=identity] The function called per iteration. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Object} Returns `object`. * @example * * _.forOwnRight({ '0': 'zero', '1': 'one', 'length': 2 }, function(num, key) { * console.log(key); * }); * // => logs 'length', '1', and '0' assuming `_.forOwn` logs '0', '1', and 'length' */ function forOwnRight(object, callback, thisArg) { var props = keys(object), length = props.length; callback = baseCreateCallback(callback, thisArg, 3); while (length--) { var key = props[length]; if (callback(object[key], key, object) === false) { break; } } return object; } /** * Creates a sorted array of property names of all enumerable properties, * own and inherited, of `object` that have function values. * * @static * @memberOf _ * @alias methods * @category Objects * @param {Object} object The object to inspect. * @returns {Array} Returns an array of property names that have function values. * @example * * _.functions(_); * // => ['all', 'any', 'bind', 'bindAll', 'clone', 'compact', 'compose', ...] */ function functions(object) { var result = []; forIn(object, function(value, key) { if (isFunction(value)) { result.push(key); } }); return result.sort(); } /** * Checks if the specified property name exists as a direct property of `object`, * instead of an inherited property. * * @static * @memberOf _ * @category Objects * @param {Object} object The object to inspect. * @param {string} key The name of the property to check. * @returns {boolean} Returns `true` if key is a direct property, else `false`. * @example * * _.has({ 'a': 1, 'b': 2, 'c': 3 }, 'b'); * // => true */ function has(object, key) { return object ? hasOwnProperty.call(object, key) : false; } /** * Creates an object composed of the inverted keys and values of the given object. * * @static * @memberOf _ * @category Objects * @param {Object} object The object to invert. * @returns {Object} Returns the created inverted object. * @example * * _.invert({ 'first': 'fred', 'second': 'barney' }); * // => { 'fred': 'first', 'barney': 'second' } */ function invert(object) { var index = -1, props = keys(object), length = props.length, result = {}; while (++index < length) { var key = props[index]; result[object[key]] = key; } return result; } /** * Checks if `value` is a boolean value. * * @static * @memberOf _ * @category Objects * @param {*} value The value to check. * @returns {boolean} Returns `true` if the `value` is a boolean value, else `false`. * @example * * _.isBoolean(null); * // => false */ function isBoolean(value) { return value === true || value === false || value && typeof value == 'object' && toString.call(value) == boolClass || false; } /** * Checks if `value` is a date. * * @static * @memberOf _ * @category Objects * @param {*} value The value to check. * @returns {boolean} Returns `true` if the `value` is a date, else `false`. * @example * * _.isDate(new Date); * // => true */ function isDate(value) { return value && typeof value == 'object' && toString.call(value) == dateClass || false; } /** * Checks if `value` is a DOM element. * * @static * @memberOf _ * @category Objects * @param {*} value The value to check. * @returns {boolean} Returns `true` if the `value` is a DOM element, else `false`. * @example * * _.isElement(document.body); * // => true */ function isElement(value) { return value && value.nodeType === 1 || false; } /** * Checks if `value` is empty. Arrays, strings, or `arguments` objects with a * length of `0` and objects with no own enumerable properties are considered * "empty". * * @static * @memberOf _ * @category Objects * @param {Array|Object|string} value The value to inspect. * @returns {boolean} Returns `true` if the `value` is empty, else `false`. * @example * * _.isEmpty([1, 2, 3]); * // => false * * _.isEmpty({}); * // => true * * _.isEmpty(''); * // => true */ function isEmpty(value) { var result = true; if (!value) { return result; } var className = toString.call(value), length = value.length; if ((className == arrayClass || className == stringClass || (support.argsClass ? className == argsClass : isArguments(value))) || (className == objectClass && typeof length == 'number' && isFunction(value.splice))) { return !length; } forOwn(value, function() { return (result = false); }); return result; } /** * Performs a deep comparison between two values to determine if they are * equivalent to each other. If a callback is provided it will be executed * to compare values. If the callback returns `undefined` comparisons will * be handled by the method instead. The callback is bound to `thisArg` and * invoked with two arguments; (a, b). * * @static * @memberOf _ * @category Objects * @param {*} a The value to compare. * @param {*} b The other value to compare. * @param {Function} [callback] The function to customize comparing values. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {boolean} Returns `true` if the values are equivalent, else `false`. * @example * * var object = { 'name': 'fred' }; * var copy = { 'name': 'fred' }; * * object == copy; * // => false * * _.isEqual(object, copy); * // => true * * var words = ['hello', 'goodbye']; * var otherWords = ['hi', 'goodbye']; * * _.isEqual(words, otherWords, function(a, b) { * var reGreet = /^(?:hello|hi)$/i, * aGreet = _.isString(a) && reGreet.test(a), * bGreet = _.isString(b) && reGreet.test(b); * * return (aGreet || bGreet) ? (aGreet == bGreet) : undefined; * }); * // => true */ function isEqual(a, b, callback, thisArg) { return baseIsEqual(a, b, typeof callback == 'function' && baseCreateCallback(callback, thisArg, 2)); } /** * Checks if `value` is, or can be coerced to, a finite number. * * Note: This is not the same as native `isFinite` which will return true for * booleans and empty strings. See http://es5.github.io/#x15.1.2.5. * * @static * @memberOf _ * @category Objects * @param {*} value The value to check. * @returns {boolean} Returns `true` if the `value` is finite, else `false`. * @example * * _.isFinite(-101); * // => true * * _.isFinite('10'); * // => true * * _.isFinite(true); * // => false * * _.isFinite(''); * // => false * * _.isFinite(Infinity); * // => false */ function isFinite(value) { return nativeIsFinite(value) && !nativeIsNaN(parseFloat(value)); } /** * Checks if `value` is a function. * * @static * @memberOf _ * @category Objects * @param {*} value The value to check. * @returns {boolean} Returns `true` if the `value` is a function, else `false`. * @example * * _.isFunction(_); * // => true */ function isFunction(value) { return typeof value == 'function'; } // fallback for older versions of Chrome and Safari if (isFunction(/x/)) { isFunction = function(value) { return typeof value == 'function' && toString.call(value) == funcClass; }; } /** * Checks if `value` is the language type of Object. * (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) * * @static * @memberOf _ * @category Objects * @param {*} value The value to check. * @returns {boolean} Returns `true` if the `value` is an object, else `false`. * @example * * _.isObject({}); * // => true * * _.isObject([1, 2, 3]); * // => true * * _.isObject(1); * // => false */ function isObject(value) { // check if the value is the ECMAScript language type of Object // http://es5.github.io/#x8 // and avoid a V8 bug // http://code.google.com/p/v8/issues/detail?id=2291 return !!(value && objectTypes[typeof value]); } /** * Checks if `value` is `NaN`. * * Note: This is not the same as native `isNaN` which will return `true` for * `undefined` and other non-numeric values. See http://es5.github.io/#x15.1.2.4. * * @static * @memberOf _ * @category Objects * @param {*} value The value to check. * @returns {boolean} Returns `true` if the `value` is `NaN`, else `false`. * @example * * _.isNaN(NaN); * // => true * * _.isNaN(new Number(NaN)); * // => true * * isNaN(undefined); * // => true * * _.isNaN(undefined); * // => false */ function isNaN(value) { // `NaN` as a primitive is the only value that is not equal to itself // (perform the [[Class]] check first to avoid errors with some host objects in IE) return isNumber(value) && value != +value; } /** * Checks if `value` is `null`. * * @static * @memberOf _ * @category Objects * @param {*} value The value to check. * @returns {boolean} Returns `true` if the `value` is `null`, else `false`. * @example * * _.isNull(null); * // => true * * _.isNull(undefined); * // => false */ function isNull(value) { return value === null; } /** * Checks if `value` is a number. * * Note: `NaN` is considered a number. See http://es5.github.io/#x8.5. * * @static * @memberOf _ * @category Objects * @param {*} value The value to check. * @returns {boolean} Returns `true` if the `value` is a number, else `false`. * @example * * _.isNumber(8.4 * 5); * // => true */ function isNumber(value) { return typeof value == 'number' || value && typeof value == 'object' && toString.call(value) == numberClass || false; } /** * Checks if `value` is an object created by the `Object` constructor. * * @static * @memberOf _ * @category Objects * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a plain object, else `false`. * @example * * function Shape() { * this.x = 0; * this.y = 0; * } * * _.isPlainObject(new Shape); * // => false * * _.isPlainObject([1, 2, 3]); * // => false * * _.isPlainObject({ 'x': 0, 'y': 0 }); * // => true */ var isPlainObject = !getPrototypeOf ? shimIsPlainObject : function(value) { if (!(value && toString.call(value) == objectClass) || (!support.argsClass && isArguments(value))) { return false; } var valueOf = value.valueOf, objProto = isNative(valueOf) && (objProto = getPrototypeOf(valueOf)) && getPrototypeOf(objProto); return objProto ? (value == objProto || getPrototypeOf(value) == objProto) : shimIsPlainObject(value); }; /** * Checks if `value` is a regular expression. * * @static * @memberOf _ * @category Objects * @param {*} value The value to check. * @returns {boolean} Returns `true` if the `value` is a regular expression, else `false`. * @example * * _.isRegExp(/fred/); * // => true */ function isRegExp(value) { return value && objectTypes[typeof value] && toString.call(value) == regexpClass || false; } /** * Checks if `value` is a string. * * @static * @memberOf _ * @category Objects * @param {*} value The value to check. * @returns {boolean} Returns `true` if the `value` is a string, else `false`. * @example * * _.isString('fred'); * // => true */ function isString(value) { return typeof value == 'string' || value && typeof value == 'object' && toString.call(value) == stringClass || false; } /** * Checks if `value` is `undefined`. * * @static * @memberOf _ * @category Objects * @param {*} value The value to check. * @returns {boolean} Returns `true` if the `value` is `undefined`, else `false`. * @example * * _.isUndefined(void 0); * // => true */ function isUndefined(value) { return typeof value == 'undefined'; } /** * Creates an object with the same keys as `object` and values generated by * running each own enumerable property of `object` through the callback. * The callback is bound to `thisArg` and invoked with three arguments; * (value, key, object). * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Objects * @param {Object} object The object to iterate over. * @param {Function|Object|string} [callback=identity] The function called * per iteration. If a property name or object is provided it will be used * to create a "_.pluck" or "_.where" style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Array} Returns a new object with values of the results of each `callback` execution. * @example * * _.mapValues({ 'a': 1, 'b': 2, 'c': 3} , function(num) { return num * 3; }); * // => { 'a': 3, 'b': 6, 'c': 9 } * * var characters = { * 'fred': { 'name': 'fred', 'age': 40 }, * 'pebbles': { 'name': 'pebbles', 'age': 1 } * }; * * // using "_.pluck" callback shorthand * _.mapValues(characters, 'age'); * // => { 'fred': 40, 'pebbles': 1 } */ function mapValues(object, callback, thisArg) { var result = {}; callback = lodash.createCallback(callback, thisArg, 3); forOwn(object, function(value, key, object) { result[key] = callback(value, key, object); }); return result; } /** * Recursively merges own enumerable properties of the source object(s), that * don't resolve to `undefined` into the destination object. Subsequent sources * will overwrite property assignments of previous sources. If a callback is * provided it will be executed to produce the merged values of the destination * and source properties. If the callback returns `undefined` merging will * be handled by the method instead. The callback is bound to `thisArg` and * invoked with two arguments; (objectValue, sourceValue). * * @static * @memberOf _ * @category Objects * @param {Object} object The destination object. * @param {...Object} [source] The source objects. * @param {Function} [callback] The function to customize merging properties. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Object} Returns the destination object. * @example * * var names = { * 'characters': [ * { 'name': 'barney' }, * { 'name': 'fred' } * ] * }; * * var ages = { * 'characters': [ * { 'age': 36 }, * { 'age': 40 } * ] * }; * * _.merge(names, ages); * // => { 'characters': [{ 'name': 'barney', 'age': 36 }, { 'name': 'fred', 'age': 40 }] } * * var food = { * 'fruits': ['apple'], * 'vegetables': ['beet'] * }; * * var otherFood = { * 'fruits': ['banana'], * 'vegetables': ['carrot'] * }; * * _.merge(food, otherFood, function(a, b) { * return _.isArray(a) ? a.concat(b) : undefined; * }); * // => { 'fruits': ['apple', 'banana'], 'vegetables': ['beet', 'carrot] } */ function merge(object) { var args = arguments, length = 2; if (!isObject(object)) { return object; } // allows working with `_.reduce` and `_.reduceRight` without using // their `index` and `collection` arguments if (typeof args[2] != 'number') { length = args.length; } if (length > 3 && typeof args[length - 2] == 'function') { var callback = baseCreateCallback(args[--length - 1], args[length--], 2); } else if (length > 2 && typeof args[length - 1] == 'function') { callback = args[--length]; } var sources = slice(arguments, 1, length), index = -1, stackA = getArray(), stackB = getArray(); while (++index < length) { baseMerge(object, sources[index], callback, stackA, stackB); } releaseArray(stackA); releaseArray(stackB); return object; } /** * Creates a shallow clone of `object` excluding the specified properties. * Property names may be specified as individual arguments or as arrays of * property names. If a callback is provided it will be executed for each * property of `object` omitting the properties the callback returns truey * for. The callback is bound to `thisArg` and invoked with three arguments; * (value, key, object). * * @static * @memberOf _ * @category Objects * @param {Object} object The source object. * @param {Function|...string|string[]} [callback] The properties to omit or the * function called per iteration. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Object} Returns an object without the omitted properties. * @example * * _.omit({ 'name': 'fred', 'age': 40 }, 'age'); * // => { 'name': 'fred' } * * _.omit({ 'name': 'fred', 'age': 40 }, function(value) { * return typeof value == 'number'; * }); * // => { 'name': 'fred' } */ function omit(object, callback, thisArg) { var result = {}; if (typeof callback != 'function') { var props = []; forIn(object, function(value, key) { props.push(key); }); props = baseDifference(props, baseFlatten(arguments, true, false, 1)); var index = -1, length = props.length; while (++index < length) { var key = props[index]; result[key] = object[key]; } } else { callback = lodash.createCallback(callback, thisArg, 3); forIn(object, function(value, key, object) { if (!callback(value, key, object)) { result[key] = value; } }); } return result; } /** * Creates a two dimensional array of an object's key-value pairs, * i.e. `[[key1, value1], [key2, value2]]`. * * @static * @memberOf _ * @category Objects * @param {Object} object The object to inspect. * @returns {Array} Returns new array of key-value pairs. * @example * * _.pairs({ 'barney': 36, 'fred': 40 }); * // => [['barney', 36], ['fred', 40]] (property order is not guaranteed across environments) */ function pairs(object) { var index = -1, props = keys(object), length = props.length, result = Array(length); while (++index < length) { var key = props[index]; result[index] = [key, object[key]]; } return result; } /** * Creates a shallow clone of `object` composed of the specified properties. * Property names may be specified as individual arguments or as arrays of * property names. If a callback is provided it will be executed for each * property of `object` picking the properties the callback returns truey * for. The callback is bound to `thisArg` and invoked with three arguments; * (value, key, object). * * @static * @memberOf _ * @category Objects * @param {Object} object The source object. * @param {Function|...string|string[]} [callback] The function called per * iteration or property names to pick, specified as individual property * names or arrays of property names. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Object} Returns an object composed of the picked properties. * @example * * _.pick({ 'name': 'fred', '_userid': 'fred1' }, 'name'); * // => { 'name': 'fred' } * * _.pick({ 'name': 'fred', '_userid': 'fred1' }, function(value, key) { * return key.charAt(0) != '_'; * }); * // => { 'name': 'fred' } */ function pick(object, callback, thisArg) { var result = {}; if (typeof callback != 'function') { var index = -1, props = baseFlatten(arguments, true, false, 1), length = isObject(object) ? props.length : 0; while (++index < length) { var key = props[index]; if (key in object) { result[key] = object[key]; } } } else { callback = lodash.createCallback(callback, thisArg, 3); forIn(object, function(value, key, object) { if (callback(value, key, object)) { result[key] = value; } }); } return result; } /** * An alternative to `_.reduce` this method transforms `object` to a new * `accumulator` object which is the result of running each of its own * enumerable properties through a callback, with each callback execution * potentially mutating the `accumulator` object. The callback is bound to * `thisArg` and invoked with four arguments; (accumulator, value, key, object). * Callbacks may exit iteration early by explicitly returning `false`. * * @static * @memberOf _ * @category Objects * @param {Array|Object} object The object to iterate over. * @param {Function} [callback=identity] The function called per iteration. * @param {*} [accumulator] The custom accumulator value. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {*} Returns the accumulated value. * @example * * var squares = _.transform([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], function(result, num) { * num *= num; * if (num % 2) { * return result.push(num) < 3; * } * }); * // => [1, 9, 25] * * var mapped = _.transform({ 'a': 1, 'b': 2, 'c': 3 }, function(result, num, key) { * result[key] = num * 3; * }); * // => { 'a': 3, 'b': 6, 'c': 9 } */ function transform(object, callback, accumulator, thisArg) { var isArr = isArray(object); if (accumulator == null) { if (isArr) { accumulator = []; } else { var ctor = object && object.constructor, proto = ctor && ctor.prototype; accumulator = baseCreate(proto); } } if (callback) { callback = lodash.createCallback(callback, thisArg, 4); (isArr ? baseEach : forOwn)(object, function(value, index, object) { return callback(accumulator, value, index, object); }); } return accumulator; } /** * Creates an array composed of the own enumerable property values of `object`. * * @static * @memberOf _ * @category Objects * @param {Object} object The object to inspect. * @returns {Array} Returns an array of property values. * @example * * _.values({ 'one': 1, 'two': 2, 'three': 3 }); * // => [1, 2, 3] (property order is not guaranteed across environments) */ function values(object) { var index = -1, props = keys(object), length = props.length, result = Array(length); while (++index < length) { result[index] = object[props[index]]; } return result; } /*--------------------------------------------------------------------------*/ /** * Creates an array of elements from the specified indexes, or keys, of the * `collection`. Indexes may be specified as individual arguments or as arrays * of indexes. * * @static * @memberOf _ * @category Collections * @param {Array|Object|string} collection The collection to iterate over. * @param {...(number|number[]|string|string[])} [index] The indexes of `collection` * to retrieve, specified as individual indexes or arrays of indexes. * @returns {Array} Returns a new array of elements corresponding to the * provided indexes. * @example * * _.at(['a', 'b', 'c', 'd', 'e'], [0, 2, 4]); * // => ['a', 'c', 'e'] * * _.at(['fred', 'barney', 'pebbles'], 0, 2); * // => ['fred', 'pebbles'] */ function at(collection) { var args = arguments, index = -1, props = baseFlatten(args, true, false, 1), length = (args[2] && args[2][args[1]] === collection) ? 1 : props.length, result = Array(length); if (support.unindexedChars && isString(collection)) { collection = collection.split(''); } while(++index < length) { result[index] = collection[props[index]]; } return result; } /** * Checks if a given value is present in a collection using strict equality * for comparisons, i.e. `===`. If `fromIndex` is negative, it is used as the * offset from the end of the collection. * * @static * @memberOf _ * @alias include * @category Collections * @param {Array|Object|string} collection The collection to iterate over. * @param {*} target The value to check for. * @param {number} [fromIndex=0] The index to search from. * @returns {boolean} Returns `true` if the `target` element is found, else `false`. * @example * * _.contains([1, 2, 3], 1); * // => true * * _.contains([1, 2, 3], 1, 2); * // => false * * _.contains({ 'name': 'fred', 'age': 40 }, 'fred'); * // => true * * _.contains('pebbles', 'eb'); * // => true */ function contains(collection, target, fromIndex) { var index = -1, indexOf = getIndexOf(), length = collection ? collection.length : 0, result = false; fromIndex = (fromIndex < 0 ? nativeMax(0, length + fromIndex) : fromIndex) || 0; if (isArray(collection)) { result = indexOf(collection, target, fromIndex) > -1; } else if (typeof length == 'number') { result = (isString(collection) ? collection.indexOf(target, fromIndex) : indexOf(collection, target, fromIndex)) > -1; } else { baseEach(collection, function(value) { if (++index >= fromIndex) { return !(result = value === target); } }); } return result; } /** * Creates an object composed of keys generated from the results of running * each element of `collection` through the callback. The corresponding value * of each key is the number of times the key was returned by the callback. * The callback is bound to `thisArg` and invoked with three arguments; * (value, index|key, collection). * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Collections * @param {Array|Object|string} collection The collection to iterate over. * @param {Function|Object|string} [callback=identity] The function called * per iteration. If a property name or object is provided it will be used * to create a "_.pluck" or "_.where" style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Object} Returns the composed aggregate object. * @example * * _.countBy([4.3, 6.1, 6.4], function(num) { return Math.floor(num); }); * // => { '4': 1, '6': 2 } * * _.countBy([4.3, 6.1, 6.4], function(num) { return this.floor(num); }, Math); * // => { '4': 1, '6': 2 } * * _.countBy(['one', 'two', 'three'], 'length'); * // => { '3': 2, '5': 1 } */ var countBy = createAggregator(function(result, value, key) { (hasOwnProperty.call(result, key) ? result[key]++ : result[key] = 1); }); /** * Checks if the given callback returns truey value for **all** elements of * a collection. The callback is bound to `thisArg` and invoked with three * arguments; (value, index|key, collection). * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @alias all * @category Collections * @param {Array|Object|string} collection The collection to iterate over. * @param {Function|Object|string} [callback=identity] The function called * per iteration. If a property name or object is provided it will be used * to create a "_.pluck" or "_.where" style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {boolean} Returns `true` if all elements passed the callback check, * else `false`. * @example * * _.every([true, 1, null, 'yes']); * // => false * * var characters = [ * { 'name': 'barney', 'age': 36 }, * { 'name': 'fred', 'age': 40 } * ]; * * // using "_.pluck" callback shorthand * _.every(characters, 'age'); * // => true * * // using "_.where" callback shorthand * _.every(characters, { 'age': 36 }); * // => false */ function every(collection, callback, thisArg) { var result = true; callback = lodash.createCallback(callback, thisArg, 3); if (isArray(collection)) { var index = -1, length = collection.length; while (++index < length) { if (!(result = !!callback(collection[index], index, collection))) { break; } } } else { baseEach(collection, function(value, index, collection) { return (result = !!callback(value, index, collection)); }); } return result; } /** * Iterates over elements of a collection, returning an array of all elements * the callback returns truey for. The callback is bound to `thisArg` and * invoked with three arguments; (value, index|key, collection). * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @alias select * @category Collections * @param {Array|Object|string} collection The collection to iterate over. * @param {Function|Object|string} [callback=identity] The function called * per iteration. If a property name or object is provided it will be used * to create a "_.pluck" or "_.where" style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Array} Returns a new array of elements that passed the callback check. * @example * * var evens = _.filter([1, 2, 3, 4, 5, 6], function(num) { return num % 2 == 0; }); * // => [2, 4, 6] * * var characters = [ * { 'name': 'barney', 'age': 36, 'blocked': false }, * { 'name': 'fred', 'age': 40, 'blocked': true } * ]; * * // using "_.pluck" callback shorthand * _.filter(characters, 'blocked'); * // => [{ 'name': 'fred', 'age': 40, 'blocked': true }] * * // using "_.where" callback shorthand * _.filter(characters, { 'age': 36 }); * // => [{ 'name': 'barney', 'age': 36, 'blocked': false }] */ function filter(collection, callback, thisArg) { var result = []; callback = lodash.createCallback(callback, thisArg, 3); if (isArray(collection)) { var index = -1, length = collection.length; while (++index < length) { var value = collection[index]; if (callback(value, index, collection)) { result.push(value); } } } else { baseEach(collection, function(value, index, collection) { if (callback(value, index, collection)) { result.push(value); } }); } return result; } /** * Iterates over elements of a collection, returning the first element that * the callback returns truey for. The callback is bound to `thisArg` and * invoked with three arguments; (value, index|key, collection). * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @alias detect, findWhere * @category Collections * @param {Array|Object|string} collection The collection to iterate over. * @param {Function|Object|string} [callback=identity] The function called * per iteration. If a property name or object is provided it will be used * to create a "_.pluck" or "_.where" style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {*} Returns the found element, else `undefined`. * @example * * var characters = [ * { 'name': 'barney', 'age': 36, 'blocked': false }, * { 'name': 'fred', 'age': 40, 'blocked': true }, * { 'name': 'pebbles', 'age': 1, 'blocked': false } * ]; * * _.find(characters, function(chr) { * return chr.age < 40; * }); * // => { 'name': 'barney', 'age': 36, 'blocked': false } * * // using "_.where" callback shorthand * _.find(characters, { 'age': 1 }); * // => { 'name': 'pebbles', 'age': 1, 'blocked': false } * * // using "_.pluck" callback shorthand * _.find(characters, 'blocked'); * // => { 'name': 'fred', 'age': 40, 'blocked': true } */ function find(collection, callback, thisArg) { callback = lodash.createCallback(callback, thisArg, 3); if (isArray(collection)) { var index = -1, length = collection.length; while (++index < length) { var value = collection[index]; if (callback(value, index, collection)) { return value; } } } else { var result; baseEach(collection, function(value, index, collection) { if (callback(value, index, collection)) { result = value; return false; } }); return result; } } /** * This method is like `_.find` except that it iterates over elements * of a `collection` from right to left. * * @static * @memberOf _ * @category Collections * @param {Array|Object|string} collection The collection to iterate over. * @param {Function|Object|string} [callback=identity] The function called * per iteration. If a property name or object is provided it will be used * to create a "_.pluck" or "_.where" style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {*} Returns the found element, else `undefined`. * @example * * _.findLast([1, 2, 3, 4], function(num) { * return num % 2 == 1; * }); * // => 3 */ function findLast(collection, callback, thisArg) { var result; callback = lodash.createCallback(callback, thisArg, 3); forEachRight(collection, function(value, index, collection) { if (callback(value, index, collection)) { result = value; return false; } }); return result; } /** * Iterates over elements of a collection, executing the callback for each * element. The callback is bound to `thisArg` and invoked with three arguments; * (value, index|key, collection). Callbacks may exit iteration early by * explicitly returning `false`. * * Note: As with other "Collections" methods, objects with a `length` property * are iterated like arrays. To avoid this behavior `_.forIn` or `_.forOwn` * may be used for object iteration. * * @static * @memberOf _ * @alias each * @category Collections * @param {Array|Object|string} collection The collection to iterate over. * @param {Function} [callback=identity] The function called per iteration. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Array|Object|string} Returns `collection`. * @example * * _([1, 2, 3]).forEach(function(num) { console.log(num); }).join(','); * // => logs each number and returns '1,2,3' * * _.forEach({ 'one': 1, 'two': 2, 'three': 3 }, function(num) { console.log(num); }); * // => logs each number and returns the object (property order is not guaranteed across environments) */ function forEach(collection, callback, thisArg) { if (callback && typeof thisArg == 'undefined' && isArray(collection)) { var index = -1, length = collection.length; while (++index < length) { if (callback(collection[index], index, collection) === false) { break; } } } else { baseEach(collection, callback, thisArg); } return collection; } /** * This method is like `_.forEach` except that it iterates over elements * of a `collection` from right to left. * * @static * @memberOf _ * @alias eachRight * @category Collections * @param {Array|Object|string} collection The collection to iterate over. * @param {Function} [callback=identity] The function called per iteration. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Array|Object|string} Returns `collection`. * @example * * _([1, 2, 3]).forEachRight(function(num) { console.log(num); }).join(','); * // => logs each number from right to left and returns '3,2,1' */ function forEachRight(collection, callback, thisArg) { var iterable = collection, length = collection ? collection.length : 0; callback = callback && typeof thisArg == 'undefined' ? callback : baseCreateCallback(callback, thisArg, 3); if (isArray(collection)) { while (length--) { if (callback(collection[length], length, collection) === false) { break; } } } else { if (typeof length != 'number') { var props = keys(collection); length = props.length; } else if (support.unindexedChars && isString(collection)) { iterable = collection.split(''); } baseEach(collection, function(value, key, collection) { key = props ? props[--length] : --length; return callback(iterable[key], key, collection); }); } return collection; } /** * Creates an object composed of keys generated from the results of running * each element of a collection through the callback. The corresponding value * of each key is an array of the elements responsible for generating the key. * The callback is bound to `thisArg` and invoked with three arguments; * (value, index|key, collection). * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false` * * @static * @memberOf _ * @category Collections * @param {Array|Object|string} collection The collection to iterate over. * @param {Function|Object|string} [callback=identity] The function called * per iteration. If a property name or object is provided it will be used * to create a "_.pluck" or "_.where" style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Object} Returns the composed aggregate object. * @example * * _.groupBy([4.2, 6.1, 6.4], function(num) { return Math.floor(num); }); * // => { '4': [4.2], '6': [6.1, 6.4] } * * _.groupBy([4.2, 6.1, 6.4], function(num) { return this.floor(num); }, Math); * // => { '4': [4.2], '6': [6.1, 6.4] } * * // using "_.pluck" callback shorthand * _.groupBy(['one', 'two', 'three'], 'length'); * // => { '3': ['one', 'two'], '5': ['three'] } */ var groupBy = createAggregator(function(result, value, key) { (hasOwnProperty.call(result, key) ? result[key] : result[key] = []).push(value); }); /** * Creates an object composed of keys generated from the results of running * each element of the collection through the given callback. The corresponding * value of each key is the last element responsible for generating the key. * The callback is bound to `thisArg` and invoked with three arguments; * (value, index|key, collection). * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Collections * @param {Array|Object|string} collection The collection to iterate over. * @param {Function|Object|string} [callback=identity] The function called * per iteration. If a property name or object is provided it will be used * to create a "_.pluck" or "_.where" style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Object} Returns the composed aggregate object. * @example * * var keys = [ * { 'dir': 'left', 'code': 97 }, * { 'dir': 'right', 'code': 100 } * ]; * * _.indexBy(keys, 'dir'); * // => { 'left': { 'dir': 'left', 'code': 97 }, 'right': { 'dir': 'right', 'code': 100 } } * * _.indexBy(keys, function(key) { return String.fromCharCode(key.code); }); * // => { 'a': { 'dir': 'left', 'code': 97 }, 'd': { 'dir': 'right', 'code': 100 } } * * _.indexBy(characters, function(key) { this.fromCharCode(key.code); }, String); * // => { 'a': { 'dir': 'left', 'code': 97 }, 'd': { 'dir': 'right', 'code': 100 } } */ var indexBy = createAggregator(function(result, value, key) { result[key] = value; }); /** * Invokes the method named by `methodName` on each element in the `collection` * returning an array of the results of each invoked method. Additional arguments * will be provided to each invoked method. If `methodName` is a function it * will be invoked for, and `this` bound to, each element in the `collection`. * * @static * @memberOf _ * @category Collections * @param {Array|Object|string} collection The collection to iterate over. * @param {Function|string} methodName The name of the method to invoke or * the function invoked per iteration. * @param {...*} [arg] Arguments to invoke the method with. * @returns {Array} Returns a new array of the results of each invoked method. * @example * * _.invoke([[5, 1, 7], [3, 2, 1]], 'sort'); * // => [[1, 5, 7], [1, 2, 3]] * * _.invoke([123, 456], String.prototype.split, ''); * // => [['1', '2', '3'], ['4', '5', '6']] */ function invoke(collection, methodName) { var args = slice(arguments, 2), index = -1, isFunc = typeof methodName == 'function', length = collection ? collection.length : 0, result = Array(typeof length == 'number' ? length : 0); forEach(collection, function(value) { result[++index] = (isFunc ? methodName : value[methodName]).apply(value, args); }); return result; } /** * Creates an array of values by running each element in the collection * through the callback. The callback is bound to `thisArg` and invoked with * three arguments; (value, index|key, collection). * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @alias collect * @category Collections * @param {Array|Object|string} collection The collection to iterate over. * @param {Function|Object|string} [callback=identity] The function called * per iteration. If a property name or object is provided it will be used * to create a "_.pluck" or "_.where" style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Array} Returns a new array of the results of each `callback` execution. * @example * * _.map([1, 2, 3], function(num) { return num * 3; }); * // => [3, 6, 9] * * _.map({ 'one': 1, 'two': 2, 'three': 3 }, function(num) { return num * 3; }); * // => [3, 6, 9] (property order is not guaranteed across environments) * * var characters = [ * { 'name': 'barney', 'age': 36 }, * { 'name': 'fred', 'age': 40 } * ]; * * // using "_.pluck" callback shorthand * _.map(characters, 'name'); * // => ['barney', 'fred'] */ function map(collection, callback, thisArg) { var index = -1, length = collection ? collection.length : 0, result = Array(typeof length == 'number' ? length : 0); callback = lodash.createCallback(callback, thisArg, 3); if (isArray(collection)) { while (++index < length) { result[index] = callback(collection[index], index, collection); } } else { baseEach(collection, function(value, key, collection) { result[++index] = callback(value, key, collection); }); } return result; } /** * Retrieves the maximum value of a collection. If the collection is empty or * falsey `-Infinity` is returned. If a callback is provided it will be executed * for each value in the collection to generate the criterion by which the value * is ranked. The callback is bound to `thisArg` and invoked with three * arguments; (value, index, collection). * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Collections * @param {Array|Object|string} collection The collection to iterate over. * @param {Function|Object|string} [callback=identity] The function called * per iteration. If a property name or object is provided it will be used * to create a "_.pluck" or "_.where" style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {*} Returns the maximum value. * @example * * _.max([4, 2, 8, 6]); * // => 8 * * var characters = [ * { 'name': 'barney', 'age': 36 }, * { 'name': 'fred', 'age': 40 } * ]; * * _.max(characters, function(chr) { return chr.age; }); * // => { 'name': 'fred', 'age': 40 }; * * // using "_.pluck" callback shorthand * _.max(characters, 'age'); * // => { 'name': 'fred', 'age': 40 }; */ function max(collection, callback, thisArg) { var computed = -Infinity, result = computed; // allows working with functions like `_.map` without using // their `index` argument as a callback if (typeof callback != 'function' && thisArg && thisArg[callback] === collection) { callback = null; } if (callback == null && isArray(collection)) { var index = -1, length = collection.length; while (++index < length) { var value = collection[index]; if (value > result) { result = value; } } } else { callback = (callback == null && isString(collection)) ? charAtCallback : lodash.createCallback(callback, thisArg, 3); baseEach(collection, function(value, index, collection) { var current = callback(value, index, collection); if (current > computed) { computed = current; result = value; } }); } return result; } /** * Retrieves the minimum value of a collection. If the collection is empty or * falsey `Infinity` is returned. If a callback is provided it will be executed * for each value in the collection to generate the criterion by which the value * is ranked. The callback is bound to `thisArg` and invoked with three * arguments; (value, index, collection). * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Collections * @param {Array|Object|string} collection The collection to iterate over. * @param {Function|Object|string} [callback=identity] The function called * per iteration. If a property name or object is provided it will be used * to create a "_.pluck" or "_.where" style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {*} Returns the minimum value. * @example * * _.min([4, 2, 8, 6]); * // => 2 * * var characters = [ * { 'name': 'barney', 'age': 36 }, * { 'name': 'fred', 'age': 40 } * ]; * * _.min(characters, function(chr) { return chr.age; }); * // => { 'name': 'barney', 'age': 36 }; * * // using "_.pluck" callback shorthand * _.min(characters, 'age'); * // => { 'name': 'barney', 'age': 36 }; */ function min(collection, callback, thisArg) { var computed = Infinity, result = computed; // allows working with functions like `_.map` without using // their `index` argument as a callback if (typeof callback != 'function' && thisArg && thisArg[callback] === collection) { callback = null; } if (callback == null && isArray(collection)) { var index = -1, length = collection.length; while (++index < length) { var value = collection[index]; if (value < result) { result = value; } } } else { callback = (callback == null && isString(collection)) ? charAtCallback : lodash.createCallback(callback, thisArg, 3); baseEach(collection, function(value, index, collection) { var current = callback(value, index, collection); if (current < computed) { computed = current; result = value; } }); } return result; } /** * Retrieves the value of a specified property from all elements in the collection. * * @static * @memberOf _ * @type Function * @category Collections * @param {Array|Object|string} collection The collection to iterate over. * @param {string} property The name of the property to pluck. * @returns {Array} Returns a new array of property values. * @example * * var characters = [ * { 'name': 'barney', 'age': 36 }, * { 'name': 'fred', 'age': 40 } * ]; * * _.pluck(characters, 'name'); * // => ['barney', 'fred'] */ var pluck = map; /** * Reduces a collection to a value which is the accumulated result of running * each element in the collection through the callback, where each successive * callback execution consumes the return value of the previous execution. If * `accumulator` is not provided the first element of the collection will be * used as the initial `accumulator` value. The callback is bound to `thisArg` * and invoked with four arguments; (accumulator, value, index|key, collection). * * @static * @memberOf _ * @alias foldl, inject * @category Collections * @param {Array|Object|string} collection The collection to iterate over. * @param {Function} [callback=identity] The function called per iteration. * @param {*} [accumulator] Initial value of the accumulator. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {*} Returns the accumulated value. * @example * * var sum = _.reduce([1, 2, 3], function(sum, num) { * return sum + num; * }); * // => 6 * * var mapped = _.reduce({ 'a': 1, 'b': 2, 'c': 3 }, function(result, num, key) { * result[key] = num * 3; * return result; * }, {}); * // => { 'a': 3, 'b': 6, 'c': 9 } */ function reduce(collection, callback, accumulator, thisArg) { var noaccum = arguments.length < 3; callback = lodash.createCallback(callback, thisArg, 4); if (isArray(collection)) { var index = -1, length = collection.length; if (noaccum) { accumulator = collection[++index]; } while (++index < length) { accumulator = callback(accumulator, collection[index], index, collection); } } else { baseEach(collection, function(value, index, collection) { accumulator = noaccum ? (noaccum = false, value) : callback(accumulator, value, index, collection) }); } return accumulator; } /** * This method is like `_.reduce` except that it iterates over elements * of a `collection` from right to left. * * @static * @memberOf _ * @alias foldr * @category Collections * @param {Array|Object|string} collection The collection to iterate over. * @param {Function} [callback=identity] The function called per iteration. * @param {*} [accumulator] Initial value of the accumulator. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {*} Returns the accumulated value. * @example * * var list = [[0, 1], [2, 3], [4, 5]]; * var flat = _.reduceRight(list, function(a, b) { return a.concat(b); }, []); * // => [4, 5, 2, 3, 0, 1] */ function reduceRight(collection, callback, accumulator, thisArg) { var noaccum = arguments.length < 3; callback = lodash.createCallback(callback, thisArg, 4); forEachRight(collection, function(value, index, collection) { accumulator = noaccum ? (noaccum = false, value) : callback(accumulator, value, index, collection); }); return accumulator; } /** * The opposite of `_.filter` this method returns the elements of a * collection that the callback does **not** return truey for. * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Collections * @param {Array|Object|string} collection The collection to iterate over. * @param {Function|Object|string} [callback=identity] The function called * per iteration. If a property name or object is provided it will be used * to create a "_.pluck" or "_.where" style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Array} Returns a new array of elements that failed the callback check. * @example * * var odds = _.reject([1, 2, 3, 4, 5, 6], function(num) { return num % 2 == 0; }); * // => [1, 3, 5] * * var characters = [ * { 'name': 'barney', 'age': 36, 'blocked': false }, * { 'name': 'fred', 'age': 40, 'blocked': true } * ]; * * // using "_.pluck" callback shorthand * _.reject(characters, 'blocked'); * // => [{ 'name': 'barney', 'age': 36, 'blocked': false }] * * // using "_.where" callback shorthand * _.reject(characters, { 'age': 36 }); * // => [{ 'name': 'fred', 'age': 40, 'blocked': true }] */ function reject(collection, callback, thisArg) { callback = lodash.createCallback(callback, thisArg, 3); return filter(collection, function(value, index, collection) { return !callback(value, index, collection); }); } /** * Retrieves a random element or `n` random elements from a collection. * * @static * @memberOf _ * @category Collections * @param {Array|Object|string} collection The collection to sample. * @param {number} [n] The number of elements to sample. * @param- {Object} [guard] Allows working with functions like `_.map` * without using their `index` arguments as `n`. * @returns {Array} Returns the random sample(s) of `collection`. * @example * * _.sample([1, 2, 3, 4]); * // => 2 * * _.sample([1, 2, 3, 4], 2); * // => [3, 1] */ function sample(collection, n, guard) { if (collection && typeof collection.length != 'number') { collection = values(collection); } else if (support.unindexedChars && isString(collection)) { collection = collection.split(''); } if (n == null || guard) { return collection ? collection[baseRandom(0, collection.length - 1)] : undefined; } var result = shuffle(collection); result.length = nativeMin(nativeMax(0, n), result.length); return result; } /** * Creates an array of shuffled values, using a version of the Fisher-Yates * shuffle. See http://en.wikipedia.org/wiki/Fisher-Yates_shuffle. * * @static * @memberOf _ * @category Collections * @param {Array|Object|string} collection The collection to shuffle. * @returns {Array} Returns a new shuffled collection. * @example * * _.shuffle([1, 2, 3, 4, 5, 6]); * // => [4, 1, 6, 3, 5, 2] */ function shuffle(collection) { var index = -1, length = collection ? collection.length : 0, result = Array(typeof length == 'number' ? length : 0); forEach(collection, function(value) { var rand = baseRandom(0, ++index); result[index] = result[rand]; result[rand] = value; }); return result; } /** * Gets the size of the `collection` by returning `collection.length` for arrays * and array-like objects or the number of own enumerable properties for objects. * * @static * @memberOf _ * @category Collections * @param {Array|Object|string} collection The collection to inspect. * @returns {number} Returns `collection.length` or number of own enumerable properties. * @example * * _.size([1, 2]); * // => 2 * * _.size({ 'one': 1, 'two': 2, 'three': 3 }); * // => 3 * * _.size('pebbles'); * // => 7 */ function size(collection) { var length = collection ? collection.length : 0; return typeof length == 'number' ? length : keys(collection).length; } /** * Checks if the callback returns a truey value for **any** element of a * collection. The function returns as soon as it finds a passing value and * does not iterate over the entire collection. The callback is bound to * `thisArg` and invoked with three arguments; (value, index|key, collection). * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @alias any * @category Collections * @param {Array|Object|string} collection The collection to iterate over. * @param {Function|Object|string} [callback=identity] The function called * per iteration. If a property name or object is provided it will be used * to create a "_.pluck" or "_.where" style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {boolean} Returns `true` if any element passed the callback check, * else `false`. * @example * * _.some([null, 0, 'yes', false], Boolean); * // => true * * var characters = [ * { 'name': 'barney', 'age': 36, 'blocked': false }, * { 'name': 'fred', 'age': 40, 'blocked': true } * ]; * * // using "_.pluck" callback shorthand * _.some(characters, 'blocked'); * // => true * * // using "_.where" callback shorthand * _.some(characters, { 'age': 1 }); * // => false */ function some(collection, callback, thisArg) { var result; callback = lodash.createCallback(callback, thisArg, 3); if (isArray(collection)) { var index = -1, length = collection.length; while (++index < length) { if ((result = callback(collection[index], index, collection))) { break; } } } else { baseEach(collection, function(value, index, collection) { return !(result = callback(value, index, collection)); }); } return !!result; } /** * Creates an array of elements, sorted in ascending order by the results of * running each element in a collection through the callback. This method * performs a stable sort, that is, it will preserve the original sort order * of equal elements. The callback is bound to `thisArg` and invoked with * three arguments; (value, index|key, collection). * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an array of property names is provided for `callback` the collection * will be sorted by each property value. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Collections * @param {Array|Object|string} collection The collection to iterate over. * @param {Array|Function|Object|string} [callback=identity] The function called * per iteration. If a property name or object is provided it will be used * to create a "_.pluck" or "_.where" style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Array} Returns a new array of sorted elements. * @example * * _.sortBy([1, 2, 3], function(num) { return Math.sin(num); }); * // => [3, 1, 2] * * _.sortBy([1, 2, 3], function(num) { return this.sin(num); }, Math); * // => [3, 1, 2] * * var characters = [ * { 'name': 'barney', 'age': 36 }, * { 'name': 'fred', 'age': 40 }, * { 'name': 'barney', 'age': 26 }, * { 'name': 'fred', 'age': 30 } * ]; * * // using "_.pluck" callback shorthand * _.map(_.sortBy(characters, 'age'), _.values); * // => [['barney', 26], ['fred', 30], ['barney', 36], ['fred', 40]] * * // sorting by multiple properties * _.map(_.sortBy(characters, ['name', 'age']), _.values); * // = > [['barney', 26], ['barney', 36], ['fred', 30], ['fred', 40]] */ function sortBy(collection, callback, thisArg) { var index = -1, isArr = isArray(callback), length = collection ? collection.length : 0, result = Array(typeof length == 'number' ? length : 0); if (!isArr) { callback = lodash.createCallback(callback, thisArg, 3); } forEach(collection, function(value, key, collection) { var object = result[++index] = getObject(); if (isArr) { object.criteria = map(callback, function(key) { return value[key]; }); } else { (object.criteria = getArray())[0] = callback(value, key, collection); } object.index = index; object.value = value; }); length = result.length; result.sort(compareAscending); while (length--) { var object = result[length]; result[length] = object.value; if (!isArr) { releaseArray(object.criteria); } releaseObject(object); } return result; } /** * Converts the `collection` to an array. * * @static * @memberOf _ * @category Collections * @param {Array|Object|string} collection The collection to convert. * @returns {Array} Returns the new converted array. * @example * * (function() { return _.toArray(arguments).slice(1); })(1, 2, 3, 4); * // => [2, 3, 4] */ function toArray(collection) { if (collection && typeof collection.length == 'number') { return (support.unindexedChars && isString(collection)) ? collection.split('') : slice(collection); } return values(collection); } /** * Performs a deep comparison of each element in a `collection` to the given * `properties` object, returning an array of all elements that have equivalent * property values. * * @static * @memberOf _ * @type Function * @category Collections * @param {Array|Object|string} collection The collection to iterate over. * @param {Object} props The object of property values to filter by. * @returns {Array} Returns a new array of elements that have the given properties. * @example * * var characters = [ * { 'name': 'barney', 'age': 36, 'pets': ['hoppy'] }, * { 'name': 'fred', 'age': 40, 'pets': ['baby puss', 'dino'] } * ]; * * _.where(characters, { 'age': 36 }); * // => [{ 'name': 'barney', 'age': 36, 'pets': ['hoppy'] }] * * _.where(characters, { 'pets': ['dino'] }); * // => [{ 'name': 'fred', 'age': 40, 'pets': ['baby puss', 'dino'] }] */ var where = filter; /*--------------------------------------------------------------------------*/ /** * Creates an array with all falsey values removed. The values `false`, `null`, * `0`, `""`, `undefined`, and `NaN` are all falsey. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to compact. * @returns {Array} Returns a new array of filtered values. * @example * * _.compact([0, 1, false, 2, '', 3]); * // => [1, 2, 3] */ function compact(array) { var index = -1, length = array ? array.length : 0, result = []; while (++index < length) { var value = array[index]; if (value) { result.push(value); } } return result; } /** * Creates an array excluding all values of the provided arrays using strict * equality for comparisons, i.e. `===`. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to process. * @param {...Array} [values] The arrays of values to exclude. * @returns {Array} Returns a new array of filtered values. * @example * * _.difference([1, 2, 3, 4, 5], [5, 2, 10]); * // => [1, 3, 4] */ function difference(array) { return baseDifference(array, baseFlatten(arguments, true, true, 1)); } /** * This method is like `_.find` except that it returns the index of the first * element that passes the callback check, instead of the element itself. * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to search. * @param {Function|Object|string} [callback=identity] The function called * per iteration. If a property name or object is provided it will be used * to create a "_.pluck" or "_.where" style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {number} Returns the index of the found element, else `-1`. * @example * * var characters = [ * { 'name': 'barney', 'age': 36, 'blocked': false }, * { 'name': 'fred', 'age': 40, 'blocked': true }, * { 'name': 'pebbles', 'age': 1, 'blocked': false } * ]; * * _.findIndex(characters, function(chr) { * return chr.age < 20; * }); * // => 2 * * // using "_.where" callback shorthand * _.findIndex(characters, { 'age': 36 }); * // => 0 * * // using "_.pluck" callback shorthand * _.findIndex(characters, 'blocked'); * // => 1 */ function findIndex(array, callback, thisArg) { var index = -1, length = array ? array.length : 0; callback = lodash.createCallback(callback, thisArg, 3); while (++index < length) { if (callback(array[index], index, array)) { return index; } } return -1; } /** * This method is like `_.findIndex` except that it iterates over elements * of a `collection` from right to left. * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to search. * @param {Function|Object|string} [callback=identity] The function called * per iteration. If a property name or object is provided it will be used * to create a "_.pluck" or "_.where" style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {number} Returns the index of the found element, else `-1`. * @example * * var characters = [ * { 'name': 'barney', 'age': 36, 'blocked': true }, * { 'name': 'fred', 'age': 40, 'blocked': false }, * { 'name': 'pebbles', 'age': 1, 'blocked': true } * ]; * * _.findLastIndex(characters, function(chr) { * return chr.age > 30; * }); * // => 1 * * // using "_.where" callback shorthand * _.findLastIndex(characters, { 'age': 36 }); * // => 0 * * // using "_.pluck" callback shorthand * _.findLastIndex(characters, 'blocked'); * // => 2 */ function findLastIndex(array, callback, thisArg) { var length = array ? array.length : 0; callback = lodash.createCallback(callback, thisArg, 3); while (length--) { if (callback(array[length], length, array)) { return length; } } return -1; } /** * Gets the first element or first `n` elements of an array. If a callback * is provided elements at the beginning of the array are returned as long * as the callback returns truey. The callback is bound to `thisArg` and * invoked with three arguments; (value, index, array). * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @alias head, take * @category Arrays * @param {Array} array The array to query. * @param {Function|Object|number|string} [callback] The function called * per element or the number of elements to return. If a property name or * object is provided it will be used to create a "_.pluck" or "_.where" * style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {*} Returns the first element(s) of `array`. * @example * * _.first([1, 2, 3]); * // => 1 * * _.first([1, 2, 3], 2); * // => [1, 2] * * _.first([1, 2, 3], function(num) { * return num < 3; * }); * // => [1, 2] * * var characters = [ * { 'name': 'barney', 'blocked': true, 'employer': 'slate' }, * { 'name': 'fred', 'blocked': false, 'employer': 'slate' }, * { 'name': 'pebbles', 'blocked': true, 'employer': 'na' } * ]; * * // using "_.pluck" callback shorthand * _.first(characters, 'blocked'); * // => [{ 'name': 'barney', 'blocked': true, 'employer': 'slate' }] * * // using "_.where" callback shorthand * _.pluck(_.first(characters, { 'employer': 'slate' }), 'name'); * // => ['barney', 'fred'] */ function first(array, callback, thisArg) { var n = 0, length = array ? array.length : 0; if (typeof callback != 'number' && callback != null) { var index = -1; callback = lodash.createCallback(callback, thisArg, 3); while (++index < length && callback(array[index], index, array)) { n++; } } else { n = callback; if (n == null || thisArg) { return array ? array[0] : undefined; } } return slice(array, 0, nativeMin(nativeMax(0, n), length)); } /** * Flattens a nested array (the nesting can be to any depth). If `isShallow` * is truey, the array will only be flattened a single level. If a callback * is provided each element of the array is passed through the callback before * flattening. The callback is bound to `thisArg` and invoked with three * arguments; (value, index, array). * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to flatten. * @param {boolean} [isShallow=false] A flag to restrict flattening to a single level. * @param {Function|Object|string} [callback=identity] The function called * per iteration. If a property name or object is provided it will be used * to create a "_.pluck" or "_.where" style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Array} Returns a new flattened array. * @example * * _.flatten([1, [2], [3, [[4]]]]); * // => [1, 2, 3, 4]; * * _.flatten([1, [2], [3, [[4]]]], true); * // => [1, 2, 3, [[4]]]; * * var characters = [ * { 'name': 'barney', 'age': 30, 'pets': ['hoppy'] }, * { 'name': 'fred', 'age': 40, 'pets': ['baby puss', 'dino'] } * ]; * * // using "_.pluck" callback shorthand * _.flatten(characters, 'pets'); * // => ['hoppy', 'baby puss', 'dino'] */ function flatten(array, isShallow, callback, thisArg) { // juggle arguments if (typeof isShallow != 'boolean' && isShallow != null) { thisArg = callback; callback = (typeof isShallow != 'function' && thisArg && thisArg[isShallow] === array) ? null : isShallow; isShallow = false; } if (callback != null) { array = map(array, callback, thisArg); } return baseFlatten(array, isShallow); } /** * Gets the index at which the first occurrence of `value` is found using * strict equality for comparisons, i.e. `===`. If the array is already sorted * providing `true` for `fromIndex` will run a faster binary search. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to search. * @param {*} value The value to search for. * @param {boolean|number} [fromIndex=0] The index to search from or `true` * to perform a binary search on a sorted array. * @returns {number} Returns the index of the matched value or `-1`. * @example * * _.indexOf([1, 2, 3, 1, 2, 3], 2); * // => 1 * * _.indexOf([1, 2, 3, 1, 2, 3], 2, 3); * // => 4 * * _.indexOf([1, 1, 2, 2, 3, 3], 2, true); * // => 2 */ function indexOf(array, value, fromIndex) { if (typeof fromIndex == 'number') { var length = array ? array.length : 0; fromIndex = (fromIndex < 0 ? nativeMax(0, length + fromIndex) : fromIndex || 0); } else if (fromIndex) { var index = sortedIndex(array, value); return array[index] === value ? index : -1; } return baseIndexOf(array, value, fromIndex); } /** * Gets all but the last element or last `n` elements of an array. If a * callback is provided elements at the end of the array are excluded from * the result as long as the callback returns truey. The callback is bound * to `thisArg` and invoked with three arguments; (value, index, array). * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to query. * @param {Function|Object|number|string} [callback=1] The function called * per element or the number of elements to exclude. If a property name or * object is provided it will be used to create a "_.pluck" or "_.where" * style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Array} Returns a slice of `array`. * @example * * _.initial([1, 2, 3]); * // => [1, 2] * * _.initial([1, 2, 3], 2); * // => [1] * * _.initial([1, 2, 3], function(num) { * return num > 1; * }); * // => [1] * * var characters = [ * { 'name': 'barney', 'blocked': false, 'employer': 'slate' }, * { 'name': 'fred', 'blocked': true, 'employer': 'slate' }, * { 'name': 'pebbles', 'blocked': true, 'employer': 'na' } * ]; * * // using "_.pluck" callback shorthand * _.initial(characters, 'blocked'); * // => [{ 'name': 'barney', 'blocked': false, 'employer': 'slate' }] * * // using "_.where" callback shorthand * _.pluck(_.initial(characters, { 'employer': 'na' }), 'name'); * // => ['barney', 'fred'] */ function initial(array, callback, thisArg) { var n = 0, length = array ? array.length : 0; if (typeof callback != 'number' && callback != null) { var index = length; callback = lodash.createCallback(callback, thisArg, 3); while (index-- && callback(array[index], index, array)) { n++; } } else { n = (callback == null || thisArg) ? 1 : callback || n; } return slice(array, 0, nativeMin(nativeMax(0, length - n), length)); } /** * Creates an array of unique values present in all provided arrays using * strict equality for comparisons, i.e. `===`. * * @static * @memberOf _ * @category Arrays * @param {...Array} [array] The arrays to inspect. * @returns {Array} Returns an array of shared values. * @example * * _.intersection([1, 2, 3], [5, 2, 1, 4], [2, 1]); * // => [1, 2] */ function intersection() { var args = [], argsIndex = -1, argsLength = arguments.length, caches = getArray(), indexOf = getIndexOf(), trustIndexOf = indexOf === baseIndexOf, seen = getArray(); while (++argsIndex < argsLength) { var value = arguments[argsIndex]; if (isArray(value) || isArguments(value)) { args.push(value); caches.push(trustIndexOf && value.length >= largeArraySize && createCache(argsIndex ? args[argsIndex] : seen)); } } var array = args[0], index = -1, length = array ? array.length : 0, result = []; outer: while (++index < length) { var cache = caches[0]; value = array[index]; if ((cache ? cacheIndexOf(cache, value) : indexOf(seen, value)) < 0) { argsIndex = argsLength; (cache || seen).push(value); while (--argsIndex) { cache = caches[argsIndex]; if ((cache ? cacheIndexOf(cache, value) : indexOf(args[argsIndex], value)) < 0) { continue outer; } } result.push(value); } } while (argsLength--) { cache = caches[argsLength]; if (cache) { releaseObject(cache); } } releaseArray(caches); releaseArray(seen); return result; } /** * Gets the last element or last `n` elements of an array. If a callback is * provided elements at the end of the array are returned as long as the * callback returns truey. The callback is bound to `thisArg` and invoked * with three arguments; (value, index, array). * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to query. * @param {Function|Object|number|string} [callback] The function called * per element or the number of elements to return. If a property name or * object is provided it will be used to create a "_.pluck" or "_.where" * style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {*} Returns the last element(s) of `array`. * @example * * _.last([1, 2, 3]); * // => 3 * * _.last([1, 2, 3], 2); * // => [2, 3] * * _.last([1, 2, 3], function(num) { * return num > 1; * }); * // => [2, 3] * * var characters = [ * { 'name': 'barney', 'blocked': false, 'employer': 'slate' }, * { 'name': 'fred', 'blocked': true, 'employer': 'slate' }, * { 'name': 'pebbles', 'blocked': true, 'employer': 'na' } * ]; * * // using "_.pluck" callback shorthand * _.pluck(_.last(characters, 'blocked'), 'name'); * // => ['fred', 'pebbles'] * * // using "_.where" callback shorthand * _.last(characters, { 'employer': 'na' }); * // => [{ 'name': 'pebbles', 'blocked': true, 'employer': 'na' }] */ function last(array, callback, thisArg) { var n = 0, length = array ? array.length : 0; if (typeof callback != 'number' && callback != null) { var index = length; callback = lodash.createCallback(callback, thisArg, 3); while (index-- && callback(array[index], index, array)) { n++; } } else { n = callback; if (n == null || thisArg) { return array ? array[length - 1] : undefined; } } return slice(array, nativeMax(0, length - n)); } /** * Gets the index at which the last occurrence of `value` is found using strict * equality for comparisons, i.e. `===`. If `fromIndex` is negative, it is used * as the offset from the end of the collection. * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to search. * @param {*} value The value to search for. * @param {number} [fromIndex=array.length-1] The index to search from. * @returns {number} Returns the index of the matched value or `-1`. * @example * * _.lastIndexOf([1, 2, 3, 1, 2, 3], 2); * // => 4 * * _.lastIndexOf([1, 2, 3, 1, 2, 3], 2, 3); * // => 1 */ function lastIndexOf(array, value, fromIndex) { var index = array ? array.length : 0; if (typeof fromIndex == 'number') { index = (fromIndex < 0 ? nativeMax(0, index + fromIndex) : nativeMin(fromIndex, index - 1)) + 1; } while (index--) { if (array[index] === value) { return index; } } return -1; } /** * Removes all provided values from the given array using strict equality for * comparisons, i.e. `===`. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to modify. * @param {...*} [value] The values to remove. * @returns {Array} Returns `array`. * @example * * var array = [1, 2, 3, 1, 2, 3]; * _.pull(array, 2, 3); * console.log(array); * // => [1, 1] */ function pull(array) { var args = arguments, argsIndex = 0, argsLength = args.length, length = array ? array.length : 0; while (++argsIndex < argsLength) { var index = -1, value = args[argsIndex]; while (++index < length) { if (array[index] === value) { splice.call(array, index--, 1); length--; } } } return array; } /** * Creates an array of numbers (positive and/or negative) progressing from * `start` up to but not including `end`. If `start` is less than `stop` a * zero-length range is created unless a negative `step` is specified. * * @static * @memberOf _ * @category Arrays * @param {number} [start=0] The start of the range. * @param {number} end The end of the range. * @param {number} [step=1] The value to increment or decrement by. * @returns {Array} Returns a new range array. * @example * * _.range(4); * // => [0, 1, 2, 3] * * _.range(1, 5); * // => [1, 2, 3, 4] * * _.range(0, 20, 5); * // => [0, 5, 10, 15] * * _.range(0, -4, -1); * // => [0, -1, -2, -3] * * _.range(1, 4, 0); * // => [1, 1, 1] * * _.range(0); * // => [] */ function range(start, end, step) { start = +start || 0; step = typeof step == 'number' ? step : (+step || 1); if (end == null) { end = start; start = 0; } // use `Array(length)` so engines like Chakra and V8 avoid slower modes // http://youtu.be/XAqIpGU8ZZk#t=17m25s var index = -1, length = nativeMax(0, ceil((end - start) / (step || 1))), result = Array(length); while (++index < length) { result[index] = start; start += step; } return result; } /** * Removes all elements from an array that the callback returns truey for * and returns an array of removed elements. The callback is bound to `thisArg` * and invoked with three arguments; (value, index, array). * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to modify. * @param {Function|Object|string} [callback=identity] The function called * per iteration. If a property name or object is provided it will be used * to create a "_.pluck" or "_.where" style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Array} Returns a new array of removed elements. * @example * * var array = [1, 2, 3, 4, 5, 6]; * var evens = _.remove(array, function(num) { return num % 2 == 0; }); * * console.log(array); * // => [1, 3, 5] * * console.log(evens); * // => [2, 4, 6] */ function remove(array, callback, thisArg) { var index = -1, length = array ? array.length : 0, result = []; callback = lodash.createCallback(callback, thisArg, 3); while (++index < length) { var value = array[index]; if (callback(value, index, array)) { result.push(value); splice.call(array, index--, 1); length--; } } return result; } /** * The opposite of `_.initial` this method gets all but the first element or * first `n` elements of an array. If a callback function is provided elements * at the beginning of the array are excluded from the result as long as the * callback returns truey. The callback is bound to `thisArg` and invoked * with three arguments; (value, index, array). * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. *<|fim▁hole|> * @category Arrays * @param {Array} array The array to query. * @param {Function|Object|number|string} [callback=1] The function called * per element or the number of elements to exclude. If a property name or * object is provided it will be used to create a "_.pluck" or "_.where" * style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Array} Returns a slice of `array`. * @example * * _.rest([1, 2, 3]); * // => [2, 3] * * _.rest([1, 2, 3], 2); * // => [3] * * _.rest([1, 2, 3], function(num) { * return num < 3; * }); * // => [3] * * var characters = [ * { 'name': 'barney', 'blocked': true, 'employer': 'slate' }, * { 'name': 'fred', 'blocked': false, 'employer': 'slate' }, * { 'name': 'pebbles', 'blocked': true, 'employer': 'na' } * ]; * * // using "_.pluck" callback shorthand * _.pluck(_.rest(characters, 'blocked'), 'name'); * // => ['fred', 'pebbles'] * * // using "_.where" callback shorthand * _.rest(characters, { 'employer': 'slate' }); * // => [{ 'name': 'pebbles', 'blocked': true, 'employer': 'na' }] */ function rest(array, callback, thisArg) { if (typeof callback != 'number' && callback != null) { var n = 0, index = -1, length = array ? array.length : 0; callback = lodash.createCallback(callback, thisArg, 3); while (++index < length && callback(array[index], index, array)) { n++; } } else { n = (callback == null || thisArg) ? 1 : nativeMax(0, callback); } return slice(array, n); } /** * Uses a binary search to determine the smallest index at which a value * should be inserted into a given sorted array in order to maintain the sort * order of the array. If a callback is provided it will be executed for * `value` and each element of `array` to compute their sort ranking. The * callback is bound to `thisArg` and invoked with one argument; (value). * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to inspect. * @param {*} value The value to evaluate. * @param {Function|Object|string} [callback=identity] The function called * per iteration. If a property name or object is provided it will be used * to create a "_.pluck" or "_.where" style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {number} Returns the index at which `value` should be inserted * into `array`. * @example * * _.sortedIndex([20, 30, 50], 40); * // => 2 * * // using "_.pluck" callback shorthand * _.sortedIndex([{ 'x': 20 }, { 'x': 30 }, { 'x': 50 }], { 'x': 40 }, 'x'); * // => 2 * * var dict = { * 'wordToNumber': { 'twenty': 20, 'thirty': 30, 'fourty': 40, 'fifty': 50 } * }; * * _.sortedIndex(['twenty', 'thirty', 'fifty'], 'fourty', function(word) { * return dict.wordToNumber[word]; * }); * // => 2 * * _.sortedIndex(['twenty', 'thirty', 'fifty'], 'fourty', function(word) { * return this.wordToNumber[word]; * }, dict); * // => 2 */ function sortedIndex(array, value, callback, thisArg) { var low = 0, high = array ? array.length : low; // explicitly reference `identity` for better inlining in Firefox callback = callback ? lodash.createCallback(callback, thisArg, 1) : identity; value = callback(value); while (low < high) { var mid = (low + high) >>> 1; (callback(array[mid]) < value) ? low = mid + 1 : high = mid; } return low; } /** * Creates an array of unique values, in order, of the provided arrays using * strict equality for comparisons, i.e. `===`. * * @static * @memberOf _ * @category Arrays * @param {...Array} [array] The arrays to inspect. * @returns {Array} Returns an array of combined values. * @example * * _.union([1, 2, 3], [5, 2, 1, 4], [2, 1]); * // => [1, 2, 3, 5, 4] */ function union() { return baseUniq(baseFlatten(arguments, true, true)); } /** * Creates a duplicate-value-free version of an array using strict equality * for comparisons, i.e. `===`. If the array is sorted, providing * `true` for `isSorted` will use a faster algorithm. If a callback is provided * each element of `array` is passed through the callback before uniqueness * is computed. The callback is bound to `thisArg` and invoked with three * arguments; (value, index, array). * * If a property name is provided for `callback` the created "_.pluck" style * callback will return the property value of the given element. * * If an object is provided for `callback` the created "_.where" style callback * will return `true` for elements that have the properties of the given object, * else `false`. * * @static * @memberOf _ * @alias unique * @category Arrays * @param {Array} array The array to process. * @param {boolean} [isSorted=false] A flag to indicate that `array` is sorted. * @param {Function|Object|string} [callback=identity] The function called * per iteration. If a property name or object is provided it will be used * to create a "_.pluck" or "_.where" style callback, respectively. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Array} Returns a duplicate-value-free array. * @example * * _.uniq([1, 2, 1, 3, 1]); * // => [1, 2, 3] * * _.uniq([1, 1, 2, 2, 3], true); * // => [1, 2, 3] * * _.uniq(['A', 'b', 'C', 'a', 'B', 'c'], function(letter) { return letter.toLowerCase(); }); * // => ['A', 'b', 'C'] * * _.uniq([1, 2.5, 3, 1.5, 2, 3.5], function(num) { return this.floor(num); }, Math); * // => [1, 2.5, 3] * * // using "_.pluck" callback shorthand * _.uniq([{ 'x': 1 }, { 'x': 2 }, { 'x': 1 }], 'x'); * // => [{ 'x': 1 }, { 'x': 2 }] */ function uniq(array, isSorted, callback, thisArg) { // juggle arguments if (typeof isSorted != 'boolean' && isSorted != null) { thisArg = callback; callback = (typeof isSorted != 'function' && thisArg && thisArg[isSorted] === array) ? null : isSorted; isSorted = false; } if (callback != null) { callback = lodash.createCallback(callback, thisArg, 3); } return baseUniq(array, isSorted, callback); } /** * Creates an array excluding all provided values using strict equality for * comparisons, i.e. `===`. * * @static * @memberOf _ * @category Arrays * @param {Array} array The array to filter. * @param {...*} [value] The values to exclude. * @returns {Array} Returns a new array of filtered values. * @example * * _.without([1, 2, 1, 0, 3, 1, 4], 0, 1); * // => [2, 3, 4] */ function without(array) { return baseDifference(array, slice(arguments, 1)); } /** * Creates an array that is the symmetric difference of the provided arrays. * See http://en.wikipedia.org/wiki/Symmetric_difference. * * @static * @memberOf _ * @category Arrays * @param {...Array} [array] The arrays to inspect. * @returns {Array} Returns an array of values. * @example * * _.xor([1, 2, 3], [5, 2, 1, 4]); * // => [3, 5, 4] * * _.xor([1, 2, 5], [2, 3, 5], [3, 4, 5]); * // => [1, 4, 5] */ function xor() { var index = -1, length = arguments.length; while (++index < length) { var array = arguments[index]; if (isArray(array) || isArguments(array)) { var result = result ? baseUniq(baseDifference(result, array).concat(baseDifference(array, result))) : array; } } return result || []; } /** * Creates an array of grouped elements, the first of which contains the first * elements of the given arrays, the second of which contains the second * elements of the given arrays, and so on. * * @static * @memberOf _ * @alias unzip * @category Arrays * @param {...Array} [array] Arrays to process. * @returns {Array} Returns a new array of grouped elements. * @example * * _.zip(['fred', 'barney'], [30, 40], [true, false]); * // => [['fred', 30, true], ['barney', 40, false]] */ function zip() { var array = arguments.length > 1 ? arguments : arguments[0], index = -1, length = array ? max(pluck(array, 'length')) : 0, result = Array(length < 0 ? 0 : length); while (++index < length) { result[index] = pluck(array, index); } return result; } /** * Creates an object composed from arrays of `keys` and `values`. Provide * either a single two dimensional array, i.e. `[[key1, value1], [key2, value2]]` * or two arrays, one of `keys` and one of corresponding `values`. * * @static * @memberOf _ * @alias object * @category Arrays * @param {Array} keys The array of keys. * @param {Array} [values=[]] The array of values. * @returns {Object} Returns an object composed of the given keys and * corresponding values. * @example * * _.zipObject(['fred', 'barney'], [30, 40]); * // => { 'fred': 30, 'barney': 40 } */ function zipObject(keys, values) { var index = -1, length = keys ? keys.length : 0, result = {}; if (!values && length && !isArray(keys[0])) { values = []; } while (++index < length) { var key = keys[index]; if (values) { result[key] = values[index]; } else if (key) { result[key[0]] = key[1]; } } return result; } /*--------------------------------------------------------------------------*/ /** * Creates a function that executes `func`, with the `this` binding and * arguments of the created function, only after being called `n` times. * * @static * @memberOf _ * @category Functions * @param {number} n The number of times the function must be called before * `func` is executed. * @param {Function} func The function to restrict. * @returns {Function} Returns the new restricted function. * @example * * var saves = ['profile', 'settings']; * * var done = _.after(saves.length, function() { * console.log('Done saving!'); * }); * * _.forEach(saves, function(type) { * asyncSave({ 'type': type, 'complete': done }); * }); * // => logs 'Done saving!', after all saves have completed */ function after(n, func) { if (!isFunction(func)) { throw new TypeError; } return function() { if (--n < 1) { return func.apply(this, arguments); } }; } /** * Creates a function that, when called, invokes `func` with the `this` * binding of `thisArg` and prepends any additional `bind` arguments to those * provided to the bound function. * * @static * @memberOf _ * @category Functions * @param {Function} func The function to bind. * @param {*} [thisArg] The `this` binding of `func`. * @param {...*} [arg] Arguments to be partially applied. * @returns {Function} Returns the new bound function. * @example * * var func = function(greeting) { * return greeting + ' ' + this.name; * }; * * func = _.bind(func, { 'name': 'fred' }, 'hi'); * func(); * // => 'hi fred' */ function bind(func, thisArg) { return arguments.length > 2 ? createWrapper(func, 17, slice(arguments, 2), null, thisArg) : createWrapper(func, 1, null, null, thisArg); } /** * Binds methods of an object to the object itself, overwriting the existing * method. Method names may be specified as individual arguments or as arrays * of method names. If no method names are provided all the function properties * of `object` will be bound. * * @static * @memberOf _ * @category Functions * @param {Object} object The object to bind and assign the bound methods to. * @param {...string} [methodName] The object method names to * bind, specified as individual method names or arrays of method names. * @returns {Object} Returns `object`. * @example * * var view = { * 'label': 'docs', * 'onClick': function() { console.log('clicked ' + this.label); } * }; * * _.bindAll(view); * jQuery('#docs').on('click', view.onClick); * // => logs 'clicked docs', when the button is clicked */ function bindAll(object) { var funcs = arguments.length > 1 ? baseFlatten(arguments, true, false, 1) : functions(object), index = -1, length = funcs.length; while (++index < length) { var key = funcs[index]; object[key] = createWrapper(object[key], 1, null, null, object); } return object; } /** * Creates a function that, when called, invokes the method at `object[key]` * and prepends any additional `bindKey` arguments to those provided to the bound * function. This method differs from `_.bind` by allowing bound functions to * reference methods that will be redefined or don't yet exist. * See http://michaux.ca/articles/lazy-function-definition-pattern. * * @static * @memberOf _ * @category Functions * @param {Object} object The object the method belongs to. * @param {string} key The key of the method. * @param {...*} [arg] Arguments to be partially applied. * @returns {Function} Returns the new bound function. * @example * * var object = { * 'name': 'fred', * 'greet': function(greeting) { * return greeting + ' ' + this.name; * } * }; * * var func = _.bindKey(object, 'greet', 'hi'); * func(); * // => 'hi fred' * * object.greet = function(greeting) { * return greeting + 'ya ' + this.name + '!'; * }; * * func(); * // => 'hiya fred!' */ function bindKey(object, key) { return arguments.length > 2 ? createWrapper(key, 19, slice(arguments, 2), null, object) : createWrapper(key, 3, null, null, object); } /** * Creates a function that is the composition of the provided functions, * where each function consumes the return value of the function that follows. * For example, composing the functions `f()`, `g()`, and `h()` produces `f(g(h()))`. * Each function is executed with the `this` binding of the composed function. * * @static * @memberOf _ * @category Functions * @param {...Function} [func] Functions to compose. * @returns {Function} Returns the new composed function. * @example * * var realNameMap = { * 'pebbles': 'penelope' * }; * * var format = function(name) { * name = realNameMap[name.toLowerCase()] || name; * return name.charAt(0).toUpperCase() + name.slice(1).toLowerCase(); * }; * * var greet = function(formatted) { * return 'Hiya ' + formatted + '!'; * }; * * var welcome = _.compose(greet, format); * welcome('pebbles'); * // => 'Hiya Penelope!' */ function compose() { var funcs = arguments, length = funcs.length; while (length--) { if (!isFunction(funcs[length])) { throw new TypeError; } } return function() { var args = arguments, length = funcs.length; while (length--) { args = [funcs[length].apply(this, args)]; } return args[0]; }; } /** * Creates a function which accepts one or more arguments of `func` that when * invoked either executes `func` returning its result, if all `func` arguments * have been provided, or returns a function that accepts one or more of the * remaining `func` arguments, and so on. The arity of `func` can be specified * if `func.length` is not sufficient. * * @static * @memberOf _ * @category Functions * @param {Function} func The function to curry. * @param {number} [arity=func.length] The arity of `func`. * @returns {Function} Returns the new curried function. * @example * * var curried = _.curry(function(a, b, c) { * console.log(a + b + c); * }); * * curried(1)(2)(3); * // => 6 * * curried(1, 2)(3); * // => 6 * * curried(1, 2, 3); * // => 6 */ function curry(func, arity) { arity = typeof arity == 'number' ? arity : (+arity || func.length); return createWrapper(func, 4, null, null, null, arity); } /** * Creates a function that will delay the execution of `func` until after * `wait` milliseconds have elapsed since the last time it was invoked. * Provide an options object to indicate that `func` should be invoked on * the leading and/or trailing edge of the `wait` timeout. Subsequent calls * to the debounced function will return the result of the last `func` call. * * Note: If `leading` and `trailing` options are `true` `func` will be called * on the trailing edge of the timeout only if the the debounced function is * invoked more than once during the `wait` timeout. * * @static * @memberOf _ * @category Functions * @param {Function} func The function to debounce. * @param {number} wait The number of milliseconds to delay. * @param {Object} [options] The options object. * @param {boolean} [options.leading=false] Specify execution on the leading edge of the timeout. * @param {number} [options.maxWait] The maximum time `func` is allowed to be delayed before it's called. * @param {boolean} [options.trailing=true] Specify execution on the trailing edge of the timeout. * @returns {Function} Returns the new debounced function. * @example * * // avoid costly calculations while the window size is in flux * var lazyLayout = _.debounce(calculateLayout, 150); * jQuery(window).on('resize', lazyLayout); * * // execute `sendMail` when the click event is fired, debouncing subsequent calls * jQuery('#postbox').on('click', _.debounce(sendMail, 300, { * 'leading': true, * 'trailing': false * }); * * // ensure `batchLog` is executed once after 1 second of debounced calls * var source = new EventSource('/stream'); * source.addEventListener('message', _.debounce(batchLog, 250, { * 'maxWait': 1000 * }, false); */ function debounce(func, wait, options) { var args, maxTimeoutId, result, stamp, thisArg, timeoutId, trailingCall, lastCalled = 0, maxWait = false, trailing = true; if (!isFunction(func)) { throw new TypeError; } wait = nativeMax(0, wait) || 0; if (options === true) { var leading = true; trailing = false; } else if (isObject(options)) { leading = options.leading; maxWait = 'maxWait' in options && (nativeMax(wait, options.maxWait) || 0); trailing = 'trailing' in options ? options.trailing : trailing; } var delayed = function() { var remaining = wait - (now() - stamp); if (remaining <= 0) { if (maxTimeoutId) { clearTimeout(maxTimeoutId); } var isCalled = trailingCall; maxTimeoutId = timeoutId = trailingCall = undefined; if (isCalled) { lastCalled = now(); result = func.apply(thisArg, args); if (!timeoutId && !maxTimeoutId) { args = thisArg = null; } } } else { timeoutId = setTimeout(delayed, remaining); } }; var maxDelayed = function() { if (timeoutId) { clearTimeout(timeoutId); } maxTimeoutId = timeoutId = trailingCall = undefined; if (trailing || (maxWait !== wait)) { lastCalled = now(); result = func.apply(thisArg, args); if (!timeoutId && !maxTimeoutId) { args = thisArg = null; } } }; return function() { args = arguments; stamp = now(); thisArg = this; trailingCall = trailing && (timeoutId || !leading); if (maxWait === false) { var leadingCall = leading && !timeoutId; } else { if (!maxTimeoutId && !leading) { lastCalled = stamp; } var remaining = maxWait - (stamp - lastCalled), isCalled = remaining <= 0; if (isCalled) { if (maxTimeoutId) { maxTimeoutId = clearTimeout(maxTimeoutId); } lastCalled = stamp; result = func.apply(thisArg, args); } else if (!maxTimeoutId) { maxTimeoutId = setTimeout(maxDelayed, remaining); } } if (isCalled && timeoutId) { timeoutId = clearTimeout(timeoutId); } else if (!timeoutId && wait !== maxWait) { timeoutId = setTimeout(delayed, wait); } if (leadingCall) { isCalled = true; result = func.apply(thisArg, args); } if (isCalled && !timeoutId && !maxTimeoutId) { args = thisArg = null; } return result; }; } /** * Defers executing the `func` function until the current call stack has cleared. * Additional arguments will be provided to `func` when it is invoked. * * @static * @memberOf _ * @category Functions * @param {Function} func The function to defer. * @param {...*} [arg] Arguments to invoke the function with. * @returns {number} Returns the timer id. * @example * * _.defer(function(text) { console.log(text); }, 'deferred'); * // logs 'deferred' after one or more milliseconds */ function defer(func) { if (!isFunction(func)) { throw new TypeError; } var args = slice(arguments, 1); return setTimeout(function() { func.apply(undefined, args); }, 1); } /** * Executes the `func` function after `wait` milliseconds. Additional arguments * will be provided to `func` when it is invoked. * * @static * @memberOf _ * @category Functions * @param {Function} func The function to delay. * @param {number} wait The number of milliseconds to delay execution. * @param {...*} [arg] Arguments to invoke the function with. * @returns {number} Returns the timer id. * @example * * _.delay(function(text) { console.log(text); }, 1000, 'later'); * // => logs 'later' after one second */ function delay(func, wait) { if (!isFunction(func)) { throw new TypeError; } var args = slice(arguments, 2); return setTimeout(function() { func.apply(undefined, args); }, wait); } /** * Creates a function that memoizes the result of `func`. If `resolver` is * provided it will be used to determine the cache key for storing the result * based on the arguments provided to the memoized function. By default, the * first argument provided to the memoized function is used as the cache key. * The `func` is executed with the `this` binding of the memoized function. * The result cache is exposed as the `cache` property on the memoized function. * * @static * @memberOf _ * @category Functions * @param {Function} func The function to have its output memoized. * @param {Function} [resolver] A function used to resolve the cache key. * @returns {Function} Returns the new memoizing function. * @example * * var fibonacci = _.memoize(function(n) { * return n < 2 ? n : fibonacci(n - 1) + fibonacci(n - 2); * }); * * fibonacci(9) * // => 34 * * var data = { * 'fred': { 'name': 'fred', 'age': 40 }, * 'pebbles': { 'name': 'pebbles', 'age': 1 } * }; * * // modifying the result cache * var get = _.memoize(function(name) { return data[name]; }, _.identity); * get('pebbles'); * // => { 'name': 'pebbles', 'age': 1 } * * get.cache.pebbles.name = 'penelope'; * get('pebbles'); * // => { 'name': 'penelope', 'age': 1 } */ function memoize(func, resolver) { if (!isFunction(func)) { throw new TypeError; } var memoized = function() { var cache = memoized.cache, key = resolver ? resolver.apply(this, arguments) : keyPrefix + arguments[0]; return hasOwnProperty.call(cache, key) ? cache[key] : (cache[key] = func.apply(this, arguments)); } memoized.cache = {}; return memoized; } /** * Creates a function that is restricted to execute `func` once. Repeat calls to * the function will return the value of the first call. The `func` is executed * with the `this` binding of the created function. * * @static * @memberOf _ * @category Functions * @param {Function} func The function to restrict. * @returns {Function} Returns the new restricted function. * @example * * var initialize = _.once(createApplication); * initialize(); * initialize(); * // `initialize` executes `createApplication` once */ function once(func) { var ran, result; if (!isFunction(func)) { throw new TypeError; } return function() { if (ran) { return result; } ran = true; result = func.apply(this, arguments); // clear the `func` variable so the function may be garbage collected func = null; return result; }; } /** * Creates a function that, when called, invokes `func` with any additional * `partial` arguments prepended to those provided to the new function. This * method is similar to `_.bind` except it does **not** alter the `this` binding. * * @static * @memberOf _ * @category Functions * @param {Function} func The function to partially apply arguments to. * @param {...*} [arg] Arguments to be partially applied. * @returns {Function} Returns the new partially applied function. * @example * * var greet = function(greeting, name) { return greeting + ' ' + name; }; * var hi = _.partial(greet, 'hi'); * hi('fred'); * // => 'hi fred' */ function partial(func) { return createWrapper(func, 16, slice(arguments, 1)); } /** * This method is like `_.partial` except that `partial` arguments are * appended to those provided to the new function. * * @static * @memberOf _ * @category Functions * @param {Function} func The function to partially apply arguments to. * @param {...*} [arg] Arguments to be partially applied. * @returns {Function} Returns the new partially applied function. * @example * * var defaultsDeep = _.partialRight(_.merge, _.defaults); * * var options = { * 'variable': 'data', * 'imports': { 'jq': $ } * }; * * defaultsDeep(options, _.templateSettings); * * options.variable * // => 'data' * * options.imports * // => { '_': _, 'jq': $ } */ function partialRight(func) { return createWrapper(func, 32, null, slice(arguments, 1)); } /** * Creates a function that, when executed, will only call the `func` function * at most once per every `wait` milliseconds. Provide an options object to * indicate that `func` should be invoked on the leading and/or trailing edge * of the `wait` timeout. Subsequent calls to the throttled function will * return the result of the last `func` call. * * Note: If `leading` and `trailing` options are `true` `func` will be called * on the trailing edge of the timeout only if the the throttled function is * invoked more than once during the `wait` timeout. * * @static * @memberOf _ * @category Functions * @param {Function} func The function to throttle. * @param {number} wait The number of milliseconds to throttle executions to. * @param {Object} [options] The options object. * @param {boolean} [options.leading=true] Specify execution on the leading edge of the timeout. * @param {boolean} [options.trailing=true] Specify execution on the trailing edge of the timeout. * @returns {Function} Returns the new throttled function. * @example * * // avoid excessively updating the position while scrolling * var throttled = _.throttle(updatePosition, 100); * jQuery(window).on('scroll', throttled); * * // execute `renewToken` when the click event is fired, but not more than once every 5 minutes * jQuery('.interactive').on('click', _.throttle(renewToken, 300000, { * 'trailing': false * })); */ function throttle(func, wait, options) { var leading = true, trailing = true; if (!isFunction(func)) { throw new TypeError; } if (options === false) { leading = false; } else if (isObject(options)) { leading = 'leading' in options ? options.leading : leading; trailing = 'trailing' in options ? options.trailing : trailing; } debounceOptions.leading = leading; debounceOptions.maxWait = wait; debounceOptions.trailing = trailing; return debounce(func, wait, debounceOptions); } /** * Creates a function that provides `value` to the wrapper function as its * first argument. Additional arguments provided to the function are appended * to those provided to the wrapper function. The wrapper is executed with * the `this` binding of the created function. * * @static * @memberOf _ * @category Functions * @param {*} value The value to wrap. * @param {Function} wrapper The wrapper function. * @returns {Function} Returns the new function. * @example * * var p = _.wrap(_.escape, function(func, text) { * return '<p>' + func(text) + '</p>'; * }); * * p('Fred, Wilma, & Pebbles'); * // => '<p>Fred, Wilma, &amp; Pebbles</p>' */ function wrap(value, wrapper) { return createWrapper(wrapper, 16, [value]); } /*--------------------------------------------------------------------------*/ /** * Creates a function that returns `value`. * * @static * @memberOf _ * @category Utilities * @param {*} value The value to return from the new function. * @returns {Function} Returns the new function. * @example * * var object = { 'name': 'fred' }; * var getter = _.constant(object); * getter() === object; * // => true */ function constant(value) { return function() { return value; }; } /** * Produces a callback bound to an optional `thisArg`. If `func` is a property * name the created callback will return the property value for a given element. * If `func` is an object the created callback will return `true` for elements * that contain the equivalent object properties, otherwise it will return `false`. * * @static * @memberOf _ * @category Utilities * @param {*} [func=identity] The value to convert to a callback. * @param {*} [thisArg] The `this` binding of the created callback. * @param {number} [argCount] The number of arguments the callback accepts. * @returns {Function} Returns a callback function. * @example * * var characters = [ * { 'name': 'barney', 'age': 36 }, * { 'name': 'fred', 'age': 40 } * ]; * * // wrap to create custom callback shorthands * _.createCallback = _.wrap(_.createCallback, function(func, callback, thisArg) { * var match = /^(.+?)__([gl]t)(.+)$/.exec(callback); * return !match ? func(callback, thisArg) : function(object) { * return match[2] == 'gt' ? object[match[1]] > match[3] : object[match[1]] < match[3]; * }; * }); * * _.filter(characters, 'age__gt38'); * // => [{ 'name': 'fred', 'age': 40 }] */ function createCallback(func, thisArg, argCount) { var type = typeof func; if (func == null || type == 'function') { return baseCreateCallback(func, thisArg, argCount); } // handle "_.pluck" style callback shorthands if (type != 'object') { return property(func); } var props = keys(func), key = props[0], a = func[key]; // handle "_.where" style callback shorthands if (props.length == 1 && a === a && !isObject(a)) { // fast path the common case of providing an object with a single // property containing a primitive value return function(object) { var b = object[key]; return a === b && (a !== 0 || (1 / a == 1 / b)); }; } return function(object) { var length = props.length, result = false; while (length--) { if (!(result = baseIsEqual(object[props[length]], func[props[length]], null, true))) { break; } } return result; }; } /** * Converts the characters `&`, `<`, `>`, `"`, and `'` in `string` to their * corresponding HTML entities. * * @static * @memberOf _ * @category Utilities * @param {string} string The string to escape. * @returns {string} Returns the escaped string. * @example * * _.escape('Fred, Wilma, & Pebbles'); * // => 'Fred, Wilma, &amp; Pebbles' */ function escape(string) { return string == null ? '' : String(string).replace(reUnescapedHtml, escapeHtmlChar); } /** * This method returns the first argument provided to it. * * @static * @memberOf _ * @category Utilities * @param {*} value Any value. * @returns {*} Returns `value`. * @example * * var object = { 'name': 'fred' }; * _.identity(object) === object; * // => true */ function identity(value) { return value; } /** * Adds function properties of a source object to the destination object. * If `object` is a function methods will be added to its prototype as well. * * @static * @memberOf _ * @category Utilities * @param {Function|Object} [object=lodash] object The destination object. * @param {Object} source The object of functions to add. * @param {Object} [options] The options object. * @param {boolean} [options.chain=true] Specify whether the functions added are chainable. * @example * * function capitalize(string) { * return string.charAt(0).toUpperCase() + string.slice(1).toLowerCase(); * } * * _.mixin({ 'capitalize': capitalize }); * _.capitalize('fred'); * // => 'Fred' * * _('fred').capitalize().value(); * // => 'Fred' * * _.mixin({ 'capitalize': capitalize }, { 'chain': false }); * _('fred').capitalize(); * // => 'Fred' */ function mixin(object, source, options) { var chain = true, methodNames = source && functions(source); if (!source || (!options && !methodNames.length)) { if (options == null) { options = source; } ctor = lodashWrapper; source = object; object = lodash; methodNames = functions(source); } if (options === false) { chain = false; } else if (isObject(options) && 'chain' in options) { chain = options.chain; } var ctor = object, isFunc = isFunction(ctor); forEach(methodNames, function(methodName) { var func = object[methodName] = source[methodName]; if (isFunc) { ctor.prototype[methodName] = function() { var chainAll = this.__chain__, value = this.__wrapped__, args = [value]; push.apply(args, arguments); var result = func.apply(object, args); if (chain || chainAll) { if (value === result && isObject(result)) { return this; } result = new ctor(result); result.__chain__ = chainAll; } return result; }; } }); } /** * Reverts the '_' variable to its previous value and returns a reference to * the `lodash` function. * * @static * @memberOf _ * @category Utilities * @returns {Function} Returns the `lodash` function. * @example * * var lodash = _.noConflict(); */ function noConflict() { context._ = oldDash; return this; } /** * A no-operation function. * * @static * @memberOf _ * @category Utilities * @example * * var object = { 'name': 'fred' }; * _.noop(object) === undefined; * // => true */ function noop() { // no operation performed } /** * Gets the number of milliseconds that have elapsed since the Unix epoch * (1 January 1970 00:00:00 UTC). * * @static * @memberOf _ * @category Utilities * @example * * var stamp = _.now(); * _.defer(function() { console.log(_.now() - stamp); }); * // => logs the number of milliseconds it took for the deferred function to be called */ var now = isNative(now = Date.now) && now || function() { return new Date().getTime(); }; /** * Converts the given value into an integer of the specified radix. * If `radix` is `undefined` or `0` a `radix` of `10` is used unless the * `value` is a hexadecimal, in which case a `radix` of `16` is used. * * Note: This method avoids differences in native ES3 and ES5 `parseInt` * implementations. See http://es5.github.io/#E. * * @static * @memberOf _ * @category Utilities * @param {string} value The value to parse. * @param {number} [radix] The radix used to interpret the value to parse. * @returns {number} Returns the new integer value. * @example * * _.parseInt('08'); * // => 8 */ var parseInt = nativeParseInt(whitespace + '08') == 8 ? nativeParseInt : function(value, radix) { // Firefox < 21 and Opera < 15 follow the ES3 specified implementation of `parseInt` return nativeParseInt(isString(value) ? value.replace(reLeadingSpacesAndZeros, '') : value, radix || 0); }; /** * Creates a "_.pluck" style function, which returns the `key` value of a * given object. * * @static * @memberOf _ * @category Utilities * @param {string} key The name of the property to retrieve. * @returns {Function} Returns the new function. * @example * * var characters = [ * { 'name': 'fred', 'age': 40 }, * { 'name': 'barney', 'age': 36 } * ]; * * var getName = _.property('name'); * * _.map(characters, getName); * // => ['barney', 'fred'] * * _.sortBy(characters, getName); * // => [{ 'name': 'barney', 'age': 36 }, { 'name': 'fred', 'age': 40 }] */ function property(key) { return function(object) { return object[key]; }; } /** * Produces a random number between `min` and `max` (inclusive). If only one * argument is provided a number between `0` and the given number will be * returned. If `floating` is truey or either `min` or `max` are floats a * floating-point number will be returned instead of an integer. * * @static * @memberOf _ * @category Utilities * @param {number} [min=0] The minimum possible value. * @param {number} [max=1] The maximum possible value. * @param {boolean} [floating=false] Specify returning a floating-point number. * @returns {number} Returns a random number. * @example * * _.random(0, 5); * // => an integer between 0 and 5 * * _.random(5); * // => also an integer between 0 and 5 * * _.random(5, true); * // => a floating-point number between 0 and 5 * * _.random(1.2, 5.2); * // => a floating-point number between 1.2 and 5.2 */ function random(min, max, floating) { var noMin = min == null, noMax = max == null; if (floating == null) { if (typeof min == 'boolean' && noMax) { floating = min; min = 1; } else if (!noMax && typeof max == 'boolean') { floating = max; noMax = true; } } if (noMin && noMax) { max = 1; } min = +min || 0; if (noMax) { max = min; min = 0; } else { max = +max || 0; } if (floating || min % 1 || max % 1) { var rand = nativeRandom(); return nativeMin(min + (rand * (max - min + parseFloat('1e-' + ((rand +'').length - 1)))), max); } return baseRandom(min, max); } /** * Resolves the value of property `key` on `object`. If `key` is a function * it will be invoked with the `this` binding of `object` and its result returned, * else the property value is returned. If `object` is falsey then `undefined` * is returned. * * @static * @memberOf _ * @category Utilities * @param {Object} object The object to inspect. * @param {string} key The name of the property to resolve. * @returns {*} Returns the resolved value. * @example * * var object = { * 'cheese': 'crumpets', * 'stuff': function() { * return 'nonsense'; * } * }; * * _.result(object, 'cheese'); * // => 'crumpets' * * _.result(object, 'stuff'); * // => 'nonsense' */ function result(object, key) { if (object) { var value = object[key]; return isFunction(value) ? object[key]() : value; } } /** * A micro-templating method that handles arbitrary delimiters, preserves * whitespace, and correctly escapes quotes within interpolated code. * * Note: In the development build, `_.template` utilizes sourceURLs for easier * debugging. See http://www.html5rocks.com/en/tutorials/developertools/sourcemaps/#toc-sourceurl * * For more information on precompiling templates see: * http://lodash.com/custom-builds * * For more information on Chrome extension sandboxes see: * http://developer.chrome.com/stable/extensions/sandboxingEval.html * * @static * @memberOf _ * @category Utilities * @param {string} text The template text. * @param {Object} data The data object used to populate the text. * @param {Object} [options] The options object. * @param {RegExp} [options.escape] The "escape" delimiter. * @param {RegExp} [options.evaluate] The "evaluate" delimiter. * @param {Object} [options.imports] An object to import into the template as local variables. * @param {RegExp} [options.interpolate] The "interpolate" delimiter. * @param {string} [sourceURL] The sourceURL of the template's compiled source. * @param {string} [variable] The data object variable name. * @returns {Function|string} Returns a compiled function when no `data` object * is given, else it returns the interpolated text. * @example * * // using the "interpolate" delimiter to create a compiled template * var compiled = _.template('hello <%= name %>'); * compiled({ 'name': 'fred' }); * // => 'hello fred' * * // using the "escape" delimiter to escape HTML in data property values * _.template('<b><%- value %></b>', { 'value': '<script>' }); * // => '<b>&lt;script&gt;</b>' * * // using the "evaluate" delimiter to generate HTML * var list = '<% _.forEach(people, function(name) { %><li><%- name %></li><% }); %>'; * _.template(list, { 'people': ['fred', 'barney'] }); * // => '<li>fred</li><li>barney</li>' * * // using the ES6 delimiter as an alternative to the default "interpolate" delimiter * _.template('hello ${ name }', { 'name': 'pebbles' }); * // => 'hello pebbles' * * // using the internal `print` function in "evaluate" delimiters * _.template('<% print("hello " + name); %>!', { 'name': 'barney' }); * // => 'hello barney!' * * // using a custom template delimiters * _.templateSettings = { * 'interpolate': /{{([\s\S]+?)}}/g * }; * * _.template('hello {{ name }}!', { 'name': 'mustache' }); * // => 'hello mustache!' * * // using the `imports` option to import jQuery * var list = '<% jq.each(people, function(name) { %><li><%- name %></li><% }); %>'; * _.template(list, { 'people': ['fred', 'barney'] }, { 'imports': { 'jq': jQuery } }); * // => '<li>fred</li><li>barney</li>' * * // using the `sourceURL` option to specify a custom sourceURL for the template * var compiled = _.template('hello <%= name %>', null, { 'sourceURL': '/basic/greeting.jst' }); * compiled(data); * // => find the source of "greeting.jst" under the Sources tab or Resources panel of the web inspector * * // using the `variable` option to ensure a with-statement isn't used in the compiled template * var compiled = _.template('hi <%= data.name %>!', null, { 'variable': 'data' }); * compiled.source; * // => function(data) { * var __t, __p = '', __e = _.escape; * __p += 'hi ' + ((__t = ( data.name )) == null ? '' : __t) + '!'; * return __p; * } * * // using the `source` property to inline compiled templates for meaningful * // line numbers in error messages and a stack trace * fs.writeFileSync(path.join(cwd, 'jst.js'), '\ * var JST = {\ * "main": ' + _.template(mainText).source + '\ * };\ * '); */ function template(text, data, options) { // based on Mark Resig's `tmpl` implementation // http://eMark.org/blog/javascript-micro-templating/ // and Laura Doktorova's doT.js // https://github.com/olado/doT var settings = lodash.templateSettings; text = String(text || ''); // avoid missing dependencies when `iteratorTemplate` is not defined options = defaults({}, options, settings); var imports = defaults({}, options.imports, settings.imports), importsKeys = keys(imports), importsValues = values(imports); var isEvaluating, index = 0, interpolate = options.interpolate || reNoMatch, source = "__p += '"; // compile the regexp to match each delimiter var reDelimiters = RegExp( (options.escape || reNoMatch).source + '|' + interpolate.source + '|' + (interpolate === reInterpolate ? reEsTemplate : reNoMatch).source + '|' + (options.evaluate || reNoMatch).source + '|$' , 'g'); text.replace(reDelimiters, function(match, escapeValue, interpolateValue, esTemplateValue, evaluateValue, offset) { interpolateValue || (interpolateValue = esTemplateValue); // escape characters that cannot be included in string literals source += text.slice(index, offset).replace(reUnescapedString, escapeStringChar); // replace delimiters with snippets if (escapeValue) { source += "' +\n__e(" + escapeValue + ") +\n'"; } if (evaluateValue) { isEvaluating = true; source += "';\n" + evaluateValue + ";\n__p += '"; } if (interpolateValue) { source += "' +\n((__t = (" + interpolateValue + ")) == null ? '' : __t) +\n'"; } index = offset + match.length; // the JS engine embedded in Adobe products requires returning the `match` // string in order to produce the correct `offset` value return match; }); source += "';\n"; // if `variable` is not specified, wrap a with-statement around the generated // code to add the data object to the top of the scope chain var variable = options.variable, hasVariable = variable; if (!hasVariable) { variable = 'obj'; source = 'with (' + variable + ') {\n' + source + '\n}\n'; } // cleanup code by stripping empty strings source = (isEvaluating ? source.replace(reEmptyStringLeading, '') : source) .replace(reEmptyStringMiddle, '$1') .replace(reEmptyStringTrailing, '$1;'); // frame code as the function body source = 'function(' + variable + ') {\n' + (hasVariable ? '' : variable + ' || (' + variable + ' = {});\n') + "var __t, __p = '', __e = _.escape" + (isEvaluating ? ', __j = Array.prototype.join;\n' + "function print() { __p += __j.call(arguments, '') }\n" : ';\n' ) + source + 'return __p\n}'; // Use a sourceURL for easier debugging. // http://www.html5rocks.com/en/tutorials/developertools/sourcemaps/#toc-sourceurl var sourceURL = '\n/*\n//# sourceURL=' + (options.sourceURL || '/lodash/template/source[' + (templateCounter++) + ']') + '\n*/'; try { var result = Function(importsKeys, 'return ' + source + sourceURL).apply(undefined, importsValues); } catch(e) { e.source = source; throw e; } if (data) { return result(data); } // provide the compiled function's source by its `toString` method, in // supported environments, or the `source` property as a convenience for // inlining compiled templates during the build process result.source = source; return result; } /** * Executes the callback `n` times, returning an array of the results * of each callback execution. The callback is bound to `thisArg` and invoked * with one argument; (index). * * @static * @memberOf _ * @category Utilities * @param {number} n The number of times to execute the callback. * @param {Function} callback The function called per iteration. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Array} Returns an array of the results of each `callback` execution. * @example * * var diceRolls = _.times(3, _.partial(_.random, 1, 6)); * // => [3, 6, 4] * * _.times(3, function(n) { mage.castSpell(n); }); * // => calls `mage.castSpell(n)` three times, passing `n` of `0`, `1`, and `2` respectively * * _.times(3, function(n) { this.cast(n); }, mage); * // => also calls `mage.castSpell(n)` three times */ function times(n, callback, thisArg) { n = (n = +n) > -1 ? n : 0; var index = -1, result = Array(n); callback = baseCreateCallback(callback, thisArg, 1); while (++index < n) { result[index] = callback(index); } return result; } /** * The inverse of `_.escape` this method converts the HTML entities * `&amp;`, `&lt;`, `&gt;`, `&quot;`, and `&#39;` in `string` to their * corresponding characters. * * @static * @memberOf _ * @category Utilities * @param {string} string The string to unescape. * @returns {string} Returns the unescaped string. * @example * * _.unescape('Fred, Barney &amp; Pebbles'); * // => 'Fred, Barney & Pebbles' */ function unescape(string) { return string == null ? '' : String(string).replace(reEscapedHtml, unescapeHtmlChar); } /** * Generates a unique ID. If `prefix` is provided the ID will be appended to it. * * @static * @memberOf _ * @category Utilities * @param {string} [prefix] The value to prefix the ID with. * @returns {string} Returns the unique ID. * @example * * _.uniqueId('contact_'); * // => 'contact_104' * * _.uniqueId(); * // => '105' */ function uniqueId(prefix) { var id = ++idCounter; return String(prefix == null ? '' : prefix) + id; } /*--------------------------------------------------------------------------*/ /** * Creates a `lodash` object that wraps the given value with explicit * method chaining enabled. * * @static * @memberOf _ * @category Chaining * @param {*} value The value to wrap. * @returns {Object} Returns the wrapper object. * @example * * var characters = [ * { 'name': 'barney', 'age': 36 }, * { 'name': 'fred', 'age': 40 }, * { 'name': 'pebbles', 'age': 1 } * ]; * * var youngest = _.chain(characters) * .sortBy('age') * .map(function(chr) { return chr.name + ' is ' + chr.age; }) * .first() * .value(); * // => 'pebbles is 1' */ function chain(value) { value = new lodashWrapper(value); value.__chain__ = true; return value; } /** * Invokes `interceptor` with the `value` as the first argument and then * returns `value`. The purpose of this method is to "tap into" a method * chain in order to perform operations on intermediate results within * the chain. * * @static * @memberOf _ * @category Chaining * @param {*} value The value to provide to `interceptor`. * @param {Function} interceptor The function to invoke. * @returns {*} Returns `value`. * @example * * _([1, 2, 3, 4]) * .tap(function(array) { array.pop(); }) * .reverse() * .value(); * // => [3, 2, 1] */ function tap(value, interceptor) { interceptor(value); return value; } /** * Enables explicit method chaining on the wrapper object. * * @name chain * @memberOf _ * @category Chaining * @returns {*} Returns the wrapper object. * @example * * var characters = [ * { 'name': 'barney', 'age': 36 }, * { 'name': 'fred', 'age': 40 } * ]; * * // without explicit chaining * _(characters).first(); * // => { 'name': 'barney', 'age': 36 } * * // with explicit chaining * _(characters).chain() * .first() * .pick('age') * .value(); * // => { 'age': 36 } */ function wrapperChain() { this.__chain__ = true; return this; } /** * Produces the `toString` result of the wrapped value. * * @name toString * @memberOf _ * @category Chaining * @returns {string} Returns the string result. * @example * * _([1, 2, 3]).toString(); * // => '1,2,3' */ function wrapperToString() { return String(this.__wrapped__); } /** * Extracts the wrapped value. * * @name valueOf * @memberOf _ * @alias value * @category Chaining * @returns {*} Returns the wrapped value. * @example * * _([1, 2, 3]).valueOf(); * // => [1, 2, 3] */ function wrapperValueOf() { return this.__wrapped__; } /*--------------------------------------------------------------------------*/ // add functions that return wrapped values when chaining lodash.after = after; lodash.assign = assign; lodash.at = at; lodash.bind = bind; lodash.bindAll = bindAll; lodash.bindKey = bindKey; lodash.chain = chain; lodash.compact = compact; lodash.compose = compose; lodash.constant = constant; lodash.countBy = countBy; lodash.create = create; lodash.createCallback = createCallback; lodash.curry = curry; lodash.debounce = debounce; lodash.defaults = defaults; lodash.defer = defer; lodash.delay = delay; lodash.difference = difference; lodash.filter = filter; lodash.flatten = flatten; lodash.forEach = forEach; lodash.forEachRight = forEachRight; lodash.forIn = forIn; lodash.forInRight = forInRight; lodash.forOwn = forOwn; lodash.forOwnRight = forOwnRight; lodash.functions = functions; lodash.groupBy = groupBy; lodash.indexBy = indexBy; lodash.initial = initial; lodash.intersection = intersection; lodash.invert = invert; lodash.invoke = invoke; lodash.keys = keys; lodash.map = map; lodash.mapValues = mapValues; lodash.max = max; lodash.memoize = memoize; lodash.merge = merge; lodash.min = min; lodash.omit = omit; lodash.once = once; lodash.pairs = pairs; lodash.partial = partial; lodash.partialRight = partialRight; lodash.pick = pick; lodash.pluck = pluck; lodash.property = property; lodash.pull = pull; lodash.range = range; lodash.reject = reject; lodash.remove = remove; lodash.rest = rest; lodash.shuffle = shuffle; lodash.sortBy = sortBy; lodash.tap = tap; lodash.throttle = throttle; lodash.times = times; lodash.toArray = toArray; lodash.transform = transform; lodash.union = union; lodash.uniq = uniq; lodash.values = values; lodash.where = where; lodash.without = without; lodash.wrap = wrap; lodash.xor = xor; lodash.zip = zip; lodash.zipObject = zipObject; // add aliases lodash.collect = map; lodash.drop = rest; lodash.each = forEach; lodash.eachRight = forEachRight; lodash.extend = assign; lodash.methods = functions; lodash.object = zipObject; lodash.select = filter; lodash.tail = rest; lodash.unique = uniq; lodash.unzip = zip; // add functions to `lodash.prototype` mixin(lodash); /*--------------------------------------------------------------------------*/ // add functions that return unwrapped values when chaining lodash.clone = clone; lodash.cloneDeep = cloneDeep; lodash.contains = contains; lodash.escape = escape; lodash.every = every; lodash.find = find; lodash.findIndex = findIndex; lodash.findKey = findKey; lodash.findLast = findLast; lodash.findLastIndex = findLastIndex; lodash.findLastKey = findLastKey; lodash.has = has; lodash.identity = identity; lodash.indexOf = indexOf; lodash.isArguments = isArguments; lodash.isArray = isArray; lodash.isBoolean = isBoolean; lodash.isDate = isDate; lodash.isElement = isElement; lodash.isEmpty = isEmpty; lodash.isEqual = isEqual; lodash.isFinite = isFinite; lodash.isFunction = isFunction; lodash.isNaN = isNaN; lodash.isNull = isNull; lodash.isNumber = isNumber; lodash.isObject = isObject; lodash.isPlainObject = isPlainObject; lodash.isRegExp = isRegExp; lodash.isString = isString; lodash.isUndefined = isUndefined; lodash.lastIndexOf = lastIndexOf; lodash.mixin = mixin; lodash.noConflict = noConflict; lodash.noop = noop; lodash.now = now; lodash.parseInt = parseInt; lodash.random = random; lodash.reduce = reduce; lodash.reduceRight = reduceRight; lodash.result = result; lodash.runInContext = runInContext; lodash.size = size; lodash.some = some; lodash.sortedIndex = sortedIndex; lodash.template = template; lodash.unescape = unescape; lodash.uniqueId = uniqueId; // add aliases lodash.all = every; lodash.any = some; lodash.detect = find; lodash.findWhere = find; lodash.foldl = reduce; lodash.foldr = reduceRight; lodash.include = contains; lodash.inject = reduce; mixin(function() { var source = {} forOwn(lodash, function(func, methodName) { if (!lodash.prototype[methodName]) { source[methodName] = func; } }); return source; }(), false); /*--------------------------------------------------------------------------*/ // add functions capable of returning wrapped and unwrapped values when chaining lodash.first = first; lodash.last = last; lodash.sample = sample; // add aliases lodash.take = first; lodash.head = first; forOwn(lodash, function(func, methodName) { var callbackable = methodName !== 'sample'; if (!lodash.prototype[methodName]) { lodash.prototype[methodName]= function(n, guard) { var chainAll = this.__chain__, result = func(this.__wrapped__, n, guard); return !chainAll && (n == null || (guard && !(callbackable && typeof n == 'function'))) ? result : new lodashWrapper(result, chainAll); }; } }); /*--------------------------------------------------------------------------*/ /** * The semantic version number. * * @static * @memberOf _ * @type string */ lodash.VERSION = '2.4.1'; // add "Chaining" functions to the wrapper lodash.prototype.chain = wrapperChain; lodash.prototype.toString = wrapperToString; lodash.prototype.value = wrapperValueOf; lodash.prototype.valueOf = wrapperValueOf; // add `Array` functions that return unwrapped values baseEach(['join', 'pop', 'shift'], function(methodName) { var func = arrayRef[methodName]; lodash.prototype[methodName] = function() { var chainAll = this.__chain__, result = func.apply(this.__wrapped__, arguments); return chainAll ? new lodashWrapper(result, chainAll) : result; }; }); // add `Array` functions that return the existing wrapped value baseEach(['push', 'reverse', 'sort', 'unshift'], function(methodName) { var func = arrayRef[methodName]; lodash.prototype[methodName] = function() { func.apply(this.__wrapped__, arguments); return this; }; }); // add `Array` functions that return new wrapped values baseEach(['concat', 'slice', 'splice'], function(methodName) { var func = arrayRef[methodName]; lodash.prototype[methodName] = function() { return new lodashWrapper(func.apply(this.__wrapped__, arguments), this.__chain__); }; }); // avoid array-like object bugs with `Array#shift` and `Array#splice` // in IE < 9, Firefox < 10, Narwhal, and RingoJS if (!support.spliceObjects) { baseEach(['pop', 'shift', 'splice'], function(methodName) { var func = arrayRef[methodName], isSplice = methodName == 'splice'; lodash.prototype[methodName] = function() { var chainAll = this.__chain__, value = this.__wrapped__, result = func.apply(value, arguments); if (value.length === 0) { delete value[0]; } return (chainAll || isSplice) ? new lodashWrapper(result, chainAll) : result; }; }); } return lodash; } /*--------------------------------------------------------------------------*/ // expose Lo-Dash var _ = runInContext(); // some AMD build optimizers like r.js check for condition patterns like the following: if (typeof define == 'function' && typeof define.amd == 'object' && define.amd) { // Expose Lo-Dash to the global object even when an AMD loader is present in // case Lo-Dash is loaded with a RequireJS shim config. // See http://requirejs.org/docs/api.html#config-shim root._ = _; // define as an anonymous module so, through path mapping, it can be // referenced as the "underscore" module define(function() { return _; }); } // check for `exports` after `define` in case a build optimizer adds an `exports` object else if (freeExports && freeModule) { // in Node.js or RingoJS if (moduleExports) { (freeModule.exports = _)._ = _; } // in Narwhal or Rhino -require else { freeExports._ = _; } } else { // in a browser or Rhino root._ = _; } }.call(this));<|fim▁end|>
* @static * @memberOf _ * @alias drop, tail
<|file_name|>Class_175.java<|end_file_name|><|fim▁begin|>package fr.javatronic.blog.massive.annotation1; <|fim▁hole|>public class Class_175 { }<|fim▁end|>
import fr.javatronic.blog.processor.Annotation_001; @Annotation_001
<|file_name|>server.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3 import _thread import RPi.GPIO as GPIO import socket import time from time import sleep from sys import exit import datetime #import MySQLdb # Start task command # sleep 30 && python /home/pi/Scripts/Sprinkler/Sprinkler.py > /home/pi/Scripts/Sprinkler/log.txt 2>&1 # Set GPIO output points Zones = [5, 6, 13, 19] StatusLED = 16 # Set GPIO input points CancelButton = 18 WaterSensor = 10 # Water Sensor Enabled? Sensor = False #Is it currently raining isRaining = False defaultWaitDuration = 0 def setup(): global serversocket,t # Setup GPIO GPIO.setmode(GPIO.BCM) GPIO.setwarnings(True) # Input Cancel Button GPIO.setup(CancelButton, GPIO.IN, pull_up_down=GPIO.PUD_UP) # Input Rain Sensor if Sensor: GPIO.setup(WaterSensor, GPIO.IN, pull_up_down=GPIO.PUD_UP) # Setup 4 zones on GPIO # Turn all Zones "OFF" for i in Zones: GPIO.setup(i, GPIO.OUT) GPIO.output(i, GPIO.HIGH) # Setup status LED GPIO.setup(StatusLED, GPIO.OUT) # Setup Sockets serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) host = socket.gethostname() port = 9999 serversocket.bind((host, port)) serversocket.listen(5) addLog("System", "Setup complete") def mainRun(): global isRaining addLog("System", "Main Thread started") # Always check the switch _thread.start_new_thread(checkSwitch, ((),)) while True: global serversocket clientsocket,addr = serversocket.accept() fromClient = clientsocket.recv(1024) clientsocket.close() strFromClient = str(fromClient.decode("ascii")) addLog("Recived", strFromClient) # Split incoming message requestType = strFromClient.split(":") <|fim▁hole|> # Do something with that message # What was the command? if(requestType[0] == "WATER"): # Is it raining if(isRaining == False): # Turn off LED if it was raining statusLED("off") # Start watering _thread.start_new_thread(water, (requestType[1], requestType[2], ) ) elif(requestType[0] == "ZONE"): if(requestType[1] == "ON"): zone(int(requestType[2]), "ON") else: zone(int(requestType[2]), "OFF") elif(requestType[0] == "RainStatus"): # Some day we will send something back print("nothing") elif(requestType[0] == "QUIT"): destroy() # Check switch def checkSwitch(self): global isRaining while True: state = GPIO.input(CancelButton) if(state): if(state != isRaining): addLog("System", "Switch TRUE") statusLED("solid") isRaining = True else: if(state != isRaining): addLog("System", "Switch FALSE") statusLED("off") isRaining = False sleep(2) # Water the lawn def water(zoneNum, duration): # Turn on zone zone(int(zoneNum), "ON") statusLED("on") # Sleep for that amount sleep(int(duration) * 60) # Turn off zone zone(int(zoneNum), "OFF") statusLED("off") # Zone Control Setup def zone(zoneSelect, onoff): if(onoff == "ON"): GPIO.output(Zones[zoneSelect], 0) addLog('Zone ' + str(zoneSelect), 'ON') else: GPIO.output(Zones[zoneSelect], 1) addLog('Zone ' + str(zoneSelect), 'OFF') def rain(): global isRaining # Check if it's raining if Sensor: if GPIO.input(WaterSensor): isRaining = True else: isRaining = False def statusLED(status): if status == "blink": GPIO.output(StatusLED, GPIO.HIGH) sleep(0.5) GPIO.output(StatusLED, GPIO.LOW) sleep(0.5) elif status == "solid": GPIO.output(StatusLED, GPIO.HIGH) elif status == "off": GPIO.output(StatusLED, GPIO.LOW) def addLog(currentZone, addedText): now = datetime.datetime.now() print ("{0}: {1}: {2}".format(now, currentZone, addedText)) def destroy(): global serversocket serversocket.shutdown(socket.SHUT_RDWR) for i in Zones: GPIO.output(i, GPIO.LOW) GPIO.output(StatusLED, GPIO.LOW) addLog('System', 'Sprinkler Script OFF') exit() if __name__ == '__main__': setup() try: mainRun() except KeyboardInterrupt: destroy() finally: GPIO.cleanup() exit() else: destroy()<|fim▁end|>
<|file_name|>m_ao_eval_libnao.py<|end_file_name|><|fim▁begin|># Copyright 2014-2018 The PySCF Developers. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import numpy as np from ctypes import POINTER, c_double, c_int64 from pyscf.nao.m_libnao import libnao libnao.ao_eval.argtypes = (<|fim▁hole|> POINTER(c_double), # dr_jt POINTER(c_int64), # mu2j POINTER(c_int64), # mu2s POINTER(c_double), # mu2rcut POINTER(c_double), # rvec_atom_center POINTER(c_int64), # ncoords POINTER(c_double), # coords POINTER(c_int64), # norbs POINTER(c_double), # res[orb, icoord] POINTER(c_int64)) # ldres leading dimension of res (ncoords) # # # def ao_eval_libnao_(ao, rat, isp, crds, res): """ Compute the values of atomic orbitals on given grid points Args: ao : instance of ao_log_c class rat : vector where the atomic orbitals from "ao" are centered isp : specie index for which we compute crds: coordinates on which we compute Returns: res[norbs,ncoord] : array of atomic orbital values """ #print(res_copy.flags) rat_copy = np.require(rat, dtype=c_double, requirements='C') crd_copy = np.require(crds, dtype=c_double, requirements='C') res_copy = np.require(res, dtype=c_double, requirements='CW') mu2j = np.require(ao.sp_mu2j[isp], dtype=c_int64, requirements='C') mu2s = np.require(ao.sp_mu2s[isp], dtype=c_int64, requirements='C') mu2rcut = np.require(ao.sp_mu2rcut[isp], dtype=c_double, requirements='C') ff = np.require(ao.psi_log_rl[isp], dtype=c_double, requirements='C') libnao.ao_eval( c_int64(ao.sp2nmult[isp]), ff.ctypes.data_as(POINTER(c_double)), c_int64(ao.nr), c_double(ao.interp_rr.gammin_jt), c_double(ao.interp_rr.dg_jt), mu2j.ctypes.data_as(POINTER(c_int64)), mu2s.ctypes.data_as(POINTER(c_int64)), mu2rcut.ctypes.data_as(POINTER(c_double)), rat_copy.ctypes.data_as(POINTER(c_double)), c_int64(crd_copy.shape[0]), crd_copy.ctypes.data_as(POINTER(c_double)), c_int64(ao.sp2norbs[isp]), res_copy.ctypes.data_as(POINTER(c_double)), c_int64(res.shape[1]) ) res = res_copy return 0 # # See above # def ao_eval_libnao(ao, ra, isp, coords): res = np.zeros((ao.sp2norbs[isp],coords.shape[0]), dtype='float64') ao_eval_libnao_(ao, ra, isp, coords, res) return res if __name__ == '__main__': from pyscf.nao.m_system_vars import system_vars_c from pyscf.nao.m_ao_eval import ao_eval from pyscf.nao.m_ao_eval_libnao import ao_eval_libnao sv = system_vars_c() ra = np.array([0.3, -0.5, 0.77], dtype='float64') #coords = np.array([[0.07716887, 2.82933578, 3.73214881]]) coords = np.random.rand(35580,3)*5.0 print('ao_val2 (reference)') ao_val1 = ao_eval(sv.ao_log, ra, 0, coords) print('ao_val2_libnao') ao_val2 = ao_eval_libnao(sv.ao_log, ra, 0, coords) print(np.allclose(ao_val1,ao_val2)) for iorb,[oo1,oo2] in enumerate(zip(ao_val1,ao_val2)): print(iorb, abs(oo1-oo2).argmax(), abs(oo1-oo2).max(), coords[abs(oo1-oo2).argmax(),:])<|fim▁end|>
POINTER(c_int64), # nmult POINTER(c_double), # psi_log_rl POINTER(c_int64), # nr POINTER(c_double), # rhomin_jt
<|file_name|>test_bnet_calculation.py<|end_file_name|><|fim▁begin|># RABDAM # Copyright (C) 2020 Garman Group, University of Oxford # This file is part of RABDAM. # RABDAM is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # RABDAM is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # You should have received a copy of the GNU Lesser General # Public License along with this program. If not, see # <http://www.gnu.org/licenses/>. # An outer layer to the pipeline scripts. Depending upon the flags specified # in the command line input, this script will run either the complete / a # subsection of the pipeline. # python -m unittest tests/test_bnet_calculation.py import os import unittest from rabdam.Subroutines.CalculateBDamage import rabdam class TestClass(unittest.TestCase): def test_bnet_values(self): """ Checks that RABDAM calculates expected Bnet values for a selection of PDB entries """ import os import requests import shutil import pandas as pd exp_bnet_dict = {'2O2X': 3.300580966, '4EZF': 3.193514624, '4MWU': 3.185476349, '4MOV': 3.144130191, '3NBM': 3.141821366, '1GW1': 3.105626889, '4EWE': 3.08241654, '3F1P': 3.060628186, '3IV0': 3.054440912, '4ZWV': 3.017330004, '1T2I': 3.004830448, '3LX3': 2.962424378, '5P4N': 2.916582486, '5MAA': 2.91219352, '1E73': 2.850203561, '1YKI': 2.797739814, '4WA4': 2.720540993, '3V2J': 2.669599635, '3CUI': 2.666605946, '4XLA': 2.624366813, '4DUK': 2.854175949, '3V38': 2.500984382, '1VJF': 2.496374854, '5IO2': 2.467587911, '5CM7': 2.44869046, '2EHU': 2.448290431, '5JOW': 2.439619791, '2C54': 2.379224017, '4GZK': 2.349526276, '2NUM': 2.326904729, '5FYO': 2.319618192, '4ODK': 2.304354685, '6EV4': 2.302433369, '5P5U': 2.288966997, '3VHV': 2.285877338, '4JCK': 2.27150332, '5EKM': 2.258574341, '3H4O': 2.231817033, '5JIG': 2.247664542, '2H5S': 2.206850226, '4M5I': 2.169405117, '1Y59': 2.138787261, '4C45': 2.131256276, '5F90': 2.11287042, '4NI3': 2.088735516, '4Z6N': 2.083743584, '5M2G': 2.06566475, '5ER6': 2.05707889, '4R0X': 2.006996308, '5LLG': 1.981501196, '1FCX': 1.976990791, '5M90': 1.96542442, '3NJK': 1.955577757, '5CWG': 1.949818624, '2P7O': 1.921138477, '5SZC': 1.962633169, '2I0K': 1.901555841, '4RDK': 1.886900766, '5MA0': 1.877853781, '4C1E': 1.877575448, '5EJ3': 1.875439995, '2WUG': 1.87334953, '4MPY': 1.842338963, '4OTZ': 1.835716553, '4IOO': 1.828349113, '4Z6O': 1.800528596, '4ZOT': 1.799163077, '5PHB': 1.783879628, '3UJC': 1.747894856, '4FR8': 1.738876799, '5PH8': 1.736825591, '5UPM': 1.736663507, '3MWX': 1.733132746, '4KDX': 1.729650659, '3WH5': 1.717975404, '4P04': 1.714107945, '5Y90': 1.695283923, '4H31': 1.674014779, '5HJE': 1.662869176, '4YKK': 1.653894709, '1Q0F': 1.646880018, '5JP6': 1.629246723, '1X7Y': 1.618817315, '4ZC8': 1.60606196, '5EPE': 1.604407869, '4ZS9': 1.582398487, '5VNX': 1.543824945, '5IHV': 1.542271159, '5J90': 1.526469901, '4K6W': 1.520316883, '3PBC': 1.512738972, '5CMB': 1.504620762, '4PSC': 1.491796934,<|fim▁hole|> '3A54': 1.319587779} if not os.path.isdir('tests/temp_files/'): os.mkdir('tests/temp_files/') for code, exp_bnet in exp_bnet_dict.items(): # Checks cif file cif_text = requests.get('https://files.rcsb.org/view/%s.cif' % code) with open('tests/temp_files/%s.cif' % code, 'w') as f: f.write(cif_text.text) rabdam_run = rabdam( pathToInput='%s/tests/temp_files/%s.cif' % (os.getcwd(), code), outputDir='%s/tests/temp_files/' % os.getcwd(), batchRun=True, overwrite=True, PDT=7, windowSize=0.02, protOrNA='protein', HETATM=False, removeAtoms=[], addAtoms=[], highlightAtoms=[], createOrigpdb=False, createAUpdb=False, createUCpdb=False, createAUCpdb=False, createTApdb=False ) rabdam_run.rabdam_dataframe(test=True) rabdam_run.rabdam_analysis( output_options=['csv', 'pdb', 'cif', 'kde', 'bnet', 'summary'] ) bnet_df = pd.read_pickle('tests/temp_files/Logfiles/Bnet_protein.pkl') act_bnet_cif = bnet_df['Bnet'].tolist()[-1] self.assertEqual(round(exp_bnet, 7), round(act_bnet_cif, 7)) os.remove('tests/temp_files/%s.cif' % code) os.remove('tests/temp_files/Logfiles/Bnet_protein.pkl') # Checks PDB file pdb_text = requests.get('https://files.rcsb.org/view/%s.pdb' % code) with open('tests/temp_files/%s.pdb' % code, 'w') as f: f.write(pdb_text.text) rabdam_run = rabdam( pathToInput='%s/tests/temp_files/%s.pdb' % (os.getcwd(), code), outputDir='%s/tests/temp_files/' % os.getcwd(), batchRun=True, overwrite=True, PDT=7, windowSize=0.02, protOrNA='protein', HETATM=False, removeAtoms=[], addAtoms=[], highlightAtoms=[], createOrigpdb=False, createAUpdb=False, createUCpdb=False, createAUCpdb=False, createTApdb=False ) rabdam_run.rabdam_dataframe(test=True) rabdam_run.rabdam_analysis( output_options=['csv', 'pdb', 'cif', 'kde', 'bnet', 'summary'] ) bnet_df = pd.read_pickle( '%s/tests/temp_files/Logfiles/Bnet_protein.pkl' % os.getcwd() ) act_bnet_pdb = bnet_df['Bnet'].tolist()[-1] self.assertEqual(round(exp_bnet, 7), round(act_bnet_pdb, 7)) os.remove('tests/temp_files/%s.pdb' % code) os.remove('tests/temp_files/Logfiles/Bnet_protein.pkl') shutil.rmtree('tests/temp_files/')<|fim▁end|>
'5UPN': 1.477252783, '4XLZ': 1.473298738, '4XGY': 1.465885549, '5M4G': 1.400219288,
<|file_name|>exporter.py<|end_file_name|><|fim▁begin|>""" Base class for exporters """ # Standard library modules. import os # Third party modules. # Local modules. from pyhmsa.util.monitorable import _Monitorable, _MonitorableThread # Globals and constants variables. class _ExporterThread(_MonitorableThread): def __init__(self, datafile, dirpath, *args, **kwargs): args = (datafile, dirpath,) + args super().__init__(args=args, kwargs=kwargs) def _run(self, datafile, dirpath, *args, **kwargs): raise NotImplementedError class _Exporter(_Monitorable): def _create_thread(self, datafile, dirpath, *args, **kwargs): args = (datafile, dirpath,) + args super()._create_thread(*args, **kwargs) def validate(self, datafile): pass def can_export(self, datafile): try: self.validate(datafile) except: return False else: return True def export(self, datafile, dirpath): self.validate(datafile) if not os.path.exists(dirpath): raise ValueError('Path does not exist: %s' % dirpath) if not os.path.isdir(dirpath):<|fim▁hole|><|fim▁end|>
raise ValueError('Path is not a directory: %s' % dirpath) self._start(datafile, dirpath)
<|file_name|>test_psrfits_to_sdfits.py<|end_file_name|><|fim▁begin|>"""Unit tests for psrfits_to_sdfits.py.""" import unittest import sys import scipy as sp import numpy.random as rand import psrfits_to_sdfits as p2s import kiyopy.custom_exceptions as ce class TestFormatData(unittest.TestCase) : def setUp(self) : self.ntime = 5 self.npol = 4 self.nfreq = 10 self.good_data = sp.empty((self.ntime, self.npol, self.nfreq), dtype=int) self.good_data[:,:,:] = sp.reshape(sp.arange(self.ntime*self.nfreq), (self.ntime, 1, self.nfreq)) self.good_data[:,0,:] += 100 self.good_data[:,1:,:] -= self.ntime*self.nfreq//2 self.raw_data = sp.empty((self.ntime, self.npol, self.nfreq), dtype=sp.uint8) self.raw_data[:,0,:] = self.good_data[:,0,:] self.raw_data.dtype = sp.int8 self.raw_data[:,1:,:] = self.good_data[:,1:,:] self.raw_data.dtype = sp.uint8 self.raw_data = self.raw_data.flatten() def test_runs(self) : p2s.format_data(self.raw_data, self.ntime, self.npol, self.nfreq) def test_requires_uint(self) : self.assertRaises(TypeError, p2s.format_data, self.good_data, self.ntime, self.npol, self.nfreq) def test_right_answer(self): reformated = p2s.format_data(self.raw_data, self.ntime, self.npol, self.nfreq) self.assertTrue(sp.allclose(reformated, self.good_data)) class TestFoldOnCal(unittest.TestCase) : def setUp(self): self.ntime = 2048 self.nfreq = 10 self.data = sp.zeros((self.ntime, 4, self.nfreq)) self.n_bins_cal = 64 # Set channel dependant gain. self.level = 0.1*(self.nfreq + sp.arange(self.nfreq)) # Add noise. self.data[:,:,:] += (0.1 * self.level * rand.randn(self.ntime, 4, self.nfreq)) # Add DC level. self.dc = 10 * self.level self.data += self.dc # First can transition. self.first_trans = rand.randint(0, self.n_bins_cal // 2) # The following randomly assigns self.neg to -1 or 1. self.neg = 0 while not self.neg: self.neg = rand.randint(-1, 2) # First upward edge: if self.neg == 1: self.offset = self.first_trans else: self.offset = self.first_trans + self.n_bins_cal // 2 self.data[:,0,:] += self.level for ii in range(self.ntime//self.n_bins_cal) : s = slice(self.first_trans + ii*self.n_bins_cal, self.first_trans + (2*ii+1)*self.n_bins_cal//2) self.data[s, 0, :] += self.neg * self.level # Transition values and locations. self.t_slice = slice(self.first_trans, sys.maxint, self.n_bins_cal//2) self.t_vals = 0.5 + 0.1 * rand.randn(2*self.ntime//self.n_bins_cal, self.nfreq) self.t_vals *= - self.level def test_runs(self) : p2s.get_cal_mask(self.data, self.n_bins_cal) def test_right_answer_basic(self) : first_ind_on, n_blank = p2s.get_cal_mask(self.data, self.n_bins_cal) self.assertEqual(first_ind_on, (self.offset + 1) % self.n_bins_cal) self.assertEqual(n_blank, 2) def test_right_answer_partial(self) : self.data[self.t_slice, 0, :] += self.t_vals first_ind_on, n_blank = p2s.get_cal_mask(self.data, self.n_bins_cal) self.assertEqual(first_ind_on, (self.offset + 1) % self.n_bins_cal) self.assertEqual(n_blank, 1) def test_checks_cal_per(self) : self.assertRaises(ValueError, p2s.get_cal_mask, self.data, self.n_bins_cal + 1) def test_fails_to_many_transitions(self) : self.data[self.t_slice, 0, :] += self.t_vals self.assertRaises(ce.DataError, p2s.get_cal_mask, self.data, self.n_bins_cal*2) self.assertRaises(ce.DataError, p2s.get_cal_mask, self.data, self.n_bins_cal//2) def test_fails_any_nan(self) : self.data[self.t_slice,0,:] = float('nan') self.assertRaises(ce.DataError, p2s.get_cal_mask, self.data, self.n_bins_cal) def test_fails_offs_in_ons(self) : self.data[self.t_slice, 0, :] += self.t_vals s = slice((self.offset + 7) % self.n_bins_cal, sys.maxint, self.n_bins_cal) self.data[s, :, :] = self.dc self.assertRaises(ce.DataError, p2s.get_cal_mask, self.data, self.n_bins_cal) <|fim▁hole|> self.data[self.t_slice, 0, :] = self.dc s = slice(self.offset+1, sys.maxint, self.n_bins_cal) self.data[s, :, :] = self.dc self.assertRaises(ce.DataError, p2s.get_cal_mask, self.data, self.n_bins_cal) def test_fails_to_many_semi_bins(self) : self.data[self.t_slice, 0, :] += self.t_vals s = slice((self.offset + 7) % self.n_bins_cal, sys.maxint, self.n_bins_cal) self.data[s, :, :] = self.dc + self.level * 0.7 self.assertRaises(ce.DataError, p2s.get_cal_mask, self.data, self.n_bins_cal) def test_fast_flagger(self): for ii in range(self.ntime * self.nfreq * 4 // self.n_bins_cal // 10): #for ii in range(3): i_f = rand.randint(0, self.nfreq) i_t = rand.randint(0, self.ntime) i_p = rand.randint(0, 4) self.data[i_t,i_p,i_f] += self.level[i_f] * 5 data, weights = p2s.separate_cal(self.data, self.n_bins_cal, flag=10) right_answer = sp.empty((4, 2, self.nfreq)) right_answer[...] = self.dc right_answer[0,0,:] += self.level self.assertTrue(sp.allclose(data, right_answer, atol=self.level / 10)) self.assertTrue(sp.all(weights <= 1.)) kept_fraction = 1. - 4./self.n_bins_cal - (4./self.n_bins_cal/10) self.assertTrue(sp.allclose(sp.mean(weights), kept_fraction, rtol=1e-3)) class TestSeparateCal(unittest.TestCase) : """Unlike the tests for get_cal_mask, these tests are tightly controled with no noise so we can detect deviations from expected.""" def setUp(self) : self.ntime = 2048 self.nfreq = 10 self.data = sp.zeros((self.ntime, 4, self.nfreq)) self.n_bins_cal = 64 self.offset = 10 def post_setup(self) : if self.offset > self.n_bins_cal//2 : last_on_start = (self.offset + self.n_bins_cal//2)% self.n_bins_cal self.data[:last_on_start, :, :] = 1 for ii in range(self.ntime//self.n_bins_cal) : s = slice(self.offset + ii*self.n_bins_cal, self.offset + (2*ii+1)*self.n_bins_cal//2) self.data[s, :, :] = 1 self.t_slice_on = slice(self.offset, sys.maxint, self.n_bins_cal) self.t_slice_off = slice((self.offset + self.n_bins_cal//2)%self.n_bins_cal, sys.maxint, self.n_bins_cal) def check_answer(self) : data = self.data.copy() outdata, weights = p2s.separate_cal(data, self.n_bins_cal, flag=-1) self.assertTrue(sp.allclose(outdata[:,:,0,:], 1.0)) self.assertTrue(sp.allclose(outdata[:,:,1,:], 0.0)) data = self.data.copy() outdata, weights = p2s.separate_cal(data, self.n_bins_cal, flag=10) self.assertTrue(sp.allclose(outdata[:,:,0,:], 1.0)) self.assertTrue(sp.allclose(outdata[:,:,1,:], 0.0)) def test_works_no_transition(self) : self.post_setup() self.check_answer() def test_works_transition(self) : self.post_setup() self.data[self.t_slice_off, :, :] = 0.3 self.data[self.t_slice_on, :, :] = 0.5 self.check_answer() # Move the offset to the the second half and make sure it works. def test_works_no_transition_late(self) : self.offset = 57 self.post_setup() self.check_answer() def test_works_transition_late(self) : self.offset = 57 self.post_setup() self.data[self.t_slice_off, :, :] = 0.3 self.data[self.t_slice_on, :, :] = 0.5 self.check_answer() # Test offset = 63 def test_works_no_transition__1(self) : self.offset = 63 self.post_setup() self.check_answer() def test_works_transition__1(self) : self.offset = 63 self.post_setup() self.data[self.t_slice_off, :, :] = 0.3 self.data[self.t_slice_on, :, :] = 0.5 self.check_answer() # Test offset = 32 def test_works_no_transition_32(self) : self.offset = 32 self.post_setup() self.check_answer() def test_works_transition_32(self) : self.offset = 32 self.post_setup() self.data[self.t_slice_off, :, :] = 0.3 self.data[self.t_slice_on, :, :] = 0.5 self.check_answer() # Test offset = 0 def test_works_no_transition_0(self) : self.offset = 0 self.post_setup() self.check_answer() def test_works_transition_0(self) : self.offset = 0 self.post_setup() self.data[self.t_slice_off, :, :] = 0.3 self.data[self.t_slice_on, :, :] = 0.5 self.check_answer() if __name__ == '__main__' : unittest.main()<|fim▁end|>
def test_fails_late_on(self) :
<|file_name|>validators.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import re # from flask_restful import inputs # objectid = inputs.regex('^[0-9a-z]{24}$')<|fim▁hole|> message = 'ciao' if not value: return None pattern = re.compile('^[0-9a-z]{24}$') if not pattern.match(value): raise ValueError(message) return value<|fim▁end|>
def objectid(value):
<|file_name|>ols_table.py<|end_file_name|><|fim▁begin|>"""Example: statsmodels.OLS """ from statsmodels.datasets.longley import load import statsmodels.api as sm from statsmodels.iolib.table import SimpleTable, default_txt_fmt<|fim▁hole|> data_orig = (data.endog.copy(), data.exog.copy()) #.. Note: In this example using zscored/standardized variables has no effect on #.. regression estimates. Are there no numerical problems? rescale = 0 #0: no rescaling, 1:demean, 2:standardize, 3:standardize and transform back rescale_ratio = data.endog.std() / data.exog.std(0) if rescale > 0: # rescaling data.endog -= data.endog.mean() data.exog -= data.exog.mean(0) if rescale > 1: data.endog /= data.endog.std() data.exog /= data.exog.std(0) #skip because mean has been removed, but dimension is hardcoded in table data.exog = sm.tools.add_constant(data.exog, prepend=False) ols_model = sm.OLS(data.endog, data.exog) ols_results = ols_model.fit() # the Longley dataset is well known to have high multicollinearity # one way to find the condition number is as follows #Find OLS parameters for model with one explanatory variable dropped resparams = np.nan * np.ones((7, 7)) res = sm.OLS(data.endog, data.exog).fit() resparams[:, 0] = res.params indall = range(7) for i in range(6): ind = indall[:] del ind[i] res = sm.OLS(data.endog, data.exog[:, ind]).fit() resparams[ind, i + 1] = res.params if rescale == 1: pass if rescale == 3: resparams[:-1, :] *= rescale_ratio[:, None] txt_fmt1 = default_txt_fmt numformat = '%10.4f' txt_fmt1 = dict(data_fmts=[numformat]) rowstubs = data.names[1:] + ['const'] headers = ['all'] + ['drop %s' % name for name in data.names[1:]] tabl = SimpleTable(resparams, headers, rowstubs, txt_fmt=txt_fmt1) nanstring = numformat % np.nan nn = len(nanstring) nanrep = ' ' * (nn - 1) nanrep = nanrep[:nn // 2] + '-' + nanrep[nn // 2:] print('Longley data - sensitivity to dropping an explanatory variable') print(str(tabl).replace(nanstring, nanrep))<|fim▁end|>
import numpy as np data = load()
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/*<|fim▁hole|> * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ mod subscription; mod watchman_file; pub use subscription::{ WatchmanFileSourceResult, WatchmanFileSourceSubscription, WatchmanFileSourceSubscriptionNextChange, }; pub use watchman_file::WatchmanFile;<|fim▁end|>
<|file_name|>returnOnTop.js<|end_file_name|><|fim▁begin|>$(document).ready( function () { // Add return on top button $('body').append('<div id="returnOnTop" title="Retour en haut">&nbsp;</div>'); // On button click, let's scroll up to top $('#returnOnTop').click( function() { $('html,body').animate({scrollTop: 0}, 'slow'); }); });<|fim▁hole|> $('#returnOnTop').fadeOut(); else $('#returnOnTop').fadeIn(); });<|fim▁end|>
$(window).scroll(function() { // If on top fade the bouton out, else fade it in if ( $(window).scrollTop() == 0 )
<|file_name|>x86.rs<|end_file_name|><|fim▁begin|>pub type cc_t = ::uchar_t; pub type speed_t = ::uint_t; pub type tcflag_t = ::uint_t; #[repr(C)] #[derive(Copy)] pub struct termios { pub c_iflag: tcflag_t, pub c_oflag: tcflag_t, pub c_cflag: tcflag_t, pub c_lflag: tcflag_t, pub c_line: cc_t,<|fim▁hole|> pub c_ispeed: speed_t, pub c_ospeed: speed_t, } new!(termios); pub const NCCS: usize = 32; pub const VEOF: usize = 4; pub const VEOL: usize = 11; pub const VERASE: usize = 2; pub const VINTR: usize = 0; pub const VKILL: usize = 3; pub const VMIN: usize = 6; pub const VQUIT: usize = 1; pub const VSTART: usize = 8; pub const VSTOP: usize = 9; pub const VSUSP: usize = 10; pub const VTIME: usize = 5; pub const BRKINT: tcflag_t = 0o000002; pub const ICRNL: tcflag_t = 0o000400; pub const IGNBRK: tcflag_t = 0o000001; pub const IGNCR: tcflag_t = 0o000200; pub const IGNPAR: tcflag_t = 0o000004; pub const INLCR: tcflag_t = 0o000100; pub const INPCK: tcflag_t = 0o000020; pub const ISTRIP: tcflag_t = 0o000040; pub const IXANY: tcflag_t = 0o004000; pub const IXOFF: tcflag_t = 0o010000; pub const IXON: tcflag_t = 0o002000; pub const PARMRK: tcflag_t = 0o000010; pub const OPOST: tcflag_t = 0o000001; pub const ONLCR: tcflag_t = 0o000004; pub const OCRNL: tcflag_t = 0o000010; pub const ONOCR: tcflag_t = 0o000020; pub const ONLRET: tcflag_t = 0o000040; pub const OFDEL: tcflag_t = 0o000200; pub const OFILL: tcflag_t = 0o000100; pub const NLDLY: tcflag_t = 0o000400; pub const NL0: tcflag_t = 0o000000; pub const NL1: tcflag_t = 0o000400; pub const CRDLY: tcflag_t = 0o003000; pub const CR0: tcflag_t = 0o000000; pub const CR1: tcflag_t = 0o001000; pub const CR2: tcflag_t = 0o002000; pub const CR3: tcflag_t = 0o003000; pub const TABDLY: tcflag_t = 0o014000; pub const TAB0: tcflag_t = 0o000000; pub const TAB1: tcflag_t = 0o004000; pub const TAB2: tcflag_t = 0o010000; pub const TAB3: tcflag_t = 0o014000; pub const BSDLY: tcflag_t = 0o020000; pub const BS0: tcflag_t = 0o000000; pub const BS1: tcflag_t = 0o020000; pub const VTDLY: tcflag_t = 0o040000; pub const VT0: tcflag_t = 0o000000; pub const VT1: tcflag_t = 0o040000; pub const FFDLY: tcflag_t = 0o100000; pub const FF0: tcflag_t = 0o000000; pub const FF1: tcflag_t = 0o100000; pub const B0: tcflag_t = 0o000000; pub const B50: tcflag_t = 0o000001; pub const B75: tcflag_t = 0o000002; pub const B110: tcflag_t = 0o000003; pub const B134: tcflag_t = 0o000004; pub const B150: tcflag_t = 0o000005; pub const B200: tcflag_t = 0o000006; pub const B300: tcflag_t = 0o000007; pub const B600: tcflag_t = 0o000010; pub const B1200: tcflag_t = 0o000011; pub const B1800: tcflag_t = 0o000012; pub const B2400: tcflag_t = 0o000013; pub const B4800: tcflag_t = 0o000014; pub const B9600: tcflag_t = 0o000015; pub const B19200: tcflag_t = 0o000016; pub const B38400: tcflag_t = 0o000017; pub const CSIZE: tcflag_t = 0o000060; pub const CS5: tcflag_t = 0o000000; pub const CS6: tcflag_t = 0o000020; pub const CS7: tcflag_t = 0o000040; pub const CS8: tcflag_t = 0o000060; pub const CSTOPB: tcflag_t = 0o000100; pub const CREAD: tcflag_t = 0o000200; pub const PARENB: tcflag_t = 0o000400; pub const PARODD: tcflag_t = 0o001000; pub const HUPCL: tcflag_t = 0o002000; pub const CLOCAL: tcflag_t = 0o004000; pub const ECHO: tcflag_t = 0o000010; pub const ECHOE: tcflag_t = 0o000020; pub const ECHOK: tcflag_t = 0o000040; pub const ECHONL: tcflag_t = 0o000100; pub const ICANON: tcflag_t = 0o000002; pub const IEXTEN: tcflag_t = 0o100000; pub const ISIG: tcflag_t = 0o000001; pub const NOFLSH: tcflag_t = 0o000200; pub const TOSTOP: tcflag_t = 0o000400; pub const TCSANOW: ::int_t = 0; pub const TCSADRAIN: ::int_t = 1; pub const TCSAFLUSH: ::int_t = 2; pub const TCIFLUSH: ::int_t = 0; pub const TCIOFLUSH: ::int_t = 2; pub const TCOFLUSH: ::int_t = 1; pub const TCIOFF: ::int_t = 2; pub const TCION: ::int_t = 3; pub const TCOOFF: ::int_t = 0; pub const TCOON: ::int_t = 1;<|fim▁end|>
pub c_cc: [cc_t; 32],
<|file_name|>generator.go<|end_file_name|><|fim▁begin|>package soa import ( "encoding/json" "fmt" "strconv" "time" "github.com/Shark/powerdns-consul/backend/store" ) type soaEntry struct { NameServer string EmailAddr string Sn uint32 Refresh int32 Retry int32 Expiry int32 Nx int32 } type soaRevision struct { SnModifyIndex uint64 SnDate int SnVersion uint32 } type GeneratorConfig struct { SoaNameServer string SoaEmailAddr string SoaRefresh int32 SoaRetry int32 SoaExpiry int32 SoaNx int32 DefaultTTL uint32 } type Generator struct { cfg *GeneratorConfig currentTime time.Time } func NewGenerator(cfg *GeneratorConfig, currentTime time.Time) *Generator { return &Generator{cfg, currentTime} } func (g *Generator) RetrieveOrCreateSOAEntry(kv store.Store, zone string) (entry *store.Entry, err error) { tries := 3 for tries > 0 { entry, err = g.tryToRetrieveOrCreateSOAEntry(kv, zone) if err != nil { return nil, err } if entry != nil { return entry, err } tries-- } return nil, nil } <|fim▁hole|> if err != nil { return nil, err } var lastModifyIndex uint64 for _, pair := range pairs { if lastModifyIndex == 0 || pair.LastIndex() > lastModifyIndex { lastModifyIndex = pair.LastIndex() } } key := fmt.Sprintf("soa/%s", zone) revEntryPair, err := kv.Get(key) if err != nil && err != store.ErrKeyNotFound { return nil, err } rev := soaRevision{} if revEntryPair != nil { // use existing revision err = json.Unmarshal(revEntryPair.Value(), &rev) if err != nil { return nil, err } if rev.SnModifyIndex != lastModifyIndex { // update the modify index rev.SnModifyIndex = lastModifyIndex curSnDate := getDateFormatted(g.currentTime) if rev.SnDate != curSnDate { rev.SnDate = curSnDate rev.SnVersion = 0 } else { // TODO: what about SnVersion > 99? rev.SnVersion += 1 } } // else nothing to do } else { // create a new revision rev.SnDate = getDateFormatted(g.currentTime) rev.SnVersion = 0 rev.SnModifyIndex = lastModifyIndex } json, err := json.Marshal(rev) if err != nil { return nil, err } ok, _, err := kv.AtomicPut(key, json, revEntryPair, nil) if err != nil || !ok { return nil, err } soa := &soaEntry{NameServer: g.cfg.SoaNameServer, EmailAddr: g.cfg.SoaEmailAddr, Sn: formatSoaSn(rev.SnDate, rev.SnVersion), Refresh: g.cfg.SoaRefresh, Retry: g.cfg.SoaRetry, Expiry: g.cfg.SoaExpiry, Nx: g.cfg.SoaNx} soaAsEntry := formatSoaEntry(soa, g.cfg.DefaultTTL) return soaAsEntry, nil } func formatSoaSn(snDate int, snVersion uint32) (sn uint32) { soaSnString := fmt.Sprintf("%d%02d", snDate, snVersion) soaSnInt, err := strconv.Atoi(soaSnString) if err != nil { panic("error generating SoaSn") } return uint32(soaSnInt) } func formatSoaEntry(sEntry *soaEntry, ttl uint32) *store.Entry { value := fmt.Sprintf("%s %s %d %d %d %d %d", sEntry.NameServer, sEntry.EmailAddr, sEntry.Sn, sEntry.Refresh, sEntry.Retry, sEntry.Expiry, sEntry.Nx) return &store.Entry{"SOA", ttl, value} } func getDateFormatted(time time.Time) int { formattedMonthString := fmt.Sprintf("%02d", time.Month()) formattedDayString := fmt.Sprintf("%02d", time.Day()) dateString := fmt.Sprintf("%d%s%s", time.Year(), formattedMonthString, formattedDayString) date, err := strconv.Atoi(dateString) if err != nil { return 0 } return date }<|fim▁end|>
func (g *Generator) tryToRetrieveOrCreateSOAEntry(kv store.Store, zone string) (entry *store.Entry, err error) { prefix := fmt.Sprintf("zones/%s", zone) pairs, err := kv.List(prefix)
<|file_name|>CampaignSharedSetServiceInterfaceget.java<|end_file_name|><|fim▁begin|>// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.api.ads.adwords.jaxws.v201809.cm; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; /** * * Returns a list of CampaignSharedSets based on the given selector. * @param selector the selector specifying the query * @return a list of CampaignSharedSet entities that meet the criterion specified * by the selector * @throws ApiException * * * <p>Java class for get element declaration. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;element name="get"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="selector" type="{https://adwords.google.com/api/adwords/cm/v201809}Selector" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * </pre> * <|fim▁hole|> * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "selector" }) @XmlRootElement(name = "get") public class CampaignSharedSetServiceInterfaceget { protected Selector selector; /** * Gets the value of the selector property. * * @return * possible object is * {@link Selector } * */ public Selector getSelector() { return selector; } /** * Sets the value of the selector property. * * @param value * allowed object is * {@link Selector } * */ public void setSelector(Selector value) { this.selector = value; } }<|fim▁end|>
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>""" WSGI config for PythonAnywhere test project. This module contains the WSGI application used by Django's development server and any production WSGI deployments. It should expose a module-level variable named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover this application via the ``WSGI_APPLICATION`` setting. Usually you will have the standard Django WSGI application here, but it also might make sense to replace the whole Django WSGI application with a custom one that later delegates to the Django one. For example, you could introduce WSGI middleware here, or combine a Django application with an application of another framework. """ import os from django.core.wsgi import get_wsgi_application<|fim▁hole|> # We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks # if running multiple sites in the same mod_wsgi process. To fix this, use # mod_wsgi daemon mode with each site in its own daemon process, or use # os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production" os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production") # This application object is used by any WSGI server configured to use this # file. This includes Django's development server, if the WSGI_APPLICATION # setting points here. application = get_wsgi_application() # Use Whitenoise to serve static files # See: https://whitenoise.readthedocs.org/ application = DjangoWhiteNoise(application) # Apply WSGI middleware here. # from helloworld.wsgi import HelloWorldApplication # application = HelloWorldApplication(application)<|fim▁end|>
from whitenoise.django import DjangoWhiteNoise
<|file_name|>resource_exchange_tests.cc<|end_file_name|><|fim▁begin|>#include <set> #include <string> #include <math.h> #include <gtest/gtest.h> #include "agent.h" #include "bid.h" #include "bid_portfolio.h" #include "composition.h" #include "equality_helpers.h" #include "exchange_context.h" #include "facility.h" #include "material.h" #include "request.h" #include "request_portfolio.h" #include "resource_exchange.h" #include "resource_helpers.h" #include "test_context.h" #include "test_agents/test_facility.h" using cyclus::Bid; using cyclus::BidPortfolio; using cyclus::CommodMap; using cyclus::Composition; using cyclus::Context; using cyclus::ExchangeContext; using cyclus::Facility; using cyclus::Material; using cyclus::Agent; using cyclus::PrefMap; using cyclus::Request; using cyclus::RequestPortfolio; using cyclus::ResourceExchange; using cyclus::TestContext; using std::set; using std::string; // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - class Requester: public TestFacility { public: Requester(Context* ctx, int i = 1) : TestFacility(ctx), i_(i), req_ctr_(0), pref_ctr_(0) {} virtual cyclus::Agent* Clone() { Requester* m = new Requester(context()); m->InitFrom(this); m->i_ = i_; m->port_ = port_; return m; } set<RequestPortfolio<Material>::Ptr> GetMatlRequests() { set<RequestPortfolio<Material>::Ptr> rps; RequestPortfolio<Material>::Ptr rp(new RequestPortfolio<Material>()); rps.insert(port_); req_ctr_++; return rps; } // increments counter and squares all preferences virtual void AdjustMatlPrefs(PrefMap<Material>::type& prefs) { std::map<Request<Material>*, std::map<Bid<Material>*, double> >::iterator p_it; for (p_it = prefs.begin(); p_it != prefs.end(); ++p_it) { std::map<Bid<Material>*, double>& map = p_it->second; std::map<Bid<Material>*, double>::iterator m_it; for (m_it = map.begin(); m_it != map.end(); ++m_it) { m_it->second = std::pow(m_it->second, 2); } } pref_ctr_++; } RequestPortfolio<Material>::Ptr port_; int i_; int pref_ctr_; int req_ctr_; }; // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - class Bidder: public TestFacility { public: Bidder(Context* ctx, std::string commod) : TestFacility(ctx), commod_(commod), bid_ctr_(0) {} virtual cyclus::Agent* Clone() { Bidder* m = new Bidder(context(), commod_); m->InitFrom(this); m->port_ = port_; return m; } set<BidPortfolio<Material>::Ptr> GetMatlBids( CommodMap<Material>::type& commod_requests) { set<BidPortfolio<Material>::Ptr> bps; bps.insert(port_); bid_ctr_++; return bps; }<|fim▁hole|> int bid_ctr_; }; // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - class ResourceExchangeTests: public ::testing::Test { protected: TestContext tc; Requester* reqr; Bidder* bidr; ResourceExchange<Material>* exchng; string commod; double pref; Material::Ptr mat; Request<Material>* req; Bid<Material>* bid; virtual void SetUp() { commod = "name"; pref = 2.4; cyclus::CompMap cm; cm[92235] = 1.0; Composition::Ptr comp = Composition::CreateFromMass(cm); double qty = 1.0; mat = Material::CreateUntracked(qty, comp); reqr = new Requester(tc.get()); exchng = new ResourceExchange<Material>(tc.get()); } virtual void TearDown() { delete exchng; } }; // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - TEST_F(ResourceExchangeTests, Requests) { RequestPortfolio<Material>::Ptr rp(new RequestPortfolio<Material>()); req = rp->AddRequest(mat, reqr, commod, pref); reqr->port_ = rp; Facility* clone = dynamic_cast<Facility*>(reqr->Clone()); clone->Build(NULL); Requester* rcast = dynamic_cast<Requester*>(clone); EXPECT_EQ(0, rcast->req_ctr_); exchng->AddAllRequests(); EXPECT_EQ(1, rcast->req_ctr_); EXPECT_EQ(1, exchng->ex_ctx().requesters.size()); ExchangeContext<Material>& ctx = exchng->ex_ctx(); const std::vector<RequestPortfolio<Material>::Ptr>& obsvp = ctx.requests; EXPECT_EQ(1, obsvp.size()); EXPECT_TRUE(RPEq(*rp.get(), *obsvp[0].get())); const std::vector<Request<Material>*>& obsvr = ctx.commod_requests[commod]; EXPECT_EQ(1, obsvr.size()); std::vector<Request<Material>*> vr; vr.push_back(req); EXPECT_EQ(vr, obsvr); clone->Decommission(); } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - TEST_F(ResourceExchangeTests, Bids) { ExchangeContext<Material>& ctx = exchng->ex_ctx(); RequestPortfolio<Material>::Ptr rp(new RequestPortfolio<Material>()); req = rp->AddRequest(mat, reqr, commod, pref); Request<Material>* req1 = rp->AddRequest(mat, reqr, commod, pref); ctx.AddRequestPortfolio(rp); const std::vector<Request<Material>*>& reqs = ctx.commod_requests[commod]; EXPECT_EQ(2, reqs.size()); Bidder* bidr = new Bidder(tc.get(), commod); BidPortfolio<Material>::Ptr bp(new BidPortfolio<Material>()); bid = bp->AddBid(req, mat, bidr); Bid<Material>* bid1 = bp->AddBid(req1, mat, bidr); std::vector<Bid<Material>*> bids; bids.push_back(bid); bids.push_back(bid1); bidr->port_ = bp; Facility* clone = dynamic_cast<Facility*>(bidr->Clone()); clone->Build(NULL); Bidder* bcast = dynamic_cast<Bidder*>(clone); EXPECT_EQ(0, bcast->bid_ctr_); exchng->AddAllBids(); EXPECT_EQ(1, bcast->bid_ctr_); EXPECT_EQ(1, exchng->ex_ctx().bidders.size()); const std::vector<BidPortfolio<Material>::Ptr>& obsvp = ctx.bids; EXPECT_EQ(1, obsvp.size()); EXPECT_TRUE(BPEq(*bp.get(), *obsvp[0].get())); const cyclus::BidPortfolio<Material>& lhs = *bp; const cyclus::BidPortfolio<Material>& rhs = *obsvp[0]; EXPECT_TRUE(BPEq(*bp, *obsvp[0])); const std::vector<Bid<Material>*>& obsvb = ctx.bids_by_request[req]; EXPECT_EQ(1, obsvb.size()); std::vector<Bid<Material>*> vb; vb.push_back(bid); EXPECT_EQ(vb, obsvb); const std::vector<Bid<Material>*>& obsvb1 = ctx.bids_by_request[req1]; EXPECT_EQ(1, obsvb1.size()); vb.clear(); vb.push_back(bid1); EXPECT_EQ(vb, obsvb1); clone->Decommission(); } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - TEST_F(ResourceExchangeTests, PrefCalls) { Facility* parent = dynamic_cast<Facility*>(reqr->Clone()); Facility* child = dynamic_cast<Facility*>(reqr->Clone()); parent->Build(NULL); child->Build(parent); Requester* pcast = dynamic_cast<Requester*>(parent); Requester* ccast = dynamic_cast<Requester*>(child); ASSERT_TRUE(pcast != NULL); ASSERT_TRUE(ccast != NULL); ASSERT_TRUE(pcast->parent() == NULL); ASSERT_TRUE(ccast->parent() == dynamic_cast<Agent*>(pcast)); ASSERT_TRUE(pcast->manager() == dynamic_cast<Agent*>(pcast)); ASSERT_TRUE(ccast->manager() == dynamic_cast<Agent*>(ccast)); // doin a little magic to simulate each requester making their own request RequestPortfolio<Material>::Ptr rp1(new RequestPortfolio<Material>()); Request<Material>* preq = rp1->AddRequest(mat, pcast, commod, pref); pcast->port_ = rp1; RequestPortfolio<Material>::Ptr rp2(new RequestPortfolio<Material>()); Request<Material>* creq = rp2->AddRequest(mat, ccast, commod, pref); ccast->port_ = rp2; EXPECT_EQ(0, pcast->req_ctr_); EXPECT_EQ(0, ccast->req_ctr_); exchng->AddAllRequests(); EXPECT_EQ(2, exchng->ex_ctx().requesters.size()); EXPECT_EQ(1, pcast->req_ctr_); EXPECT_EQ(1, ccast->req_ctr_); EXPECT_EQ(0, pcast->pref_ctr_); EXPECT_EQ(0, ccast->pref_ctr_); EXPECT_NO_THROW(exchng->AdjustAll()); // child gets to adjust once - its own request // parent gets called twice - its request and adjusting its child's request EXPECT_EQ(2, pcast->pref_ctr_); EXPECT_EQ(1, ccast->pref_ctr_); child->Decommission(); parent->Decommission(); } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - TEST_F(ResourceExchangeTests, PrefValues) { Facility* parent = dynamic_cast<Facility*>(reqr->Clone()); Facility* child = dynamic_cast<Facility*>(reqr->Clone()); parent->Build(NULL); child->Build(parent); Requester* pcast = dynamic_cast<Requester*>(parent); Requester* ccast = dynamic_cast<Requester*>(child); // doin a little magic to simulate each requester making their own request RequestPortfolio<Material>::Ptr rp1(new RequestPortfolio<Material>()); Request<Material>* preq = rp1->AddRequest(mat, pcast, commod, pref); pcast->port_ = rp1; RequestPortfolio<Material>::Ptr rp2(new RequestPortfolio<Material>()); Request<Material>* creq = rp2->AddRequest(mat, ccast, commod, pref); ccast->port_ = rp2; Bidder* bidr = new Bidder(tc.get(), commod); BidPortfolio<Material>::Ptr bp(new BidPortfolio<Material>()); Bid<Material>* pbid = bp->AddBid(preq, mat, bidr); Bid<Material>* cbid = bp->AddBid(creq, mat, bidr); std::vector<Bid<Material>*> bids; bids.push_back(pbid); bids.push_back(cbid); bidr->port_ = bp; Facility* bclone = dynamic_cast<Facility*>(bidr->Clone()); bclone->Build(NULL); EXPECT_NO_THROW(exchng->AddAllRequests()); EXPECT_NO_THROW(exchng->AddAllBids()); PrefMap<Material>::type pobs; pobs[preq].insert(std::make_pair(pbid, preq->preference())); PrefMap<Material>::type cobs; cobs[creq].insert(std::make_pair(cbid, creq->preference())); ExchangeContext<Material>& context = exchng->ex_ctx(); EXPECT_EQ(context.trader_prefs[parent], pobs); EXPECT_EQ(context.trader_prefs[child], cobs); EXPECT_NO_THROW(exchng->AdjustAll()); pobs[preq].begin()->second = std::pow(preq->preference(), 2); cobs[creq].begin()->second = std::pow(std::pow(creq->preference(), 2), 2); EXPECT_EQ(context.trader_prefs[parent], pobs); EXPECT_EQ(context.trader_prefs[child], cobs); child->Decommission(); parent->Decommission(); }<|fim▁end|>
BidPortfolio<Material>::Ptr port_; std::string commod_;
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
default_app_config = "nnmware.apps.booking.apps.BookingAppConfig"
<|file_name|>nwayBlender.cpp<|end_file_name|><|fim▁begin|>/** * @file NWayBlender.cpp * @brief N-Way shape blend deformer plugin for Maya * @section LICENSE The MIT License * @section requirements: Eigen 3: http://eigen.tuxfamily.org/ * @section Autodesk Maya: http://www.autodesk.com/products/autodesk-maya/overview * @section (included) AffineLib: https://github.com/shizuo-kaji/AffineLib * @version 0.20 * @date 18/Jul/2015 * @author Shizuo KAJI */ #include "StdAfx.h" #include "nwayBlender.h" #include <set> #include <queue> #include <ctime> using namespace Eigen; using namespace AffineLib; using namespace Tetrise; MTypeId nwayDeformerNode::id( 0x00000300 ); MString nwayDeformerNode::nodeName( "nwayBlender" ); MObject nwayDeformerNode::aBlendMode; MObject nwayDeformerNode::aTetMode; MObject nwayDeformerNode::aBlendMesh; MObject nwayDeformerNode::aWeight; MObject nwayDeformerNode::aIteration; MObject nwayDeformerNode::aRotationConsistency; MObject nwayDeformerNode::aVisualiseEnergy; MObject nwayDeformerNode::aVisualisationMultiplier; MObject nwayDeformerNode::aEnergy; MObject nwayDeformerNode::aInitRotation; MObject nwayDeformerNode::aAreaWeighted; MObject nwayDeformerNode::aARAP; // blend matrices template<typename T> void blendMatList(const std::vector< std::vector<T> >& A, const std::vector<double>& weight, std::vector<T>& X){ int numMesh= (int)A.size(); if(numMesh == 0) return; int numTet= (int)A[0].size(); for(int i=0;i<numTet;i++){ X[i].setZero(); for(int j=0;j<numMesh;j++){ X[i] += weight[j]*A[j][i]; } } } template<typename T> void blendMatLinList(const std::vector< std::vector<T> >& A, const std::vector<double>& weight, std::vector<T>& X){ int numMesh= (int)A.size(); if(numMesh == 0) return; int numTet= (int)A[0].size(); for(int i=0;i<numTet;i++){ double sum = 0.0; X[i].setZero(); for(int j=0;j<numMesh;j++){ X[i] += weight[j] * A[j][i]; sum += weight[j]; } X[i] += (1.0-sum) * T::Identity(); } } // blend quaternion linearly void blendQuatList(const std::vector< std::vector<Vector4d> >& A, const std::vector<double>& weight, std::vector<Vector4d>& X){ int numMesh= (int)A.size(); if(numMesh == 0) return; int numTet= (int)A[0].size(); Vector4d I(0,0,0,1); for(int i=0;i<numTet;i++){ double sum = 0.0; X[i].setZero(); for(int j=0;j<numMesh;j++){ X[i] += weight[j] * A[j][i]; sum += weight[j]; } X[i] += (1.0-sum) * I; X[i].normalized(); } } // void* nwayDeformerNode::creator() { return new nwayDeformerNode; } MStatus nwayDeformerNode::deform( MDataBlock& data, MItGeometry& itGeo, const MMatrix &localToWorldMatrix, unsigned int mIndex ) { // clock_t clock_start=clock(); MObject thisNode = thisMObject(); MStatus status; MThreadUtils::syncNumOpenMPThreads(); // for OpenMP MArrayDataHandle hBlendMesh = data.inputArrayValue(aBlendMesh); short numIter = data.inputValue( aIteration ).asShort(); short nblendMode = data.inputValue( aBlendMode ).asShort(); short tetMode = data.inputValue( aTetMode ).asShort(); double visualisationMultiplier = data.inputValue(aVisualisationMultiplier).asDouble(); bool visualiseEnergy = data.inputValue( aVisualiseEnergy ).asBool(); bool nrotationCosistency = data.inputValue( aRotationConsistency ).asBool(); MPointArray Mpts; itGeo.allPositions(Mpts); int nnumMesh = hBlendMesh.elementCount(); int numPts = Mpts.length(); // initialisation if(!data.isClean(aARAP)){ // clock_t clock_start=clock(); numMesh = 0; // point list pts.resize(numPts); for(int i=0;i<numPts;i++){ pts[i] << Mpts[i].x, Mpts[i].y, Mpts[i].z; } // make tetrahedral structure getMeshData(data, input, inputGeom, mIndex, tetMode, pts, mesh.tetList, faceList, edgeList, vertexList, mesh.tetMatrix, mesh.tetWeight); mesh.dim = removeDegenerate(tetMode, numPts, mesh.tetList, faceList, edgeList, vertexList, mesh.tetMatrix); makeTetMatrix(tetMode, pts, mesh.tetList, faceList, edgeList, vertexList, mesh.tetMatrix, mesh.tetWeight); makeAdjacencyList(tetMode, mesh.tetList, edgeList, vertexList, adjacencyList); mesh.numTet = (int)mesh.tetList.size()/4; mesh.computeTetMatrixInverse(); // prepare ARAP solver if(!data.inputValue( aAreaWeighted ).asBool()){ mesh.tetWeight.clear(); mesh.tetWeight.resize(mesh.numTet,1.0); } mesh.constraintWeight.resize(1); mesh.constraintWeight[0] = std::make_pair(0,1.0); mesh.constraintVal.resize(1,3); mesh.constraintVal(0,0) = pts[0][0]; mesh.constraintVal(0,1) = pts[0][1]; mesh.constraintVal(0,2) = pts[0][2]; isError = mesh.ARAPprecompute(); status = data.setClean(aARAP); // MString es="Init timing: "; // double timing=(double)(clock()- clock_start)/CLOCKS_PER_SEC; // es += timing; // MGlobal::displayInfo(es); } if(isError>0) return MS::kFailure; // if blend mesh is added, compute log for each tet logR.resize(nnumMesh); logS.resize(nnumMesh); R.resize(nnumMesh); S.resize(nnumMesh); GL.resize(nnumMesh); logGL.resize(nnumMesh); quat.resize(nnumMesh); L.resize(nnumMesh); // for recomputation of parametrisation if(numMesh>nnumMesh || nblendMode != blendMode || nrotationCosistency != rotationCosistency){ numMesh =0; blendMode = nblendMode; rotationCosistency = nrotationCosistency; } for(int j=numMesh; j<nnumMesh; j++){ hBlendMesh.jumpToElement(j); MFnMesh blendMesh(hBlendMesh.inputValue().asMesh()); MPointArray Mbpts; blendMesh.getPoints( Mbpts ); if(numPts != Mbpts.length()){ MGlobal::displayInfo("incompatible mesh"); return MS::kFailure; } std::vector<Vector3d> bpts(numPts); for(int i=0;i<numPts;i++){ bpts[i] << Mbpts[i].x, Mbpts[i].y, Mbpts[i].z; } makeTetMatrix(tetMode, bpts, mesh.tetList, faceList, edgeList, vertexList, Q, dummy_weight); logR[j].resize(mesh.numTet); logS[j].resize(mesh.numTet); R[j].resize(mesh.numTet); S[j].resize(mesh.numTet); GL[j].resize(mesh.numTet); L[j].resize(mesh.numTet); for(int i=0;i<mesh.numTet;i++) { Matrix4d aff=mesh.tetMatrixInverse[i]*Q[i]; GL[j][i]=aff.block(0,0,3,3); L[j][i]=transPart(aff); parametriseGL(GL[j][i], logS[j][i] ,R[j][i]); } if( blendMode == BM_LOG3){ logGL[j].resize(mesh.numTet); for(int i=0;i<mesh.numTet;i++) logGL[j][i]=GL[j][i].log(); }else if( blendMode == BM_SQL){ quat[j].resize(mesh.numTet); for(int i=0;i<mesh.numTet;i++){ S[j][i]=expSym(logS[j][i]); Quaternion<double> q(R[j][i].transpose()); quat[j][i] << q.x(), q.y(), q.z(), q.w(); } }else if( blendMode == BM_SlRL){ for(int i=0;i<mesh.numTet;i++){ S[j][i]=expSym(logS[j][i]); } } // traverse tetrahedra to compute continuous log of rotation if(rotationCosistency){ std::set<int> remain; std::queue<int> later; // load initial rotation from the attr Matrix3d initR; double angle = data.inputValue(aInitRotation).asDouble(); initR << 0,M_PI * angle/180.0,0, -M_PI * angle/180.0,0,0, 0,0,0; std::vector<Matrix3d> prevSO(mesh.numTet, initR); // create the adjacency graph to traverse for(int i=0;i<mesh.numTet;i++){ remain.insert(remain.end(),i); } while(!remain.empty()){ int next; if( !later.empty()){ next = later.front(); later.pop(); remain.erase(next); }else{ next = *remain.begin(); remain.erase(remain.begin()); } logR[j][next]=logSOc(R[j][next],prevSO[next]); for(int k=0;k<adjacencyList[next].size();k++){ int f=adjacencyList[next][k]; if(remain.erase(f)>0){ prevSO[f]=logR[j][next]; later.push(f); } } } }else{ for(int i=0;i<mesh.numTet;i++) logR[j][i] = logSO(R[j][i]); } } numMesh=nnumMesh; if(numMesh == 0) return MS::kSuccess; // load weights std::vector<double> weight(numMesh); MArrayDataHandle hWeight = data.inputArrayValue(aWeight); if(hWeight.elementCount() != numMesh) { return MS::kSuccess; } for(int i=0;i<numMesh;i++){ hWeight.jumpToArrayElement(i); weight[i]=hWeight.inputValue().asDouble(); } // compute ideal affine std::vector<Vector3d> new_pts(numPts); std::vector<Matrix4d> A(mesh.numTet); std::vector<Matrix3d> AR(mesh.numTet),AS(mesh.numTet); std::vector<Vector3d> AL(mesh.numTet); blendMatList(L, weight, AL); if(blendMode==BM_SRL){ blendMatList(logR, weight, AR); blendMatList(logS, weight, AS); #pragma omp parallel for for(int i=0;i<mesh.numTet;i++){ AR[i] = expSO(AR[i]); AS[i] = expSym(AS[i]); } }else if(blendMode == BM_LOG3){ // log blendMatList(logGL, weight, AR); #pragma omp parallel for for(int i=0;i<mesh.numTet;i++){ AR[i] = AR[i].exp(); AS[i] = Matrix3d::Identity(); } }else if(blendMode == BM_SQL){ // quaternion std::vector<Vector4d> Aq(mesh.numTet); blendMatLinList(S, weight, AS); blendQuatList(quat, weight, Aq); #pragma omp parallel for for(int i=0;i<mesh.numTet;i++){ Quaternion<double> Q(Aq[i]); AR[i] = Q.matrix().transpose(); } }else if(blendMode == BM_SlRL){ // expSO+linear Sym blendMatList(logR, weight, AR); blendMatLinList(S, weight, AS); #pragma omp parallel for for(int i=0;i<mesh.numTet;i++){ AR[i] = expSO(AR[i]); } }else if(blendMode == BM_AFF){ // linear <|fim▁hole|> blendMatLinList(GL, weight, AR); for(int i=0;i<mesh.numTet;i++){ AS[i] = Matrix3d::Identity(); } }else{ return MS::kFailure; } std::vector<double> tetEnergy(mesh.numTet); // iterate to determine vertices position for(int k=0;k<numIter;k++){ for(int i=0;i<mesh.numTet;i++){ A[i]=pad(AS[i]*AR[i],AL[i]); } // solve ARAP mesh.ARAPSolve(A); // set new vertices position for(int i=0;i<numPts;i++){ new_pts[i][0]=mesh.Sol(i,0); new_pts[i][1]=mesh.Sol(i,1); new_pts[i][2]=mesh.Sol(i,2); } // if iteration continues if(k+1<numIter || visualiseEnergy){ makeTetMatrix(tetMode, new_pts, mesh.tetList, faceList, edgeList, vertexList, Q, dummy_weight); Matrix3d S,R; #pragma omp parallel for for(int i=0;i<mesh.numTet;i++) { polarHigham((mesh.tetMatrixInverse[i]*Q[i]).block(0,0,3,3), S, AR[i]); tetEnergy[i] = (S-AS[i]).squaredNorm(); } } } // set new vertex position for(int i=0;i<numPts;i++){ Mpts[i].x=mesh.Sol(i,0); Mpts[i].y=mesh.Sol(i,1); Mpts[i].z=mesh.Sol(i,2); } itGeo.setAllPositions(Mpts); // set vertex color according to ARAP energy if(visualiseEnergy){ std::vector<double> ptsEnergy; makePtsWeightList(tetMode, numPts, mesh.tetList, faceList, edgeList, vertexList, tetEnergy, ptsEnergy); //double max_energy = *std::max_element(ptsEnergy.begin(), ptsEnergy.end()); outputAttr(data, aEnergy, ptsEnergy); for(int i=0;i<numPts;i++){ ptsEnergy[i] *= visualisationMultiplier; // or /= max_energy } visualise(data, outputGeom, ptsEnergy); } // MString es="Runtime timing: "; // double timing=(double)(clock()- clock_start)/CLOCKS_PER_SEC; // es += timing; // MGlobal::displayInfo(es); return MS::kSuccess; } // plugin (un)initialiser MStatus nwayDeformerNode::initialize() { MFnTypedAttribute tAttr; MFnNumericAttribute nAttr; MFnEnumAttribute eAttr; MFnMatrixAttribute mAttr; // this attr will be dirtied when ARAP recomputation is needed aARAP = nAttr.create( "arap", "arap", MFnNumericData::kBoolean, true ); nAttr.setStorable(false); nAttr.setKeyable(false); nAttr.setHidden(true); addAttribute( aARAP ); aBlendMesh = tAttr.create("blendMesh", "mesh", MFnData::kMesh); tAttr.setArray(true); tAttr.setUsesArrayDataBuilder(true); addAttribute(aBlendMesh); attributeAffects( aBlendMesh, outputGeom ); aWeight = nAttr.create("blendWeight", "bw", MFnNumericData::kDouble, 0.0); nAttr.setArray(true); nAttr.setKeyable(true); nAttr.setStorable(true); nAttr.setUsesArrayDataBuilder(true); addAttribute(aWeight); attributeAffects( aWeight, outputGeom ); aRotationConsistency = nAttr.create( "rotationConsistency", "rc", MFnNumericData::kBoolean, false ); nAttr.setStorable(true); addAttribute( aRotationConsistency ); attributeAffects( aRotationConsistency, outputGeom ); aInitRotation = nAttr.create("initRotation", "ir", MFnNumericData::kDouble); addAttribute(aInitRotation); attributeAffects( aInitRotation, outputGeom ); aVisualiseEnergy = nAttr.create( "visualiseEnergy", "ve", MFnNumericData::kBoolean, false ); nAttr.setStorable(true); addAttribute( aVisualiseEnergy ); attributeAffects( aVisualiseEnergy, outputGeom ); aAreaWeighted = nAttr.create( "areaWeighted", "aw", MFnNumericData::kBoolean, false ); nAttr.setStorable(true); addAttribute( aAreaWeighted ); attributeAffects( aAreaWeighted, outputGeom ); attributeAffects( aAreaWeighted, aARAP ); aVisualisationMultiplier = nAttr.create("visualisationMultiplier", "vmp", MFnNumericData::kDouble, 1.0); nAttr.setStorable(true); addAttribute( aVisualisationMultiplier ); attributeAffects( aVisualisationMultiplier, outputGeom ); aBlendMode = eAttr.create( "blendMode", "bm", BM_SRL ); eAttr.addField( "expSO+expSym", BM_SRL ); eAttr.addField( "logmatrix3", BM_LOG3 ); eAttr.addField( "quat+linear", BM_SQL ); eAttr.addField( "expSO+linear", BM_SlRL ); eAttr.addField( "linear", BM_AFF ); eAttr.addField( "off", BM_OFF ); addAttribute( aBlendMode ); attributeAffects( aBlendMode, outputGeom ); aTetMode = eAttr.create( "tetMode", "tm", TM_FACE ); eAttr.addField( "face", TM_FACE ); eAttr.addField( "edge", TM_EDGE ); eAttr.addField( "vertex", TM_VERTEX ); eAttr.addField( "vface", TM_VFACE ); addAttribute( aTetMode ); attributeAffects( aTetMode, outputGeom ); attributeAffects( aTetMode, aARAP ); aIteration = nAttr.create("iteration", "it", MFnNumericData::kShort, 1); addAttribute(aIteration); attributeAffects(aIteration, outputGeom); // this shouldn't affect outputGeom aEnergy = nAttr.create("energy", "energy", MFnNumericData::kDouble, 0.0); nAttr.setArray(true); nAttr.setKeyable(true); nAttr.setStorable(true); nAttr.setUsesArrayDataBuilder(true); addAttribute(aEnergy); // Make the deformer weights paintable MGlobal::executeCommand( "makePaintable -attrType multiFloat -sm deformer nwayBlender weights;" ); return MS::kSuccess; } // this deformer also changes colours void nwayDeformerNode::postConstructor(){ setDeformationDetails(kDeformsColors); } MStatus initializePlugin( MObject obj ) { MStatus status; MFnPlugin plugin( obj, "Shizuo KAJI", "0.1", "Any"); status = plugin.registerNode( nwayDeformerNode::nodeName, nwayDeformerNode::id, nwayDeformerNode::creator, nwayDeformerNode::initialize, MPxNode::kDeformerNode ); CHECK_MSTATUS_AND_RETURN_IT( status ); return status; } MStatus uninitializePlugin( MObject obj ) { MStatus status; MFnPlugin plugin( obj ); status = plugin.deregisterNode( nwayDeformerNode::id ); CHECK_MSTATUS_AND_RETURN_IT( status ); return status; }<|fim▁end|>
<|file_name|>Grammar.ts<|end_file_name|><|fim▁begin|>import Rule from "./rule/Rule"; class Grammar { readonly rules: Rule[] = [];<|fim▁hole|> constructor(public readonly newline: number) {} add(rule: Rule) { this.rules.push(rule); this.ruleOpenLookup[rule.open] = this.ruleOpenLookup[rule.open] || []; this.ruleOpenLookup[rule.open].push(rule); return this; } } export default Grammar;<|fim▁end|>
readonly ruleOpenLookup: { [key: number]: Rule[] } = {};
<|file_name|>gallery.js<|end_file_name|><|fim▁begin|>/* * @Author: justinwebb * @Date: 2015-09-24 21:08:23 * @Last Modified by: justinwebb * @Last Modified time: 2015-09-24 22:19:45 */ (function (window) { 'use strict'; window.JWLB = window.JWLB || {}; window.JWLB.View = window.JWLB.View || {}; //-------------------------------------------------------------------- // Event handling //-------------------------------------------------------------------- var wallOnClick = function (event) { if (event.target.tagName.toLowerCase() === 'img') { var id = event.target.parentNode.dataset.id; var selectedPhoto = this.photos.filter(function (photo) { if (photo.id === id) { photo.portrait.id = id; return photo; } })[0]; this.sendEvent('gallery', selectedPhoto.portrait); } }; //-------------------------------------------------------------------- // View overrides //-------------------------------------------------------------------- var addUIListeners = function () { this.ui.wall.addEventListener('click', wallOnClick.bind(this)); }; var initUI = function () { var isUIValid = false; var comp = document.querySelector(this.selector); this.ui.wall = comp; if (this.ui.wall) { this.reset(); isUIValid = true; } return isUIValid; }; //-------------------------------------------------------------------- // Constructor //-------------------------------------------------------------------- var Gallery = function (domId) { // Overriden View class methods this.initUI = initUI; this.addUIListeners = addUIListeners; this.name = 'Gallery'; // Instance properties this.photos = []; // Initialize View JWLB.View.call(this, domId); }; //-------------------------------------------------------------------- // Inheritance //-------------------------------------------------------------------- Gallery.prototype = Object.create(JWLB.View.prototype); Gallery.prototype.constructor = Gallery; //-------------------------------------------------------------------- // Instance methods //-------------------------------------------------------------------- Gallery.prototype.addThumb = function (data, id) { // Store image data for future reference var photo = { id: id, thumb: null, portrait: data.size[0] }; data.size.forEach(function (elem) { if (elem.label === 'Square') { photo.thumb = elem; } if (elem.height > photo.portrait.height) { photo.portrait = elem; } }); this.photos.push(photo); // Build thumbnail UI var node = document.createElement('div'); node.setAttribute('data-id', id); node.setAttribute('class', 'thumb'); var img = document.createElement('img'); img.setAttribute('src', photo.thumb.source); img.setAttribute('title', 'id: '+ id); node.appendChild(img); this.ui.wall.querySelector('article[name=foobar]').appendChild(node); }; Gallery.prototype.reset = function () { if (this.ui.wall.children.length > 0) { var article = this.ui.wall.children.item(0) article.parentElement.removeChild(article); } var article = document.createElement('article'); article.setAttribute('name', 'foobar'); this.ui.wall.appendChild(article); }; window.JWLB.View.Gallery = Gallery;<|fim▁hole|><|fim▁end|>
})(window);
<|file_name|>util.rs<|end_file_name|><|fim▁begin|>#![allow(dead_code)] use std::env; use std::fs::{self, File}; use std::io::{Read, Write}; #[cfg(unix)] use std::os::unix::fs::symlink as symlink_file; #[cfg(windows)] use std::os::windows::fs::symlink_file; use std::path::Path; use std::process::{Command, Stdio}; use std::str::from_utf8; #[macro_export] macro_rules! assert_empty_stderr(<|fim▁hole|> } ); ); pub struct CmdResult { pub success: bool, pub stdout: String, pub stderr: String, } pub fn run(cmd: &mut Command) -> CmdResult { let prog = cmd.output().unwrap(); CmdResult { success: prog.status.success(), stdout: from_utf8(&prog.stdout).unwrap().to_string(), stderr: from_utf8(&prog.stderr).unwrap().to_string(), } } pub fn run_piped_stdin<T: AsRef<[u8]>>(cmd: &mut Command, input: T)-> CmdResult { let mut command = cmd .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn() .unwrap(); command.stdin .take() .unwrap_or_else(|| panic!("Could not take child process stdin")) .write_all(input.as_ref()) .unwrap_or_else(|e| panic!("{}", e)); let prog = command.wait_with_output().unwrap(); CmdResult { success: prog.status.success(), stdout: from_utf8(&prog.stdout).unwrap().to_string(), stderr: from_utf8(&prog.stderr).unwrap().to_string(), } } pub fn get_file_contents(name: &str) -> String { let mut f = File::open(Path::new(name)).unwrap(); let mut contents = String::new(); let _ = f.read_to_string(&mut contents); contents } pub fn mkdir(dir: &str) { fs::create_dir(Path::new(dir)).unwrap(); } pub fn make_file(name: &str) -> File { match File::create(Path::new(name)) { Ok(f) => f, Err(e) => panic!("{}", e) } } pub fn touch(file: &str) { File::create(Path::new(file)).unwrap(); } pub fn symlink(src: &str, dst: &str) { symlink_file(src, dst).unwrap(); } pub fn is_symlink(path: &str) -> bool { match fs::symlink_metadata(path) { Ok(m) => m.file_type().is_symlink(), Err(_) => false } } pub fn resolve_link(path: &str) -> String { match fs::read_link(path) { Ok(p) => p.to_str().unwrap().to_owned(), Err(_) => "".to_string() } } pub fn metadata(path: &str) -> fs::Metadata { match fs::metadata(path) { Ok(m) => m, Err(e) => panic!("{}", e) } } pub fn file_exists(path: &str) -> bool { match fs::metadata(path) { Ok(m) => m.is_file(), Err(_) => false } } pub fn dir_exists(path: &str) -> bool { match fs::metadata(path) { Ok(m) => m.is_dir(), Err(_) => false } } pub fn cleanup(path: &'static str) { let p = Path::new(path); match fs::metadata(p) { Ok(m) => if m.is_file() { fs::remove_file(&p).unwrap(); } else { fs::remove_dir(&p).unwrap(); }, Err(_) => {} } } pub fn current_directory() -> String { env::current_dir().unwrap().into_os_string().into_string().unwrap() } pub fn repeat_str(s: &str, n: u32) -> String { let mut repeated = String::new(); for _ in 0 .. n { repeated.push_str(s); } repeated }<|fim▁end|>
($cond:expr) => ( if $cond.stderr.len() > 0 { panic!(format!("stderr: {}", $cond.stderr))
<|file_name|>feng_destroyer.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python import os, sys, signal, random from datetime import datetime, timedelta VLC_POOL_SIZE = 10 #number of vlc client to keep open SPAWN_TIMER = 5 #number of seconds between vlc spawn MAX_VLC_LIFE_TIME = 60 #max life time in seconds of a vlc client VLC_COMMAND = '/usr/bin/vlc' class Vlc(object): def __init__(self, uri): super(Vlc, self).__init__() self.pid = None self.uri = uri self.spawn_time = None def _close_all_open_fd(self): for fd in xrange(0, os.sysconf('SC_OPEN_MAX')): try: os.close(fd) except OSError: pass def run(self): if self.pid: return False pid = os.fork() if pid: self.pid = pid self.spawn_time = datetime.now() return True else: self._close_all_open_fd() os.execvp(VLC_COMMAND, ['vlc', self.uri]) return None def stop(self): if not self.pid: return False try: os.kill(self.pid, signal.SIGTERM) os.waitpid(self.pid, 0) except Exception, e: print 'Vlc wasn\'t here anymore', e pass return True def main(url): random.seed() last_spawn = datetime.now() - timedelta(0, SPAWN_TIMER) vlc_pool = [] while True: to_remove = [] now = datetime.now() if (now - last_spawn >= timedelta(0, SPAWN_TIMER)) and (len(vlc_pool) < VLC_POOL_SIZE): last_spawn = now vlc = Vlc(url) print 'Running a new vlc' state = vlc.run() if state: vlc_pool.append(vlc) elif state == None: print 'Vlc Client exited by itself?' return else: print 'Failed to start Vlc' for vlc in vlc_pool: if now - vlc.spawn_time >= timedelta(0, MAX_VLC_LIFE_TIME): if random.random() >= 0.5: print 'Stopping an old vlc started at', vlc.spawn_time vlc.stop() to_remove.append(vlc) if len(to_remove) and random.random() > 0.95: for vlc in vlc_pool: if not vlc in to_remove: print 'Stopping multiple vlcs', vlc.spawn_time vlc.stop() to_remove.append(vlc) <|fim▁hole|>if __name__ == '__main__': if len(sys.argv) != 2: print '%s requires an rtsp url to request' % sys.argv[0] else: main(sys.argv[1])<|fim▁end|>
for vlc in to_remove: vlc_pool.remove(vlc)
<|file_name|>device.py<|end_file_name|><|fim▁begin|># Partname: ATmega644A # generated automatically, do not edit MCUREGS = { 'ADCSRB': '&123', 'ADCSRB_ACME': '$40', 'ACSR': '&80', 'ACSR_ACD': '$80', 'ACSR_ACBG': '$40', 'ACSR_ACO': '$20', 'ACSR_ACI': '$10', 'ACSR_ACIE': '$08', 'ACSR_ACIC': '$04', 'ACSR_ACIS': '$03', 'DIDR1': '&127', 'DIDR1_AIN1D': '$02', 'DIDR1_AIN0D': '$01', 'UDR0': '&198', 'UCSR0A': '&192', 'UCSR0A_RXC0': '$80', 'UCSR0A_TXC0': '$40', 'UCSR0A_UDRE0': '$20', 'UCSR0A_FE0': '$10', 'UCSR0A_DOR0': '$08', 'UCSR0A_UPE0': '$04', 'UCSR0A_U2X0': '$02', 'UCSR0A_MPCM0': '$01', 'UCSR0B': '&193', 'UCSR0B_RXCIE0': '$80', 'UCSR0B_TXCIE0': '$40', 'UCSR0B_UDRIE0': '$20', 'UCSR0B_RXEN0': '$10', 'UCSR0B_TXEN0': '$08', 'UCSR0B_UCSZ02': '$04', 'UCSR0B_RXB80': '$02', 'UCSR0B_TXB80': '$01', 'UCSR0C': '&194', 'UCSR0C_UMSEL0': '$C0', 'UCSR0C_UPM0': '$30', 'UCSR0C_USBS0': '$08', 'UCSR0C_UCSZ0': '$06', 'UCSR0C_UCPOL0': '$01', 'UBRR0': '&196', 'PORTA': '&34', 'DDRA': '&33', 'PINA': '&32', 'PORTB': '&37',<|fim▁hole|> 'PINB': '&35', 'PORTC': '&40', 'DDRC': '&39', 'PINC': '&38', 'PORTD': '&43', 'DDRD': '&42', 'PIND': '&41', 'OCR0B': '&72', 'OCR0A': '&71', 'TCNT0': '&70', 'TCCR0B': '&69', 'TCCR0B_FOC0A': '$80', 'TCCR0B_FOC0B': '$40', 'TCCR0B_WGM02': '$08', 'TCCR0B_CS0': '$07', 'TCCR0A': '&68', 'TCCR0A_COM0A': '$C0', 'TCCR0A_COM0B': '$30', 'TCCR0A_WGM0': '$03', 'TIMSK0': '&110', 'TIMSK0_OCIE0B': '$04', 'TIMSK0_OCIE0A': '$02', 'TIMSK0_TOIE0': '$01', 'TIFR0': '&53', 'TIFR0_OCF0B': '$04', 'TIFR0_OCF0A': '$02', 'TIFR0_TOV0': '$01', 'GTCCR': '&67', 'GTCCR_TSM': '$80', 'GTCCR_PSRSYNC': '$01', 'TIMSK2': '&112', 'TIMSK2_OCIE2B': '$04', 'TIMSK2_OCIE2A': '$02', 'TIMSK2_TOIE2': '$01', 'TIFR2': '&55', 'TIFR2_OCF2B': '$04', 'TIFR2_OCF2A': '$02', 'TIFR2_TOV2': '$01', 'TCCR2A': '&176', 'TCCR2A_COM2A': '$C0', 'TCCR2A_COM2B': '$30', 'TCCR2A_WGM2': '$03', 'TCCR2B': '&177', 'TCCR2B_FOC2A': '$80', 'TCCR2B_FOC2B': '$40', 'TCCR2B_WGM22': '$08', 'TCCR2B_CS2': '$07', 'TCNT2': '&178', 'OCR2B': '&180', 'OCR2A': '&179', 'ASSR': '&182', 'ASSR_EXCLK': '$40', 'ASSR_AS2': '$20', 'ASSR_TCN2UB': '$10', 'ASSR_OCR2AUB': '$08', 'ASSR_OCR2BUB': '$04', 'ASSR_TCR2AUB': '$02', 'ASSR_TCR2BUB': '$01', 'WDTCSR': '&96', 'WDTCSR_WDIF': '$80', 'WDTCSR_WDIE': '$40', 'WDTCSR_WDP': '$27', 'WDTCSR_WDCE': '$10', 'WDTCSR_WDE': '$08', 'OCDR': '&81', 'MCUCR': '&85', 'MCUCR_JTD': '$80', 'MCUSR': '&84', 'MCUSR_JTRF': '$10', 'SPMCSR': '&87', 'SPMCSR_SPMIE': '$80', 'SPMCSR_RWWSB': '$40', 'SPMCSR_SIGRD': '$20', 'SPMCSR_RWWSRE': '$10', 'SPMCSR_BLBSET': '$08', 'SPMCSR_PGWRT': '$04', 'SPMCSR_PGERS': '$02', 'SPMCSR_SPMEN': '$01', 'EICRA': '&105', 'EICRA_ISC2': '$30', 'EICRA_ISC1': '$0C', 'EICRA_ISC0': '$03', 'EIMSK': '&61', 'EIMSK_INT': '$07', 'EIFR': '&60', 'EIFR_INTF': '$07', 'PCMSK3': '&115', 'PCMSK3_PCINT': '$FF', 'PCMSK2': '&109', 'PCMSK2_PCINT': '$FF', 'PCMSK1': '&108', 'PCMSK1_PCINT': '$FF', 'PCMSK0': '&107', 'PCMSK0_PCINT': '$FF', 'PCIFR': '&59', 'PCIFR_PCIF': '$0F', 'PCICR': '&104', 'PCICR_PCIE': '$0F', 'ADMUX': '&124', 'ADMUX_REFS': '$C0', 'ADMUX_ADLAR': '$20', 'ADMUX_MUX': '$1F', 'ADC': '&120', 'ADCSRA': '&122', 'ADCSRA_ADEN': '$80', 'ADCSRA_ADSC': '$40', 'ADCSRA_ADATE': '$20', 'ADCSRA_ADIF': '$10', 'ADCSRA_ADIE': '$08', 'ADCSRA_ADPS': '$07', 'DIDR0': '&126', 'DIDR0_ADC7D': '$80', 'DIDR0_ADC6D': '$40', 'DIDR0_ADC5D': '$20', 'DIDR0_ADC4D': '$10', 'DIDR0_ADC3D': '$08', 'DIDR0_ADC2D': '$04', 'DIDR0_ADC1D': '$02', 'DIDR0_ADC0D': '$01', 'TIMSK1': '&111', 'TIMSK1_ICIE1': '$20', 'TIMSK1_OCIE1B': '$04', 'TIMSK1_OCIE1A': '$02', 'TIMSK1_TOIE1': '$01', 'TIFR1': '&54', 'TIFR1_ICF1': '$20', 'TIFR1_OCF1B': '$04', 'TIFR1_OCF1A': '$02', 'TIFR1_TOV1': '$01', 'TCCR1A': '&128', 'TCCR1A_COM1A': '$C0', 'TCCR1A_COM1B': '$30', 'TCCR1A_WGM1': '$03', 'TCCR1B': '&129', 'TCCR1B_ICNC1': '$80', 'TCCR1B_ICES1': '$40', 'TCCR1B_WGM1': '$18', 'TCCR1B_CS1': '$07', 'TCCR1C': '&130', 'TCCR1C_FOC1A': '$80', 'TCCR1C_FOC1B': '$40', 'TCNT1': '&132', 'OCR1A': '&136', 'OCR1B': '&138', 'ICR1': '&134', 'EEAR': '&65', 'EEDR': '&64', 'EECR': '&63', 'EECR_EEPM': '$30', 'EECR_EERIE': '$08', 'EECR_EEMPE': '$04', 'EECR_EEPE': '$02', 'EECR_EERE': '$01', 'TWAMR': '&189', 'TWAMR_TWAM': '$FE', 'TWBR': '&184', 'TWCR': '&188', 'TWCR_TWINT': '$80', 'TWCR_TWEA': '$40', 'TWCR_TWSTA': '$20', 'TWCR_TWSTO': '$10', 'TWCR_TWWC': '$08', 'TWCR_TWEN': '$04', 'TWCR_TWIE': '$01', 'TWSR': '&185', 'TWSR_TWS': '$F8', 'TWSR_TWPS': '$03', 'TWDR': '&187', 'TWAR': '&186', 'TWAR_TWA': '$FE', 'TWAR_TWGCE': '$01', 'UDR1': '&206', 'UCSR1A': '&200', 'UCSR1A_RXC1': '$80', 'UCSR1A_TXC1': '$40', 'UCSR1A_UDRE1': '$20', 'UCSR1A_FE1': '$10', 'UCSR1A_DOR1': '$08', 'UCSR1A_UPE1': '$04', 'UCSR1A_U2X1': '$02', 'UCSR1A_MPCM1': '$01', 'UCSR1B': '&201', 'UCSR1B_RXCIE1': '$80', 'UCSR1B_TXCIE1': '$40', 'UCSR1B_UDRIE1': '$20', 'UCSR1B_RXEN1': '$10', 'UCSR1B_TXEN1': '$08', 'UCSR1B_UCSZ12': '$04', 'UCSR1B_RXB81': '$02', 'UCSR1B_TXB81': '$01', 'UCSR1C': '&202', 'UCSR1C_UMSEL1': '$C0', 'UCSR1C_UPM1': '$30', 'UCSR1C_USBS1': '$08', 'UCSR1C_UCSZ1': '$06', 'UCSR1C_UCPOL1': '$01', 'UBRR1': '&204', 'SPDR': '&78', 'SPSR': '&77', 'SPSR_SPIF': '$80', 'SPSR_WCOL': '$40', 'SPSR_SPI2X': '$01', 'SPCR': '&76', 'SPCR_SPIE': '$80', 'SPCR_SPE': '$40', 'SPCR_DORD': '$20', 'SPCR_MSTR': '$10', 'SPCR_CPOL': '$08', 'SPCR_CPHA': '$04', 'SPCR_SPR': '$03', 'SREG': '&95', 'SREG_I': '$80', 'SREG_T': '$40', 'SREG_H': '$20', 'SREG_S': '$10', 'SREG_V': '$08', 'SREG_N': '$04', 'SREG_Z': '$02', 'SREG_C': '$01', 'SP': '&93', 'OSCCAL': '&102', 'CLKPR': '&97', 'CLKPR_CLKPCE': '$80', 'CLKPR_CLKPS': '$0F', 'SMCR': '&83', 'SMCR_SM': '$0E', 'SMCR_SE': '$01', 'GPIOR2': '&75', 'GPIOR2_GPIOR': '$FF', 'GPIOR1': '&74', 'GPIOR1_GPIOR': '$FF', 'GPIOR0': '&62', 'GPIOR0_GPIOR07': '$80', 'GPIOR0_GPIOR06': '$40', 'GPIOR0_GPIOR05': '$20', 'GPIOR0_GPIOR04': '$10', 'GPIOR0_GPIOR03': '$08', 'GPIOR0_GPIOR02': '$04', 'GPIOR0_GPIOR01': '$02', 'GPIOR0_GPIOR00': '$01', 'PRR0': '&100', 'PRR0_PRTWI': '$80', 'PRR0_PRTIM2': '$40', 'PRR0_PRTIM0': '$20', 'PRR0_PRUSART': '$12', 'PRR0_PRTIM1': '$08', 'PRR0_PRSPI': '$04', 'PRR0_PRADC': '$01', 'INT0Addr': '2', 'INT1Addr': '4', 'INT2Addr': '6', 'PCINT0Addr': '8', 'PCINT1Addr': '10', 'PCINT2Addr': '12', 'PCINT3Addr': '14', 'WDTAddr': '16', 'TIMER2_COMPAAddr': '18', 'TIMER2_COMPBAddr': '20', 'TIMER2_OVFAddr': '22', 'TIMER1_CAPTAddr': '24', 'TIMER1_COMPAAddr': '26', 'TIMER1_COMPBAddr': '28', 'TIMER1_OVFAddr': '30', 'TIMER0_COMPAAddr': '32', 'TIMER0_COMPBAddr': '34', 'TIMER0_OVFAddr': '36', 'SPI__STCAddr': '38', 'USART0__RXAddr': '40', 'USART0__UDREAddr': '42', 'USART0__TXAddr': '44', 'ANALOG_COMPAddr': '46', 'ADCAddr': '48', 'EE_READYAddr': '50', 'TWIAddr': '52', 'SPM_READYAddr': '54', 'USART1_RXAddr': '56', 'USART1_UDREAddr': '58', 'USART1_TXAddr': '60' }<|fim▁end|>
'DDRB': '&36',
<|file_name|>print.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import print_function import sys def get_color(color): if 'default'==color: return '\x1b[39;01m' elif 'black'==color: return '\x1b[30;01m' elif 'red'==color: return '\x1b[31;01m' elif 'green'==color: return '\x1b[32;01m' elif 'yellow'==color: return '\x1b[33;01m' elif 'blue'==color: return '\x1b[34;01m' elif 'magenta'==color: return '\x1b[35;01m' elif 'cyan'==color: return '\x1b[36;01m' return '\x1b[34;01m' def main(): if 4==len(sys.argv): color,cmd,action=get_color(sys.argv[1]),sys.argv[2],sys.argv[3] if action=='stop': action='exit' template='\x1b[1m%s[ ΔOS : %s : make : %s ]\x1b[0m' else: action='init' template='\x1b[1m%s[ ΔOS : %s : make : %s ]\x1b[0m'<|fim▁hole|>if __name__=="__main__": main()<|fim▁end|>
print(template%(color,action,cmd))
<|file_name|>argument_double_underscored.rs<|end_file_name|><|fim▁begin|>use std::pin::Pin; use juniper::graphql_subscription; type Stream<'a, I> = Pin<Box<dyn futures::Stream<Item = I> + Send + 'a>>; struct Obj; #[graphql_subscription] impl Obj { async fn id(&self, __num: i32) -> Stream<'static, &'static str> { Box::pin(stream::once(future::ready("funA"))) } } <|fim▁hole|><|fim▁end|>
fn main() {}
<|file_name|>zed_wrapper_nodelet.cpp<|end_file_name|><|fim▁begin|>/////////////////////////////////////////////////////////////////////////// // // Copyright (c) 2017, STEREOLABS. // // All rights reserved. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // /////////////////////////////////////////////////////////////////////////// /**************************************************************************************************** ** This sample is a wrapper for the ZED library in order to use the ZED Camera with ROS. ** ** A set of parameters can be specified in the launch file. ** ****************************************************************************************************/ #include <csignal> #include <cstdio> #include <math.h> #include <limits> #include <thread> #include <chrono> #include <memory> #include <sys/stat.h> #include <ros/ros.h> #include <nodelet/nodelet.h> #include <sensor_msgs/Image.h> #include <sensor_msgs/CameraInfo.h> #include <sensor_msgs/distortion_models.h> #include <sensor_msgs/image_encodings.h> #include <image_transport/image_transport.h> #include <dynamic_reconfigure/server.h> #include <autobot/AutobotConfig.h> #include <nav_msgs/Odometry.h> #include <tf2/LinearMath/Quaternion.h> #include <tf2_ros/transform_broadcaster.h> #include <geometry_msgs/TransformStamped.h> #include <autobot/compound_img.h> #include <opencv2/core/core.hpp> #include <opencv2/highgui/highgui.hpp> #include <opencv2/imgproc/imgproc.hpp> #include <opencv2/calib3d/calib3d.hpp> #include <boost/make_shared.hpp> #include <boost/thread.hpp> //#include <sensor_msgs/PointCloud2.h> //#include <pcl_conversions/pcl_conversions.h> //#include <pcl/point_cloud.h> //#include <pcl/point_types.h> #include <sl/Camera.hpp> using namespace std; namespace autobot { class ZEDWrapperNodelet : public nodelet::Nodelet { ros::NodeHandle nh; ros::NodeHandle nh_ns; boost::shared_ptr<boost::thread> device_poll_thread; image_transport::Publisher pub_rgb; image_transport::Publisher pub_raw_rgb; image_transport::Publisher pub_left; image_transport::Publisher pub_raw_left; image_transport::Publisher pub_right; image_transport::Publisher pub_raw_right; image_transport::Publisher pub_depth; ros::Publisher pub_compound_img; ros::Publisher pub_cloud; ros::Publisher pub_rgb_cam_info; ros::Publisher pub_left_cam_info; ros::Publisher pub_right_cam_info; ros::Publisher pub_depth_cam_info; ros::Publisher pub_odom; // tf tf2_ros::TransformBroadcaster transform_odom_broadcaster; std::string left_frame_id; std::string right_frame_id; std::string rgb_frame_id; std::string depth_frame_id; std::string cloud_frame_id; std::string odometry_frame_id; std::string odometry_transform_frame_id; // Launch file parameters int resolution; int quality; int sensing_mode; int rate; int gpu_id; int zed_id; std::string odometry_DB; std::string svo_filepath; //Tracking variables sl::Pose pose; // zed object sl::InitParameters param; std::unique_ptr<sl::Camera> zed; // flags int confidence; bool computeDepth; bool grabbing = false; int openniDepthMode = 0; // 16 bit UC data in mm else 32F in m, for more info http://www.ros.org/reps/rep-0118.html // Point cloud variables //sl::Mat cloud; //string point_cloud_frame_id = ""; //ros::Time point_cloud_time; /* \brief Convert an sl:Mat to a cv::Mat * \param mat : the sl::Mat to convert */ cv::Mat toCVMat(sl::Mat &mat) { if (mat.getMemoryType() == sl::MEM_GPU) mat.updateCPUfromGPU(); int cvType; switch (mat.getDataType()) { case sl::MAT_TYPE_32F_C1: cvType = CV_32FC1; break; case sl::MAT_TYPE_32F_C2: cvType = CV_32FC2; break; case sl::MAT_TYPE_32F_C3: cvType = CV_32FC3; break; case sl::MAT_TYPE_32F_C4: cvType = CV_32FC4; break; case sl::MAT_TYPE_8U_C1: cvType = CV_8UC1; break; case sl::MAT_TYPE_8U_C2: cvType = CV_8UC2; break; case sl::MAT_TYPE_8U_C3: cvType = CV_8UC3; break; case sl::MAT_TYPE_8U_C4: cvType = CV_8UC4; break; } return cv::Mat((int) mat.getHeight(), (int) mat.getWidth(), cvType, mat.getPtr<sl::uchar1>(sl::MEM_CPU), mat.getStepBytes(sl::MEM_CPU)); } /* \brief Test if a file exist * \param name : the path to the file */ bool file_exist(const std::string& name) { struct stat buffer; return (stat(name.c_str(), &buffer) == 0); } /* \brief Image to ros message conversion * \param img : the image to publish * \param encodingType : the sensor_msgs::image_encodings encoding type * \param frameId : the id of the reference frame of the image * \param t : the ros::Time to stamp the image */ sensor_msgs::ImagePtr imageToROSmsg(cv::Mat img, const std::string encodingType, std::string frameId, ros::Time t) { sensor_msgs::ImagePtr ptr = boost::make_shared<sensor_msgs::Image>(); sensor_msgs::Image& imgMessage = *ptr; imgMessage.header.stamp = t; imgMessage.header.frame_id = frameId; imgMessage.height = img.rows; imgMessage.width = img.cols; imgMessage.encoding = encodingType; int num = 1; //for endianness detection imgMessage.is_bigendian = !(*(char *) &num == 1); imgMessage.step = img.cols * img.elemSize(); size_t size = imgMessage.step * img.rows; imgMessage.data.resize(size); if (img.isContinuous()) memcpy((char*) (&imgMessage.data[0]), img.data, size); else { uchar* opencvData = img.data; uchar* rosData = (uchar*) (&imgMessage.data[0]); for (unsigned int i = 0; i < img.rows; i++) { memcpy(rosData, opencvData, imgMessage.step); rosData += imgMessage.step; opencvData += img.step; } } return ptr; } /* \brief Publish the pose of the camera with a ros Publisher * \param pose : the 4x4 matrix representing the camera pose * \param pub_odom : the publisher object to use * \param odom_frame_id : the id of the reference frame of the pose * \param t : the ros::Time to stamp the image */ //void publishOdom(sl::Pose pose, ros::Publisher &pub_odom, string odom_frame_id, ros::Time t) { //nav_msgs::Odometry odom; //odom.header.stamp = t; //odom.header.frame_id = odom_frame_id; ////odom.child_frame_id = "zed_optical_frame"; //sl::Translation translation = pose.getTranslation(); //odom.pose.pose.position.x = translation(2); //odom.pose.pose.position.y = -translation(0); //odom.pose.pose.position.z = -translation(1); //sl::Orientation quat = pose.getOrientation(); //odom.pose.pose.orientation.x = quat(2); //odom.pose.pose.orientation.y = -quat(0); //odom.pose.pose.orientation.z = -quat(1); //odom.pose.pose.orientation.w = quat(3); //pub_odom.publish(odom); //} /* \brief Publish the pose of the camera as a transformation * \param pose : the 4x4 matrix representing the camera pose * \param trans_br : the TransformBroadcaster object to use * \param odometry_transform_frame_id : the id of the transformation * \param t : the ros::Time to stamp the image */ //void publishTrackedFrame(sl::Pose pose, tf2_ros::TransformBroadcaster &trans_br, string odometry_transform_frame_id, ros::Time t) { //geometry_msgs::TransformStamped transformStamped; //transformStamped.header.stamp = ros::Time::now(); //transformStamped.header.frame_id = "zed_initial_frame"; //transformStamped.child_frame_id = odometry_transform_frame_id; //sl::Translation translation = pose.getTranslation(); //transformStamped.transform.translation.x = translation(2); //transformStamped.transform.translation.y = -translation(0); //transformStamped.transform.translation.z = -translation(1); //sl::Orientation quat = pose.getOrientation(); //transformStamped.transform.rotation.x = quat(2); //transformStamped.transform.rotation.y = -quat(0); //transformStamped.transform.rotation.z = -quat(1); //transformStamped.transform.rotation.w = quat(3); //trans_br.sendTransform(transformStamped); //} /* \brief Publish a cv::Mat image with a ros Publisher * \param img : the image to publish * \param pub_img : the publisher object to use * \param img_frame_id : the id of the reference frame of the image * \param t : the ros::Time to stamp the image */ void publishImage(cv::Mat img, image_transport::Publisher &pub_img, string img_frame_id, ros::Time t) { pub_img.publish(imageToROSmsg(img, sensor_msgs::image_encodings::BGR8, img_frame_id, t)); } /* \brief Publish a cv::Mat depth image with a ros Publisher * \param depth : the depth image to publish * \param pub_depth : the publisher object to use * \param depth_frame_id : the id of the reference frame of the depth image * \param t : the ros::Time to stamp the depth image */ void publishDepth(cv::Mat depth, image_transport::Publisher &pub_depth, string depth_frame_id, ros::Time t) { string encoding; if (openniDepthMode) { depth *= 1000.0f; depth.convertTo(depth, CV_16UC1); // in mm, rounded encoding = sensor_msgs::image_encodings::TYPE_16UC1; } else { encoding = sensor_msgs::image_encodings::TYPE_32FC1; } pub_depth.publish(imageToROSmsg(depth, encoding, depth_frame_id, t)); } void publishDepthPlusImage(cv::Mat img, cv::Mat depth, ros::Publisher &pub_compound_img, string img_frame_id, string depth_frame_id, ros::Time t) { string encoding; if (openniDepthMode) { depth *= 1000.0f; depth.convertTo(depth, CV_16UC1); // in mm, rounded encoding = sensor_msgs::image_encodings::TYPE_16UC1; } else { encoding = sensor_msgs::image_encodings::TYPE_32FC1; } sensor_msgs::ImagePtr img_msg = imageToROSmsg(img, sensor_msgs::image_encodings::BGR8, img_frame_id, t); sensor_msgs::ImagePtr depth_msg = imageToROSmsg(depth, encoding, depth_frame_id, t); boost::shared_ptr<autobot::compound_img> comp_img = boost::make_shared<autobot::compound_img>();; comp_img->img = *img_msg.get(); comp_img->depthImg = *depth_msg.get(); pub_compound_img.publish<autobot::compound_img>(comp_img); } /* \brief Publish a pointCloud with a ros Publisher * \param width : the width of the point cloud * \param height : the height of the point cloud * \param pub_cloud : the publisher object to use void publishPointCloud(int width, int height, ros::Publisher &pub_cloud) { pcl::PointCloud<pcl::PointXYZRGB> point_cloud; point_cloud.width = width; point_cloud.height = height; int size = width*height; point_cloud.points.resize(size); sl::Vector4<float>* cpu_cloud = cloud.getPtr<sl::float4>(); for (int i = 0; i < size; i++) { point_cloud.points[i].x = cpu_cloud[i][2]; point_cloud.points[i].y = -cpu_cloud[i][0]; point_cloud.points[i].z = -cpu_cloud[i][1]; point_cloud.points[i].rgb = cpu_cloud[i][3]; } sensor_msgs::PointCloud2 output; pcl::toROSMsg(point_cloud, output); // Convert the point cloud to a ROS message output.header.frame_id = point_cloud_frame_id; // Set the header values of the ROS message output.header.stamp = point_cloud_time; output.height = height; output.width = width; output.is_bigendian = false; output.is_dense = false; pub_cloud.publish(output); } */ /* \brief Publish the informations of a camera with a ros Publisher * \param cam_info_msg : the information message to publish * \param pub_cam_info : the publisher object to use * \param t : the ros::Time to stamp the message */ void publishCamInfo(sensor_msgs::CameraInfoPtr cam_info_msg, ros::Publisher pub_cam_info, ros::Time t) { static int seq = 0; cam_info_msg->header.stamp = t; cam_info_msg->header.seq = seq; pub_cam_info.publish(cam_info_msg); seq++; } /* \brief Get the information of the ZED cameras and store them in an information message * \param zed : the sl::zed::Camera* pointer to an instance * \param left_cam_info_msg : the information message to fill with the left camera informations * \param right_cam_info_msg : the information message to fill with the right camera informations * \param left_frame_id : the id of the reference frame of the left camera * \param right_frame_id : the id of the reference frame of the right camera */ void fillCamInfo(sl::Camera* zed, sensor_msgs::CameraInfoPtr left_cam_info_msg, sensor_msgs::CameraInfoPtr right_cam_info_msg, string left_frame_id, string right_frame_id) { int width = zed->getResolution().width; int height = zed->getResolution().height; sl::CameraInformation zedParam = zed->getCameraInformation(); float baseline = zedParam.calibration_parameters.T[0] * 0.001; // baseline converted in meters float fx = zedParam.calibration_parameters.left_cam.fx; float fy = zedParam.calibration_parameters.left_cam.fy; float cx = zedParam.calibration_parameters.left_cam.cx; float cy = zedParam.calibration_parameters.left_cam.cy; // There is no distorsions since the images are rectified double k1 = 0; double k2 = 0; double k3 = 0; double p1 = 0; double p2 = 0; left_cam_info_msg->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB; right_cam_info_msg->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB; left_cam_info_msg->D.resize(5); right_cam_info_msg->D.resize(5); left_cam_info_msg->D[0] = right_cam_info_msg->D[0] = k1; left_cam_info_msg->D[1] = right_cam_info_msg->D[1] = k2; left_cam_info_msg->D[2] = right_cam_info_msg->D[2] = k3; left_cam_info_msg->D[3] = right_cam_info_msg->D[3] = p1; left_cam_info_msg->D[4] = right_cam_info_msg->D[4] = p2; left_cam_info_msg->K.fill(0.0); right_cam_info_msg->K.fill(0.0); left_cam_info_msg->K[0] = right_cam_info_msg->K[0] = fx; left_cam_info_msg->K[2] = right_cam_info_msg->K[2] = cx; left_cam_info_msg->K[4] = right_cam_info_msg->K[4] = fy; left_cam_info_msg->K[5] = right_cam_info_msg->K[5] = cy; left_cam_info_msg->K[8] = right_cam_info_msg->K[8] = 1.0; left_cam_info_msg->R.fill(0.0); right_cam_info_msg->R.fill(0.0); left_cam_info_msg->P.fill(0.0); right_cam_info_msg->P.fill(0.0); left_cam_info_msg->P[0] = right_cam_info_msg->P[0] = fx; left_cam_info_msg->P[2] = right_cam_info_msg->P[2] = cx; left_cam_info_msg->P[5] = right_cam_info_msg->P[5] = fy; left_cam_info_msg->P[6] = right_cam_info_msg->P[6] = cy; left_cam_info_msg->P[10] = right_cam_info_msg->P[10] = 1.0; right_cam_info_msg->P[3] = (-1 * fx * baseline); left_cam_info_msg->width = right_cam_info_msg->width = width; left_cam_info_msg->height = right_cam_info_msg->height = height; left_cam_info_msg->header.frame_id = left_frame_id; right_cam_info_msg->header.frame_id = right_frame_id; } void callback(autobot::AutobotConfig &config, uint32_t level) { NODELET_INFO("Reconfigure confidence : %d", config.confidence); confidence = config.confidence; } void device_poll() { ros::Rate loop_rate(rate); ros::Time old_t = ros::Time::now(); bool old_image = false; bool tracking_activated = false; // Get the parameters of the ZED images int width = zed->getResolution().width; int height = zed->getResolution().height; NODELET_DEBUG_STREAM("Image size : " << width << "x" << height); cv::Size cvSize(width, height); cv::Mat leftImRGB(cvSize, CV_8UC3); cv::Mat rightImRGB(cvSize, CV_8UC3); // Create and fill the camera information messages //sensor_msgs::CameraInfoPtr rgb_cam_info_msg(new sensor_msgs::CameraInfo()); sensor_msgs::CameraInfoPtr left_cam_info_msg(new sensor_msgs::CameraInfo()); sensor_msgs::CameraInfoPtr right_cam_info_msg(new sensor_msgs::CameraInfo()); sensor_msgs::CameraInfoPtr depth_cam_info_msg(new sensor_msgs::CameraInfo()); fillCamInfo(zed.get(), left_cam_info_msg, right_cam_info_msg, left_frame_id, right_frame_id); //rgb_cam_info_msg = depth_cam_info_msg = left_cam_info_msg; // the reference camera is the Left one (next to the ZED logo) sl::RuntimeParameters runParams; runParams.sensing_mode = static_cast<sl::SENSING_MODE> (sensing_mode); sl::TrackingParameters trackParams; trackParams.area_file_path = odometry_DB.c_str(); sl::Mat leftZEDMat, rightZEDMat, depthZEDMat; // Main loop while (nh_ns.ok()) { // Check for subscribers int rgb_SubNumber = pub_rgb.getNumSubscribers(); int rgb_raw_SubNumber = pub_raw_rgb.getNumSubscribers(); int left_SubNumber = pub_left.getNumSubscribers(); int left_raw_SubNumber = pub_raw_left.getNumSubscribers();<|fim▁hole|> int right_SubNumber = pub_right.getNumSubscribers(); int right_raw_SubNumber = pub_raw_right.getNumSubscribers(); int depth_SubNumber = pub_depth.getNumSubscribers(); int compound_SubNumber = pub_compound_img.getNumSubscribers(); int cloud_SubNumber = pub_cloud.getNumSubscribers(); int odom_SubNumber = pub_odom.getNumSubscribers(); bool runLoop = (rgb_SubNumber + rgb_raw_SubNumber + left_SubNumber + left_raw_SubNumber + right_SubNumber + right_raw_SubNumber + depth_SubNumber + cloud_SubNumber + odom_SubNumber) > 0; runParams.enable_point_cloud = false; if (cloud_SubNumber > 0) runParams.enable_point_cloud = true; ros::Time t = ros::Time::now(); // Get current time // Run the loop only if there is some subscribers if (true) { if (odom_SubNumber > 0 && !tracking_activated) { //Start the tracking if (odometry_DB != "" && !file_exist(odometry_DB)) { odometry_DB = ""; NODELET_WARN("odometry_DB path doesn't exist or is unreachable."); } zed->enableTracking(trackParams); tracking_activated = true; } else if (odom_SubNumber == 0 && tracking_activated) { //Stop the tracking zed->disableTracking(); tracking_activated = false; } computeDepth = (depth_SubNumber + cloud_SubNumber + odom_SubNumber) > 0; // Detect if one of the subscriber need to have the depth information grabbing = true; if (computeDepth) { int actual_confidence = zed->getConfidenceThreshold(); if (actual_confidence != confidence) zed->setConfidenceThreshold(confidence); runParams.enable_depth = true; // Ask to compute the depth } else runParams.enable_depth = false; old_image = zed->grab(runParams); // Ask to not compute the depth grabbing = false; if (old_image) { // Detect if a error occurred (for example: the zed have been disconnected) and re-initialize the ZED NODELET_DEBUG("Wait for a new image to proceed"); std::this_thread::sleep_for(std::chrono::milliseconds(2)); if ((t - old_t).toSec() > 5) { // delete the old object before constructing a new one zed.reset(); zed.reset(new sl::Camera()); NODELET_INFO("Re-openning the ZED"); sl::ERROR_CODE err = sl::ERROR_CODE_CAMERA_NOT_DETECTED; while (err != sl::SUCCESS) { err = zed->open(param); // Try to initialize the ZED NODELET_INFO_STREAM(errorCode2str(err)); std::this_thread::sleep_for(std::chrono::milliseconds(2000)); } tracking_activated = false; if (odom_SubNumber > 0) { //Start the tracking if (odometry_DB != "" && !file_exist(odometry_DB)) { odometry_DB = ""; NODELET_WARN("odometry_DB path doesn't exist or is unreachable."); } zed->enableTracking(trackParams); tracking_activated = true; } } continue; } old_t = ros::Time::now(); // Publish the left == rgb image if someone has subscribed to if (left_SubNumber > 0 || rgb_SubNumber > 0) { // Retrieve RGBA Left image zed->retrieveImage(leftZEDMat, sl::VIEW_LEFT); cv::cvtColor(toCVMat(leftZEDMat), leftImRGB, CV_RGBA2RGB); if (left_SubNumber > 0) { publishCamInfo(left_cam_info_msg, pub_left_cam_info, t); publishImage(leftImRGB, pub_left, left_frame_id, t); } //if (rgb_SubNumber > 0) { //publishCamInfo(rgb_cam_info_msg, pub_rgb_cam_info, t); //publishImage(leftImRGB, pub_rgb, rgb_frame_id, t); // rgb is the left image //} } // Publish the left_raw == rgb_raw image if someone has subscribed to //if (left_raw_SubNumber > 0 || rgb_raw_SubNumber > 0) { //// Retrieve RGBA Left image //zed->retrieveImage(leftZEDMat, sl::VIEW_LEFT_UNRECTIFIED); //cv::cvtColor(toCVMat(leftZEDMat), leftImRGB, CV_RGBA2RGB); //if (left_raw_SubNumber > 0) { //publishCamInfo(left_cam_info_msg, pub_left_cam_info, t); //publishImage(leftImRGB, pub_raw_left, left_frame_id, t); //} //if (rgb_raw_SubNumber > 0) { //publishCamInfo(rgb_cam_info_msg, pub_rgb_cam_info, t); //publishImage(leftImRGB, pub_raw_rgb, rgb_frame_id, t); //} //} // Publish the right image if someone has subscribed to if (right_SubNumber > 0) { // Retrieve RGBA Right image zed->retrieveImage(rightZEDMat, sl::VIEW_RIGHT); cv::cvtColor(toCVMat(rightZEDMat), rightImRGB, CV_RGBA2RGB); publishCamInfo(right_cam_info_msg, pub_right_cam_info, t); publishImage(rightImRGB, pub_right, right_frame_id, t); } // Publish the right image if someone has subscribed to //if (right_raw_SubNumber > 0) { //// Retrieve RGBA Right image //zed->retrieveImage(rightZEDMat, sl::VIEW_RIGHT_UNRECTIFIED); //cv::cvtColor(toCVMat(rightZEDMat), rightImRGB, CV_RGBA2RGB); //publishCamInfo(right_cam_info_msg, pub_right_cam_info, t); //publishImage(rightImRGB, pub_raw_right, right_frame_id, t); //} //Publish the depth image if someone has subscribed to //if (depth_SubNumber > 0) { if (depth_SubNumber > 0) { zed->retrieveMeasure(depthZEDMat, sl::MEASURE_DEPTH); publishCamInfo(depth_cam_info_msg, pub_depth_cam_info, t); publishDepth(toCVMat(depthZEDMat), pub_depth, depth_frame_id, t); // in meters } // subscription-based publishing not working with vanilla ros::publisher so turning off // for compound image // if (compound_SubNumber > 0) { zed->retrieveImage(leftZEDMat, sl::VIEW_LEFT); cv::cvtColor(toCVMat(leftZEDMat), leftImRGB, CV_RGBA2RGB); zed->retrieveMeasure(depthZEDMat, sl::MEASURE_DEPTH); publishDepthPlusImage(leftImRGB, toCVMat(depthZEDMat), pub_compound_img, left_frame_id, depth_frame_id, t); // in meters //} // Publish the point cloud if someone has subscribed to //if (cloud_SubNumber > 0) { //// Run the point cloud convertion asynchronously to avoid slowing down all the program //// Retrieve raw pointCloud data //zed->retrieveMeasure(cloud, sl::MEASURE_XYZBGRA); //point_cloud_frame_id = cloud_frame_id; //point_cloud_time = t; //publishPointCloud(width, height, pub_cloud); //} // Publish the odometry if someone has subscribed to //if (odom_SubNumber > 0) { //zed->getPosition(pose); //publishOdom(pose, pub_odom, odometry_frame_id, t); //} //Note, the frame is published, but its values will only change if someone has subscribed to odom //publishTrackedFrame(pose, transform_odom_broadcaster, odometry_transform_frame_id, t); //publish the tracked Frame loop_rate.sleep(); } else { //publishTrackedFrame(pose, transform_odom_broadcaster, odometry_transform_frame_id, ros::Time::now()); //publish the tracked Frame before the sleep std::this_thread::sleep_for(std::chrono::milliseconds(10)); // No subscribers, we just wait } } // while loop zed.reset(); } void onInit() { // Launch file parameters resolution = sl::RESOLUTION_HD720; quality = sl::DEPTH_MODE_PERFORMANCE; //sensing_mode = sl::SENSING_MODE_STANDARD; sensing_mode = sl::SENSING_MODE_FILL; rate = 30; gpu_id = -1; zed_id = 0; odometry_DB = ""; std::string img_topic = "image_rect_color"; std::string img_raw_topic = "image_raw_color"; // Set the default topic names string rgb_topic = "rgb/" + img_topic; string rgb_raw_topic = "rgb/" + img_raw_topic; string rgb_cam_info_topic = "rgb/camera_info"; rgb_frame_id = "/zed_current_frame"; string left_topic = "left/" + img_topic; string left_raw_topic = "left/" + img_raw_topic; string left_cam_info_topic = "left/camera_info"; left_frame_id = "/zed_current_frame"; string right_topic = "right/" + img_topic; string right_raw_topic = "right/" + img_raw_topic; string right_cam_info_topic = "right/camera_info"; right_frame_id = "/zed_current_frame"; string depth_topic = "depth/"; if (openniDepthMode) depth_topic += "depth_raw_registered"; else depth_topic += "depth_registered"; string compound_topic = "compound_img/"; string depth_cam_info_topic = "depth/camera_info"; depth_frame_id = "/zed_depth_frame"; string point_cloud_topic = "point_cloud/cloud_registered"; cloud_frame_id = "/zed_current_frame"; string odometry_topic = "odom"; odometry_frame_id = "/zed_initial_frame"; odometry_transform_frame_id = "/zed_current_frame"; nh = getMTNodeHandle(); nh_ns = getMTPrivateNodeHandle(); // Get parameters from launch file nh_ns.getParam("resolution", resolution); nh_ns.getParam("quality", quality); nh_ns.getParam("sensing_mode", sensing_mode); nh_ns.getParam("frame_rate", rate); nh_ns.getParam("odometry_DB", odometry_DB); nh_ns.getParam("openni_depth_mode", openniDepthMode); nh_ns.getParam("gpu_id", gpu_id); nh_ns.getParam("zed_id", zed_id); if (openniDepthMode) NODELET_INFO_STREAM("Openni depth mode activated"); nh_ns.getParam("rgb_topic", rgb_topic); nh_ns.getParam("rgb_raw_topic", rgb_raw_topic); nh_ns.getParam("rgb_cam_info_topic", rgb_cam_info_topic); nh_ns.getParam("left_topic", left_topic); nh_ns.getParam("left_raw_topic", left_raw_topic); nh_ns.getParam("left_cam_info_topic", left_cam_info_topic); nh_ns.getParam("right_topic", right_topic); nh_ns.getParam("right_raw_topic", right_raw_topic); nh_ns.getParam("right_cam_info_topic", right_cam_info_topic); nh_ns.getParam("depth_topic", depth_topic); nh_ns.getParam("depth_cam_info_topic", depth_cam_info_topic); nh_ns.getParam("point_cloud_topic", point_cloud_topic); nh_ns.getParam("odometry_topic", odometry_topic); nh_ns.param<std::string>("svo_filepath", svo_filepath, std::string()); // Create the ZED object zed.reset(new sl::Camera()); // Try to initialize the ZED if (!svo_filepath.empty()) param.svo_input_filename = svo_filepath.c_str(); else { param.camera_fps = rate; param.camera_resolution = static_cast<sl::RESOLUTION> (resolution); param.camera_linux_id = zed_id; } param.coordinate_units = sl::UNIT_METER; param.coordinate_system = sl::COORDINATE_SYSTEM_IMAGE; param.depth_mode = static_cast<sl::DEPTH_MODE> (quality); param.sdk_verbose = true; param.sdk_gpu_id = gpu_id; sl::ERROR_CODE err = sl::ERROR_CODE_CAMERA_NOT_DETECTED; while (err != sl::SUCCESS) { err = zed->open(param); NODELET_INFO_STREAM(errorCode2str(err)); std::this_thread::sleep_for(std::chrono::milliseconds(2000)); } cout << "ZED OPENED" << endl; //ERRCODE display dynamic_reconfigure::Server<autobot::AutobotConfig> server; dynamic_reconfigure::Server<autobot::AutobotConfig>::CallbackType f; f = boost::bind(&ZEDWrapperNodelet::callback, this, _1, _2); server.setCallback(f); confidence = 80; // Create all the publishers // Image publishers image_transport::ImageTransport it_zed(nh); //pub_rgb = it_zed.advertise(rgb_topic, 1); //rgb //NODELET_INFO_STREAM("Advertized on topic " << rgb_topic); //pub_raw_rgb = it_zed.advertise(rgb_raw_topic, 1); //rgb raw //NODELET_INFO_STREAM("Advertized on topic " << rgb_raw_topic); pub_left = it_zed.advertise(left_topic, 2); //left NODELET_INFO_STREAM("Advertized on topic " << left_topic); //pub_raw_left = it_zed.advertise(left_raw_topic, 1); //left raw //NODELET_INFO_STREAM("Advertized on topic " << left_raw_topic); pub_right = it_zed.advertise(right_topic, 2); //right NODELET_INFO_STREAM("Advertized on topic " << right_topic); //pub_raw_right = it_zed.advertise(right_raw_topic, 1); //right raw //NODELET_INFO_STREAM("Advertized on topic " << right_raw_topic); pub_depth = it_zed.advertise(depth_topic, 2); //depth NODELET_INFO_STREAM("Advertized on topic " << depth_topic); pub_compound_img = nh.advertise<autobot::compound_img>(compound_topic, 2); //depth NODELET_INFO_STREAM("Advertized on topic " << compound_topic); ////PointCloud publisher //pub_cloud = nh.advertise<sensor_msgs::PointCloud2> (point_cloud_topic, 1); //NODELET_INFO_STREAM("Advertized on topic " << point_cloud_topic); // Camera info publishers //pub_rgb_cam_info = nh.advertise<sensor_msgs::CameraInfo>(rgb_cam_info_topic, 1); //rgb //NODELET_INFO_STREAM("Advertized on topic " << rgb_cam_info_topic); pub_left_cam_info = nh.advertise<sensor_msgs::CameraInfo>(left_cam_info_topic, 1); //left NODELET_INFO_STREAM("Advertized on topic " << left_cam_info_topic); pub_right_cam_info = nh.advertise<sensor_msgs::CameraInfo>(right_cam_info_topic, 1); //right NODELET_INFO_STREAM("Advertized on topic " << right_cam_info_topic); pub_depth_cam_info = nh.advertise<sensor_msgs::CameraInfo>(depth_cam_info_topic, 1); //depth NODELET_INFO_STREAM("Advertized on topic " << depth_cam_info_topic); //Odometry publisher //pub_odom = nh.advertise<nav_msgs::Odometry>(odometry_topic, 1); //NODELET_INFO_STREAM("Advertized on topic " << odometry_topic); device_poll_thread = boost::shared_ptr<boost::thread> (new boost::thread(boost::bind(&ZEDWrapperNodelet::device_poll, this))); } }; // class ZEDROSWrapperNodelet } // namespace #include <pluginlib/class_list_macros.h> PLUGINLIB_EXPORT_CLASS(autobot::ZEDWrapperNodelet, nodelet::Nodelet);<|fim▁end|>
<|file_name|>PickerIOS.ios.js<|end_file_name|><|fim▁begin|>/** * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * @providesModule PickerIOS * * This is a controlled component version of RCTPickerIOS */ 'use strict'; var NativeMethodsMixin = require('NativeMethodsMixin'); var React = require('React'); var ReactChildren = require('ReactChildren'); var ReactNativeViewAttributes = require('ReactNativeViewAttributes'); var RCTPickerIOSConsts = require('NativeModules').UIManager.RCTPicker.Constants; var StyleSheet = require('StyleSheet'); var View = require('View'); var requireNativeComponent = require('requireNativeComponent'); var merge = require('merge'); var PICKER = 'picker'; var PickerIOS = React.createClass({ mixins: [NativeMethodsMixin], propTypes: { onValueChange: React.PropTypes.func, selectedValue: React.PropTypes.any, // string or integer basically<|fim▁hole|> return this._stateFromProps(this.props); }, componentWillReceiveProps: function(nextProps) { this.setState(this._stateFromProps(nextProps)); }, // Translate PickerIOS prop and children into stuff that RCTPickerIOS understands. _stateFromProps: function(props) { var selectedIndex = 0; var items = []; ReactChildren.forEach(props.children, function (child, index) { if (child.props.value === props.selectedValue) { selectedIndex = index; } items.push({value: child.props.value, label: child.props.label}); }); return {selectedIndex, items}; }, render: function() { return ( <View style={this.props.style}> <RCTPickerIOS ref={PICKER} style={styles.pickerIOS} items={this.state.items} selectedIndex={this.state.selectedIndex} onChange={this._onChange} /> </View> ); }, _onChange: function(event) { if (this.props.onChange) { this.props.onChange(event); } if (this.props.onValueChange) { this.props.onValueChange(event.nativeEvent.newValue); } // The picker is a controlled component. This means we expect the // on*Change handlers to be in charge of updating our // `selectedValue` prop. That way they can also // disallow/undo/mutate the selection of certain values. In other // words, the embedder of this component should be the source of // truth, not the native component. if (this.state.selectedIndex !== event.nativeEvent.newIndex) { this.refs[PICKER].setNativeProps({ selectedIndex: this.state.selectedIndex }); } }, }); PickerIOS.Item = React.createClass({ propTypes: { value: React.PropTypes.any, // string or integer basically label: React.PropTypes.string, }, render: function() { // These items don't get rendered directly. return null; }, }); var styles = StyleSheet.create({ pickerIOS: { // The picker will conform to whatever width is given, but we do // have to set the component's height explicitly on the // surrounding view to ensure it gets rendered. height: RCTPickerIOSConsts.ComponentHeight, }, }); var RCTPickerIOS = requireNativeComponent('RCTPicker', PickerIOS, { nativeOnly: { items: true, onChange: true, selectedIndex: true, }, }); module.exports = PickerIOS;<|fim▁end|>
}, getInitialState: function() {
<|file_name|>font_context.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #[derive(Clone, HeapSizeOf, Debug)] pub struct FontContextHandle; impl FontContextHandle { pub fn new() -> FontContextHandle { FontContextHandle }<|fim▁hole|><|fim▁end|>
}
<|file_name|>cmp.rs<|end_file_name|><|fim▁begin|>//! Tests for comparison operators. mod less { use util::*; #[test] fn constants() { assert_eval_true("1 < 2"); assert_eval_true("-5 < 0"); assert_eval_true("1.5 < 2"); assert_eval_true("8 < 10.0"); assert_eval_true("-3.14 < 3.14"); assert_eval_false("1 < 1"); assert_eval_false("0 < -10"); assert_eval_error("0 < foo"); assert_eval_error("foo < 42"); assert_eval_error("bar < true"); assert_eval_error("[] < []"); assert_eval_error("{} < {}"); } // TODO(xion): inputs } mod less_or_equal { use util::*; #[test] fn constants() { assert_eval_true("1 <= 2"); assert_eval_true("-5 <= 0"); assert_eval_true("1.5 <= 2"); assert_eval_true("8 <= 10.0"); assert_eval_true("-3.14 <= 3.14"); assert_eval_true("1 <= 1"); assert_eval_false("0 <= -10"); assert_eval_false("-8 <= -12"); assert_eval_error("0 <= foo"); assert_eval_error("foo <= 42"); assert_eval_error("bar <= true"); assert_eval_error("[] <= []"); assert_eval_error("{} <= {}"); } // TODO(xion): inputs } mod greater { use util::*; #[test] fn constants() { assert_eval_true("2 > 1"); assert_eval_true("0 > -5"); assert_eval_true("2 > 1.5"); assert_eval_true("10.0 > 8"); assert_eval_true("3.14 > -3.14"); assert_eval_false("1 > 1"); assert_eval_false("-10 > 0"); assert_eval_false("-12 > -8"); assert_eval_error("0 > foo"); assert_eval_error("foo > 42"); assert_eval_error("bar > true"); assert_eval_error("[] > []"); assert_eval_error("{} > {}"); } // TODO(xion): inputs } mod greater_or_equal { use util::*;<|fim▁hole|> assert_eval_true("0 >= -5"); assert_eval_true("2 >= 1.5"); assert_eval_true("10.0 >= 8"); assert_eval_true("3.14 >= -3.14"); assert_eval_true("1 >= 1"); assert_eval_false("-10 >= 0"); assert_eval_false("-12 >= -8"); assert_eval_error("0 >= foo"); assert_eval_error("foo >= 42"); assert_eval_error("bar >= true"); assert_eval_error("[] >= []"); assert_eval_error("{} >= {}"); } // TODO(xion): inputs } mod equal { use util::*; #[test] fn constants() { assert_eval_false("2 == 1"); assert_eval_false("0 == -5"); assert_eval_false("2 == 1.5"); assert_eval_false("10.0 == 8"); assert_eval_false("3.14 == -3.14"); assert_eval_false("-10 == 0"); assert_eval_false("-12 == -8"); assert_eval_true("1 == 1"); assert_eval_true("2.0 == 2"); assert_eval_true("3.0 == 3.0"); assert_eval_true("4 == 4.0"); assert_eval_true("[] == []"); assert_eval_true("{} == {}"); assert_eval_error("0 == foo"); assert_eval_error("foo == 42"); assert_eval_error("bar == true"); } // TODO(xion): inputs } mod not_equal { use util::*; #[test] fn constants() { assert_eval_true("2 != 1"); assert_eval_true("0 != -5"); assert_eval_true("2 != 1.5"); assert_eval_true("10.0 != 8"); assert_eval_true("3.14 != -3.14"); assert_eval_true("-10 != 0"); assert_eval_true("-12 != -8"); assert_eval_false("1 != 1"); assert_eval_false("2.0 != 2"); assert_eval_false("3.0 != 3.0"); assert_eval_false("4 != 4.0"); assert_eval_false("[] != []"); assert_eval_false("{} != {}"); assert_eval_error("0 != foo"); assert_eval_error("foo != 42"); assert_eval_error("bar != true"); } // TODO(xion): inputs }<|fim▁end|>
#[test] fn constants() { assert_eval_true("2 >= 1");
<|file_name|>wrong.js<|end_file_name|><|fim▁begin|>import { expect } from 'chai'; import Urlsparser from '../src/app.js'; describe('wrong', () => { it('...', () => { const caps = () => {new Urlsparser('...')}; expect(caps).to.throw(); }); <|fim▁hole|> }); it('0:5:92', () => { const caps = () => {new Urlsparser('0:5:92')}; expect(caps).to.throw(); }); });<|fim▁end|>
it('+++abs+', () => { const caps = () => {new Urlsparser('+++abs+')}; expect(caps).to.throw();
<|file_name|>en-MH.ts<|end_file_name|><|fim▁begin|>/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ // THIS CODE IS GENERATED - DO NOT MODIFY // See angular/tools/gulp-tasks/cldr/extract.js const u = undefined; function plural(n: number): number { let i = Math.floor(Math.abs(n)), v = n.toString().replace(/^[^.]*\.?/, '').length; if (i === 1 && v === 0) return 1; return 5; } export default [ 'en-MH', [['a', 'p'], ['AM', 'PM'], u], [['AM', 'PM'], u, u], [ ['S', 'M', 'T', 'W', 'T', 'F', 'S'], ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'], ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'], ['Su', 'Mo', 'Tu', 'We', 'Th', 'Fr', 'Sa'] ], u, [ ['J', 'F', 'M', 'A', 'M', 'J', 'J', 'A', 'S', 'O', 'N', 'D'], ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'], [ 'January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December' ] ],<|fim▁hole|> [['B', 'A'], ['BC', 'AD'], ['Before Christ', 'Anno Domini']], 0, [6, 0], ['M/d/yy', 'MMM d, y', 'MMMM d, y', 'EEEE, MMMM d, y'], ['h:mm a', 'h:mm:ss a', 'h:mm:ss a z', 'h:mm:ss a zzzz'], ['{1}, {0}', u, '{1} \'at\' {0}', u], ['.', ',', ';', '%', '+', '-', 'E', '×', '‰', '∞', 'NaN', ':'], ['#,##0.###', '#,##0%', '¤#,##0.00', '#E0'], 'USD', '$', 'US Dollar', {}, 'ltr', plural ];<|fim▁end|>
u,
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
from .hu_psi_constraints import HuPsiConstraints
<|file_name|>aws.py<|end_file_name|><|fim▁begin|>""" This is the default template for our main set of AWS servers. """ # We intentionally define lots of variables that aren't used, and # want to import all variables from base settings files # pylint: disable=W0401, W0614 import json from .common import * from logsettings import get_logger_config import os # specified as an environment variable. Typically this is set # in the service's upstart script and corresponds exactly to the service name. # Service variants apply config differences via env and auth JSON files, # the names of which correspond to the variant. SERVICE_VARIANT = os.environ.get('SERVICE_VARIANT', None) # when not variant is specified we attempt to load an unvaried # config set. CONFIG_PREFIX = "" if SERVICE_VARIANT: CONFIG_PREFIX = SERVICE_VARIANT + "." ############### ALWAYS THE SAME ################################ DEBUG = False TEMPLATE_DEBUG = False EMAIL_BACKEND = 'django_ses.SESBackend' SESSION_ENGINE = 'django.contrib.sessions.backends.cache'<|fim▁hole|># Don't use a connection pool, since connections are dropped by ELB. BROKER_POOL_LIMIT = 0 BROKER_CONNECTION_TIMEOUT = 1 # For the Result Store, use the django cache named 'celery' CELERY_RESULT_BACKEND = 'cache' CELERY_CACHE_BACKEND = 'celery' # When the broker is behind an ELB, use a heartbeat to refresh the # connection and to detect if it has been dropped. BROKER_HEARTBEAT = 10.0 BROKER_HEARTBEAT_CHECKRATE = 2 # Each worker should only fetch one message at a time CELERYD_PREFETCH_MULTIPLIER = 1 # Skip djcelery migrations, since we don't use the database as the broker SOUTH_MIGRATION_MODULES = { 'djcelery': 'ignore', } # Rename the exchange and queues for each variant QUEUE_VARIANT = CONFIG_PREFIX.lower() CELERY_DEFAULT_EXCHANGE = 'edx.{0}core'.format(QUEUE_VARIANT) HIGH_PRIORITY_QUEUE = 'edx.{0}core.high'.format(QUEUE_VARIANT) DEFAULT_PRIORITY_QUEUE = 'edx.{0}core.default'.format(QUEUE_VARIANT) LOW_PRIORITY_QUEUE = 'edx.{0}core.low'.format(QUEUE_VARIANT) CELERY_DEFAULT_QUEUE = DEFAULT_PRIORITY_QUEUE CELERY_DEFAULT_ROUTING_KEY = DEFAULT_PRIORITY_QUEUE CELERY_QUEUES = { HIGH_PRIORITY_QUEUE: {}, LOW_PRIORITY_QUEUE: {}, DEFAULT_PRIORITY_QUEUE: {} } ############# NON-SECURE ENV CONFIG ############################## # Things like server locations, ports, etc. with open(ENV_ROOT / CONFIG_PREFIX + "env.json") as env_file: ENV_TOKENS = json.load(env_file) EMAIL_BACKEND = ENV_TOKENS.get('EMAIL_BACKEND', EMAIL_BACKEND) EMAIL_FILE_PATH = ENV_TOKENS.get('EMAIL_FILE_PATH', None) LMS_BASE = ENV_TOKENS.get('LMS_BASE') # Note that MITX_FEATURES['PREVIEW_LMS_BASE'] gets read in from the environment file. SITE_NAME = ENV_TOKENS['SITE_NAME'] LOG_DIR = ENV_TOKENS['LOG_DIR'] CACHES = ENV_TOKENS['CACHES'] SESSION_COOKIE_DOMAIN = ENV_TOKENS.get('SESSION_COOKIE_DOMAIN') SESSION_ENGINE = ENV_TOKENS.get('SESSION_ENGINE', SESSION_ENGINE) # allow for environments to specify what cookie name our login subsystem should use # this is to fix a bug regarding simultaneous logins between edx.org and edge.edx.org which can # happen with some browsers (e.g. Firefox) if ENV_TOKENS.get('SESSION_COOKIE_NAME', None): # NOTE, there's a bug in Django (http://bugs.python.org/issue18012) which necessitates this being a str() SESSION_COOKIE_NAME = str(ENV_TOKENS.get('SESSION_COOKIE_NAME')) #Email overrides DEFAULT_FROM_EMAIL = ENV_TOKENS.get('DEFAULT_FROM_EMAIL', DEFAULT_FROM_EMAIL) DEFAULT_FEEDBACK_EMAIL = ENV_TOKENS.get('DEFAULT_FEEDBACK_EMAIL', DEFAULT_FEEDBACK_EMAIL) ADMINS = ENV_TOKENS.get('ADMINS', ADMINS) SERVER_EMAIL = ENV_TOKENS.get('SERVER_EMAIL', SERVER_EMAIL) MKTG_URLS = ENV_TOKENS.get('MKTG_URLS', MKTG_URLS) TECH_SUPPORT_EMAIL = ENV_TOKENS.get('TECH_SUPPORT_EMAIL', TECH_SUPPORT_EMAIL) COURSES_WITH_UNSAFE_CODE = ENV_TOKENS.get("COURSES_WITH_UNSAFE_CODE", []) #Timezone overrides TIME_ZONE = ENV_TOKENS.get('TIME_ZONE', TIME_ZONE) for feature, value in ENV_TOKENS.get('MITX_FEATURES', {}).items(): MITX_FEATURES[feature] = value LOGGING = get_logger_config(LOG_DIR, logging_env=ENV_TOKENS['LOGGING_ENV'], syslog_addr=(ENV_TOKENS['SYSLOG_SERVER'], 514), debug=False, service_variant=SERVICE_VARIANT) #theming start: PLATFORM_NAME = ENV_TOKENS.get('PLATFORM_NAME', 'edX') # Event Tracking if "TRACKING_IGNORE_URL_PATTERNS" in ENV_TOKENS: TRACKING_IGNORE_URL_PATTERNS = ENV_TOKENS.get("TRACKING_IGNORE_URL_PATTERNS") ################ SECURE AUTH ITEMS ############################### # Secret things: passwords, access keys, etc. with open(ENV_ROOT / CONFIG_PREFIX + "auth.json") as auth_file: AUTH_TOKENS = json.load(auth_file) # If Segment.io key specified, load it and turn on Segment.io if the feature flag is set # Note that this is the Studio key. There is a separate key for the LMS. SEGMENT_IO_KEY = AUTH_TOKENS.get('SEGMENT_IO_KEY') if SEGMENT_IO_KEY: MITX_FEATURES['SEGMENT_IO'] = ENV_TOKENS.get('SEGMENT_IO', False) AWS_ACCESS_KEY_ID = AUTH_TOKENS["AWS_ACCESS_KEY_ID"] AWS_SECRET_ACCESS_KEY = AUTH_TOKENS["AWS_SECRET_ACCESS_KEY"] DATABASES = AUTH_TOKENS['DATABASES'] MODULESTORE = AUTH_TOKENS['MODULESTORE'] CONTENTSTORE = AUTH_TOKENS['CONTENTSTORE'] # Datadog for events! DATADOG = AUTH_TOKENS.get("DATADOG", {}) DATADOG.update(ENV_TOKENS.get("DATADOG", {})) # TODO: deprecated (compatibility with previous settings) if 'DATADOG_API' in AUTH_TOKENS: DATADOG['api_key'] = AUTH_TOKENS['DATADOG_API'] # Celery Broker CELERY_BROKER_TRANSPORT = ENV_TOKENS.get("CELERY_BROKER_TRANSPORT", "") CELERY_BROKER_HOSTNAME = ENV_TOKENS.get("CELERY_BROKER_HOSTNAME", "") CELERY_BROKER_VHOST = ENV_TOKENS.get("CELERY_BROKER_VHOST", "") CELERY_BROKER_USER = AUTH_TOKENS.get("CELERY_BROKER_USER", "") CELERY_BROKER_PASSWORD = AUTH_TOKENS.get("CELERY_BROKER_PASSWORD", "") BROKER_URL = "{0}://{1}:{2}@{3}/{4}".format(CELERY_BROKER_TRANSPORT, CELERY_BROKER_USER, CELERY_BROKER_PASSWORD, CELERY_BROKER_HOSTNAME, CELERY_BROKER_VHOST) # Event tracking TRACKING_BACKENDS.update(AUTH_TOKENS.get("TRACKING_BACKENDS", {}))<|fim▁end|>
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage' ###################################### CELERY ################################
<|file_name|>java_wire_library.py<|end_file_name|><|fim▁begin|># coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import logging from pants.backend.jvm.targets.exportable_jvm_library import ExportableJvmLibrary from pants.base.payload import Payload from pants.base.payload_field import PrimitiveField logger = logging.getLogger(__name__) class JavaWireLibrary(ExportableJvmLibrary): """Generates a stub Java library from protobuf IDL files.""" def __init__(self,<|fim▁hole|> service_writer=None, service_writer_options=None, roots=None, registry_class=None, enum_options=None, no_options=None, **kwargs): """ :param string service_writer: the name of the class to pass as the --service_writer option to the Wire compiler. :param list service_writer_options: A list of options to pass to the service writer :param list roots: passed through to the --roots option of the Wire compiler :param string registry_class: fully qualified class name of RegistryClass to create. If in doubt, specify com.squareup.wire.SimpleServiceWriter :param list enum_options: list of enums to pass to as the --enum-enum_options option, # optional :param boolean no_options: boolean that determines if --no_options flag is passed """ payload = payload or Payload() payload.add_fields({ 'service_writer': PrimitiveField(service_writer or None), 'service_writer_options': PrimitiveField(service_writer_options or []), 'roots': PrimitiveField(roots or []), 'registry_class': PrimitiveField(registry_class or None), 'enum_options': PrimitiveField(enum_options or []), 'no_options': PrimitiveField(no_options or False), }) if service_writer_options: logger.warn('The service_writer_options flag is ignored.') super(JavaWireLibrary, self).__init__(payload=payload, **kwargs) self.add_labels('codegen')<|fim▁end|>
payload=None,
<|file_name|>nk.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>% Nonlinear New Keynesian Model % Reference: Foerster, Rubio-Ramirez, Waggoner and Zha (2013) % Perturbation Methods for Markov Switching Models. %------------------------------------------------------------- endogenous PAI, "Inflation", Y, "Output gap", R, "Interest rate" exogenous EPS_R "Monetary policy shock" parameters betta, eta, kappa, rhor sigr a_tp_1_2, a_tp_2_1 parameters(a,2) mu, psi model 1=betta*(1-.5*kappa*(PAI-1)^2)*Y*R/((1-.5*kappa*(PAI{+1}-1)^2)*Y{+1}*exp(mu)*PAI{+1}); 1-eta+eta*(1-.5*kappa*(PAI-1)^2)*Y+betta*kappa*(1-.5*kappa*(PAI-1)^2)*(PAI{+1}-1)*PAI{+1}/(1-.5*kappa*(PAI{+1}-1)^2) -kappa*(PAI-1)*PAI; (R{-1}/R{stst})^rhor*(PAI/PAI{stst})^((1-rhor)*psi)*exp(sigr*EPS_R)-R/R{stst}; steady_state_model PAI=1; Y=(eta-1)/eta; R=exp(mu)/betta*PAI; parameterization a_tp_1_2,1-.9; a_tp_2_1,1-.9; betta, .9976; kappa, 161; eta, 10; rhor, .8; sigr, 0.0025; mu(a,1), 0.005+0.0025; mu(a,2), 0.005-0.0025; psi(a,1), 3.1; psi(a,2), 0.9;<|fim▁end|>
%-------------------------------------------------------------
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Foremast - Pipeline Tooling #<|fim▁hole|># You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from .api_gateway_event import *<|fim▁end|>
# Copyright 2018 Gogo, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License.
<|file_name|>V2MainFragModel.java<|end_file_name|><|fim▁begin|>package com.stdnull.v2api.model; import android.os.Bundle; import android.os.Parcelable; import java.util.ArrayList; import java.util.List; /** * Created by chen on 2017/8/20. */ public class V2MainFragModel { private static final String KEY_V2EXBEAN = "KEY_V2EXBEAN"; private List<V2ExBean> mContentListModel = new ArrayList<>(); <|fim▁hole|> } public void addContentListModel(List<V2ExBean> contentListModel) { if(contentListModel != null) { this.mContentListModel.addAll(contentListModel); } } public boolean isModelEmpty(){ return mContentListModel.isEmpty() ; } public void clearModel(){ mContentListModel.clear(); } public void save(Bundle bundle){ bundle.putParcelableArrayList(KEY_V2EXBEAN, (ArrayList<? extends Parcelable>) mContentListModel); } public boolean restore(Bundle bundle){ if(bundle == null){ return false; } mContentListModel = bundle.getParcelableArrayList(KEY_V2EXBEAN); return mContentListModel != null && !mContentListModel.isEmpty(); } }<|fim▁end|>
public List<V2ExBean> getContentListModel() { return mContentListModel;
<|file_name|>test_settings.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
DATE_FORMAT = '%Y-%m-%dT%H:%M:%S+00:00'
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import url, include from django.contrib import admin<|fim▁hole|> from .views import UploadBlackListView, DemoView, UdateBlackListView urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^upload-blacklist$', login_required(UploadBlackListView.as_view()), name='upload-blacklist'), url(r'^update-blacklist$', UdateBlackListView.as_view(), name='update-blacklist'), url(r'^profile/', include('n_profile.urls')), url(r'^demo$', DemoView.as_view(), name='demo'), ]<|fim▁end|>
from django.contrib.auth.decorators import login_required
<|file_name|>group_by_results_test.go<|end_file_name|><|fim▁begin|>package search import ( "github.com/aliyun/aliyun-tablestore-go-sdk/tablestore/otsprotocol" "github.com/golang/protobuf/proto" "github.com/stretchr/testify/assert" "math" "math/rand" "testing" ) func genPBGroupBysResult() *otsprotocol.GroupBysResult { pbGroupBysResults := otsprotocol.GroupBysResult{} { items := []*otsprotocol.GroupByFieldResultItem { { Key: proto.String("k1"), RowCount: proto.Int64(6), }, { Key: proto.String("k2"), RowCount: proto.Int64(9), }, } groupByBodyBytes, _ := proto.Marshal(&otsprotocol.GroupByFieldResult { GroupByFieldResultItems: items, }) groupByResult := otsprotocol.GroupByResult{ Name: proto.String("group_by1"), Type: otsprotocol.GroupByType_GROUP_BY_FIELD.Enum(), GroupByResult: groupByBodyBytes, } pbGroupBysResults.GroupByResults = append(pbGroupBysResults.GroupByResults, &groupByResult) } { items := []*otsprotocol.GroupByFilterResultItem { { RowCount: proto.Int64(3), }, { RowCount: proto.Int64(5), }, } groupByBodyBytes, _ := proto.Marshal(&otsprotocol.GroupByFilterResult { GroupByFilterResultItems: items, }) groupByResult := otsprotocol.GroupByResult{ Name: proto.String("group_by2"), Type: otsprotocol.GroupByType_GROUP_BY_FILTER.Enum(), GroupByResult: groupByBodyBytes, } pbGroupBysResults.GroupByResults = append(pbGroupBysResults.GroupByResults, &groupByResult) } { items := []*otsprotocol.GroupByRangeResultItem { { From: proto.Float64(math.Inf(-1)), To: proto.Float64(3), RowCount: proto.Int64(333), }, { From: proto.Float64(3), To: proto.Float64(5), RowCount: proto.Int64(666), }, { From: proto.Float64(5), To: proto.Float64(math.Inf(1)), RowCount: proto.Int64(999), }, } groupByBodyBytes, _ := proto.Marshal(&otsprotocol.GroupByRangeResult { GroupByRangeResultItems: items, }) groupByResult := otsprotocol.GroupByResult{ Name: proto.String("group_by3"), Type: otsprotocol.GroupByType_GROUP_BY_RANGE.Enum(), GroupByResult: groupByBodyBytes, } pbGroupBysResults.GroupByResults = append(pbGroupBysResults.GroupByResults, &groupByResult) } { items := []*otsprotocol.GroupByGeoDistanceResultItem { { From: proto.Float64(math.Inf(-1)), To: proto.Float64(3), RowCount: proto.Int64(333), }, { From: proto.Float64(3), To: proto.Float64(5), RowCount: proto.Int64(666), }, { From: proto.Float64(5), To: proto.Float64(math.Inf(1)), RowCount: proto.Int64(999), }, } groupByBodyBytes, _ := proto.Marshal(&otsprotocol.GroupByGeoDistanceResult { GroupByGeoDistanceResultItems: items, }) groupByResult := otsprotocol.GroupByResult{ Name: proto.String("group_by4"), Type: otsprotocol.GroupByType_GROUP_BY_GEO_DISTANCE.Enum(), GroupByResult: groupByBodyBytes, } pbGroupBysResults.GroupByResults = append(pbGroupBysResults.GroupByResults, &groupByResult) } { var value int64 = 1 var key = rand.Int63() items := []*otsprotocol.GroupByHistogramItem{ { Key: VTInteger(key), Value: &value, }, } groupByBodyBytes, _ := proto.Marshal(&otsprotocol.GroupByHistogramResult{ GroupByHistograItems: items, }) groupByResult := otsprotocol.GroupByResult{ Name: proto.String("group_by5"), Type: otsprotocol.GroupByType_GROUP_BY_HISTOGRAM.Enum(), GroupByResult: groupByBodyBytes, } pbGroupBysResults.GroupByResults = append(pbGroupBysResults.GroupByResults, &groupByResult) } <|fim▁hole|>} func TestParseGroupByResultsFromPB(t *testing.T) { pbGroupBysResult := genPBGroupBysResult() groupByResults, _ := ParseGroupByResultsFromPB(pbGroupBysResult.GroupByResults) assert.Equal(t, 5, len(groupByResults.resultMap)) assert.Equal(t, false, groupByResults.Empty()) { groupByResult, err := groupByResults.GroupByField("group_by1") assert.Nil(t, err) assert.Equal(t, 2, len(groupByResult.Items)) assert.Equal(t, "k1", groupByResult.Items[0].Key) assert.Equal(t, int64(6), groupByResult.Items[0].RowCount) assert.Equal(t, "k2", groupByResult.Items[1].Key) assert.Equal(t, int64(9), groupByResult.Items[1].RowCount) } { groupByResult, err := groupByResults.GroupByFilter("group_by2") assert.Nil(t, err) assert.Equal(t, 2, len(groupByResult.Items)) assert.Equal(t, int64(3), groupByResult.Items[0].RowCount) assert.Equal(t, int64(5), groupByResult.Items[1].RowCount) } { groupByResult, err := groupByResults.GroupByRange("group_by3") assert.Nil(t, err) assert.Equal(t, 3, len(groupByResult.Items)) assert.Equal(t, math.Inf(-1), groupByResult.Items[0].From) assert.Equal(t, float64(3), groupByResult.Items[0].To) assert.Equal(t, int64(333), groupByResult.Items[0].RowCount) assert.Equal(t, float64(3), groupByResult.Items[1].From) assert.Equal(t, float64(5), groupByResult.Items[1].To) assert.Equal(t, int64(666), groupByResult.Items[1].RowCount) assert.Equal(t, float64(5), groupByResult.Items[2].From) assert.Equal(t, math.Inf(1), groupByResult.Items[2].To) assert.Equal(t, int64(999), groupByResult.Items[2].RowCount) } { groupByResult, err := groupByResults.GroupByGeoDistance("group_by4") assert.Nil(t, err) assert.Equal(t, 3, len(groupByResult.Items)) assert.Equal(t, math.Inf(-1), groupByResult.Items[0].From) assert.Equal(t, float64(3), groupByResult.Items[0].To) assert.Equal(t, int64(333), groupByResult.Items[0].RowCount) assert.Equal(t, float64(3), groupByResult.Items[1].From) assert.Equal(t, float64(5), groupByResult.Items[1].To) assert.Equal(t, int64(666), groupByResult.Items[1].RowCount) assert.Equal(t, float64(5), groupByResult.Items[2].From) assert.Equal(t, math.Inf(1), groupByResult.Items[2].To) assert.Equal(t, int64(999), groupByResult.Items[2].RowCount) } { groupByResult, err := groupByResults.GroupByHistogram("group_by5") assert.Nil(t, err) assert.Equal(t, 1, len(groupByResult.Items)) assert.Equal(t, int64(1), groupByResult.Items[0].Value) } }<|fim▁end|>
return &pbGroupBysResults
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # This file is part of REANA. # Copyright (C) 2021, 2022 CERN. # # REANA is free software; you can redistribute it and/or modify it # under the terms of the MIT License; see LICENSE file for more details. """REANA client validation utilities.""" import sys from typing import Dict, NoReturn, Union import click from reana_commons.errors import REANAValidationError from reana_commons.validation.operational_options import validate_operational_options from reana_commons.validation.utils import validate_reana_yaml, validate_workflow_name from reana_client.printer import display_message from reana_client.validation.compute_backends import validate_compute_backends from reana_client.validation.environments import validate_environment from reana_client.validation.parameters import validate_parameters from reana_client.validation.workspace import _validate_workspace def validate_reana_spec( reana_yaml, filepath, access_token=None, skip_validation=False, skip_validate_environments=True, pull_environment_image=False, server_capabilities=False, ): """Validate REANA specification file.""" if "options" in reana_yaml.get("inputs", {}): workflow_type = reana_yaml["workflow"]["type"] workflow_options = reana_yaml["inputs"]["options"] try: reana_yaml["inputs"]["options"] = validate_operational_options( workflow_type, workflow_options ) except REANAValidationError as e: display_message(e.message, msg_type="error") sys.exit(1) if not skip_validation: display_message( f"Verifying REANA specification file... {filepath}", msg_type="info", ) validate_reana_yaml(reana_yaml) display_message( "Valid REANA specification file.", msg_type="success", indented=True, ) validate_parameters(reana_yaml) if server_capabilities: _validate_server_capabilities(reana_yaml, access_token) if not skip_validate_environments: display_message( "Verifying environments in REANA specification file...", msg_type="info", ) validate_environment(reana_yaml, pull=pull_environment_image) def _validate_server_capabilities(reana_yaml: Dict, access_token: str) -> None: """Validate server capabilities in REANA specification file. :param reana_yaml: dictionary which represents REANA specification file. :param access_token: access token of the current user. """ from reana_client.api.client import info info_response = info(access_token) display_message( "Verifying compute backends in REANA specification file...", msg_type="info", ) supported_backends = info_response.get("compute_backends", {}).get("value") validate_compute_backends(reana_yaml, supported_backends) root_path = reana_yaml.get("workspace", {}).get("root_path") available_workspaces = info_response.get("workspaces_available", {}).get("value") _validate_workspace(root_path, available_workspaces) def validate_input_parameters(live_parameters, original_parameters): """Return validated input parameters.""" parsed_input_parameters = dict(live_parameters)<|fim▁hole|> "Given parameter - {0}, is not in reana.yaml".format(parameter), msg_type="error", ) del live_parameters[parameter] return live_parameters def validate_workflow_name_parameter( ctx: click.core.Context, _: click.core.Option, workflow_name: str ) -> Union[str, NoReturn]: """Validate workflow name parameter.""" try: return validate_workflow_name(workflow_name) except ValueError as e: display_message(str(e), msg_type="error") sys.exit(1)<|fim▁end|>
for parameter in parsed_input_parameters.keys(): if parameter not in original_parameters: display_message(
<|file_name|>board.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- import re from user import make_anonymous_user from exeptions import HttpStatusError, RegexError def make_subject_url(url): if url.endswith("/"): return url + "subject.txt" else: return url + "/subject.txt" def parse_board(string): if not isinstance(string, unicode): raise TypeError("unsupported string type:" + str(type(string))) thread_expressions = re.compile( r"^(?P<dat>\d+\.dat)<>(?P<title>.*) \((?P<n_comments>\d*)\)$") results = [] for thread_string in string.split("\n"): thread_data = thread_expressions.search(thread_string) if thread_data: results.append({ "title": thread_data.group("title"), "n_comments": int(thread_data.group("n_comments")), "dat": thread_data.group("dat"), }) elif len(thread_string) != 0: raise RegexError( "Regex unmatched in parsing the thread's data", thread_expressions) return results def retrieve_board(board_url, user=None): my_user = user if user else make_anonymous_user() subject_url = make_subject_url(board_url) response = my_user.urlopen(subject_url, gzip=False) if response.code == 200: retrieved_string = unicode(response.read(), "Shift_JIS", "ignore") print type(retrieved_string) return parse_board(retrieved_string) else:<|fim▁hole|> raise HttpStatusError(message, response)<|fim▁end|>
message = "HTTP status is invalid: " + str(response.code)
<|file_name|>element-rendering.ts<|end_file_name|><|fim▁begin|>const { registerHtml, useStore } = require('../../src/tram-one'); const html = registerHtml(); /** * This page has an input that changes the total number of elements on the page */ export default () => { const pageStore = useStore({ queue: '1000', elements: '1000', startTimer: 0, endTimer: 0, renders: 0 }); const updateCount = (event: any) => { pageStore.queue = event.target.value; }; const render = () => { pageStore.renders++; pageStore.startTimer = performance.now(); pageStore.elements = pageStore.queue; pageStore.endTimer = performance.now(); }; const numberOfElements = Number.parseInt(pageStore.elements, 10); const newSpan = () => html`<span>-</span>`; const elements = [...new Array(Number.isNaN(numberOfElements) ? 0 : numberOfElements)].map(newSpan);<|fim▁hole|> <h1>Element Rendering Example</h1> <figure>Wait: ${pageStore.endTimer - pageStore.startTimer}</figure> <label for="element-count">Element Count</label> <input id="element-count" value=${pageStore.queue} onkeyup=${updateCount} /> <button onclick=${render} renders=${pageStore.renders}>Render</button> <br /> ${elements} </section> `; };<|fim▁end|>
return html` <section>
<|file_name|>authz.py<|end_file_name|><|fim▁begin|>from buildbot.status.web.auth import IAuth class Authz(object): """Decide who can do what.""" knownActions = [ # If you add a new action here, be sure to also update the documentation # at docs/cfg-statustargets.texinfo 'gracefulShutdown', 'forceBuild', 'forceAllBuilds', 'pingBuilder', 'stopBuild', 'stopAllBuilds', 'cancelPendingBuild', ] def __init__(self, default_action=False, auth=None, **kwargs): self.auth = auth if auth: assert IAuth.providedBy(auth) self.config = dict( (a, default_action) for a in self.knownActions ) for act in self.knownActions: if act in kwargs: self.config[act] = kwargs[act] del kwargs[act]<|fim▁hole|> if kwargs: raise ValueError("unknown authorization action(s) " + ", ".join(kwargs.keys())) def advertiseAction(self, action): """Should the web interface even show the form for ACTION?""" if action not in self.knownActions: raise KeyError("unknown action") cfg = self.config.get(action, False) if cfg: return True return False def needAuthForm(self, action): """Does this action require an authentication form?""" if action not in self.knownActions: raise KeyError("unknown action") cfg = self.config.get(action, False) if cfg == 'auth' or callable(cfg): return True return False def actionAllowed(self, action, request, *args): """Is this ACTION allowed, given this http REQUEST?""" if action not in self.knownActions: raise KeyError("unknown action") cfg = self.config.get(action, False) if cfg: if cfg == 'auth' or callable(cfg): if not self.auth: return False user = request.args.get("username", ["<unknown>"])[0] passwd = request.args.get("passwd", ["<no-password>"])[0] if user == "<unknown>" or passwd == "<no-password>": return False if self.auth.authenticate(user, passwd): if callable(cfg) and not cfg(user, *args): return False return True return False else: return True # anyone can do this..<|fim▁end|>
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>// MIT License // // Copyright (c) 2016 Fingercomp // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. #include <iostream> #include <cmath> #include <map> #include <string> #include <vector> #include <SFML/Graphics.hpp> #include "board.hpp" #include "graphics.hpp" #include "main.hpp" void resizeWindow(sf::RenderWindow &window, sf::Vector2u &windowSize, float zoom) { sf::FloatRect visibleArea(0, 0, zoom * static_cast<float>(windowSize.x), zoom * static_cast<float>(windowSize.y)); window.setView(sf::View(visibleArea)); } inline void resizeTilemap(CellTilemap &cellTilemap, sf::Vector2u &windowSize, float zoom) { cellTilemap.resize(ceil(zoom * static_cast<float>(windowSize.x) / graphicsSettings::cellWidth), ceil(zoom * static_cast<float>(windowSize.y) / graphicsSettings::cellHeight)); } int main() { Board board(10, 10); CellTilemap cellTilemap(board); sf::Texture texture; std::vector<std::pair<Tile, sf::Color>> tilesetNumbers; sf::Uint8 *tilesetBytes = nullptr; createTileset(graphicsSettings::colors, tilesetNumbers, tilesetBytes, texture); Tilemap tilemap(cellTilemap, texture, tilesetNumbers); sf::RenderWindow window(sf::VideoMode(800, 600), "Game of Life"); window.setFramerateLimit(30); // no need for high FPS int zoomPos = 4; // 1.0f float zoom = graphicsSettings::zoomLevels.at(zoomPos); sf::Vector2u windowSize = window.getSize(); cellTilemap.resize(ceil(zoom * static_cast<float>(windowSize.x) / graphicsSettings::cellWidth), ceil(zoom * static_cast<float>(windowSize.y) / graphicsSettings::cellHeight)); resizeWindow(window, windowSize, zoom); State state = State::PAUSED; int speed = 7; sf::Time updateInterval = graphicsSettings::speed.at(speed); sf::Clock clock; while (window.isOpen()) { sf::Event event; while (window.pollEvent(event)) { switch (event.type) { case sf::Event::Closed: window.close(); break; case sf::Event::Resized: { sf::Vector2u windowSize(event.size.width, event.size.height); resizeWindow(window, windowSize, zoom); resizeTilemap(cellTilemap, windowSize, zoom); } case sf::Event::MouseButtonPressed: { switch (event.mouseButton.button) { case sf::Mouse::Left: { sf::Vector2i point(event.mouseButton.x, event.mouseButton.y); sf::Vector2f pos = window.mapPixelToCoords(point); int x = static_cast<int>(pos.x); int y = static_cast<int>(pos.y); x /= graphicsSettings::cellWidth; y /= graphicsSettings::cellHeight; if (x >= 0 && y >= 0 && x < cellTilemap.getWidth() && y < cellTilemap.getHeight()) { cellTilemap.set(x, y, true); } break; } case sf::Mouse::Right: { sf::Vector2i point(event.mouseButton.x, event.mouseButton.y); sf::Vector2f pos = window.mapPixelToCoords(point); int x = static_cast<int>(pos.x); int y = static_cast<int>(pos.y); x /= graphicsSettings::cellWidth; y /= graphicsSettings::cellHeight; if (x >= 0 && y >= 0 && x < cellTilemap.getWidth() && y < cellTilemap.getHeight()) { cellTilemap.set(x, y, false); } break; } case sf::Mouse::Middle: { sf::Vector2i point(event.mouseButton.x, event.mouseButton.y); sf::Vector2f pos = window.mapPixelToCoords(point); int x = static_cast<int>(pos.x); int y = static_cast<int>(pos.y); x /= graphicsSettings::cellWidth; y /= graphicsSettings::cellHeight; if (x >= 0 && y >= 0 && x < cellTilemap.getWidth() && y < cellTilemap.getHeight()) { std::cout << "DEBUG INFO FOR {x=" << x << ", y=" << y << "}:\n"; std::cout << "Neighbors: " << board.getNeighborCount(x, y) << "\n"; } break; } default: break; } } case sf::Event::KeyPressed: { switch (event.key.code) { case sf::Keyboard::Space: if (state == State::PAUSED) { state = State::RUNNING; } else if (state == State::RUNNING) { state = State::PAUSED; } break; case sf::Keyboard::Period: // Speed + if (speed != 0) { --speed; updateInterval = graphicsSettings::speed.at(speed); } break; case sf::Keyboard::Comma: // Speed - if (speed + 1 != static_cast<int>(graphicsSettings::speed.size())) { ++speed; updateInterval = graphicsSettings::speed.at(speed); } break; case sf::Keyboard::PageUp: // Zoom In if (zoomPos != 0) { --zoomPos; zoom = graphicsSettings::zoomLevels[zoomPos]; sf::Vector2u windowSize = window.getSize(); resizeWindow(window, windowSize, zoom); resizeTilemap(cellTilemap, windowSize, zoom); } break; case sf::Keyboard::PageDown: // Zoom Out if (zoomPos < static_cast<int>(graphicsSettings::zoomLevels.size()) - 1) { ++zoomPos; zoom = graphicsSettings::zoomLevels[zoomPos]; sf::Vector2u windowSize = window.getSize(); resizeWindow(window, windowSize, zoom); resizeTilemap(cellTilemap, windowSize, zoom); } break; case sf::Keyboard::BackSpace: // Clear board.clear(); break; case sf::Keyboard::Return: // Pause and step if (state == State::RUNNING) { state = State::PAUSED; }<|fim▁hole|> board.step(); clock.restart(); default: break; } break; } case sf::Event::MouseMoved: { if (sf::Mouse::isButtonPressed(sf::Mouse::Left) || sf::Mouse::isButtonPressed(sf::Mouse::Right) || sf::Mouse::isButtonPressed(sf::Mouse::Middle)) { sf::Vector2i point(event.mouseMove.x, event.mouseMove.y); sf::Vector2f pos = window.mapPixelToCoords(point); int x = static_cast<int>(pos.x); int y = static_cast<int>(pos.y); x /= graphicsSettings::cellWidth; y /= graphicsSettings::cellHeight; if (x >= 0 && y >= 0 && x < cellTilemap.getWidth() && y < cellTilemap.getHeight()) { if (sf::Mouse::isButtonPressed(sf::Mouse::Left)) { cellTilemap.set(x, y, true); } if (sf::Mouse::isButtonPressed(sf::Mouse::Right)) { cellTilemap.set(x, y, false); } } } } default: break; } } if (state == State::RUNNING) { if (clock.getElapsedTime() >= updateInterval) { board.step(); clock.restart(); } } window.clear(); if (board.modified()) { tilemap.update(); board.modified(true); } window.draw(tilemap); window.display(); } delete[] tilesetBytes; return 0; }<|fim▁end|>
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import js from './js'<|fim▁hole|><|fim▁end|>
js();
<|file_name|>any_of_test.go<|end_file_name|><|fim▁begin|>package match import ( "reflect" "testing" ) func TestAnyOfIndex(t *testing.T) { for id, test := range []struct { matchers Matchers fixture string index int segments []int }{ { Matchers{ Any{}, NewText("b"), NewText("c"), }, "abc", 0, []int{0, 1, 2, 3}, }, { Matchers{ Prefix{"b"}, Suffix{"c"}, }, "abc", 0, []int{3}, }, { Matchers{ List{"[def]", false}, List{"[abc]", false}, }, "abcdef",<|fim▁hole|> }, } { everyOf := AnyOf{test.matchers} index, segments := everyOf.Index(test.fixture) if index != test.index { t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index) } if !reflect.DeepEqual(segments, test.segments) { t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments) } } }<|fim▁end|>
0, []int{1},
<|file_name|>sale_order.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from openerp import models, api class sale_order_line(models.Model): _inherit = "sale.order.line"<|fim▁hole|> @api.one def button_confirm(self): if self.product_id.recurring_invoice and self.order_id.project_id: invoice_line_ids = [((0, 0, { 'product_id': self.product_id.id, 'analytic_account_id': self.order_id.project_id.id, 'name': self.name, 'quantity': self.product_uom_qty, 'uom_id': self.product_uom.id, 'price_unit': self.price_unit, 'price_subtotal': self.price_subtotal }))] analytic_values = {'recurring_invoices': True, 'recurring_invoice_line_ids': invoice_line_ids} if not self.order_id.project_id.partner_id: analytic_values['partner_id'] = self.order_id.partner_id.id self.order_id.project_id.write(analytic_values) return super(sale_order_line, self).button_confirm()<|fim▁end|>
<|file_name|>dump.go<|end_file_name|><|fim▁begin|>package main import ( "fmt" ) func dumpCmd() command { return command{fn: func([]string) error { return fmt.Errorf("vegeta dump has been deprecated and succeeded by the vegeta encode command") }}<|fim▁hole|><|fim▁end|>
}
<|file_name|>ex1.py<|end_file_name|><|fim▁begin|>#Working with variables<|fim▁hole|>import pyaudiogame spk = pyaudiogame.speak MyApp = pyaudiogame.App("My Application") #Here are some variables #Lets first write one line of text my_name = "Frastlin" #now lets write a number my_age = 42 #now lets write several lines of text my_song = """ My application tis to be, the coolest you've ever seen! """ #Magic time! def logic(actions): key = actions['key'] if key == "a": #Here is our one line of text, it will speak when we press a spk(my_name) elif key == "s": #Here is our number, it will speak when we press s spk(my_age) elif key == "d": #Here is our multiline text example. It will speak when we press d spk(my_song) MyApp.logic = logic MyApp.run()<|fim▁end|>
<|file_name|>CString.java<|end_file_name|><|fim▁begin|>/***************************************************************************** * This file is part of the Prolog Development Tool (PDT) * * Author: Lukas Degener (among others) * WWW: http://sewiki.iai.uni-bonn.de/research/pdt/start * Mail: pdt@lists.iai.uni-bonn.de * Copyright (C): 2004-2012, CS Dept. III, University of Bonn * * All rights reserved. This program is made available under the terms * of the Eclipse Public License v1.0 which accompanies this distribution, * and is available at http://www.eclipse.org/legal/epl-v10.html * <|fim▁hole|> ****************************************************************************/ package org.cs3.prolog.connector.cterm; import org.cs3.prolog.connector.internal.cterm.parser.ASTNode; /** * Represents a Prolog string. */ public class CString extends CTerm { public CString(ASTNode node) { super(node); } }<|fim▁end|>
<|file_name|>cli.py<|end_file_name|><|fim▁begin|># Copyright 2013 Answers for AWS LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import logging import sys from backup_monkey.core import BackupMonkey, Logging from backup_monkey import __version__ from backup_monkey.exceptions import BackupMonkeyException from boto.utils import get_instance_metadata __all__ = ('run', ) log = logging.getLogger(__name__) LIMIT_LABEL = 32 # Label is added to description when created snapshot. # The description limit in aws is 255 def _fail(message="Unknown failure", code=1): log.error(message) sys.exit(code) def run(): parser = argparse.ArgumentParser(description='Loops through all EBS volumes, and snapshots them, then loops through all snapshots, and removes the oldest ones.') parser.add_argument('--region', metavar='REGION', help='the region to loop through and snapshot (default is current region of EC2 instance this is running on). E.g. us-east-1') parser.add_argument('--max-snapshots-per-volume', metavar='SNAPSHOTS', default=3, type=int, help='the maximum number of snapshots to keep per EBS volume. The oldest snapshots will be deleted. Default: 3') parser.add_argument('--snapshot-only', action='store_true', default=False, help='Only snapshot EBS volumes, do not remove old snapshots') parser.add_argument('--remove-only', action='store_true', default=False, help='Only remove old snapshots, do not create new snapshots') parser.add_argument('--verbose', '-v', action='count', help='enable verbose output (-vvv for more)') parser.add_argument('--version', action='version', version='%(prog)s ' + __version__, help='display version number and exit') parser.add_argument('--tags', nargs="+", help='Only snapshot instances that match passed in tags. E.g. --tag Name:foo will snapshot all instances with a tag `Name` and value is `foo`') parser.add_argument('--reverse-tags', action='store_true', default=False, help='Do a reverse match on the passed in tags. E.g. --tag Name:foo --reverse-tags will snapshot all instances that do not have a `Name` tag with the value `foo`') parser.add_argument('--label', action='store', help='Only snapshot instances that match passed in label are created or deleted. Default: None. Selected all snapshot. You have the posibility of create a different strategies for daily, weekly and monthly for example. Label daily won\'t deleted label weekly') parser.add_argument('--cross-account-number', action='store', help='Do a cross-account snapshot (this is the account number to do snapshots on). NOTE: This requires that you pass in the --cross-account-role parameter. E.g. --cross-account-number 111111111111 --cross-account-role Snapshot') parser.add_argument('--cross-account-role', action='store', help='The name of the role that backup-monkey will assume when doing a cross-account snapshot. E.g. --cross-account-role Snapshot') args = parser.parse_args() if args.cross_account_number and not args.cross_account_role: parser.error('The --cross-account-role parameter is required if you specify --cross-account-number (doing a cross-account snapshot)')<|fim▁hole|> if args.reverse_tags and not args.tags: parser.error('The --tags parameter is required if you specify --reverse-tags (doing a blacklist filter)') if args.label and len(args.label) > LIMIT_LABEL: parser.error('The --label parameter lenght should be less than 32') Logging().configure(args.verbose) log.debug("CLI parse args: %s", args) if args.region: region = args.region else: # If no region was specified, assume this is running on an EC2 instance # and work out what region it is in log.debug("Figure out which region I am running in...") instance_metadata = get_instance_metadata(timeout=5) log.debug('Instance meta-data: %s', instance_metadata) if not instance_metadata: _fail('Could not determine region. This script is either not running on an EC2 instance (in which case you should use the --region option), or the meta-data service is down') region = instance_metadata['placement']['availability-zone'][:-1] log.debug("Running in region: %s", region) try: monkey = BackupMonkey(region, args.max_snapshots_per_volume, args.tags, args.reverse_tags, args.label, args.cross_account_number, args.cross_account_role) if not args.remove_only: monkey.snapshot_volumes() if not args.snapshot_only: monkey.remove_old_snapshots() except BackupMonkeyException as e: _fail(e.message) log.info('Backup Monkey completed successfully!') sys.exit(0)<|fim▁end|>
if args.cross_account_role and not args.cross_account_number: parser.error('The --cross-account-number parameter is required if you specify --cross-account-role (doing a cross-account snapshot)')
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// String types for sending to/from the jvm mod ffi_str; pub use self::ffi_str::*;<|fim▁hole|> mod java_str; pub use self::java_str::*;<|fim▁end|>
<|file_name|>test_views.py<|end_file_name|><|fim▁begin|>""" Tests for Discussion API views """ from datetime import datetime import json from urlparse import urlparse import ddt import httpretty import mock from pytz import UTC from django.core.urlresolvers import reverse from rest_framework.test import APIClient from discussion_api.tests.utils import ( CommentsServiceMockMixin, make_minimal_cs_comment, make_minimal_cs_thread, ) from student.tests.factories import CourseEnrollmentFactory, UserFactory from util.testing import UrlResetMixin from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase from xmodule.modulestore.tests.factories import CourseFactory class DiscussionAPIViewTestMixin(CommentsServiceMockMixin, UrlResetMixin): """ Mixin for common code in tests of Discussion API views. This includes creation of common structures (e.g. a course, user, and enrollment), logging in the test client, utility functions, and a test case for unauthenticated requests. Subclasses must set self.url in their setUp methods. """ client_class = APIClient @mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True}) def setUp(self): super(DiscussionAPIViewTestMixin, self).setUp() self.maxDiff = None # pylint: disable=invalid-name self.course = CourseFactory.create( org="x", course="y", run="z", start=datetime.now(UTC), discussion_topics={"Test Topic": {"id": "test_topic"}} ) self.password = "password" self.user = UserFactory.create(password=self.password) CourseEnrollmentFactory.create(user=self.user, course_id=self.course.id) self.client.login(username=self.user.username, password=self.password) def assert_response_correct(self, response, expected_status, expected_content): """ Assert that the response has the given status code and parsed content """ self.assertEqual(response.status_code, expected_status) parsed_content = json.loads(response.content) self.assertEqual(parsed_content, expected_content) def test_not_authenticated(self): self.client.logout() response = self.client.get(self.url) self.assert_response_correct( response, 401, {"developer_message": "Authentication credentials were not provided."} ) class CourseViewTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase): """Tests for CourseView""" def setUp(self): super(CourseViewTest, self).setUp() self.url = reverse("discussion_course", kwargs={"course_id": unicode(self.course.id)}) def test_404(self): response = self.client.get( reverse("course_topics", kwargs={"course_id": "non/existent/course"}) ) self.assert_response_correct( response, 404, {"developer_message": "Not found."} ) def test_get_success(self): response = self.client.get(self.url) self.assert_response_correct( response, 200, { "id": unicode(self.course.id), "blackouts": [], "thread_list_url": "http://testserver/api/discussion/v1/threads/?course_id=x%2Fy%2Fz", "following_thread_list_url": ( "http://testserver/api/discussion/v1/threads/?course_id=x%2Fy%2Fz&following=True" ), "topics_url": "http://testserver/api/discussion/v1/course_topics/x/y/z", } ) class CourseTopicsViewTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase): """Tests for CourseTopicsView""" def setUp(self): super(CourseTopicsViewTest, self).setUp() self.url = reverse("course_topics", kwargs={"course_id": unicode(self.course.id)}) def test_404(self): response = self.client.get( reverse("course_topics", kwargs={"course_id": "non/existent/course"}) ) self.assert_response_correct( response, 404, {"developer_message": "Not found."} ) def test_get_success(self): response = self.client.get(self.url) self.assert_response_correct( response, 200, { "courseware_topics": [], "non_courseware_topics": [{ "id": "test_topic", "name": "Test Topic", "children": [], "thread_list_url": "http://testserver/api/discussion/v1/threads/?course_id=x%2Fy%2Fz&topic_id=test_topic", }], } ) @ddt.ddt @httpretty.activate class ThreadViewSetListTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase): """Tests for ThreadViewSet list""" def setUp(self): super(ThreadViewSetListTest, self).setUp() self.author = UserFactory.create() self.url = reverse("thread-list") def test_course_id_missing(self): response = self.client.get(self.url) self.assert_response_correct( response, 400, {"field_errors": {"course_id": {"developer_message": "This field is required."}}} ) def test_404(self): response = self.client.get(self.url, {"course_id": unicode("non/existent/course")}) self.assert_response_correct( response, 404, {"developer_message": "Not found."} ) def test_basic(self): self.register_get_user_response(self.user, upvoted_ids=["test_thread"]) source_threads = [{ "type": "thread", "id": "test_thread", "course_id": unicode(self.course.id), "commentable_id": "test_topic", "group_id": None, "user_id": str(self.author.id), "username": self.author.username, "anonymous": False, "anonymous_to_peers": False, "created_at": "2015-04-28T00:00:00Z", "updated_at": "2015-04-28T11:11:11Z", "thread_type": "discussion", "title": "Test Title", "body": "Test body", "pinned": False, "closed": False, "abuse_flaggers": [], "votes": {"up_count": 4}, "comments_count": 5, "unread_comments_count": 3, "read": False, "endorsed": False }] expected_threads = [{ "id": "test_thread", "course_id": unicode(self.course.id), "topic_id": "test_topic", "group_id": None, "group_name": None, "author": self.author.username, "author_label": None, "created_at": "2015-04-28T00:00:00Z", "updated_at": "2015-04-28T11:11:11Z", "type": "discussion", "title": "Test Title", "raw_body": "Test body", "rendered_body": "<p>Test body</p>", "pinned": False, "closed": False, "following": False, "abuse_flagged": False, "voted": True, "vote_count": 4, "comment_count": 5, "unread_comment_count": 3, "comment_list_url": "http://testserver/api/discussion/v1/comments/?thread_id=test_thread", "endorsed_comment_list_url": None, "non_endorsed_comment_list_url": None, "editable_fields": ["abuse_flagged", "following", "voted"], "read": False, "has_endorsed": False }] self.register_get_threads_response(source_threads, page=1, num_pages=2) response = self.client.get(self.url, {"course_id": unicode(self.course.id)}) self.assert_response_correct( response, 200, { "results": expected_threads, "next": "http://testserver/api/discussion/v1/threads/?course_id=x%2Fy%2Fz&page=2", "previous": None, "text_search_rewrite": None, } ) self.assert_last_query_params({ "user_id": [unicode(self.user.id)], "course_id": [unicode(self.course.id)], "sort_key": ["date"], "sort_order": ["desc"], "page": ["1"], "per_page": ["10"], "recursive": ["False"], }) @ddt.data("unread", "unanswered") def test_view_query(self, query): threads = [make_minimal_cs_thread()] self.register_get_user_response(self.user) self.register_get_threads_response(threads, page=1, num_pages=1) self.client.get( self.url, { "course_id": unicode(self.course.id), "view": query, } ) self.assert_last_query_params({ "user_id": [unicode(self.user.id)], "course_id": [unicode(self.course.id)], "sort_key": ["date"], "sort_order": ["desc"], "recursive": ["False"], "page": ["1"], "per_page": ["10"], query: ["true"], }) def test_pagination(self): self.register_get_user_response(self.user) self.register_get_threads_response([], page=1, num_pages=1) response = self.client.get( self.url, {"course_id": unicode(self.course.id), "page": "18", "page_size": "4"} ) self.assert_response_correct( response, 404, {"developer_message": "Not found."} ) self.assert_last_query_params({ "user_id": [unicode(self.user.id)], "course_id": [unicode(self.course.id)], "sort_key": ["date"], "sort_order": ["desc"], "page": ["18"], "per_page": ["4"], "recursive": ["False"], }) def test_text_search(self): self.register_get_user_response(self.user) self.register_get_threads_search_response([], None) response = self.client.get( self.url, {"course_id": unicode(self.course.id), "text_search": "test search string"} ) self.assert_response_correct( response, 200, {"results": [], "next": None, "previous": None, "text_search_rewrite": None} ) self.assert_last_query_params({ "user_id": [unicode(self.user.id)], "course_id": [unicode(self.course.id)], "sort_key": ["date"], "sort_order": ["desc"], "page": ["1"], "per_page": ["10"], "recursive": ["False"], "text": ["test search string"], }) def test_following(self): self.register_get_user_response(self.user) self.register_subscribed_threads_response(self.user, [], page=1, num_pages=1) response = self.client.get( self.url, { "course_id": unicode(self.course.id), "page": "1", "page_size": "4", "following": "True", } ) self.assert_response_correct( response, 200, {"results": [], "next": None, "previous": None, "text_search_rewrite": None} ) self.assertEqual( urlparse(httpretty.last_request().path).path, "/api/v1/users/{}/subscribed_threads".format(self.user.id) ) @ddt.data( ("last_activity_at", "date"), ("comment_count", "comments"), ("vote_count", "votes") ) @ddt.unpack def test_order_by(self, http_query, cc_query): """ Tests the order_by parameter Arguments: http_query (str): Query string sent in the http request cc_query (str): Query string used for the comments client service """ threads = [make_minimal_cs_thread()] self.register_get_user_response(self.user) self.register_get_threads_response(threads, page=1, num_pages=1) self.client.get( self.url, { "course_id": unicode(self.course.id), "order_by": http_query, } ) self.assert_last_query_params({ "user_id": [unicode(self.user.id)], "course_id": [unicode(self.course.id)], "sort_order": ["desc"], "recursive": ["False"], "page": ["1"], "per_page": ["10"], "sort_key": [cc_query], }) @ddt.data("asc", "desc") def test_order_direction(self, query): threads = [make_minimal_cs_thread()] self.register_get_user_response(self.user) self.register_get_threads_response(threads, page=1, num_pages=1) self.client.get( self.url, { "course_id": unicode(self.course.id), "order_direction": query, } ) self.assert_last_query_params({ "user_id": [unicode(self.user.id)], "course_id": [unicode(self.course.id)], "sort_key": ["date"], "recursive": ["False"], "page": ["1"], "per_page": ["10"], "sort_order": [query], }) @httpretty.activate class ThreadViewSetCreateTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase): """Tests for ThreadViewSet create""" def setUp(self): super(ThreadViewSetCreateTest, self).setUp() self.url = reverse("thread-list") def test_basic(self): self.register_get_user_response(self.user) self.register_post_thread_response({ "id": "test_thread", "username": self.user.username, "created_at": "2015-05-19T00:00:00Z", "updated_at": "2015-05-19T00:00:00Z", }) request_data = { "course_id": unicode(self.course.id), "topic_id": "test_topic", "type": "discussion", "title": "Test Title", "raw_body": "Test body", } expected_response_data = { "id": "test_thread", "course_id": unicode(self.course.id), "topic_id": "test_topic", "group_id": None, "group_name": None, "author": self.user.username, "author_label": None, "created_at": "2015-05-19T00:00:00Z", "updated_at": "2015-05-19T00:00:00Z", "type": "discussion", "title": "Test Title", "raw_body": "Test body", "rendered_body": "<p>Test body</p>", "pinned": False, "closed": False, "following": False, "abuse_flagged": False, "voted": False, "vote_count": 0, "comment_count": 0, "unread_comment_count": 0, "comment_list_url": "http://testserver/api/discussion/v1/comments/?thread_id=test_thread", "endorsed_comment_list_url": None, "non_endorsed_comment_list_url": None, "editable_fields": ["abuse_flagged", "following", "raw_body", "title", "topic_id", "type", "voted"], "read": False, "has_endorsed": False } response = self.client.post( self.url, json.dumps(request_data), content_type="application/json" ) self.assertEqual(response.status_code, 200) response_data = json.loads(response.content) self.assertEqual(response_data, expected_response_data) self.assertEqual(<|fim▁hole|> httpretty.last_request().parsed_body, { "course_id": [unicode(self.course.id)], "commentable_id": ["test_topic"], "thread_type": ["discussion"], "title": ["Test Title"], "body": ["Test body"], "user_id": [str(self.user.id)], } ) def test_error(self): request_data = { "topic_id": "dummy", "type": "discussion", "title": "dummy", "raw_body": "dummy", } response = self.client.post( self.url, json.dumps(request_data), content_type="application/json" ) expected_response_data = { "field_errors": {"course_id": {"developer_message": "This field is required."}} } self.assertEqual(response.status_code, 400) response_data = json.loads(response.content) self.assertEqual(response_data, expected_response_data) @httpretty.activate class ThreadViewSetPartialUpdateTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase): """Tests for ThreadViewSet partial_update""" def setUp(self): super(ThreadViewSetPartialUpdateTest, self).setUp() self.url = reverse("thread-detail", kwargs={"thread_id": "test_thread"}) def test_basic(self): self.register_get_user_response(self.user) cs_thread = make_minimal_cs_thread({ "id": "test_thread", "course_id": unicode(self.course.id), "commentable_id": "original_topic", "username": self.user.username, "user_id": str(self.user.id), "created_at": "2015-05-29T00:00:00Z", "updated_at": "2015-05-29T00:00:00Z", "thread_type": "discussion", "title": "Original Title", "body": "Original body", }) self.register_get_thread_response(cs_thread) self.register_put_thread_response(cs_thread) request_data = {"raw_body": "Edited body"} expected_response_data = { "id": "test_thread", "course_id": unicode(self.course.id), "topic_id": "original_topic", "group_id": None, "group_name": None, "author": self.user.username, "author_label": None, "created_at": "2015-05-29T00:00:00Z", "updated_at": "2015-05-29T00:00:00Z", "type": "discussion", "title": "Original Title", "raw_body": "Edited body", "rendered_body": "<p>Edited body</p>", "pinned": False, "closed": False, "following": False, "abuse_flagged": False, "voted": False, "vote_count": 0, "comment_count": 0, "unread_comment_count": 0, "comment_list_url": "http://testserver/api/discussion/v1/comments/?thread_id=test_thread", "endorsed_comment_list_url": None, "non_endorsed_comment_list_url": None, "editable_fields": ["abuse_flagged", "following", "raw_body", "title", "topic_id", "type", "voted"], "read": False, "has_endorsed": False } response = self.client.patch( # pylint: disable=no-member self.url, json.dumps(request_data), content_type="application/json" ) self.assertEqual(response.status_code, 200) response_data = json.loads(response.content) self.assertEqual(response_data, expected_response_data) self.assertEqual( httpretty.last_request().parsed_body, { "course_id": [unicode(self.course.id)], "commentable_id": ["original_topic"], "thread_type": ["discussion"], "title": ["Original Title"], "body": ["Edited body"], "user_id": [str(self.user.id)], "anonymous": ["False"], "anonymous_to_peers": ["False"], "closed": ["False"], "pinned": ["False"], } ) def test_error(self): self.register_get_user_response(self.user) cs_thread = make_minimal_cs_thread({ "id": "test_thread", "course_id": unicode(self.course.id), "user_id": str(self.user.id), }) self.register_get_thread_response(cs_thread) request_data = {"title": ""} response = self.client.patch( # pylint: disable=no-member self.url, json.dumps(request_data), content_type="application/json" ) expected_response_data = { "field_errors": {"title": {"developer_message": "This field is required."}} } self.assertEqual(response.status_code, 400) response_data = json.loads(response.content) self.assertEqual(response_data, expected_response_data) @httpretty.activate class ThreadViewSetDeleteTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase): """Tests for ThreadViewSet delete""" def setUp(self): super(ThreadViewSetDeleteTest, self).setUp() self.url = reverse("thread-detail", kwargs={"thread_id": "test_thread"}) self.thread_id = "test_thread" def test_basic(self): self.register_get_user_response(self.user) cs_thread = make_minimal_cs_thread({ "id": self.thread_id, "course_id": unicode(self.course.id), "username": self.user.username, "user_id": str(self.user.id), }) self.register_get_thread_response(cs_thread) self.register_delete_thread_response(self.thread_id) response = self.client.delete(self.url) self.assertEqual(response.status_code, 204) self.assertEqual(response.content, "") self.assertEqual( urlparse(httpretty.last_request().path).path, "/api/v1/threads/{}".format(self.thread_id) ) self.assertEqual(httpretty.last_request().method, "DELETE") def test_delete_nonexistent_thread(self): self.register_get_thread_error_response(self.thread_id, 404) response = self.client.delete(self.url) self.assertEqual(response.status_code, 404) @httpretty.activate class CommentViewSetListTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase): """Tests for CommentViewSet list""" def setUp(self): super(CommentViewSetListTest, self).setUp() self.author = UserFactory.create() self.url = reverse("comment-list") self.thread_id = "test_thread" def test_thread_id_missing(self): response = self.client.get(self.url) self.assert_response_correct( response, 400, {"field_errors": {"thread_id": {"developer_message": "This field is required."}}} ) def test_404(self): self.register_get_thread_error_response(self.thread_id, 404) response = self.client.get(self.url, {"thread_id": self.thread_id}) self.assert_response_correct( response, 404, {"developer_message": "Not found."} ) def test_basic(self): self.register_get_user_response(self.user, upvoted_ids=["test_comment"]) source_comments = [{ "type": "comment", "id": "test_comment", "thread_id": self.thread_id, "parent_id": None, "user_id": str(self.author.id), "username": self.author.username, "anonymous": False, "anonymous_to_peers": False, "created_at": "2015-05-11T00:00:00Z", "updated_at": "2015-05-11T11:11:11Z", "body": "Test body", "endorsed": False, "abuse_flaggers": [], "votes": {"up_count": 4}, "children": [], }] expected_comments = [{ "id": "test_comment", "thread_id": self.thread_id, "parent_id": None, "author": self.author.username, "author_label": None, "created_at": "2015-05-11T00:00:00Z", "updated_at": "2015-05-11T11:11:11Z", "raw_body": "Test body", "rendered_body": "<p>Test body</p>", "endorsed": False, "endorsed_by": None, "endorsed_by_label": None, "endorsed_at": None, "abuse_flagged": False, "voted": True, "vote_count": 4, "children": [], "editable_fields": ["abuse_flagged", "voted"], }] self.register_get_thread_response({ "id": self.thread_id, "course_id": unicode(self.course.id), "thread_type": "discussion", "children": source_comments, "resp_total": 100, }) response = self.client.get(self.url, {"thread_id": self.thread_id}) self.assert_response_correct( response, 200, { "results": expected_comments, "next": "http://testserver/api/discussion/v1/comments/?thread_id={}&page=2".format( self.thread_id ), "previous": None, } ) self.assert_query_params_equal( httpretty.httpretty.latest_requests[-2], { "recursive": ["True"], "resp_skip": ["0"], "resp_limit": ["10"], "user_id": [str(self.user.id)], "mark_as_read": ["False"], } ) def test_pagination(self): """ Test that pagination parameters are correctly plumbed through to the comments service and that a 404 is correctly returned if a page past the end is requested """ self.register_get_user_response(self.user) self.register_get_thread_response(make_minimal_cs_thread({ "id": self.thread_id, "course_id": unicode(self.course.id), "thread_type": "discussion", "children": [], "resp_total": 10, })) response = self.client.get( self.url, {"thread_id": self.thread_id, "page": "18", "page_size": "4"} ) self.assert_response_correct( response, 404, {"developer_message": "Not found."} ) self.assert_query_params_equal( httpretty.httpretty.latest_requests[-2], { "recursive": ["True"], "resp_skip": ["68"], "resp_limit": ["4"], "user_id": [str(self.user.id)], "mark_as_read": ["False"], } ) @httpretty.activate class CommentViewSetDeleteTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase): """Tests for ThreadViewSet delete""" def setUp(self): super(CommentViewSetDeleteTest, self).setUp() self.url = reverse("comment-detail", kwargs={"comment_id": "test_comment"}) self.comment_id = "test_comment" def test_basic(self): self.register_get_user_response(self.user) cs_thread = make_minimal_cs_thread({ "id": "test_thread", "course_id": unicode(self.course.id), }) self.register_get_thread_response(cs_thread) cs_comment = make_minimal_cs_comment({ "id": self.comment_id, "course_id": cs_thread["course_id"], "thread_id": cs_thread["id"], "username": self.user.username, "user_id": str(self.user.id), }) self.register_get_comment_response(cs_comment) self.register_delete_comment_response(self.comment_id) response = self.client.delete(self.url) self.assertEqual(response.status_code, 204) self.assertEqual(response.content, "") self.assertEqual( urlparse(httpretty.last_request().path).path, "/api/v1/comments/{}".format(self.comment_id) ) self.assertEqual(httpretty.last_request().method, "DELETE") def test_delete_nonexistent_comment(self): self.register_get_comment_error_response(self.comment_id, 404) response = self.client.delete(self.url) self.assertEqual(response.status_code, 404) @httpretty.activate class CommentViewSetCreateTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase): """Tests for CommentViewSet create""" def setUp(self): super(CommentViewSetCreateTest, self).setUp() self.url = reverse("comment-list") def test_basic(self): self.register_get_user_response(self.user) self.register_get_thread_response( make_minimal_cs_thread({ "id": "test_thread", "course_id": unicode(self.course.id), "commentable_id": "test_topic", }) ) self.register_post_comment_response( { "id": "test_comment", "username": self.user.username, "created_at": "2015-05-27T00:00:00Z", "updated_at": "2015-05-27T00:00:00Z", }, thread_id="test_thread" ) request_data = { "thread_id": "test_thread", "raw_body": "Test body", } expected_response_data = { "id": "test_comment", "thread_id": "test_thread", "parent_id": None, "author": self.user.username, "author_label": None, "created_at": "2015-05-27T00:00:00Z", "updated_at": "2015-05-27T00:00:00Z", "raw_body": "Test body", "rendered_body": "<p>Test body</p>", "endorsed": False, "endorsed_by": None, "endorsed_by_label": None, "endorsed_at": None, "abuse_flagged": False, "voted": False, "vote_count": 0, "children": [], "editable_fields": ["abuse_flagged", "raw_body", "voted"], } response = self.client.post( self.url, json.dumps(request_data), content_type="application/json" ) self.assertEqual(response.status_code, 200) response_data = json.loads(response.content) self.assertEqual(response_data, expected_response_data) self.assertEqual( urlparse(httpretty.last_request().path).path, "/api/v1/threads/test_thread/comments" ) self.assertEqual( httpretty.last_request().parsed_body, { "course_id": [unicode(self.course.id)], "body": ["Test body"], "user_id": [str(self.user.id)], } ) def test_error(self): response = self.client.post( self.url, json.dumps({}), content_type="application/json" ) expected_response_data = { "field_errors": {"thread_id": {"developer_message": "This field is required."}} } self.assertEqual(response.status_code, 400) response_data = json.loads(response.content) self.assertEqual(response_data, expected_response_data) class CommentViewSetPartialUpdateTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase): """Tests for CommentViewSet partial_update""" def setUp(self): super(CommentViewSetPartialUpdateTest, self).setUp() httpretty.reset() httpretty.enable() self.addCleanup(httpretty.disable) self.register_get_user_response(self.user) self.url = reverse("comment-detail", kwargs={"comment_id": "test_comment"}) cs_thread = make_minimal_cs_thread({ "id": "test_thread", "course_id": unicode(self.course.id), }) self.register_get_thread_response(cs_thread) cs_comment = make_minimal_cs_comment({ "id": "test_comment", "course_id": cs_thread["course_id"], "thread_id": cs_thread["id"], "username": self.user.username, "user_id": str(self.user.id), "created_at": "2015-06-03T00:00:00Z", "updated_at": "2015-06-03T00:00:00Z", "body": "Original body", }) self.register_get_comment_response(cs_comment) self.register_put_comment_response(cs_comment) def test_basic(self): request_data = {"raw_body": "Edited body"} expected_response_data = { "id": "test_comment", "thread_id": "test_thread", "parent_id": None, "author": self.user.username, "author_label": None, "created_at": "2015-06-03T00:00:00Z", "updated_at": "2015-06-03T00:00:00Z", "raw_body": "Edited body", "rendered_body": "<p>Edited body</p>", "endorsed": False, "endorsed_by": None, "endorsed_by_label": None, "endorsed_at": None, "abuse_flagged": False, "voted": False, "vote_count": 0, "children": [], "editable_fields": ["abuse_flagged", "raw_body", "voted"], } response = self.client.patch( # pylint: disable=no-member self.url, json.dumps(request_data), content_type="application/json" ) self.assertEqual(response.status_code, 200) response_data = json.loads(response.content) self.assertEqual(response_data, expected_response_data) self.assertEqual( httpretty.last_request().parsed_body, { "body": ["Edited body"], "course_id": [unicode(self.course.id)], "user_id": [str(self.user.id)], "anonymous": ["False"], "anonymous_to_peers": ["False"], "endorsed": ["False"], } ) def test_error(self): request_data = {"raw_body": ""} response = self.client.patch( # pylint: disable=no-member self.url, json.dumps(request_data), content_type="application/json" ) expected_response_data = { "field_errors": {"raw_body": {"developer_message": "This field is required."}} } self.assertEqual(response.status_code, 400) response_data = json.loads(response.content) self.assertEqual(response_data, expected_response_data)<|fim▁end|>
<|file_name|>example.rs<|end_file_name|><|fim▁begin|>extern crate int_range_check; use std::fmt::Display; use std::num::Int; use int_range_check::uncovered_and_overlapped; use int_range_check::IntRange; use int_range_check::IntRange::*; fn main() { example_driver("Example 1", vec![Bound(0i32, 5i32), From(3)]); example_driver("Example 2a", vec![To(5u8), From(250)]); example_driver("Example 2b", vec![Bound(0u8, 5), Bound(250, 255)]); } <|fim▁hole|>fn example_driver<T: Display+Int>(title: &str, ranges: Vec<IntRange<T>>) { let (uncovered, overlapped) = uncovered_and_overlapped(&ranges); println!("{} input ranges: {}", title, ranges); println!("{} uncovered ranges: {}", title, uncovered); println!("{} overlapping ranges: {}", title, overlapped); }<|fim▁end|>
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import from __future__ import division from typing import Any, Dict, List, Tuple from django.db import connection from django.template import RequestContext, loader from django.core import urlresolvers from django.http import HttpResponseNotFound from jinja2 import Markup as mark_safe from zerver.decorator import has_request_variables, REQ, zulip_internal from zerver.models import get_realm, UserActivity, UserActivityInterval, Realm from zerver.lib.timestamp import timestamp_to_datetime from collections import defaultdict from datetime import datetime, timedelta import itertools import time import re import pytz from six.moves import filter from six.moves import map from six.moves import range from six.moves import zip eastern_tz = pytz.timezone('US/Eastern') from zproject.jinja2 import render_to_response def make_table(title, cols, rows, has_row_class=False): if not has_row_class: def fix_row(row): return dict(cells=row, row_class=None) rows = list(map(fix_row, rows)) data = dict(title=title, cols=cols, rows=rows) content = loader.render_to_string( 'analytics/ad_hoc_query.html', dict(data=data) ) return content def dictfetchall(cursor): "Returns all rows from a cursor as a dict" desc = cursor.description return [ dict(list(zip([col[0] for col in desc], row))) for row in cursor.fetchall() ] def get_realm_day_counts(): query = ''' select r.domain, (now()::date - pub_date::date) age, count(*) cnt from zerver_message m join zerver_userprofile up on up.id = m.sender_id join zerver_realm r on r.id = up.realm_id join zerver_client c on c.id = m.sending_client_id where (not up.is_bot) and pub_date > now()::date - interval '8 day' and c.name not in ('zephyr_mirror', 'ZulipMonitoring') group by r.domain, age order by r.domain, age ''' cursor = connection.cursor() cursor.execute(query) rows = dictfetchall(cursor) cursor.close() counts = defaultdict(dict) # type: Dict[str, Dict[int, int]] for row in rows: counts[row['domain']][row['age']] = row['cnt'] result = {} for domain in counts: raw_cnts = [counts[domain].get(age, 0) for age in range(8)] min_cnt = min(raw_cnts) max_cnt = max(raw_cnts) def format_count(cnt): if cnt == min_cnt: good_bad = 'bad' elif cnt == max_cnt: good_bad = 'good' else: good_bad = 'neutral' return '<td class="number %s">%s</td>' % (good_bad, cnt) cnts = ''.join(map(format_count, raw_cnts)) result[domain] = dict(cnts=cnts) return result def realm_summary_table(realm_minutes): query = ''' SELECT realm.domain, coalesce(user_counts.active_user_count, 0) active_user_count, coalesce(at_risk_counts.at_risk_count, 0) at_risk_count, ( SELECT count(*) FROM zerver_userprofile up WHERE up.realm_id = realm.id AND is_active AND not is_bot ) user_profile_count, ( SELECT count(*) FROM zerver_userprofile up WHERE up.realm_id = realm.id AND is_active AND is_bot ) bot_count FROM zerver_realm realm LEFT OUTER JOIN ( SELECT up.realm_id realm_id, count(distinct(ua.user_profile_id)) active_user_count FROM zerver_useractivity ua JOIN zerver_userprofile up ON up.id = ua.user_profile_id WHERE query in ( '/json/send_message', 'send_message_backend', '/api/v1/send_message', '/json/update_pointer', '/json/users/me/pointer' ) AND last_visit > now() - interval '1 day' AND not is_bot GROUP BY realm_id ) user_counts ON user_counts.realm_id = realm.id LEFT OUTER JOIN ( SELECT realm_id, count(*) at_risk_count FROM ( SELECT realm.id as realm_id, up.email FROM zerver_useractivity ua JOIN zerver_userprofile up ON up.id = ua.user_profile_id JOIN zerver_realm realm ON realm.id = up.realm_id WHERE up.is_active AND (not up.is_bot) AND ua.query in ( '/json/send_message', 'send_message_backend', '/api/v1/send_message', '/json/update_pointer', '/json/users/me/pointer' ) GROUP by realm.id, up.email HAVING max(last_visit) between now() - interval '7 day' and now() - interval '1 day' ) as at_risk_users GROUP BY realm_id ) at_risk_counts ON at_risk_counts.realm_id = realm.id WHERE EXISTS ( SELECT * FROM zerver_useractivity ua JOIN zerver_userprofile up ON up.id = ua.user_profile_id WHERE query in ( '/json/send_message', '/api/v1/send_message', 'send_message_backend', '/json/update_pointer', '/json/users/me/pointer' ) AND up.realm_id = realm.id AND last_visit > now() - interval '2 week' ) ORDER BY active_user_count DESC, domain ASC ''' cursor = connection.cursor() cursor.execute(query) rows = dictfetchall(cursor) cursor.close() # get messages sent per day counts = get_realm_day_counts() for row in rows: try: row['history'] = counts[row['domain']]['cnts'] except: row['history'] = '' # augment data with realm_minutes total_hours = 0 for row in rows: domain = row['domain'] minutes = realm_minutes.get(domain, 0) hours = minutes / 60.0 total_hours += hours row['hours'] = str(int(hours)) try: row['hours_per_user'] = '%.1f' % (hours / row['active_user_count'],) except: pass # formatting for row in rows: row['domain'] = realm_activity_link(row['domain']) # Count active sites def meets_goal(row): return row['active_user_count'] >= 5 num_active_sites = len(list(filter(meets_goal, rows))) # create totals total_active_user_count = 0 total_user_profile_count = 0 total_bot_count = 0 total_at_risk_count = 0 for row in rows: total_active_user_count += int(row['active_user_count']) total_user_profile_count += int(row['user_profile_count']) total_bot_count += int(row['bot_count']) total_at_risk_count += int(row['at_risk_count']) rows.append(dict( domain='Total', active_user_count=total_active_user_count, user_profile_count=total_user_profile_count, bot_count=total_bot_count, hours=int(total_hours), at_risk_count=total_at_risk_count, )) content = loader.render_to_string( 'analytics/realm_summary_table.html', dict(rows=rows, num_active_sites=num_active_sites) ) return content def user_activity_intervals(): day_end = timestamp_to_datetime(time.time()) day_start = day_end - timedelta(hours=24) output = "Per-user online duration for the last 24 hours:\n" total_duration = timedelta(0) all_intervals = UserActivityInterval.objects.filter( end__gte=day_start, start__lte=day_end ).select_related( 'user_profile', 'user_profile__realm' ).only( 'start', 'end', 'user_profile__email', 'user_profile__realm__domain' ).order_by( 'user_profile__realm__domain', 'user_profile__email' ) by_domain = lambda row: row.user_profile.realm.domain by_email = lambda row: row.user_profile.email realm_minutes = {} for domain, realm_intervals in itertools.groupby(all_intervals, by_domain): realm_duration = timedelta(0) output += '<hr>%s\n' % (domain,) for email, intervals in itertools.groupby(realm_intervals, by_email): duration = timedelta(0) for interval in intervals: start = max(day_start, interval.start) end = min(day_end, interval.end) duration += end - start total_duration += duration realm_duration += duration output += " %-*s%s\n" % (37, email, duration) realm_minutes[domain] = realm_duration.total_seconds() / 60 output += "\nTotal Duration: %s\n" % (total_duration,) output += "\nTotal Duration in minutes: %s\n" % (total_duration.total_seconds() / 60.,) output += "Total Duration amortized to a month: %s" % (total_duration.total_seconds() * 30. / 60.,) content = mark_safe('<pre>' + output + '</pre>') return content, realm_minutes def sent_messages_report(realm): title = 'Recently sent messages for ' + realm cols = [ 'Date', 'Humans', 'Bots' ] query = ''' select series.day::date, humans.cnt, bots.cnt from ( select generate_series( (now()::date - interval '2 week'), now()::date, interval '1 day' ) as day ) as series left join ( select pub_date::date pub_date, count(*) cnt from zerver_message m join zerver_userprofile up on up.id = m.sender_id join zerver_realm r on r.id = up.realm_id where r.domain = %s and (not up.is_bot) and pub_date > now() - interval '2 week' group by pub_date::date order by pub_date::date ) humans on series.day = humans.pub_date left join ( select pub_date::date pub_date, count(*) cnt from zerver_message m join zerver_userprofile up on up.id = m.sender_id join zerver_realm r on r.id = up.realm_id where r.domain = %s and up.is_bot and pub_date > now() - interval '2 week' group by pub_date::date order by pub_date::date ) bots on series.day = bots.pub_date ''' cursor = connection.cursor() cursor.execute(query, [realm, realm]) rows = cursor.fetchall() cursor.close() return make_table(title, cols, rows) def ad_hoc_queries(): def get_page(query, cols, title): cursor = connection.cursor() cursor.execute(query) rows = cursor.fetchall() rows = list(map(list, rows)) cursor.close() def fix_rows(i, fixup_func): for row in rows: row[i] = fixup_func(row[i]) for i, col in enumerate(cols): if col == 'Domain': fix_rows(i, realm_activity_link) elif col in ['Last time', 'Last visit']: fix_rows(i, format_date_for_activity_reports) content = make_table(title, cols, rows) return dict( content=content, title=title ) pages = [] ### for mobile_type in ['Android', 'ZulipiOS']: title = '%s usage' % (mobile_type,) query = ''' select realm.domain, up.id user_id, client.name, sum(count) as hits, max(last_visit) as last_time from zerver_useractivity ua join zerver_client client on client.id = ua.client_id join zerver_userprofile up on up.id = ua.user_profile_id join zerver_realm realm on realm.id = up.realm_id where client.name like '%s' group by domain, up.id, client.name having max(last_visit) > now() - interval '2 week' order by domain, up.id, client.name ''' % (mobile_type,) cols = [ 'Domain', 'User id', 'Name', 'Hits', 'Last time' ] pages.append(get_page(query, cols, title)) ### title = 'Desktop users' query = ''' select realm.domain, client.name, sum(count) as hits, max(last_visit) as last_time from zerver_useractivity ua join zerver_client client on client.id = ua.client_id join zerver_userprofile up on up.id = ua.user_profile_id join zerver_realm realm on realm.id = up.realm_id where client.name like 'desktop%%' group by domain, client.name having max(last_visit) > now() - interval '2 week' order by domain, client.name ''' cols = [ 'Domain', 'Client', 'Hits', 'Last time' ] pages.append(get_page(query, cols, title)) ### title = 'Integrations by domain' query = ''' select realm.domain, case when query like '%%external%%' then split_part(query, '/', 5) else client.name end client_name, sum(count) as hits, max(last_visit) as last_time from zerver_useractivity ua join zerver_client client on client.id = ua.client_id join zerver_userprofile up on up.id = ua.user_profile_id join zerver_realm realm on realm.id = up.realm_id where (query in ('send_message_backend', '/api/v1/send_message') and client.name not in ('Android', 'ZulipiOS') and client.name not like 'test: Zulip%%' ) or query like '%%external%%' group by domain, client_name having max(last_visit) > now() - interval '2 week' order by domain, client_name ''' cols = [ 'Domain', 'Client', 'Hits',<|fim▁hole|> 'Last time' ] pages.append(get_page(query, cols, title)) ### title = 'Integrations by client' query = ''' select case when query like '%%external%%' then split_part(query, '/', 5) else client.name end client_name, realm.domain, sum(count) as hits, max(last_visit) as last_time from zerver_useractivity ua join zerver_client client on client.id = ua.client_id join zerver_userprofile up on up.id = ua.user_profile_id join zerver_realm realm on realm.id = up.realm_id where (query in ('send_message_backend', '/api/v1/send_message') and client.name not in ('Android', 'ZulipiOS') and client.name not like 'test: Zulip%%' ) or query like '%%external%%' group by client_name, domain having max(last_visit) > now() - interval '2 week' order by client_name, domain ''' cols = [ 'Client', 'Domain', 'Hits', 'Last time' ] pages.append(get_page(query, cols, title)) return pages @zulip_internal @has_request_variables def get_activity(request): duration_content, realm_minutes = user_activity_intervals() counts_content = realm_summary_table(realm_minutes) data = [ ('Counts', counts_content), ('Durations', duration_content), ] for page in ad_hoc_queries(): data.append((page['title'], page['content'])) title = 'Activity' return render_to_response( 'analytics/activity.html', dict(data=data, title=title, is_home=True), request=request ) def get_user_activity_records_for_realm(realm, is_bot): fields = [ 'user_profile__full_name', 'user_profile__email', 'query', 'client__name', 'count', 'last_visit', ] records = UserActivity.objects.filter( user_profile__realm__domain=realm, user_profile__is_active=True, user_profile__is_bot=is_bot ) records = records.order_by("user_profile__email", "-last_visit") records = records.select_related('user_profile', 'client').only(*fields) return records def get_user_activity_records_for_email(email): fields = [ 'user_profile__full_name', 'query', 'client__name', 'count', 'last_visit' ] records = UserActivity.objects.filter( user_profile__email=email ) records = records.order_by("-last_visit") records = records.select_related('user_profile', 'client').only(*fields) return records def raw_user_activity_table(records): cols = [ 'query', 'client', 'count', 'last_visit' ] def row(record): return [ record.query, record.client.name, record.count, format_date_for_activity_reports(record.last_visit) ] rows = list(map(row, records)) title = 'Raw Data' return make_table(title, cols, rows) def get_user_activity_summary(records): # type: (Any) -> Any summary = {} # type: Dict[str, Dict[str, Any]] def update(action, record): if action not in summary: summary[action] = dict( count=record.count, last_visit=record.last_visit ) else: summary[action]['count'] += record.count summary[action]['last_visit'] = max( summary[action]['last_visit'], record.last_visit ) if records: summary['name'] = records[0].user_profile.full_name for record in records: client = record.client.name query = record.query update('use', record) if client == 'API': m = re.match('/api/.*/external/(.*)', query) if m: client = m.group(1) update(client, record) if client.startswith('desktop'): update('desktop', record) if client == 'website': update('website', record) if ('send_message' in query) or re.search('/api/.*/external/.*', query): update('send', record) if query in ['/json/update_pointer', '/json/users/me/pointer', '/api/v1/update_pointer']: update('pointer', record) update(client, record) return summary def format_date_for_activity_reports(date): if date: return date.astimezone(eastern_tz).strftime('%Y-%m-%d %H:%M') else: return '' def user_activity_link(email): url_name = 'analytics.views.get_user_activity' url = urlresolvers.reverse(url_name, kwargs=dict(email=email)) email_link = '<a href="%s">%s</a>' % (url, email) return mark_safe(email_link) def realm_activity_link(realm): url_name = 'analytics.views.get_realm_activity' url = urlresolvers.reverse(url_name, kwargs=dict(realm=realm)) realm_link = '<a href="%s">%s</a>' % (url, realm) return mark_safe(realm_link) def realm_client_table(user_summaries): exclude_keys = [ 'internal', 'name', 'use', 'send', 'pointer', 'website', 'desktop', ] rows = [] for email, user_summary in user_summaries.items(): email_link = user_activity_link(email) name = user_summary['name'] for k, v in user_summary.items(): if k in exclude_keys: continue client = k count = v['count'] last_visit = v['last_visit'] row = [ format_date_for_activity_reports(last_visit), client, name, email_link, count, ] rows.append(row) rows = sorted(rows, key=lambda r: r[0], reverse=True) cols = [ 'Last visit', 'Client', 'Name', 'Email', 'Count', ] title = 'Clients' return make_table(title, cols, rows) def user_activity_summary_table(user_summary): rows = [] for k, v in user_summary.items(): if k == 'name': continue client = k count = v['count'] last_visit = v['last_visit'] row = [ format_date_for_activity_reports(last_visit), client, count, ] rows.append(row) rows = sorted(rows, key=lambda r: r[0], reverse=True) cols = [ 'last_visit', 'client', 'count', ] title = 'User Activity' return make_table(title, cols, rows) def realm_user_summary_table(all_records, admin_emails): user_records = {} def by_email(record): return record.user_profile.email for email, records in itertools.groupby(all_records, by_email): user_records[email] = get_user_activity_summary(list(records)) def get_last_visit(user_summary, k): if k in user_summary: return user_summary[k]['last_visit'] else: return None def get_count(user_summary, k): if k in user_summary: return user_summary[k]['count'] else: return '' def is_recent(val): age = datetime.now(val.tzinfo) - val return age.total_seconds() < 5 * 60 rows = [] for email, user_summary in user_records.items(): email_link = user_activity_link(email) sent_count = get_count(user_summary, 'send') cells = [user_summary['name'], email_link, sent_count] row_class = '' for field in ['use', 'send', 'pointer', 'desktop', 'ZulipiOS', 'Android']: val = get_last_visit(user_summary, field) if field == 'use': if val and is_recent(val): row_class += ' recently_active' if email in admin_emails: row_class += ' admin' val = format_date_for_activity_reports(val) cells.append(val) row = dict(cells=cells, row_class=row_class) rows.append(row) def by_used_time(row): return row['cells'][3] rows = sorted(rows, key=by_used_time, reverse=True) cols = [ 'Name', 'Email', 'Total sent', 'Heard from', 'Message sent', 'Pointer motion', 'Desktop', 'ZulipiOS', 'Android' ] title = 'Summary' content = make_table(title, cols, rows, has_row_class=True) return user_records, content @zulip_internal def get_realm_activity(request, realm): # type: (Any, Any) -> Any data = [] # type: List[Tuple[str, str]] all_user_records = {} # type: Dict[str, Any] try: admins = get_realm(realm).get_admin_users() except Realm.DoesNotExist: return HttpResponseNotFound("Realm %s does not exist" % (realm,)) admin_emails = {admin.email for admin in admins} for is_bot, page_title in [(False, 'Humans'), (True, 'Bots')]: all_records = list(get_user_activity_records_for_realm(realm, is_bot)) user_records, content = realm_user_summary_table(all_records, admin_emails) all_user_records.update(user_records) data += [(page_title, content)] page_title = 'Clients' content = realm_client_table(all_user_records) data += [(page_title, content)] page_title = 'History' content = sent_messages_report(realm) data += [(page_title, content)] fix_name = lambda realm: realm.replace('.', '_') realm_link = 'https://stats1.zulip.net:444/render/?from=-7days' realm_link += '&target=stats.gauges.staging.users.active.%s.0_16hr' % (fix_name(realm),) title = realm return render_to_response( 'analytics/activity.html', dict(data=data, realm_link=realm_link, title=title), request=request ) @zulip_internal def get_user_activity(request, email): records = get_user_activity_records_for_email(email) data = [] # type: List[Tuple[str, str]] user_summary = get_user_activity_summary(records) content = user_activity_summary_table(user_summary) data += [('Summary', content)] content = raw_user_activity_table(records) data += [('Info', content)] title = email return render_to_response( 'analytics/activity.html', dict(data=data, title=title), request=request )<|fim▁end|>
<|file_name|>arguments.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python import argparse ###basic parser for parent help statement### def parentArgs(): parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, description='''\ Suzanne's pipeline to identify somatic CNVs from single-cell whole-genome sequencing data ========================================================================================= You must specify a function to perform: *preprocess (trim fastq reads to the appropriate length) *map (map fastq files to the hg38 or mm10 genome) *count (count number of reads in 25,000 genomic bins) *segment (run CBS -- requires Matlab!) *interpret (perform QC assessment and removal of low-quality CNV calls) # [More functions coming soon...] ''') parser.print_help() raise SystemExit ###interpret arguments needed to perform preprocessing of fastq files### def preprocessArgs(): parser = argparse.ArgumentParser(description='Trim fastq reads to the appropriate length') #required arguments# parser.add_argument('FastqDirectory', help = 'The path to the folder that contains fastq files to be processed') #optional arguments# parser.add_argument('-5', '--trim5', metavar='X', type=int, default=0, help = "Number of 5' bases to trim from fastq reads") parser.add_argument('-l', '--length', metavar='X', type=int, default=36, help = 'The desired read length') parser.add_argument('-r', '--remove', action='store_true', help = 'Set this flag if you want to delete the full length fastq files (UNTESTED)') parser.add_argument('-s', '--samples', metavar='/path/to/sample_list.txt', default=False, help='Path to a file containing a list of fastq files to be processed\n\tsample names only, no path or file extension needed (UNTESTED)') return parser ###interpret arguments needed to perform mapping of fastq files### def mapArgs(): parser = argparse.ArgumentParser(description='Map fastq files to the appropriate reference genome') #required arguments# parser.add_argument('FastqDirectory', help = 'The path to the folder that contains fastq files to be processed') parser.add_argument('MapIndex', help='The path to the bowtie (v1) mapping references, as you would input if running bowtie directly -- MUST BE HG38 OR MM10') # parser.add_argument('species', choices=['hg38', 'mm10'], # help = 'The genome build of the species being assessed') #optional arguments# parser.add_argument('-t', '--trim', metavar='X', nargs=2, type=int, default=[0, 0], help = "Number of 5' and 3' bases to trim from fastq reads during mapping") parser.add_argument('-o', '--output', metavar='/path/to/output_directory/', default=False, help = 'A filepath to the desired directory where you would like sam files saved, if not in the same parent directory as the fastq files (UNTESTED)')<|fim▁hole|> parser.add_argument('-x', '--statdir', metavar='/path/to/statistics_directory/', default=False, help = 'A filepath to the desired directory where you would like mapping statistics saved, if not in the same parent directory as the fastq files (UNTESTED)') parser.add_argument('-s', '--samples', metavar='/path/to/sample_list.txt', default=False, help='Path to a file containing a list of fastq files to be processed\n\tsample names only, no path or file extension needed (UNTESTED)') parser.add_argument('-b', '--bowtie', metavar='/path/to/bowtie1', default='bowtie', help='Path to the bowtie binary, if not in your PATH variable (UNTESTED)') parser.add_argument('-m', '--samtools', metavar='/path/to/samtools0.1.19', default='samtools', help='Path to the samtools (v0.1.19) binary, if not in your PATH variable (UNTESTED)') return parser ###interpret arguments needed to perform counting of unique.sam files### def countArgs(): parser = argparse.ArgumentParser(description='Count the reads per genomic bin from unique sam files') #required arguments# parser.add_argument('AnalysisDirectory', help = 'The path to the analysis directory, which contains the Sam/ directory with unique.sam files to be processed') parser.add_argument('species', choices=['hg38', 'mm10'], help = 'The genome build of the species being assessed') #optional arguments# parser.add_argument('-m', '--mapdir', metavar='/path/to/output_directory/', default=False, help = 'A filepath to the directory containing the sam files, if not AnalysisDirectory/Sam/ (UNTESTED)') parser.add_argument('-x', '--statdir', metavar='/path/to/statistics_directory/', default=False, help = 'A filepath to the desired directory where you would like mapping statistics saved, if not in the same parent directory as the sam files (UNTESTED)') parser.add_argument('-s', '--samples', metavar='/path/to/sample_list.txt', default=False, help='Path to a file containing a list of unique.sam files to be processed\n\tsample names only, no path or file extension needed (UNTESTED)') return parser ###interpret arguments needed to perform normalization and segmentation of bincounts.txt files### def segmentArgs(): parser = argparse.ArgumentParser(description='Normalize and segment bincounts files to begin CNV identification process') #required arguments# parser.add_argument('AnalysisDirectory', help = 'The path to the analysis directory, which contains the BinCounts/ directory with bincounts.txt files to be processed') parser.add_argument('species', choices=['hg38', 'mm10'], help = 'The genome build of the species being assessed') #optional arguments# parser.add_argument('-b', '--bincountdir', metavar='/path/to/output_directory/', default=False, help = 'A filepath to the folder containing the bincount files, if not AnalysisDirectory/BinCounts (UNTESTED)') parser.add_argument('-i', '--infofile', metavar='/path/to/sample.info.txt', default=False, help='Path to a .txt file containing information about the samples to be processed (unique name, amplification method, number of cells)\n\tIf not all are identical. This file should not have a header row (UNTESTED)') parser.add_argument('-c', '--columns', metavar='X X X', default=[0, 1, 2], type=int, nargs=3, help='The zero-indexed locations of the columns to import from the infofile in the order: name, method, cell number (if not the first 3 columns) (UNTESTED)') parser.add_argument('-g', '--gconly', action='store_true', help = 'Set this flag if you only want GC-correction to be performed during normalization (UNTESTED)') parser.add_argument('-n', '--normalizeonly', action='store_true', help = 'Set this flag if you do not want CBS to be performed (UNTESTED)') parser.add_argument('-s', '--samples', metavar='/path/to/sample_list.txt', default=False, help='Path to a file containing a list of bincounts.txt files to be processed\n\tsample names only, no path or file extension needed (UNTESTED)') return parser ###interpret arguments needed to perform QC and CNV analysis of each single cell sample### def interpretArgs(): parser = argparse.ArgumentParser(description='Assess sample quality, filter unreliable CNVs, and generate user-friendly output files') #required arguments# parser.add_argument('AnalysisDirectory', help = 'The path to the folder to save output files') parser.add_argument('species', choices=['hg38', 'mm10'], help = 'The genome build of the species being assessed') #optional arguments# parser.add_argument('-f', '--nofilter', action='store_true', help = 'Set this flag if you do not want to perform FUnC filtering of low-quality CNV calls (UNTESTED)') # parser.add_argument('-i', '--infofile', metavar='/path/to/sample.info.txt', default=False, # help='Path to a .txt file containing information about the samples to be processed (unique name, number of cells, group)\n\tIf not all are identical. This file should not have a header row (UNTESTED)') # parser.add_argument('-c', '--columns', metavar='X X X', default=[0, 1, 2], type=int, nargs=3, # help='The zero-indexed locations of the columns to import from the infofile in the order: name, cell number, group (if not the first 3 columns) (UNTESTED)') parser.add_argument('-l', '--lowess', metavar='/path/to/lowess.txt/files/', default=False, help = 'A filepath to the desired directory where all lowess.txt files are saved, if not AnalysisDirectory/Lowess/ (UNTESTED)') parser.add_argument('-g', '--segments', metavar='/path/to/segments.txt/files/', default=False, help = 'A filepath to the desired directory where all segments.txt files are saved, if not AnalysisDirectory/Segments/ (UNTESTED)') parser.add_argument('-r', '--countstats', metavar='/path/to/bincounts.stats.txt/files/', default=False, help = 'A filepath to the desired directory where all bincounts.stats.txt files are saved, if not AnalysisDirectory/PipelineStats/ (UNTESTED)') parser.add_argument('-s', '--samples', metavar='/path/to/sample_list.txt', default=False, help='Path to a file containing a list of sample names to be processed\n\tno path or file extension needed (UNTESTED)') return parser def fullParser(input): functionDict = { '-h': parentArgs, '--help': parentArgs, 'preprocess': preprocessArgs, 'map': mapArgs, 'count': countArgs, 'segment': segmentArgs, 'interpret': interpretArgs, } if input == []: parentArgs() if input[0] not in functionDict.keys(): return input[0], False parser = functionDict[input[0]]() args = parser.parse_args(input[1:]) return input[0], args<|fim▁end|>
<|file_name|>preferences.js<|end_file_name|><|fim▁begin|>$(document).ready(function() { //Note: default min/max ranges are defined outside of this JS file //include "js/BoulderRouteGradingSystems.js" before running this script //generate Bouldering rating selection upon click //Find which boulder grading system is selected and update the difficulty range //********************************************** $("select[name='boulder-rating-select']").click(function() { var boulderGradingSelect = document.getElementById("boulder-rating-select"); boulderGradingID = boulderGradingSelect.options[boulderGradingSelect.selectedIndex].value; console.log(boulderGradingID); var maxBoulder = boulderRatings[boulderGradingID].length - 1; var boulderingMinRange = "<div id='boulderprefs'><p>Minimum bouldering rating: <select name='minBoulderRange' class='form-control' id='rating-range-select'>"; for (var i = 0;i<=maxBoulder;i++) { if (i==selectMinBoulder) { boulderingMinRange += "<option value="+i+" selected>"+boulderRatings[boulderGradingID][i]+"</option>"; } else { boulderingMinRange += "<option value="+i+">"+boulderRatings[boulderGradingID][i]+"</option>"; } } boulderingMinRange += "</option></select>"; //get max value of bouldering range var boulderingMaxRange = "<p>Maximum bouldering rating: <select name='maxBoulderRange' class='form-control' id='rating-range-select'>"; for (var i = 0;i<=maxBoulder;i++) { if (i==Math.min(maxBoulder,selectMaxBoulder)) { boulderingMaxRange += "<option value="+i+" selected>"+boulderRatings[boulderGradingID][i]+"</option>"; } else { boulderingMaxRange += "<option value="+i+">"+boulderRatings[boulderGradingID][i]+"</option>"; } } boulderingMaxRange += "</option></select></div>"; document.getElementById("boulderprefs").innerHTML = boulderingMinRange + boulderingMaxRange; }); //********************************************** <|fim▁hole|> var boulderingMinRange = "<div id='boulderprefs'><p>Minimum bouldering rating: <select name='minBoulderRange' class='form-control' id='rating-range-select'>"; for (var i = 0;i<=maxBoulder;i++) { if (i==selectMinBoulder) { boulderingMinRange += "<option value="+i+" selected>"+boulderRatings[boulderGradingID][i]+"</option>"; } else { boulderingMinRange += "<option value="+i+">"+boulderRatings[boulderGradingID][i]+"</option>"; } } boulderingMinRange += "</option></select>"; //get max value of bouldering range var boulderingMaxRange = "<p>Maximum bouldering rating: <select name='maxBoulderRange' class='form-control' id='rating-range-select'>"; for (var i = 0;i<=maxBoulder;i++) { if (i==Math.min(maxBoulder,selectMaxBoulder)) { boulderingMaxRange += "<option value="+i+" selected>"+boulderRatings[boulderGradingID][i]+"</option>"; } else { boulderingMaxRange += "<option value="+i+">"+boulderRatings[boulderGradingID][i]+"</option>"; } } boulderingMaxRange += "</option></select></div>"; //************************ //Update TR and lead when clicked //****************************** $("select[name='route-rating-select']").click(function() { var routeGradingSelect = document.getElementById("route-rating-select"); routeGradingID = routeGradingSelect.options[routeGradingSelect.selectedIndex].value; console.log(routeGradingID); var maxRoute = routeRatings[routeGradingID].length - 1; //generate TR rating selection var TRMinRange = "<div id='trprefs'><p>Minimum top-rope rating: <select name='minTRRange' class='form-control' id='rating-range-select'>"; for (var i = 0;i<=maxRoute;i++) { if (i==selectMinTR) { TRMinRange +="<option value="+i+" selected>"+ routeRatings[routeGradingID][i]+"</option>"; } else { TRMinRange += "<option value="+i+">"+ routeRatings[routeGradingID][i]+"</option>"; } } TRMinRange += "</option></select>"; var TRMaxRange = "<p>Maximum top-rope rating: <select name='maxTRRange' class='form-control' id='rating-range-select'>"; for (var i = 0;i<=maxRoute;i++) { if (i==Math.min(maxRoute,selectMaxTR)) { TRMaxRange +="<option value="+i+" selected>"+ routeRatings[routeGradingID][i]+"</option>"; } else { TRMaxRange += "<option value="+i+">"+ routeRatings[routeGradingID][i]+"</option>"; } } TRMaxRange += "</option></select></div>"; //generate lead rating selection var LeadMinRange = "<div id='leadprefs'><p>Minimum lead rating: <select name='minLeadRange' class='form-control' id='rating-range-select'>"; for (var i = 0;i<=maxRoute;i++) { if (i==selectMinL) { LeadMinRange +="<option value="+i+" selected>"+ routeRatings[routeGradingID][i]+"</option>"; } else { LeadMinRange += "<option value="+i+">"+ routeRatings[routeGradingID][i]+"</option>"; } } LeadMinRange += "</option></select>"; var LeadMaxRange = "<p>Maximum lead rating: <select name='maxLeadRange' class='form-control' id='rating-range-select'>"; for (var i = 0;i<=maxRoute;i++) { if (i==Math.min(maxRoute,selectMaxL)) { LeadMaxRange +="<option value="+i+" selected>"+ routeRatings[routeGradingID][i]+"</option>"; } else { LeadMaxRange += "<option value="+i+">"+ routeRatings[routeGradingID][i]+"</option>"; } } LeadMaxRange += "</option></select></div>"; document.getElementById("trprefs").innerHTML = TRMinRange + TRMaxRange; document.getElementById("leadprefs").innerHTML = LeadMinRange + LeadMaxRange; }); //********************************** //Initialize TR and Lead selections var maxRoute = routeRatings[routeGradingID].length - 1; //generate TR rating selection var TRMinRange = "<div id='trprefs'><p>Minimum top-rope rating: <select name='minTRRange' class='form-control' id='rating-range-select'>"; for (var i = 0;i<=maxRoute;i++) { if (i==selectMinTR) { TRMinRange +="<option value="+i+" selected>"+ routeRatings[routeGradingID][i]+"</option>"; } else { TRMinRange += "<option value="+i+">"+ routeRatings[routeGradingID][i]+"</option>"; } } TRMinRange += "</option></select>"; var TRMaxRange = "<p>Maximum top-rope rating: <select name='maxTRRange' class='form-control' id='rating-range-select'>"; for (var i = 0;i<=maxRoute;i++) { if (i==Math.min(maxRoute,selectMaxTR)) { TRMaxRange +="<option value="+i+" selected>"+ routeRatings[routeGradingID][i]+"</option>"; } else { TRMaxRange += "<option value="+i+">"+ routeRatings[routeGradingID][i]+"</option>"; } } TRMaxRange += "</option></select></div>"; //generate lead rating selection var LeadMinRange = "<div id='leadprefs'><p>Minimum lead rating: <select name='minLeadRange' class='form-control' id='rating-range-select'>"; for (var i = 0;i<=maxRoute;i++) { if (i==selectMinL) { LeadMinRange +="<option value="+i+" selected>"+ routeRatings[routeGradingID][i]+"</option>"; } else { LeadMinRange += "<option value="+i+">"+ routeRatings[routeGradingID][i]+"</option>"; } } LeadMinRange += "</option></select>"; var LeadMaxRange = "<p>Maximum lead rating: <select name='maxLeadRange' class='form-control' id='rating-range-select'> "; for (var i = 0;i<=maxRoute;i++) { if (i==Math.min(maxRoute,selectMaxL)) { LeadMaxRange +="<option value="+i+" selected>"+ routeRatings[routeGradingID][i]+"</option>"; } else { LeadMaxRange += "<option value="+i+">"+ routeRatings[routeGradingID][i]+"</option>"; } } LeadMaxRange += "</option></select></div>"; //write rating preferences to html document.getElementById("ratingrange").innerHTML = boulderingMinRange + boulderingMaxRange + TRMinRange + TRMaxRange + LeadMinRange + LeadMaxRange; $("select[name='country-select']").click(function() { //get country value var country = document.getElementById("country-select"); var countryID = country.options[country.selectedIndex].value; console.log(countryID); //determine best guess for grading system? }); $("input[name='showBoulder']").click(function() { if ($(this).is(':checked')) { $("#boulderprefs").show(); } else { $("#boulderprefs").hide(); } }); $("input[name='showTR']").click(function() { if ($(this).is(':checked')) { $("#trprefs").show(); } else { $("#trprefs").hide(); } }); $("input[name='showLead']").click(function() { if ($(this).is(':checked')) { $("#leadprefs").show(); } else { $("#leadprefs").hide(); } }); });<|fim▁end|>
//initialize the boulder grading system var maxBoulder = boulderRatings[boulderGradingID].length - 1;
<|file_name|>account.js<|end_file_name|><|fim▁begin|>Session.setDefault('isLoggingIn', false); Template.account.helpers({ signedUp: function() { return Package["brettle:accounts-login-state"].LoginState.signedUp() } }); Template.account.events({ 'click .logout': function(evt) { Meteor.logout(); evt.preventDefault(); }, 'click .login': function(evt) {<|fim▁hole|>});<|fim▁end|>
Session.set('isLoggingIn', true); evt.preventDefault(); }