prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>_delspecial.py<|end_file_name|><|fim▁begin|>#importacion de librerias import sys reload(sys) sys.setdefaultencoding('UTF8') if len(sys.argv)>1: #si el argumento existe archive = open(sys.argv[1],"r") <|fim▁hole|> for i in archive: text.append(i) for j in i: print unicode(j)<|fim▁end|>
text = []
<|file_name|>0003_auto_20141114_1441.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('imagr_app', '0002_auto_20141106_1538'), ] operations = [ migrations.AlterField( model_name='album', name='date_published', field=models.DateField(blank=True), preserve_default=True, ), migrations.AlterField( model_name='album', name='title', field=models.CharField(max_length=60), preserve_default=True, ),<|fim▁hole|> migrations.AlterField( model_name='photo', name='date_published', field=models.DateField(blank=True), preserve_default=True, ), migrations.AlterField( model_name='photo', name='title', field=models.CharField(max_length=60), preserve_default=True, ), ]<|fim▁end|>
<|file_name|>key_templates.rs<|end_file_name|><|fim▁begin|>// Copyright 2020 The Tink-Rust Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // //////////////////////////////////////////////////////////////////////////////// //! This module contains pre-generated [`KeyTemplate`] instances for PRF. use tink_proto::{prost::Message, KeyTemplate}; /// Return a [`KeyTemplate`] that generates an HMAC key with the following parameters: /// - Key size: 32 bytes /// - Hash function: SHA256 pub fn hmac_sha256_prf_key_template() -> KeyTemplate { create_hmac_prf_key_template(32, tink_proto::HashType::Sha256) } /// Return a [`KeyTemplate`] that generates an HMAC key with the following parameters: /// - Key size: 64 bytes /// - Hash function: SHA512 pub fn hmac_sha512_prf_key_template() -> KeyTemplate { create_hmac_prf_key_template(64, tink_proto::HashType::Sha512) } /// Return a [`KeyTemplate`] that generates an HKDF key with the following parameters: /// - Key size: 32 bytes /// - Salt: empty /// - Hash function: SHA256 pub fn hkdf_sha256_prf_key_template() -> KeyTemplate { create_hkdf_prf_key_template(32, tink_proto::HashType::Sha256, &[]) } /// Return a [`KeyTemplate`] that generates an AES-CMAC key with the following parameters: /// - Key size: 32 bytes pub fn aes_cmac_prf_key_template() -> KeyTemplate { create_aes_cmac_prf_key_template(32) } /// Create a new [`KeyTemplate`] for HMAC using the given parameters. fn create_hmac_prf_key_template(key_size: u32, hash_type: tink_proto::HashType) -> KeyTemplate { let params = tink_proto::HmacPrfParams { hash: hash_type as i32, }; let format = tink_proto::HmacPrfKeyFormat { params: Some(params), key_size, version: super::HMAC_PRF_KEY_VERSION, }; let mut serialized_format = Vec::new(); format.encode(&mut serialized_format).unwrap(); // safe: proto-encode KeyTemplate { type_url: super::HMAC_PRF_TYPE_URL.to_string(), output_prefix_type: tink_proto::OutputPrefixType::Raw as i32,<|fim▁hole|> } } /// Creates a new [`KeyTemplate`] for HKDF using the given parameters. fn create_hkdf_prf_key_template( key_size: u32, hash_type: tink_proto::HashType, salt: &[u8], ) -> KeyTemplate { let params = tink_proto::HkdfPrfParams { hash: hash_type as i32, salt: salt.to_vec(), }; let format = tink_proto::HkdfPrfKeyFormat { params: Some(params), key_size, version: super::HKDF_PRF_KEY_VERSION, }; let mut serialized_format = Vec::new(); format.encode(&mut serialized_format).unwrap(); // safe: proto-encode KeyTemplate { type_url: super::HKDF_PRF_TYPE_URL.to_string(), output_prefix_type: tink_proto::OutputPrefixType::Raw as i32, value: serialized_format, } } // Create a new [`KeyTemplate`] for AES-CMAC using the given parameters. fn create_aes_cmac_prf_key_template(key_size: u32) -> KeyTemplate { let format = tink_proto::AesCmacPrfKeyFormat { key_size, version: super::AES_CMAC_PRF_KEY_VERSION, }; let mut serialized_format = Vec::new(); format.encode(&mut serialized_format).unwrap(); // safe: proto-encode KeyTemplate { type_url: super::AES_CMAC_PRF_TYPE_URL.to_string(), output_prefix_type: tink_proto::OutputPrefixType::Raw as i32, value: serialized_format, } }<|fim▁end|>
value: serialized_format,
<|file_name|>textdecoder.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::TextDecoderBinding; use dom::bindings::codegen::Bindings::TextDecoderBinding::TextDecoderMethods; use dom::bindings::error::{Error, Fallible}; use dom::bindings::global::GlobalRef; use dom::bindings::js::Root; use dom::bindings::str::USVString; use dom::bindings::trace::JSTraceable; use dom::bindings::utils::{Reflector, reflect_dom_object}; use util::str::DOMString; use encoding::Encoding; use encoding::types::{EncodingRef, DecoderTrap}; use encoding::label::encoding_from_whatwg_label; use js::jsapi::{JSContext, JSObject}; use js::jsapi::JS_GetObjectAsArrayBufferView; use std::borrow::ToOwned; use std::ptr; use std::slice; #[dom_struct] pub struct TextDecoder { reflector_: Reflector,<|fim▁hole|>impl TextDecoder { fn new_inherited(encoding: EncodingRef, fatal: bool) -> TextDecoder { TextDecoder { reflector_: Reflector::new(), encoding: encoding, fatal: fatal, } } fn make_range_error() -> Fallible<Root<TextDecoder>> { Err(Error::Range("The given encoding is not supported.".to_owned())) } pub fn new(global: GlobalRef, encoding: EncodingRef, fatal: bool) -> Root<TextDecoder> { reflect_dom_object(box TextDecoder::new_inherited(encoding, fatal), global, TextDecoderBinding::Wrap) } /// https://encoding.spec.whatwg.org/#dom-textdecoder pub fn Constructor(global: GlobalRef, label: DOMString, options: &TextDecoderBinding::TextDecoderOptions) -> Fallible<Root<TextDecoder>> { let encoding = match encoding_from_whatwg_label(&label) { None => return TextDecoder::make_range_error(), Some(enc) => enc }; // The rust-encoding crate has WHATWG compatibility, so we are // guaranteed to have a whatwg_name because we successfully got // the encoding from encoding_from_whatwg_label. // Use match + panic! instead of unwrap for better error message match encoding.whatwg_name() { None => panic!("Label {} fits valid encoding without valid name", label), Some("replacement") => return TextDecoder::make_range_error(), _ => () }; Ok(TextDecoder::new(global, encoding, options.fatal)) } } impl<'a> TextDecoderMethods for &'a TextDecoder { fn Encoding(self) -> DOMString { self.encoding.whatwg_name().unwrap().to_owned() } fn Fatal(self) -> bool { self.fatal } #[allow(unsafe_code)] fn Decode(self, _cx: *mut JSContext, input: Option<*mut JSObject>) -> Fallible<USVString> { let input = match input { Some(input) => input, None => return Ok(USVString("".to_owned())), }; let mut length = 0; let mut data = ptr::null_mut(); if unsafe { JS_GetObjectAsArrayBufferView(input, &mut length, &mut data).is_null() } { return Err(Error::Type("Argument to TextDecoder.decode is not an ArrayBufferView".to_owned())); } let buffer = unsafe { slice::from_raw_parts(data as *const _, length as usize) }; let trap = if self.fatal { DecoderTrap::Strict } else { DecoderTrap::Replace }; match self.encoding.decode(buffer, trap) { Ok(s) => Ok(USVString(s)), Err(_) => Err(Error::Type("Decoding failed".to_owned())), } } }<|fim▁end|>
encoding: EncodingRef, fatal: bool, }
<|file_name|>creatortypes.py<|end_file_name|><|fim▁begin|>############################################################################ # # Copyright (C) 2015 The Qt Company Ltd. # Contact: http://www.qt.io/licensing # # This file is part of Qt Creator. # # Commercial License Usage # Licensees holding valid commercial Qt licenses may use this file in # accordance with the commercial license agreement provided with the # Software or, alternatively, in accordance with the terms contained in # a written agreement between you and The Qt Company. For licensing terms and # conditions see http://www.qt.io/terms-conditions. For further information # use the contact form at http://www.qt.io/contact-us. # # GNU Lesser General Public License Usage # Alternatively, this file may be used under the terms of the GNU Lesser # General Public License version 2.1 or version 3 as published by the Free # Software Foundation and appearing in the file LICENSE.LGPLv21 and # LICENSE.LGPLv3 included in the packaging of this file. Please review the # following information to ensure the GNU Lesser General Public License # requirements will be met: https://www.gnu.org/licenses/lgpl.html and # http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. # # In addition, as a special exception, The Qt Company gives you certain additional # rights. These rights are described in The Qt Company LGPL Exception # version 1.1, included in the file LGPL_EXCEPTION.txt in this package. # ############################################################################# from dumper import * def dumpLiteral(d, value): d.putSimpleCharArray(value["_chars"], value["_size"]) def qdump__Core__Id(d, value): try: name = d.parseAndEvaluate("Core::nameForId(%d)" % value["m_id"]) d.putSimpleCharArray(name) except: d.putValue(value["m_id"]) d.putPlainChildren(value) def qdump__Debugger__Internal__GdbMi(d, value): str = d.encodeByteArray(value["m_name"]) + "3a20" \ + d.encodeByteArray(value["m_data"]) d.putValue(str, Hex2EncodedLatin1) d.putPlainChildren(value) def qdump__Debugger__Internal__DisassemblerLine(d, value): d.putByteArrayValue(value["m_data"]) d.putPlainChildren(value) def qdump__Debugger__Internal__WatchData(d, value): d.putByteArrayValue(value["iname"]) d.putPlainChildren(value) def qdump__Debugger__Internal__WatchItem(d, value): d.putByteArrayValue(value["d"]["iname"]) d.putPlainChildren(value) def qdump__Debugger__Internal__BreakpointModelId(d, value): d.putValue("%s.%s" % (int(value["m_majorPart"]), int(value["m_minorPart"]))) d.putPlainChildren(value) def qdump__Debugger__Internal__ThreadId(d, value): d.putValue("%s" % value["m_id"]) d.putPlainChildren(value) def qdump__CPlusPlus__ByteArrayRef(d, value): d.putSimpleCharArray(value["m_start"], value["m_length"]) d.putPlainChildren(value) def qdump__CPlusPlus__Identifier(d, value): d.putSimpleCharArray(value["_chars"], value["_size"]) d.putPlainChildren(value) def qdump__CPlusPlus__Symbol(d, value): name = d.downcast(value["_name"]) dumpLiteral(d, name) d.putBetterType(value.type) d.putPlainChildren(value) def qdump__CPlusPlus__IntegerType(d, value): d.putValue(value["_kind"]) d.putPlainChildren(value) def qdump__CPlusPlus__NamedType(d, value): literal = d.downcast(value["_name"]) dumpLiteral(d, literal) d.putBetterType(value.type) d.putPlainChildren(value) def qdump__CPlusPlus__TemplateNameId(d, value): dumpLiteral(d, value["_identifier"].dereference()) d.putBetterType(value.type) d.putPlainChildren(value) def qdump__CPlusPlus__Literal(d, value): dumpLiteral(d, value) d.putPlainChildren(value) <|fim▁hole|>def qdump__CPlusPlus__Internal__Value(d, value): d.putValue(value["l"]) d.putPlainChildren(value) def qdump__Utils__FileName(d, value): d.putStringValue(value) d.putPlainChildren(value) def qdump__Utils__ElfSection(d, value): d.putByteArrayValue(value["name"]) d.putPlainChildren(value) def qdump__CPlusPlus__Token(d, value): k = value["f"]["kind"] if int(k) == 6: d.putValue("T_IDENTIFIER. offset: %d, len: %d" % (value["utf16charOffset"], value["f"]["utf16chars"])) elif int(k) == 7: d.putValue("T_NUMERIC_LITERAL. offset: %d, len: %d" % (value["utf16charOffset"], value["f"]["utf16chars"])) else: val = str(k.cast(d.lookupType("CPlusPlus::Kind"))) d.putValue(val[11:]) # Strip "CPlusPlus::" d.putPlainChildren(value) def qdump__CPlusPlus__Internal__PPToken(d, value): data, size, alloc = d.byteArrayData(value["m_src"]) length = int(value["f"]["utf16chars"]) offset = int(value["utf16charOffset"]) #warn("size: %s, alloc: %s, offset: %s, length: %s, data: %s" # % (size, alloc, offset, length, data)) d.putValue(d.readMemory(data + offset, min(100, length)), Hex2EncodedLatin1) d.putPlainChildren(value)<|fim▁end|>
def qdump__CPlusPlus__StringLiteral(d, value): d.putSimpleCharArray(value["_chars"], value["_size"]) d.putPlainChildren(value)
<|file_name|>phe_exceptions.py<|end_file_name|><|fim▁begin|>''' Base module for all of the exceptions classes used internally. Created on 10 Dec 2013 @author: alex ''' class PheException(Exception): ''' This is the top level class that EVERYTHING must be derived from. In particular, this class contains an abstract property called 'phe_return_code'. This property must be implemented and the individual implementation will have it's own exit code. which will be propogated to the calling functions, if needs be. PheException must not be passed as is.<|fim▁hole|> ''' Constructor ''' super(Exception, self).__init__(msg) self._phe_return_code = phe_return_code self._cause = cause @property def phe_return_code(self): ''' Read-only attribute that holds the return status that should be exited with. ''' return self._phe_return_code @property def cause(self): ''' Read-only attribute that indicates the root cause of the exception raised. ''' return self._cause class PheExternalError(PheException): ''' Exception class designed to be raised when an external command/process fails. Instead of falling over quietly, this exception can be raised. The exception includes the message to be put into the logs and the cause of the exception. In this case, the cause should generally be subprocess.CallerProcessError. The particulars of the failed command can be found inside the cause. If the catcher of this exception choses to exit the code, 'phe_return_code' should be used to indicate the cause of it all. ''' def __init__(self, msg, cause): ''' Constructor for the PheExternalError @param msg: Message to be displayed with the exception. @type msg: str. @param cause: Cause of this exception, usually subprocess.CalledProcessError. @type cause: class. ''' super(PheExternalError, self).__init__(msg, cause, 55)<|fim▁end|>
''' def __init__(self, msg, cause, phe_return_code=255):
<|file_name|>version.go<|end_file_name|><|fim▁begin|>// Copyright 2014 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. <|fim▁hole|>package version // Heapster version. Update this whenever making a new release. // The version is of the format Major.Minor.Patch // Increment major number for new feature additions and behavioral changes. // Increment minor number for bug fixes and performance enhancements. // Increment patch number for critical fixes to existing releases. const HeapsterVersion = "0.18.0"<|fim▁end|>
<|file_name|>altcoin_nb.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="nb" version="2.0"> <defaultcodec>UTF-8</defaultcodec> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="14"/><|fim▁hole|> <message> <location filename="../forms/aboutdialog.ui" line="53"/> <source>&lt;b&gt;Altcoin&lt;/b&gt; version</source> <translation>&lt;b&gt;Altcoin&lt;/b&gt; versjon</translation> </message> <message> <location filename="../forms/aboutdialog.ui" line="97"/> <source>Copyright © 2009-2012 Altcoin Developers This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file license.txt or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young (eay@cryptsoft.com) and UPnP software written by Thomas Bernard.</source> <translation>Copyright © 2009-2012 Altcoin Utviklerne Dette er eksperimentell programvare. Distribuert under MIT/X11 programvarelisensen, se medfølgende fil license.txt eller http://www.opensource.org/licenses/mit-license.php. Dette produktet inneholder programvare utviklet av OpenSSL prosjektet for bruk i OpenSSL Toolkit (http://www.openssl.org/) og kryptografisk programvare skrevet av Eric Young (eay@cryptsoft.com) og UPnP programvare skrevet av Thomas Bernard.</translation> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="14"/> <source>Address Book</source> <translation>Adressebok</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="20"/> <source>These are your Altcoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>Dette er dine Altcoin adresser for å motta betalinger. Du kan gi en separat adresse til hver avsender slik at du kan holde oversikt over hvem som betaler deg.</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="36"/> <source>Double-click to edit address or label</source> <translation>Dobbeltklikk for å redigere adresse eller merkelapp</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="63"/> <source>Create a new address</source> <translation>Lag en ny adresse</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="77"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Kopier den valgte adressen til systemets utklippstavle</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="66"/> <source>&amp;New Address</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/addressbookpage.ui" line="80"/> <source>&amp;Copy Address</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/addressbookpage.ui" line="91"/> <source>Show &amp;QR Code</source> <translation>Vis &amp;QR Kode</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="102"/> <source>Sign a message to prove you own this address</source> <translation>Signér en melding for å bevise at du eier denne adressen</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="105"/> <source>&amp;Sign Message</source> <translation>&amp;Signér Melding</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="116"/> <source>Delete the currently selected address from the list. Only sending addresses can be deleted.</source> <translation>Slett den valgte adressen fra listen. Bare adresser for sending kan slettes.</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="119"/> <source>&amp;Delete</source> <translation>&amp;Slett</translation> </message> <message> <location filename="../addressbookpage.cpp" line="63"/> <source>Copy &amp;Label</source> <translation type="unfinished"/> </message> <message> <location filename="../addressbookpage.cpp" line="65"/> <source>&amp;Edit</source> <translation type="unfinished"/> </message> <message> <location filename="../addressbookpage.cpp" line="292"/> <source>Export Address Book Data</source> <translation>Eksporter adressebok</translation> </message> <message> <location filename="../addressbookpage.cpp" line="293"/> <source>Comma separated file (*.csv)</source> <translation>Kommaseparert fil (*.csv)</translation> </message> <message> <location filename="../addressbookpage.cpp" line="306"/> <source>Error exporting</source> <translation>Feil ved eksportering</translation> </message> <message> <location filename="../addressbookpage.cpp" line="306"/> <source>Could not write to file %1.</source> <translation>Kunne ikke skrive til filen %1.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="142"/> <source>Label</source> <translation>Merkelapp</translation> </message> <message> <location filename="../addresstablemodel.cpp" line="142"/> <source>Address</source> <translation>Adresse</translation> </message> <message> <location filename="../addresstablemodel.cpp" line="178"/> <source>(no label)</source> <translation>(ingen merkelapp)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="26"/> <source>Passphrase Dialog</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/askpassphrasedialog.ui" line="47"/> <source>Enter passphrase</source> <translation>Angi adgangsfrase</translation> </message> <message> <location filename="../forms/askpassphrasedialog.ui" line="61"/> <source>New passphrase</source> <translation>Ny adgangsfrase</translation> </message> <message> <location filename="../forms/askpassphrasedialog.ui" line="75"/> <source>Repeat new passphrase</source> <translation>Gjenta ny adgangsfrase</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="33"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Skriv inn den nye adgangsfrasen for lommeboken.&lt;br/&gt;Vennligst bruk en adgangsfrase med &lt;b&gt;10 eller flere tilfeldige tegn&lt;/b&gt;, eller &lt;b&gt;åtte eller flere ord&lt;/b&gt;.</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="34"/> <source>Encrypt wallet</source> <translation>Krypter lommebok</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="37"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Denne operasjonen krever adgangsfrasen til lommeboken for å låse den opp.</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="42"/> <source>Unlock wallet</source> <translation>Lås opp lommebok</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="45"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Denne operasjonen krever adgangsfrasen til lommeboken for å dekryptere den.</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="50"/> <source>Decrypt wallet</source> <translation>Dekrypter lommebok</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="53"/> <source>Change passphrase</source> <translation>Endre adgangsfrase</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="54"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Skriv inn gammel og ny adgangsfrase for lommeboken.</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="100"/> <source>Confirm wallet encryption</source> <translation>Bekreft kryptering av lommebok</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="101"/> <source>WARNING: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR ALTCOINS&lt;/b&gt;! Are you sure you wish to encrypt your wallet?</source> <translation>ADVARSEL: Hvis du krypterer lommeboken og mister adgangsfrasen vil du &lt;b&gt;MISTE ALLE DINE ALTCOINS&lt;/b&gt;! Er du sikker på at du vil kryptere lommeboken?</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="110"/> <location filename="../askpassphrasedialog.cpp" line="159"/> <source>Wallet encrypted</source> <translation>Lommebok kryptert</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="111"/> <source>Altcoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your altcoins from being stolen by malware infecting your computer.</source> <translation>Altcoin vil nå lukkes for å fullføre krypteringsprosessen. Husk at kryptering av lommeboken ikke fullt ut kan beskytte dine altcoins fra å bli stjålet om skadevare infiserer datamaskinen.</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="207"/> <location filename="../askpassphrasedialog.cpp" line="231"/> <source>Warning: The Caps Lock key is on.</source> <translation>Advarsel: Caps lock tasten er på.</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="116"/> <location filename="../askpassphrasedialog.cpp" line="123"/> <location filename="../askpassphrasedialog.cpp" line="165"/> <location filename="../askpassphrasedialog.cpp" line="171"/> <source>Wallet encryption failed</source> <translation>Kryptering av lommebok feilet</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="117"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Kryptering av lommebok feilet på grunn av en intern feil. Din lommebok ble ikke kryptert.</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="124"/> <location filename="../askpassphrasedialog.cpp" line="172"/> <source>The supplied passphrases do not match.</source> <translation>De angitte adgangsfrasene er ulike.</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="135"/> <source>Wallet unlock failed</source> <translation>Opplåsing av lommebok feilet</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="136"/> <location filename="../askpassphrasedialog.cpp" line="147"/> <location filename="../askpassphrasedialog.cpp" line="166"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>Adgangsfrasen angitt for dekryptering av lommeboken var feil.</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="146"/> <source>Wallet decryption failed</source> <translation>Dekryptering av lommebok feilet</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="160"/> <source>Wallet passphrase was succesfully changed.</source> <translation>Lommebokens adgangsfrase ble endret.</translation> </message> </context> <context> <name>AltcoinGUI</name> <message> <location filename="../altcoingui.cpp" line="73"/> <source>Altcoin Wallet</source> <translation>Altcoin Lommebok</translation> </message> <message> <location filename="../altcoingui.cpp" line="215"/> <source>Sign &amp;message...</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoingui.cpp" line="248"/> <source>Show/Hide &amp;Altcoin</source> <translation>Gjem/vis &amp;Altcoin</translation> </message> <message> <location filename="../altcoingui.cpp" line="515"/> <source>Synchronizing with network...</source> <translation>Synkroniserer med nettverk...</translation> </message> <message> <location filename="../altcoingui.cpp" line="185"/> <source>&amp;Overview</source> <translation>&amp;Oversikt</translation> </message> <message> <location filename="../altcoingui.cpp" line="186"/> <source>Show general overview of wallet</source> <translation>Vis generell oversikt over lommeboken</translation> </message> <message> <location filename="../altcoingui.cpp" line="191"/> <source>&amp;Transactions</source> <translation>&amp;Transaksjoner</translation> </message> <message> <location filename="../altcoingui.cpp" line="192"/> <source>Browse transaction history</source> <translation>Vis transaksjonshistorikk</translation> </message> <message> <location filename="../altcoingui.cpp" line="197"/> <source>&amp;Address Book</source> <translation>&amp;Adressebok</translation> </message> <message> <location filename="../altcoingui.cpp" line="198"/> <source>Edit the list of stored addresses and labels</source> <translation>Rediger listen over adresser og deres merkelapper</translation> </message> <message> <location filename="../altcoingui.cpp" line="203"/> <source>&amp;Receive coins</source> <translation>&amp;Motta altcoins</translation> </message> <message> <location filename="../altcoingui.cpp" line="204"/> <source>Show the list of addresses for receiving payments</source> <translation>Vis listen over adresser for mottak av betalinger</translation> </message> <message> <location filename="../altcoingui.cpp" line="209"/> <source>&amp;Send coins</source> <translation>&amp;Send altcoins</translation> </message> <message> <location filename="../altcoingui.cpp" line="216"/> <source>Prove you control an address</source> <translation>Bevis at du kontrollerer en adresse</translation> </message> <message> <location filename="../altcoingui.cpp" line="235"/> <source>E&amp;xit</source> <translation>&amp;Avslutt</translation> </message> <message> <location filename="../altcoingui.cpp" line="236"/> <source>Quit application</source> <translation>Avslutt applikasjonen</translation> </message> <message> <location filename="../altcoingui.cpp" line="239"/> <source>&amp;About %1</source> <translation> &amp;Om %1</translation> </message> <message> <location filename="../altcoingui.cpp" line="240"/> <source>Show information about Altcoin</source> <translation>Vis informasjon om Altcoin</translation> </message> <message> <location filename="../altcoingui.cpp" line="242"/> <source>About &amp;Qt</source> <translation>Om &amp;Qt</translation> </message> <message> <location filename="../altcoingui.cpp" line="243"/> <source>Show information about Qt</source> <translation>Vis informasjon om Qt</translation> </message> <message> <location filename="../altcoingui.cpp" line="245"/> <source>&amp;Options...</source> <translation>&amp;Innstillinger...</translation> </message> <message> <location filename="../altcoingui.cpp" line="252"/> <source>&amp;Encrypt Wallet...</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoingui.cpp" line="255"/> <source>&amp;Backup Wallet...</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoingui.cpp" line="257"/> <source>&amp;Change Passphrase...</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location filename="../altcoingui.cpp" line="517"/> <source>~%n block(s) remaining</source> <translation><numerusform>~%n blokk gjenstår</numerusform><numerusform>~%n blokker gjenstår</numerusform></translation> </message> <message> <location filename="../altcoingui.cpp" line="528"/> <source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source> <translation>Lastet ned %1 av %2 blokker med transaksjonshistorikk (%3% ferdig).</translation> </message> <message> <location filename="../altcoingui.cpp" line="250"/> <source>&amp;Export...</source> <translation>&amp;Eksporter...</translation> </message> <message> <location filename="../altcoingui.cpp" line="210"/> <source>Send coins to a Altcoin address</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoingui.cpp" line="246"/> <source>Modify configuration options for Altcoin</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoingui.cpp" line="249"/> <source>Show or hide the Altcoin window</source> <translation>Vis eller gjem Altcoinvinduet</translation> </message> <message> <location filename="../altcoingui.cpp" line="251"/> <source>Export the data in the current tab to a file</source> <translation>Eksporter data fra nåværende fane til fil</translation> </message> <message> <location filename="../altcoingui.cpp" line="253"/> <source>Encrypt or decrypt wallet</source> <translation>Krypter eller dekrypter lommebok</translation> </message> <message> <location filename="../altcoingui.cpp" line="256"/> <source>Backup wallet to another location</source> <translation>Sikkerhetskopiér lommebok til annet sted</translation> </message> <message> <location filename="../altcoingui.cpp" line="258"/> <source>Change the passphrase used for wallet encryption</source> <translation>Endre adgangsfrasen brukt for kryptering av lommebok</translation> </message> <message> <location filename="../altcoingui.cpp" line="259"/> <source>&amp;Debug window</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoingui.cpp" line="260"/> <source>Open debugging and diagnostic console</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoingui.cpp" line="261"/> <source>&amp;Verify message...</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoingui.cpp" line="262"/> <source>Verify a message signature</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoingui.cpp" line="286"/> <source>&amp;File</source> <translation>&amp;Fil</translation> </message> <message> <location filename="../altcoingui.cpp" line="296"/> <source>&amp;Settings</source> <translation>&amp;Innstillinger</translation> </message> <message> <location filename="../altcoingui.cpp" line="302"/> <source>&amp;Help</source> <translation>&amp;Hjelp</translation> </message> <message> <location filename="../altcoingui.cpp" line="311"/> <source>Tabs toolbar</source> <translation>Verktøylinje for faner</translation> </message> <message> <location filename="../altcoingui.cpp" line="322"/> <source>Actions toolbar</source> <translation>Verktøylinje for handlinger</translation> </message> <message> <location filename="../altcoingui.cpp" line="334"/> <location filename="../altcoingui.cpp" line="343"/> <source>[testnet]</source> <translation>[testnett]</translation> </message> <message> <location filename="../altcoingui.cpp" line="343"/> <location filename="../altcoingui.cpp" line="399"/> <source>Altcoin client</source> <translation>Altcoinklient</translation> </message> <message numerus="yes"> <location filename="../altcoingui.cpp" line="492"/> <source>%n active connection(s) to Altcoin network</source> <translation><numerusform>%n aktiv forbindelse til Altcoin-nettverket</numerusform><numerusform>%n aktive forbindelser til Altcoin-nettverket</numerusform></translation> </message> <message> <location filename="../altcoingui.cpp" line="540"/> <source>Downloaded %1 blocks of transaction history.</source> <translation>Lastet ned %1 blokker med transaksjonshistorikk.</translation> </message> <message numerus="yes"> <location filename="../altcoingui.cpp" line="555"/> <source>%n second(s) ago</source> <translation><numerusform>for %n sekund siden</numerusform><numerusform>for %n sekunder siden</numerusform></translation> </message> <message numerus="yes"> <location filename="../altcoingui.cpp" line="559"/> <source>%n minute(s) ago</source> <translation><numerusform>for %n minutt siden</numerusform><numerusform>for %n minutter siden</numerusform></translation> </message> <message numerus="yes"> <location filename="../altcoingui.cpp" line="563"/> <source>%n hour(s) ago</source> <translation><numerusform>for %n time siden</numerusform><numerusform>for %n timer siden</numerusform></translation> </message> <message numerus="yes"> <location filename="../altcoingui.cpp" line="567"/> <source>%n day(s) ago</source> <translation><numerusform>for %n dag siden</numerusform><numerusform>for %n dager siden</numerusform></translation> </message> <message> <location filename="../altcoingui.cpp" line="573"/> <source>Up to date</source> <translation>Ajour</translation> </message> <message> <location filename="../altcoingui.cpp" line="580"/> <source>Catching up...</source> <translation>Kommer ajour...</translation> </message> <message> <location filename="../altcoingui.cpp" line="590"/> <source>Last received block was generated %1.</source> <translation>Siste mottatte blokk ble generert %1.</translation> </message> <message> <location filename="../altcoingui.cpp" line="649"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation>Denne transaksjonen overstiger størrelsesbegrensningen. Du kan likevel sende den med et gebyr på %1, som går til nodene som prosesserer transaksjonen din og støtter nettverket. Vil du betale gebyret?</translation> </message> <message> <location filename="../altcoingui.cpp" line="654"/> <source>Confirm transaction fee</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoingui.cpp" line="681"/> <source>Sent transaction</source> <translation>Sendt transaksjon</translation> </message> <message> <location filename="../altcoingui.cpp" line="682"/> <source>Incoming transaction</source> <translation>Innkommende transaksjon</translation> </message> <message> <location filename="../altcoingui.cpp" line="683"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Dato: %1 Beløp: %2 Type: %3 Adresse: %4 </translation> </message> <message> <location filename="../altcoingui.cpp" line="804"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>Lommeboken er &lt;b&gt;kryptert&lt;/b&gt; og for tiden &lt;b&gt;ulåst&lt;/b&gt;</translation> </message> <message> <location filename="../altcoingui.cpp" line="812"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>Lommeboken er &lt;b&gt;kryptert&lt;/b&gt; og for tiden &lt;b&gt;låst&lt;/b&gt;</translation> </message> <message> <location filename="../altcoingui.cpp" line="835"/> <source>Backup Wallet</source> <translation>Sikkerhetskopiér Lommebok</translation> </message> <message> <location filename="../altcoingui.cpp" line="835"/> <source>Wallet Data (*.dat)</source> <translation>Lommeboksdata (*.dat)</translation> </message> <message> <location filename="../altcoingui.cpp" line="838"/> <source>Backup Failed</source> <translation>Sikkerhetskopiering feilet</translation> </message> <message> <location filename="../altcoingui.cpp" line="838"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation>En feil oppstod ved lagring av lommebok til nytt sted</translation> </message> <message> <location filename="../altcoin.cpp" line="112"/> <source>A fatal error occured. Altcoin can no longer continue safely and will quit.</source> <translation type="unfinished"/> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="84"/> <source>Network Alert</source> <translation type="unfinished"/> </message> </context> <context> <name>DisplayOptionsPage</name> <message> <location filename="../optionsdialog.cpp" line="246"/> <source>Display</source> <translation>Visning</translation> </message> <message> <location filename="../optionsdialog.cpp" line="257"/> <source>default</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="263"/> <source>The user interface language can be set here. This setting will only take effect after restarting Altcoin.</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="252"/> <source>User Interface &amp;Language:</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="273"/> <source>&amp;Unit to show amounts in:</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="277"/> <source>Choose the default subdivision unit to show in the interface, and when sending coins</source> <translation>Velg standard underenhet som skal vises i grensesnittet og ved sending av mynter</translation> </message> <message> <location filename="../optionsdialog.cpp" line="284"/> <source>&amp;Display addresses in transaction list</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="285"/> <source>Whether to show Altcoin addresses in the transaction list</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="303"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="303"/> <source>This setting will take effect after restarting Altcoin.</source> <translation type="unfinished"/> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="14"/> <source>Edit Address</source> <translation>Rediger adresse</translation> </message> <message> <location filename="../forms/editaddressdialog.ui" line="25"/> <source>&amp;Label</source> <translation>&amp;Merkelapp</translation> </message> <message> <location filename="../forms/editaddressdialog.ui" line="35"/> <source>The label associated with this address book entry</source> <translation>Merkelappen koblet til denne adressen i adresseboken</translation> </message> <message> <location filename="../forms/editaddressdialog.ui" line="42"/> <source>&amp;Address</source> <translation>&amp;Adresse</translation> </message> <message> <location filename="../forms/editaddressdialog.ui" line="52"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>Adressen til denne oppføringen i adresseboken. Denne kan kun endres for utsendingsadresser.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="20"/> <source>New receiving address</source> <translation>Ny mottaksadresse</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="24"/> <source>New sending address</source> <translation>Ny utsendingsadresse</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="27"/> <source>Edit receiving address</source> <translation>Rediger mottaksadresse</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="31"/> <source>Edit sending address</source> <translation>Rediger utsendingsadresse</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="91"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>Den oppgitte adressen &quot;%1&quot; er allerede i adresseboken.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="96"/> <source>The entered address &quot;%1&quot; is not a valid Altcoin address.</source> <translation type="unfinished"/> </message> <message> <location filename="../editaddressdialog.cpp" line="101"/> <source>Could not unlock wallet.</source> <translation>Kunne ikke låse opp lommeboken.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="106"/> <source>New key generation failed.</source> <translation>Generering av ny nøkkel feilet.</translation> </message> </context> <context> <name>HelpMessageBox</name> <message> <location filename="../altcoin.cpp" line="133"/> <location filename="../altcoin.cpp" line="143"/> <source>Altcoin-Qt</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoin.cpp" line="133"/> <source>version</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoin.cpp" line="135"/> <source>Usage:</source> <translation>Bruk:</translation> </message> <message> <location filename="../altcoin.cpp" line="136"/> <source>options</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoin.cpp" line="138"/> <source>UI options</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoin.cpp" line="139"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation>Sett språk, for eksempel &quot;nb_NO&quot; (standardverdi: fra operativsystem)</translation> </message> <message> <location filename="../altcoin.cpp" line="140"/> <source>Start minimized</source> <translation>Start minimert </translation> </message> <message> <location filename="../altcoin.cpp" line="141"/> <source>Show splash screen on startup (default: 1)</source> <translation>Vis splashskjerm ved oppstart (standardverdi: 1)</translation> </message> </context> <context> <name>MainOptionsPage</name> <message> <location filename="../optionsdialog.cpp" line="227"/> <source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="212"/> <source>Pay transaction &amp;fee</source> <translation>Betal transaksjons&amp;gebyr</translation> </message> <message> <location filename="../optionsdialog.cpp" line="204"/> <source>Main</source> <translation>Hoved</translation> </message> <message> <location filename="../optionsdialog.cpp" line="206"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source> <translation>Valgfritt transaksjonsgebyr per kB som sikrer at dine transaksjoner blir raskt prosessert. De fleste transaksjoner er 1 kB. Et gebyr på 0.01 er anbefalt.</translation> </message> <message> <location filename="../optionsdialog.cpp" line="222"/> <source>&amp;Start Altcoin on system login</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="223"/> <source>Automatically start Altcoin after logging in to the system</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="226"/> <source>&amp;Detach databases at shutdown</source> <translation type="unfinished"/> </message> </context> <context> <name>MessagePage</name> <message> <location filename="../forms/messagepage.ui" line="14"/> <source>Sign Message</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/messagepage.ui" line="20"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Du kan signere meldinger med dine adresser for å bevise at du eier dem. Ikke signér vage meldinger da phishing-angrep kan prøve å lure deg til å signere din identitet over til andre. Signér kun fullt detaljerte utsagn som du er enig i.</translation> </message> <message> <location filename="../forms/messagepage.ui" line="38"/> <source>The address to sign the message with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Adressen meldingen skal signeres med (f.eks. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <location filename="../forms/messagepage.ui" line="48"/> <source>Choose adress from address book</source> <translation>Velg adresse fra adresseboken</translation> </message> <message> <location filename="../forms/messagepage.ui" line="58"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location filename="../forms/messagepage.ui" line="71"/> <source>Paste address from clipboard</source> <translation>Lim inn adresse fra utklippstavlen</translation> </message> <message> <location filename="../forms/messagepage.ui" line="81"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location filename="../forms/messagepage.ui" line="93"/> <source>Enter the message you want to sign here</source> <translation>Skriv inn meldingen du vil signere her</translation> </message> <message> <location filename="../forms/messagepage.ui" line="128"/> <source>Copy the current signature to the system clipboard</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/messagepage.ui" line="131"/> <source>&amp;Copy Signature</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/messagepage.ui" line="142"/> <source>Reset all sign message fields</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/messagepage.ui" line="145"/> <source>Clear &amp;All</source> <translation type="unfinished"/> </message> <message> <location filename="../messagepage.cpp" line="31"/> <source>Click &quot;Sign Message&quot; to get signature</source> <translation>Klikk &quot;Signér Melding&quot; for signatur</translation> </message> <message> <location filename="../forms/messagepage.ui" line="114"/> <source>Sign a message to prove you own this address</source> <translation>Signér en melding for å bevise at du eier denne adressen</translation> </message> <message> <location filename="../forms/messagepage.ui" line="117"/> <source>&amp;Sign Message</source> <translation>&amp;Signér Melding</translation> </message> <message> <location filename="../messagepage.cpp" line="30"/> <source>Enter a Altcoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Skriv inn en Altcoin adresse (f.eks. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <location filename="../messagepage.cpp" line="83"/> <location filename="../messagepage.cpp" line="90"/> <location filename="../messagepage.cpp" line="105"/> <location filename="../messagepage.cpp" line="117"/> <source>Error signing</source> <translation>Feil ved signering</translation> </message> <message> <location filename="../messagepage.cpp" line="83"/> <source>%1 is not a valid address.</source> <translation>%1 er ikke en gyldig adresse</translation> </message> <message> <location filename="../messagepage.cpp" line="90"/> <source>%1 does not refer to a key.</source> <translation type="unfinished"/> </message> <message> <location filename="../messagepage.cpp" line="105"/> <source>Private key for %1 is not available.</source> <translation>Privat nøkkel for %1 er ikke tilgjengelig.</translation> </message> <message> <location filename="../messagepage.cpp" line="117"/> <source>Sign failed</source> <translation>Signering feilet</translation> </message> </context> <context> <name>NetworkOptionsPage</name> <message> <location filename="../optionsdialog.cpp" line="345"/> <source>Network</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="347"/> <source>Map port using &amp;UPnP</source> <translation>Sett opp port vha. &amp;UPnP</translation> </message> <message> <location filename="../optionsdialog.cpp" line="348"/> <source>Automatically open the Altcoin client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Åpne automatisk Altcoin klientporten på ruteren. Dette virker kun om din ruter støtter UPnP og dette er påslått.</translation> </message> <message> <location filename="../optionsdialog.cpp" line="351"/> <source>&amp;Connect through SOCKS4 proxy:</source> <translation>&amp;Koble til gjennom SOCKS4 proxy:</translation> </message> <message> <location filename="../optionsdialog.cpp" line="352"/> <source>Connect to the Bitcon network through a SOCKS4 proxy (e.g. when connecting through Tor)</source> <translation>Koble til Altcoin nettverket gjennom en SOCKS4 mellomtjener (f.eks. for tilkobling gjennom Tor)</translation> </message> <message> <location filename="../optionsdialog.cpp" line="357"/> <source>Proxy &amp;IP:</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="366"/> <source>&amp;Port:</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="363"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation>IP-adresse for mellomtjener (f.eks. 127.0.0.1)</translation> </message> <message> <location filename="../optionsdialog.cpp" line="372"/> <source>Port of the proxy (e.g. 1234)</source> <translation>Port for mellomtjener (f.eks. 1234)</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../optionsdialog.cpp" line="135"/> <source>Options</source> <translation>Innstillinger</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="14"/> <source>Form</source> <translation>Skjema</translation> </message> <message> <location filename="../forms/overviewpage.ui" line="47"/> <location filename="../forms/overviewpage.ui" line="204"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the Altcoin network after a connection is established, but this process has not completed yet.</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/overviewpage.ui" line="89"/> <source>Balance:</source> <translation>Saldo:</translation> </message> <message> <location filename="../forms/overviewpage.ui" line="147"/> <source>Number of transactions:</source> <translation>Antall transaksjoner:</translation> </message> <message> <location filename="../forms/overviewpage.ui" line="118"/> <source>Unconfirmed:</source> <translation>Ubekreftet</translation> </message> <message> <location filename="../forms/overviewpage.ui" line="40"/> <source>Wallet</source> <translation>Lommebok</translation> </message> <message> <location filename="../forms/overviewpage.ui" line="197"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Siste transaksjoner&lt;/b&gt;</translation> </message> <message> <location filename="../forms/overviewpage.ui" line="105"/> <source>Your current balance</source> <translation>Din nåværende saldo</translation> </message> <message> <location filename="../forms/overviewpage.ui" line="134"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation>Totalt antall ubekreftede transaksjoner som ikke telles med i saldo enda</translation> </message> <message> <location filename="../forms/overviewpage.ui" line="154"/> <source>Total number of transactions in wallet</source> <translation>Totalt antall transaksjoner i lommeboken</translation> </message> <message> <location filename="../overviewpage.cpp" line="110"/> <location filename="../overviewpage.cpp" line="111"/> <source>out of sync</source> <translation type="unfinished"/> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="14"/> <source>QR Code Dialog</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/qrcodedialog.ui" line="32"/> <source>QR Code</source> <translation>QR Kode</translation> </message> <message> <location filename="../forms/qrcodedialog.ui" line="55"/> <source>Request Payment</source> <translation>Etterspør Betaling</translation> </message> <message> <location filename="../forms/qrcodedialog.ui" line="70"/> <source>Amount:</source> <translation>Beløp:</translation> </message> <message> <location filename="../forms/qrcodedialog.ui" line="105"/> <source>BTC</source> <translation>BTC</translation> </message> <message> <location filename="../forms/qrcodedialog.ui" line="121"/> <source>Label:</source> <translation>Merkelapp:</translation> </message> <message> <location filename="../forms/qrcodedialog.ui" line="144"/> <source>Message:</source> <translation>Melding:</translation> </message> <message> <location filename="../forms/qrcodedialog.ui" line="186"/> <source>&amp;Save As...</source> <translation>&amp;Lagre Som...</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="45"/> <source>Error encoding URI into QR Code.</source> <translation type="unfinished"/> </message> <message> <location filename="../qrcodedialog.cpp" line="63"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>Resulterende URI for lang, prøv å redusere teksten for merkelapp / melding.</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="120"/> <source>Save QR Code</source> <translation type="unfinished"/> </message> <message> <location filename="../qrcodedialog.cpp" line="120"/> <source>PNG Images (*.png)</source> <translation>PNG bilder (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="14"/> <source>Altcoin debug window</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/rpcconsole.ui" line="46"/> <source>Client name</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/rpcconsole.ui" line="56"/> <location filename="../forms/rpcconsole.ui" line="79"/> <location filename="../forms/rpcconsole.ui" line="102"/> <location filename="../forms/rpcconsole.ui" line="125"/> <location filename="../forms/rpcconsole.ui" line="161"/> <location filename="../forms/rpcconsole.ui" line="214"/> <location filename="../forms/rpcconsole.ui" line="237"/> <location filename="../forms/rpcconsole.ui" line="260"/> <location filename="../rpcconsole.cpp" line="245"/> <source>N/A</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/rpcconsole.ui" line="69"/> <source>Client version</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/rpcconsole.ui" line="24"/> <source>&amp;Information</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/rpcconsole.ui" line="39"/> <source>Client</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/rpcconsole.ui" line="115"/> <source>Startup time</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/rpcconsole.ui" line="144"/> <source>Network</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/rpcconsole.ui" line="151"/> <source>Number of connections</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/rpcconsole.ui" line="174"/> <source>On testnet</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/rpcconsole.ui" line="197"/> <source>Block chain</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/rpcconsole.ui" line="204"/> <source>Current number of blocks</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/rpcconsole.ui" line="227"/> <source>Estimated total blocks</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/rpcconsole.ui" line="250"/> <source>Last block time</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/rpcconsole.ui" line="292"/> <source>Debug logfile</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/rpcconsole.ui" line="299"/> <source>Open the Altcoin debug logfile from the current data directory. This can take a few seconds for large logfiles.</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/rpcconsole.ui" line="302"/> <source>&amp;Open</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/rpcconsole.ui" line="323"/> <source>&amp;Console</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/rpcconsole.ui" line="92"/> <source>Build date</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/rpcconsole.ui" line="372"/> <source>Clear console</source> <translation type="unfinished"/> </message> <message> <location filename="../rpcconsole.cpp" line="212"/> <source>Welcome to the Altcoin RPC console.</source> <translation type="unfinished"/> </message> <message> <location filename="../rpcconsole.cpp" line="213"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation type="unfinished"/> </message> <message> <location filename="../rpcconsole.cpp" line="214"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation type="unfinished"/> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="14"/> <location filename="../sendcoinsdialog.cpp" line="122"/> <location filename="../sendcoinsdialog.cpp" line="127"/> <location filename="../sendcoinsdialog.cpp" line="132"/> <location filename="../sendcoinsdialog.cpp" line="137"/> <location filename="../sendcoinsdialog.cpp" line="143"/> <location filename="../sendcoinsdialog.cpp" line="148"/> <location filename="../sendcoinsdialog.cpp" line="153"/> <source>Send Coins</source> <translation>Send Altcoins</translation> </message> <message> <location filename="../forms/sendcoinsdialog.ui" line="64"/> <source>Send to multiple recipients at once</source> <translation>Send til flere enn én mottaker</translation> </message> <message> <location filename="../forms/sendcoinsdialog.ui" line="67"/> <source>&amp;Add Recipient</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/sendcoinsdialog.ui" line="84"/> <source>Remove all transaction fields</source> <translation>Fjern alle transaksjonsfelter</translation> </message> <message> <location filename="../forms/sendcoinsdialog.ui" line="87"/> <source>Clear &amp;All</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/sendcoinsdialog.ui" line="106"/> <source>Balance:</source> <translation>Saldo:</translation> </message> <message> <location filename="../forms/sendcoinsdialog.ui" line="113"/> <source>123.456 BTC</source> <translation>123.456 BTC</translation> </message> <message> <location filename="../forms/sendcoinsdialog.ui" line="144"/> <source>Confirm the send action</source> <translation>Bekreft sending</translation> </message> <message> <location filename="../forms/sendcoinsdialog.ui" line="147"/> <source>&amp;Send</source> <translation>&amp;Send</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="94"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>&lt;b&gt;%1&lt;/b&gt; til %2 (%3)</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="99"/> <source>Confirm send coins</source> <translation>Bekreft sending av altcoins</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="100"/> <source>Are you sure you want to send %1?</source> <translation>Er du sikker på at du vil sende %1?</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="100"/> <source> and </source> <translation> og </translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="123"/> <source>The recepient address is not valid, please recheck.</source> <translation>Mottaksadressen er ugyldig, prøv igjen.</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="128"/> <source>The amount to pay must be larger than 0.</source> <translation>Beløpen som skal betales må være over 0.</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="133"/> <source>The amount exceeds your balance.</source> <translation type="unfinished"/> </message> <message> <location filename="../sendcoinsdialog.cpp" line="138"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation type="unfinished"/> </message> <message> <location filename="../sendcoinsdialog.cpp" line="144"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation type="unfinished"/> </message> <message> <location filename="../sendcoinsdialog.cpp" line="149"/> <source>Error: Transaction creation failed.</source> <translation type="unfinished"/> </message> <message> <location filename="../sendcoinsdialog.cpp" line="154"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation type="unfinished"/> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="14"/> <source>Form</source> <translation>Skjema</translation> </message> <message> <location filename="../forms/sendcoinsentry.ui" line="29"/> <source>A&amp;mount:</source> <translation>&amp;Beløp:</translation> </message> <message> <location filename="../forms/sendcoinsentry.ui" line="42"/> <source>Pay &amp;To:</source> <translation>Betal &amp;Til:</translation> </message> <message> <location filename="../forms/sendcoinsentry.ui" line="66"/> <location filename="../sendcoinsentry.cpp" line="25"/> <source>Enter a label for this address to add it to your address book</source> <translation>Skriv inn en merkelapp for denne adressen for å legge den til i din adressebok</translation> </message> <message> <location filename="../forms/sendcoinsentry.ui" line="75"/> <source>&amp;Label:</source> <translation>&amp;Merkelapp:</translation> </message> <message> <location filename="../forms/sendcoinsentry.ui" line="93"/> <source>The address to send the payment to (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Adressen betalingen skal sendes til (f.eks. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <location filename="../forms/sendcoinsentry.ui" line="103"/> <source>Choose address from address book</source> <translation>Velg adresse fra adresseboken</translation> </message> <message> <location filename="../forms/sendcoinsentry.ui" line="113"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location filename="../forms/sendcoinsentry.ui" line="120"/> <source>Paste address from clipboard</source> <translation>Lim inn adresse fra utklippstavlen</translation> </message> <message> <location filename="../forms/sendcoinsentry.ui" line="130"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location filename="../forms/sendcoinsentry.ui" line="137"/> <source>Remove this recipient</source> <translation>Fjern denne mottakeren</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="26"/> <source>Enter a Altcoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Skriv inn en Altcoin adresse (f.eks. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="21"/> <source>Open for %1 blocks</source> <translation>Åpen for %1 blokker</translation> </message> <message> <location filename="../transactiondesc.cpp" line="23"/> <source>Open until %1</source> <translation>Åpen til %1</translation> </message> <message> <location filename="../transactiondesc.cpp" line="29"/> <source>%1/offline?</source> <translation>%1/frakoblet?</translation> </message> <message> <location filename="../transactiondesc.cpp" line="31"/> <source>%1/unconfirmed</source> <translation>%1/ubekreftet</translation> </message> <message> <location filename="../transactiondesc.cpp" line="33"/> <source>%1 confirmations</source> <translation>%1 bekreftelser</translation> </message> <message> <location filename="../transactiondesc.cpp" line="51"/> <source>&lt;b&gt;Status:&lt;/b&gt; </source> <translation>&lt;b&gt;Status:&lt;/b&gt; </translation> </message> <message> <location filename="../transactiondesc.cpp" line="56"/> <source>, has not been successfully broadcast yet</source> <translation>, har ikke blitt kringkastet uten problemer enda.</translation> </message> <message> <location filename="../transactiondesc.cpp" line="58"/> <source>, broadcast through %1 node</source> <translation>, kringkast gjennom %1 node</translation> </message> <message> <location filename="../transactiondesc.cpp" line="60"/> <source>, broadcast through %1 nodes</source> <translation>, kringkast gjennom %1 noder</translation> </message> <message> <location filename="../transactiondesc.cpp" line="64"/> <source>&lt;b&gt;Date:&lt;/b&gt; </source> <translation>&lt;b&gt;Dato:&lt;/b&gt; </translation> </message> <message> <location filename="../transactiondesc.cpp" line="71"/> <source>&lt;b&gt;Source:&lt;/b&gt; Generated&lt;br&gt;</source> <translation>&lt;b&gt;Kilde:&lt;/b&gt; Generert&lt;br&gt;</translation> </message> <message> <location filename="../transactiondesc.cpp" line="77"/> <location filename="../transactiondesc.cpp" line="94"/> <source>&lt;b&gt;From:&lt;/b&gt; </source> <translation>&lt;b&gt;Fra:&lt;/b&gt; </translation> </message> <message> <location filename="../transactiondesc.cpp" line="94"/> <source>unknown</source> <translation>ukjent</translation> </message> <message> <location filename="../transactiondesc.cpp" line="95"/> <location filename="../transactiondesc.cpp" line="118"/> <location filename="../transactiondesc.cpp" line="178"/> <source>&lt;b&gt;To:&lt;/b&gt; </source> <translation>&lt;b&gt;Til:&lt;/b&gt; </translation> </message> <message> <location filename="../transactiondesc.cpp" line="98"/> <source> (yours, label: </source> <translation> (din, merkelapp: </translation> </message> <message> <location filename="../transactiondesc.cpp" line="100"/> <source> (yours)</source> <translation> (din)</translation> </message> <message> <location filename="../transactiondesc.cpp" line="136"/> <location filename="../transactiondesc.cpp" line="150"/> <location filename="../transactiondesc.cpp" line="195"/> <location filename="../transactiondesc.cpp" line="212"/> <source>&lt;b&gt;Credit:&lt;/b&gt; </source> <translation>&lt;b&gt;Kredit:&lt;/b&gt; </translation> </message> <message> <location filename="../transactiondesc.cpp" line="138"/> <source>(%1 matures in %2 more blocks)</source> <translation>(%1 modnes om %2 flere blokker)</translation> </message> <message> <location filename="../transactiondesc.cpp" line="142"/> <source>(not accepted)</source> <translation>(ikke akseptert)</translation> </message> <message> <location filename="../transactiondesc.cpp" line="186"/> <location filename="../transactiondesc.cpp" line="194"/> <location filename="../transactiondesc.cpp" line="209"/> <source>&lt;b&gt;Debit:&lt;/b&gt; </source> <translation>&lt;b&gt;Debet:&lt;/b&gt; </translation> </message> <message> <location filename="../transactiondesc.cpp" line="200"/> <source>&lt;b&gt;Transaction fee:&lt;/b&gt; </source> <translation>&lt;b&gt;Transaksjonsgebyr:&lt;/b&gt; </translation> </message> <message> <location filename="../transactiondesc.cpp" line="216"/> <source>&lt;b&gt;Net amount:&lt;/b&gt; </source> <translation>&lt;b&gt;Nettobeløp:&lt;/b&gt; </translation> </message> <message> <location filename="../transactiondesc.cpp" line="222"/> <source>Message:</source> <translation>Melding:</translation> </message> <message> <location filename="../transactiondesc.cpp" line="224"/> <source>Comment:</source> <translation>Kommentar:</translation> </message> <message> <location filename="../transactiondesc.cpp" line="226"/> <source>Transaction ID:</source> <translation>Transaksjons-ID:</translation> </message> <message> <location filename="../transactiondesc.cpp" line="229"/> <source>Generated coins must wait 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, it will change to &quot;not accepted&quot; and not be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Genererte mynter må vente 120 blokker før de kan brukes. Da du genererte denne blokken ble den kringkastet på nettverket for å bli lagt til i kjeden av blokker. Hvis den ikke kommer med i kjeden vil den endre seg til &quot;ikke akseptert og pengene vil ikke kunne brukes. Dette vil noen ganger skje hvis en annen node genererer en blokk noen sekunder i tid fra din egen.</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="14"/> <source>Transaction details</source> <translation>Transaksjonsdetaljer</translation> </message> <message> <location filename="../forms/transactiondescdialog.ui" line="20"/> <source>This pane shows a detailed description of the transaction</source> <translation>Her vises en detaljert beskrivelse av transaksjonen</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="226"/> <source>Date</source> <translation>Dato</translation> </message> <message> <location filename="../transactiontablemodel.cpp" line="226"/> <source>Type</source> <translation>Type</translation> </message> <message> <location filename="../transactiontablemodel.cpp" line="226"/> <source>Address</source> <translation>Adresse</translation> </message> <message> <location filename="../transactiontablemodel.cpp" line="226"/> <source>Amount</source> <translation>Beløp</translation> </message> <message numerus="yes"> <location filename="../transactiontablemodel.cpp" line="281"/> <source>Open for %n block(s)</source> <translation><numerusform>Åpen for %n blokk</numerusform><numerusform>Åpen for %n blokker</numerusform></translation> </message> <message> <location filename="../transactiontablemodel.cpp" line="284"/> <source>Open until %1</source> <translation>Åpen til %1</translation> </message> <message> <location filename="../transactiontablemodel.cpp" line="287"/> <source>Offline (%1 confirmations)</source> <translation>Frakoblet (%1 bekreftelser)</translation> </message> <message> <location filename="../transactiontablemodel.cpp" line="290"/> <source>Unconfirmed (%1 of %2 confirmations)</source> <translation>Ubekreftet (%1 av %2 bekreftelser)</translation> </message> <message> <location filename="../transactiontablemodel.cpp" line="293"/> <source>Confirmed (%1 confirmations)</source> <translation>Bekreftet (%1 bekreftelser)</translation> </message> <message numerus="yes"> <location filename="../transactiontablemodel.cpp" line="301"/> <source>Mined balance will be available in %n more blocks</source> <translation><numerusform>Utvunnet saldo vil bli tilgjengelig om %n blokk</numerusform><numerusform>Utvunnet saldo vil bli tilgjengelig om %n blokker</numerusform></translation> </message> <message> <location filename="../transactiontablemodel.cpp" line="307"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Denne blokken har ikke blitt mottatt av noen andre noder og vil sannsynligvis ikke bli akseptert!</translation> </message> <message> <location filename="../transactiontablemodel.cpp" line="310"/> <source>Generated but not accepted</source> <translation>Generert men ikke akseptert</translation> </message> <message> <location filename="../transactiontablemodel.cpp" line="353"/> <source>Received with</source> <translation>Mottatt med</translation> </message> <message> <location filename="../transactiontablemodel.cpp" line="355"/> <source>Received from</source> <translation>Mottatt fra</translation> </message> <message> <location filename="../transactiontablemodel.cpp" line="358"/> <source>Sent to</source> <translation>Sendt til</translation> </message> <message> <location filename="../transactiontablemodel.cpp" line="360"/> <source>Payment to yourself</source> <translation>Betaling til deg selv</translation> </message> <message> <location filename="../transactiontablemodel.cpp" line="362"/> <source>Mined</source> <translation>Utvunnet</translation> </message> <message> <location filename="../transactiontablemodel.cpp" line="400"/> <source>(n/a)</source> <translation>-</translation> </message> <message> <location filename="../transactiontablemodel.cpp" line="599"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Transaksjonsstatus. Hold muspekeren over dette feltet for å se antall bekreftelser.</translation> </message> <message> <location filename="../transactiontablemodel.cpp" line="601"/> <source>Date and time that the transaction was received.</source> <translation>Dato og tid for da transaksjonen ble mottat.</translation> </message> <message> <location filename="../transactiontablemodel.cpp" line="603"/> <source>Type of transaction.</source> <translation>Type transaksjon.</translation> </message> <message> <location filename="../transactiontablemodel.cpp" line="605"/> <source>Destination address of transaction.</source> <translation>Mottaksadresse for transaksjonen</translation> </message> <message> <location filename="../transactiontablemodel.cpp" line="607"/> <source>Amount removed from or added to balance.</source> <translation>Beløp fjernet eller lagt til saldo.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="55"/> <location filename="../transactionview.cpp" line="71"/> <source>All</source> <translation>Alle</translation> </message> <message> <location filename="../transactionview.cpp" line="56"/> <source>Today</source> <translation>I dag</translation> </message> <message> <location filename="../transactionview.cpp" line="57"/> <source>This week</source> <translation>Denne uken</translation> </message> <message> <location filename="../transactionview.cpp" line="58"/> <source>This month</source> <translation>Denne måneden</translation> </message> <message> <location filename="../transactionview.cpp" line="59"/> <source>Last month</source> <translation>Forrige måned</translation> </message> <message> <location filename="../transactionview.cpp" line="60"/> <source>This year</source> <translation>Dette året</translation> </message> <message> <location filename="../transactionview.cpp" line="61"/> <source>Range...</source> <translation>Intervall...</translation> </message> <message> <location filename="../transactionview.cpp" line="72"/> <source>Received with</source> <translation>Mottatt med</translation> </message> <message> <location filename="../transactionview.cpp" line="74"/> <source>Sent to</source> <translation>Sendt til</translation> </message> <message> <location filename="../transactionview.cpp" line="76"/> <source>To yourself</source> <translation>Til deg selv</translation> </message> <message> <location filename="../transactionview.cpp" line="77"/> <source>Mined</source> <translation>Utvunnet</translation> </message> <message> <location filename="../transactionview.cpp" line="78"/> <source>Other</source> <translation>Andre</translation> </message> <message> <location filename="../transactionview.cpp" line="85"/> <source>Enter address or label to search</source> <translation>Skriv inn adresse eller merkelapp for søk</translation> </message> <message> <location filename="../transactionview.cpp" line="92"/> <source>Min amount</source> <translation>Minimumsbeløp</translation> </message> <message> <location filename="../transactionview.cpp" line="126"/> <source>Copy address</source> <translation>Kopier adresse</translation> </message> <message> <location filename="../transactionview.cpp" line="127"/> <source>Copy label</source> <translation>Kopier merkelapp</translation> </message> <message> <location filename="../transactionview.cpp" line="128"/> <source>Copy amount</source> <translation>Kopiér beløp</translation> </message> <message> <location filename="../transactionview.cpp" line="129"/> <source>Edit label</source> <translation>Rediger merkelapp</translation> </message> <message> <location filename="../transactionview.cpp" line="130"/> <source>Show transaction details</source> <translation type="unfinished"/> </message> <message> <location filename="../transactionview.cpp" line="270"/> <source>Export Transaction Data</source> <translation>Eksporter transaksjonsdata</translation> </message> <message> <location filename="../transactionview.cpp" line="271"/> <source>Comma separated file (*.csv)</source> <translation>Kommaseparert fil (*.csv)</translation> </message> <message> <location filename="../transactionview.cpp" line="279"/> <source>Confirmed</source> <translation>Bekreftet</translation> </message> <message> <location filename="../transactionview.cpp" line="280"/> <source>Date</source> <translation>Dato</translation> </message> <message> <location filename="../transactionview.cpp" line="281"/> <source>Type</source> <translation>Type</translation> </message> <message> <location filename="../transactionview.cpp" line="282"/> <source>Label</source> <translation>Merkelapp</translation> </message> <message> <location filename="../transactionview.cpp" line="283"/> <source>Address</source> <translation>Adresse</translation> </message> <message> <location filename="../transactionview.cpp" line="284"/> <source>Amount</source> <translation>Beløp</translation> </message> <message> <location filename="../transactionview.cpp" line="285"/> <source>ID</source> <translation>ID</translation> </message> <message> <location filename="../transactionview.cpp" line="289"/> <source>Error exporting</source> <translation>Feil ved eksport</translation> </message> <message> <location filename="../transactionview.cpp" line="289"/> <source>Could not write to file %1.</source> <translation>Kunne ikke skrive til filen %1.</translation> </message> <message> <location filename="../transactionview.cpp" line="384"/> <source>Range:</source> <translation>Intervall:</translation> </message> <message> <location filename="../transactionview.cpp" line="392"/> <source>to</source> <translation>til</translation> </message> </context> <context> <name>VerifyMessageDialog</name> <message> <location filename="../forms/verifymessagedialog.ui" line="14"/> <source>Verify Signed Message</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/verifymessagedialog.ui" line="20"/> <source>Enter the message and signature below (be careful to correctly copy newlines, spaces, tabs and other invisible characters) to obtain the Altcoin address used to sign the message.</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/verifymessagedialog.ui" line="62"/> <source>Verify a message and obtain the Altcoin address used to sign the message</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/verifymessagedialog.ui" line="65"/> <source>&amp;Verify Message</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/verifymessagedialog.ui" line="79"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Kopier den valgte adressen til systemets utklippstavle</translation> </message> <message> <location filename="../forms/verifymessagedialog.ui" line="82"/> <source>&amp;Copy Address</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/verifymessagedialog.ui" line="93"/> <source>Reset all verify message fields</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/verifymessagedialog.ui" line="96"/> <source>Clear &amp;All</source> <translation type="unfinished"/> </message> <message> <location filename="../verifymessagedialog.cpp" line="28"/> <source>Enter Altcoin signature</source> <translation type="unfinished"/> </message> <message> <location filename="../verifymessagedialog.cpp" line="29"/> <source>Click &quot;Verify Message&quot; to obtain address</source> <translation type="unfinished"/> </message> <message> <location filename="../verifymessagedialog.cpp" line="55"/> <location filename="../verifymessagedialog.cpp" line="62"/> <source>Invalid Signature</source> <translation type="unfinished"/> </message> <message> <location filename="../verifymessagedialog.cpp" line="55"/> <source>The signature could not be decoded. Please check the signature and try again.</source> <translation type="unfinished"/> </message> <message> <location filename="../verifymessagedialog.cpp" line="62"/> <source>The signature did not match the message digest. Please check the signature and try again.</source> <translation type="unfinished"/> </message> <message> <location filename="../verifymessagedialog.cpp" line="72"/> <source>Address not found in address book.</source> <translation type="unfinished"/> </message> <message> <location filename="../verifymessagedialog.cpp" line="72"/> <source>Address found in address book: %1</source> <translation type="unfinished"/> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="158"/> <source>Sending...</source> <translation>Sender...</translation> </message> </context> <context> <name>WindowOptionsPage</name> <message> <location filename="../optionsdialog.cpp" line="313"/> <source>Window</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="316"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp;Minimer til systemkurv istedenfor oppgavelinjen</translation> </message> <message> <location filename="../optionsdialog.cpp" line="317"/> <source>Show only a tray icon after minimizing the window</source> <translation>Vis kun ikon i systemkurv etter minimering av vinduet</translation> </message> <message> <location filename="../optionsdialog.cpp" line="320"/> <source>M&amp;inimize on close</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="321"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Minimerer vinduet istedenfor å avslutte applikasjonen når vinduet lukkes. Når dette er slått på avsluttes applikasjonen kun ved å velge avslutt i menyen.</translation> </message> </context> <context> <name>altcoin-core</name> <message> <location filename="../altcoinstrings.cpp" line="43"/> <source>Altcoin version</source> <translation>Altcoin versjon</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="44"/> <source>Usage:</source> <translation>Bruk:</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="45"/> <source>Send command to -server or altcoind</source> <translation>Send kommando til -server eller altcoind</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="46"/> <source>List commands</source> <translation>List opp kommandoer</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="47"/> <source>Get help for a command</source> <translation>Vis hjelpetekst for en kommando</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="49"/> <source>Options:</source> <translation>Innstillinger:</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="50"/> <source>Specify configuration file (default: altcoin.conf)</source> <translation>Angi konfigurasjonsfil (standardverdi: altcoin.conf)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="51"/> <source>Specify pid file (default: altcoind.pid)</source> <translation>Angi pid-fil (standardverdi: altcoind.pid)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="52"/> <source>Generate coins</source> <translation>Generér altcoins</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="53"/> <source>Don&apos;t generate coins</source> <translation>Ikke generér altcoins</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="54"/> <source>Specify data directory</source> <translation>Angi mappe for datafiler</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="55"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Sett størrelse på mellomlager for database i megabytes (standardverdi: 25)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="56"/> <source>Set database disk log size in megabytes (default: 100)</source> <translation>Sett størrelse på disklogg for database i megabytes (standardverdi: 100)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="57"/> <source>Specify connection timeout (in milliseconds)</source> <translation>Angi tidsavbrudd for forbindelse (i millisekunder)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="63"/> <source>Listen for connections on &lt;port&gt; (default: 8333 or testnet: 18333)</source> <translation>Lytt etter tilkoblinger på &lt;port&gt; (standardverdi: 8333 eller testnet: 18333)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="64"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Hold maks &lt;n&gt; koblinger åpne til andre noder (standardverdi: 125)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="66"/> <source>Connect only to the specified node</source> <translation>Koble kun til angitt node</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="67"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="68"/> <source>Specify your own public address</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="69"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4 or IPv6)</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="70"/> <source>Try to discover public IP address (default: 1)</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="73"/> <source>Bind to given address. Use [host]:port notation for IPv6</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="75"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Grenseverdi for å koble fra noder med dårlig oppførsel (standardverdi: 100)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="76"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Antall sekunder noder med dårlig oppførsel hindres fra å koble til på nytt (standardverdi: 86400)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="79"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 10000)</source> <translation>Maksimum mottaksbuffer per tilkobling, &lt;n&gt;*1000 bytes (standardverdi: 10000)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="80"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 10000)</source> <translation>Maksimum sendebuffer per tilkobling, &lt;n&gt;*1000 bytes (standardverdi: 10000)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="83"/> <source>Detach block and address databases. Increases shutdown time (default: 0)</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="86"/> <source>Accept command line and JSON-RPC commands</source> <translation>Ta imot kommandolinje- og JSON-RPC-kommandoer</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="87"/> <source>Run in the background as a daemon and accept commands</source> <translation>Kjør i bakgrunnen som daemon og ta imot kommandoer</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="88"/> <source>Use the test network</source> <translation>Bruk testnettverket</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="89"/> <source>Output extra debugging information</source> <translation>Gi ut ekstra debuginformasjon</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="90"/> <source>Prepend debug output with timestamp</source> <translation>Sett tidsstempel på debugmeldinger</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="91"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Send spor/debug informasjon til konsollet istedenfor debug.log filen</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="92"/> <source>Send trace/debug info to debugger</source> <translation>Send spor/debug informasjon til debugger</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="93"/> <source>Username for JSON-RPC connections</source> <translation>Brukernavn for JSON-RPC forbindelser</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="94"/> <source>Password for JSON-RPC connections</source> <translation>Passord for JSON-RPC forbindelser</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="95"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 8332)</source> <translation>Lytt etter JSON-RPC tilkoblinger på &lt;port&gt; (standardverdi: 8332)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="96"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Tillat JSON-RPC tilkoblinger fra angitt IP-adresse</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="97"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Send kommandoer til node på &lt;ip&gt; (standardverdi: 127.0.0.1)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="98"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Eksekvér kommando når beste blokk endrer seg (%s i kommandoen erstattes med blokkens hash)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="101"/> <source>Upgrade wallet to latest format</source> <translation>Oppgradér lommebok til nyeste format</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="102"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Angi størrelse på nøkkel-lager til &lt;n&gt; (standardverdi: 100)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="103"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Se gjennom blokk-kjeden etter manglende lommeboktransaksjoner</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="104"/> <source>How many blocks to check at startup (default: 2500, 0 = all)</source> <translation>Hvor mange blokker som skal sjekkes ved oppstart (standardverdi: 2500, 0 = alle)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="105"/> <source>How thorough the block verification is (0-6, default: 1)</source> <translation>Hvor grundig verifisering av blokker gjøres (0-6, standardverdi: 1)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="106"/> <source>Imports blocks from external blk000?.dat file</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="108"/> <source> SSL options: (see the Altcoin Wiki for SSL setup instructions)</source> <translation> SSL innstillinger: (se Altcoin Wiki for instruksjoner om SSL oppsett)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="111"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Bruk OpenSSL (https) for JSON-RPC forbindelser</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="112"/> <source>Server certificate file (default: server.cert)</source> <translation>Servers sertifikat (standardverdi: server.cert)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="113"/> <source>Server private key (default: server.pem)</source> <translation>Servers private nøkkel (standardverdi: server.pem)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="114"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>Akseptable krypteringsmetoder (standardverdi: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="145"/> <source>Warning: Disk space is low</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="107"/> <source>This help message</source> <translation>Denne hjelpemeldingen</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="121"/> <source>Cannot obtain a lock on data directory %s. Altcoin is probably already running.</source> <translation>Kunne ikke låse datamappen %s. Altcoin kjører sannsynligvis allerede.</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="48"/> <source>Altcoin</source> <translation>Altcoin</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="30"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="58"/> <source>Connect through socks proxy</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="59"/> <source>Select the version of socks proxy to use (4 or 5, 5 is default)</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="60"/> <source>Do not use proxy for connections to network &lt;net&gt; (IPv4 or IPv6)</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="61"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="62"/> <source>Pass DNS requests to (SOCKS5) proxy</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="142"/> <source>Loading addresses...</source> <translation>Laster adresser...</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="132"/> <source>Error loading blkindex.dat</source> <translation>Feil ved lasting av blkindex.dat</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="134"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Feil ved lasting av wallet.dat: Lommeboken er skadet</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="135"/> <source>Error loading wallet.dat: Wallet requires newer version of Altcoin</source> <translation>Feil ved lasting av wallet.dat: Lommeboken krever en nyere versjon av Altcoin</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="136"/> <source>Wallet needed to be rewritten: restart Altcoin to complete</source> <translation>Lommeboken måtte skrives om: start Altcoin på nytt for å fullføre</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="137"/> <source>Error loading wallet.dat</source> <translation>Feil ved lasting av wallet.dat</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="124"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="125"/> <source>Unknown network specified in -noproxy: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="127"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="126"/> <source>Unknown -socks proxy version requested: %i</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="128"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="129"/> <source>Not listening on any port</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="130"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="117"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="143"/> <source>Error: could not start node</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="31"/> <source>Error: Wallet locked, unable to create transaction </source> <translation>Feil: Lommebok låst, kan ikke opprette transaksjon </translation> </message> <message> <location filename="../altcoinstrings.cpp" line="32"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source> <translation>Feil: Denne transaksjonen krever et gebyr på minst %s pga. beløpet, kompleksiteten, eller bruk av nylig mottatte midler </translation> </message> <message> <location filename="../altcoinstrings.cpp" line="35"/> <source>Error: Transaction creation failed </source> <translation>Feil: Opprettelse av transaksjon feilet </translation> </message> <message> <location filename="../altcoinstrings.cpp" line="36"/> <source>Sending...</source> <translation>Sender...</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="37"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Feil: Transaksjonen ble avvist. Dette kan skje hvis noen av myntene i lommeboken allerede var brukt, f.eks. hvis du kopierte wallet.dat og mynter ble brukt i kopien uten å bli markert brukt her.</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="41"/> <source>Invalid amount</source> <translation>Ugyldig beløp</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="42"/> <source>Insufficient funds</source> <translation>Utilstrekkelige midler</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="131"/> <source>Loading block index...</source> <translation>Laster blokkindeks...</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="65"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Legg til node for tilkobling og hold forbindelsen åpen</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="28"/> <source>Unable to bind to %s on this computer. Altcoin is probably already running.</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="71"/> <source>Find peers using internet relay chat (default: 0)</source> <translation>Finn andre noder via internet relay chat (standardverdi: 0)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="72"/> <source>Accept connections from outside (default: 1)</source> <translation>Ta imot innkommende forbindelser fra nettet (standardverdi: 1)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="74"/> <source>Find peers using DNS lookup (default: 1)</source> <translation>Finn andre noder gjennom DNS-oppslag (standardverdi: 1)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="81"/> <source>Use Universal Plug and Play to map the listening port (default: 1)</source> <translation>Bruk Universal Plug and Play for å sette opp lytteporten (standardverdi :1)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="82"/> <source>Use Universal Plug and Play to map the listening port (default: 0)</source> <translation>Bruk Universal Plug and Play for å sette opp lytteporten (standardverdi :0)</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="85"/> <source>Fee per KB to add to transactions you send</source> <translation>Gebyr per KB for transaksjoner du sender</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="118"/> <source>Warning: -paytxfee is set very high. This is the transaction fee you will pay if you send a transaction.</source> <translation type="unfinished"/> </message> <message> <location filename="../altcoinstrings.cpp" line="133"/> <source>Loading wallet...</source> <translation>Laster lommebok...</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="138"/> <source>Cannot downgrade wallet</source> <translation>Kan ikke nedgradere lommebok</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="139"/> <source>Cannot initialize keypool</source> <translation>Kan ikke initialisere nøkkellager</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="140"/> <source>Cannot write default address</source> <translation>Kan ikke skrive standardadresse</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="141"/> <source>Rescanning...</source> <translation>Leser gjennom...</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="144"/> <source>Done loading</source> <translation>Ferdig med lasting</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="8"/> <source>To use the %s option</source> <translation>For å bruke %s opsjonen</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="9"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=altcoinrpc rpcpassword=%s (you do not need to remember this password) If the file does not exist, create it with owner-readable-only file permissions. </source> <translation>%s, du må sette et rpcpassord i konfigurasjonsfilen: %s Det anbefales at du bruker følgende tilfeldige passord: rpcuser=altcoinrpc rpcpassword=%s (du trenger ikke huske dette passordet) Hvis filen ikke finnes, opprett den med leserettighet kun for eier av filen. </translation> </message> <message> <location filename="../altcoinstrings.cpp" line="18"/> <source>Error</source> <translation>Feil</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="19"/> <source>An error occured while setting up the RPC port %i for listening: %s</source> <translation>En feil oppstod ved oppsett av RPC port %i for lytting: %s</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="20"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>Du må sette rpcpassword=&lt;passord&gt; i konfigurasjonsfilen: %s Hvis filen ikke finnes, opprett den med leserettighet kun for eier av filen.</translation> </message> <message> <location filename="../altcoinstrings.cpp" line="25"/> <source>Warning: Please check that your computer&apos;s date and time are correct. If your clock is wrong Altcoin will not work properly.</source> <translation>Advarsel: Vennligst sjekk at dato og klokke er riktig innstilt på datamaskinen. Hvis klokken er feil vil ikke Altcoin fungere ordentlig.</translation> </message> </context> </TS><|fim▁end|>
<source>About Altcoin</source> <translation>Om Altcoin</translation> </message>
<|file_name|>display_list_builder.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! Builds display lists from flows and fragments. //! //! Other browser engines sometimes call this "painting", but it is more accurately called display //! list building, as the actual painting does not happen here—only deciding *what* we're going to //! paint. #![deny(unsafe_code)] use app_units::{Au, AU_PER_PX}; use azure::azure_hl::Color; use block::BlockFlow; use canvas_traits::{CanvasMsg, FromLayoutMsg}; use context::LayoutContext; use euclid::num::Zero; use euclid::{Matrix4, Point2D, Point3D, Rect, SideOffsets2D, Size2D}; use flex::FlexFlow; use flow::{self, BaseFlow, Flow, IS_ABSOLUTELY_POSITIONED}; use flow_ref; use fragment::{CoordinateSystem, Fragment, HAS_LAYER, ImageFragmentInfo, ScannedTextFragmentInfo}; use fragment::{SpecificFragmentInfo}; use gfx::display_list::{BLUR_INFLATION_FACTOR, BaseDisplayItem, BorderDisplayItem}; use gfx::display_list::{BorderRadii, BoxShadowClipMode, BoxShadowDisplayItem, ClippingRegion}; use gfx::display_list::{DisplayItem, DisplayItemMetadata, DisplayList}; use gfx::display_list::{GradientDisplayItem}; use gfx::display_list::{GradientStop, ImageDisplayItem, LayeredItem, LayerInfo}; use gfx::display_list::{LineDisplayItem, OpaqueNode, SolidColorDisplayItem}; use gfx::display_list::{StackingContext, TextDisplayItem, TextOrientation}; use gfx::paint_task::THREAD_TINT_COLORS; use gfx::text::glyph::CharIndex; use gfx_traits::color; use inline::{FIRST_FRAGMENT_OF_ELEMENT, InlineFlow, LAST_FRAGMENT_OF_ELEMENT}; use ipc_channel::ipc::{self, IpcSharedMemory}; use list_item::ListItemFlow; use model::{self, MaybeAuto, ToGfxMatrix}; use msg::compositor_msg::ScrollPolicy; use net_traits::image::base::{Image, PixelFormat}; use net_traits::image_cache_task::UsePlaceholder; use std::default::Default; use std::sync::Arc; use std::sync::mpsc::channel; use std::{cmp, f32}; use style::computed_values::filter::Filter; use style::computed_values::{background_attachment, background_clip, background_origin}; use style::computed_values::{background_repeat, background_size}; use style::computed_values::{border_style, image_rendering, overflow_x, position}; use style::computed_values::{transform, transform_style, visibility}; use style::properties::style_structs::Border; use style::properties::{self, ComputedValues}; use style::values::RGBA; use style::values::computed; use style::values::computed::LinearGradient; use style::values::computed::{LengthOrNone, LengthOrPercentage, LengthOrPercentageOrAuto}; use style::values::specified::{AngleOrCorner, HorizontalDirection, VerticalDirection}; use table_cell::CollapsedBordersForCell; use url::Url; use util::cursor::Cursor; use util::geometry::ZERO_POINT; use util::logical_geometry::{LogicalPoint, LogicalRect, LogicalSize, WritingMode}; use util::opts; use util::range::Range; /// The logical width of an insertion point: at the moment, a one-pixel-wide line. const INSERTION_POINT_LOGICAL_WIDTH: Au = Au(1 * AU_PER_PX); pub trait FragmentDisplayListBuilding { /// Adds the display items necessary to paint the background of this fragment to the display /// list if necessary. fn build_display_list_for_background_if_applicable(&self, style: &ComputedValues, display_list: &mut DisplayList, layout_context: &LayoutContext, level: StackingLevel, absolute_bounds: &Rect<Au>, clip: &ClippingRegion); /// Computes the background size for an image with the given background area according to the /// rules in CSS-BACKGROUNDS § 3.9. fn compute_background_image_size(&self, style: &ComputedValues, bounds: &Rect<Au>, image: &Image) -> Size2D<Au>; /// Adds the display items necessary to paint the background image of this fragment to the /// display list at the appropriate stacking level. fn build_display_list_for_background_image(&self, style: &ComputedValues, display_list: &mut DisplayList, layout_context: &LayoutContext, level: StackingLevel, absolute_bounds: &Rect<Au>, clip: &ClippingRegion, image_url: &Url); /// Adds the display items necessary to paint the background linear gradient of this fragment /// to the display list at the appropriate stacking level. fn build_display_list_for_background_linear_gradient(&self, display_list: &mut DisplayList, level: StackingLevel, absolute_bounds: &Rect<Au>, clip: &ClippingRegion, gradient: &LinearGradient, style: &ComputedValues); /// Adds the display items necessary to paint the borders of this fragment to a display list if /// necessary. fn build_display_list_for_borders_if_applicable( &self, style: &ComputedValues, border_painting_mode: BorderPaintingMode, display_list: &mut DisplayList, bounds: &Rect<Au>, level: StackingLevel, clip: &ClippingRegion); /// Adds the display items necessary to paint the outline of this fragment to the display list /// if necessary. fn build_display_list_for_outline_if_applicable(&self, style: &ComputedValues, display_list: &mut DisplayList, bounds: &Rect<Au>, clip: &ClippingRegion); /// Adds the display items necessary to paint the box shadow of this fragment to the display /// list if necessary. fn build_display_list_for_box_shadow_if_applicable(&self, style: &ComputedValues, list: &mut DisplayList, layout_context: &LayoutContext, level: StackingLevel, absolute_bounds: &Rect<Au>, clip: &ClippingRegion); /// Adds display items necessary to draw debug boxes around a scanned text fragment. fn build_debug_borders_around_text_fragments(&self, style: &ComputedValues, display_list: &mut DisplayList, stacking_relative_border_box: &Rect<Au>, stacking_relative_content_box: &Rect<Au>, text_fragment: &ScannedTextFragmentInfo, clip: &ClippingRegion); /// Adds display items necessary to draw debug boxes around this fragment. fn build_debug_borders_around_fragment(&self, display_list: &mut DisplayList, stacking_relative_border_box: &Rect<Au>, clip: &ClippingRegion); /// Adds the display items for this fragment to the given display list. /// /// Arguments: /// /// * `display_list`: The display list to add display items to. /// * `layout_context`: The layout context. /// * `dirty`: The dirty rectangle in the coordinate system of the owning flow. /// * `stacking_relative_flow_origin`: Position of the origin of the owning flow with respect /// to its nearest ancestor stacking context. /// * `relative_containing_block_size`: The size of the containing block that /// `position: relative` makes use of. /// * `clip`: The region to clip the display items to. /// * `stacking_relative_display_port`: The position and size of the display port with respect /// to the nearest ancestor stacking context. fn build_display_list(&mut self, display_list: &mut DisplayList, layout_context: &LayoutContext, stacking_relative_flow_origin: &Point2D<Au>, relative_containing_block_size: &LogicalSize<Au>, relative_containing_block_mode: WritingMode, border_painting_mode: BorderPaintingMode, background_and_border_level: BackgroundAndBorderLevel, clip: &ClippingRegion, stacking_relative_display_port: &Rect<Au>); /// Returns the appropriate clipping region for descendants of this fragment. fn clipping_region_for_children(&self, current_clip: &ClippingRegion, stacking_relative_border_box: &Rect<Au>, is_absolutely_positioned: bool) -> ClippingRegion; /// Calculates the clipping rectangle for a fragment, taking the `clip` property into account /// per CSS 2.1 § 11.1.2. fn calculate_style_specified_clip(&self, parent_clip: &ClippingRegion, stacking_relative_border_box: &Rect<Au>) -> ClippingRegion; /// Builds the display items necessary to paint the selection and/or caret for this fragment, /// if any. fn build_display_items_for_selection_if_necessary(&self, display_list: &mut DisplayList, stacking_relative_border_box: &Rect<Au>, level: StackingLevel, clip: &ClippingRegion); /// Creates the text display item for one text fragment. This can be called multiple times for /// one fragment if there are text shadows. /// /// `shadow_blur_radius` will be `Some` if this is a shadow, even if the blur radius is zero. fn build_display_list_for_text_fragment(&self, display_list: &mut DisplayList, text_fragment: &ScannedTextFragmentInfo, text_color: RGBA, stacking_relative_content_box: &Rect<Au>, shadow_blur_radius: Option<Au>, offset: &Point2D<Au>, clip: &ClippingRegion); /// Creates the display item for a text decoration: underline, overline, or line-through. fn build_display_list_for_text_decoration(&self, display_list: &mut DisplayList, color: &RGBA, stacking_relative_box: &LogicalRect<Au>, clip: &ClippingRegion, blur_radius: Au); /// A helper method that `build_display_list` calls to create per-fragment-type display items. fn build_fragment_type_specific_display_items(&mut self, display_list: &mut DisplayList, layout_context: &LayoutContext, stacking_relative_border_box: &Rect<Au>, clip: &ClippingRegion); /// Creates a stacking context for associated fragment. fn create_stacking_context(&self, base_flow: &BaseFlow, display_list: Box<DisplayList>, scroll_policy: ScrollPolicy, mode: StackingContextCreationMode) -> Arc<StackingContext>; } fn handle_overlapping_radii(size: &Size2D<Au>, radii: &BorderRadii<Au>) -> BorderRadii<Au> { // No two corners' border radii may add up to more than the length of the edge // between them. To prevent that, all radii are scaled down uniformly. fn scale_factor(radius_a: Au, radius_b: Au, edge_length: Au) -> f32 { let required = radius_a + radius_b; if required <= edge_length { 1.0 } else { edge_length.to_f32_px() / required.to_f32_px() } } let top_factor = scale_factor(radii.top_left.width, radii.top_right.width, size.width); let bottom_factor = scale_factor(radii.bottom_left.width, radii.bottom_right.width, size.width); let left_factor = scale_factor(radii.top_left.height, radii.bottom_left.height, size.height); let right_factor = scale_factor(radii.top_right.height, radii.bottom_right.height, size.height); let min_factor = top_factor.min(bottom_factor).min(left_factor).min(right_factor); if min_factor < 1.0 { radii.scale_by(min_factor) } else { *radii } } fn build_border_radius(abs_bounds: &Rect<Au>, border_style: &Border) -> BorderRadii<Au> { // TODO(cgaebel): Support border radii even in the case of multiple border widths. // This is an extension of supporting elliptical radii. For now, all percentage // radii will be relative to the width. handle_overlapping_radii(&abs_bounds.size, &BorderRadii { top_left: model::specified_border_radius(border_style.border_top_left_radius, abs_bounds.size.width), top_right: model::specified_border_radius(border_style.border_top_right_radius, abs_bounds.size.width), bottom_right: model::specified_border_radius(border_style.border_bottom_right_radius, abs_bounds.size.width), bottom_left: model::specified_border_radius(border_style.border_bottom_left_radius, abs_bounds.size.width), }) } impl FragmentDisplayListBuilding for Fragment { fn build_display_list_for_background_if_applicable(&self, style: &ComputedValues, display_list: &mut DisplayList, layout_context: &LayoutContext, level: StackingLevel, absolute_bounds: &Rect<Au>, clip: &ClippingRegion) { // Adjust the clipping region as necessary to account for `border-radius`. let border_radii = build_border_radius(absolute_bounds, style.get_border()); let mut clip = (*clip).clone(); if !border_radii.is_square() { clip = clip.intersect_with_rounded_rect(absolute_bounds, &border_radii) } // FIXME: This causes a lot of background colors to be displayed when they are clearly not // needed. We could use display list optimization to clean this up, but it still seems // inefficient. What we really want is something like "nearest ancestor element that // doesn't have a fragment". let background_color = style.resolve_color(style.get_background().background_color); // 'background-clip' determines the area within which the background is painted. // http://dev.w3.org/csswg/css-backgrounds-3/#the-background-clip let mut bounds = *absolute_bounds; match style.get_background().background_clip { background_clip::T::border_box => {} background_clip::T::padding_box => { let border = style.logical_border_width().to_physical(style.writing_mode); bounds.origin.x = bounds.origin.x + border.left; bounds.origin.y = bounds.origin.y + border.top; bounds.size.width = bounds.size.width - border.horizontal(); bounds.size.height = bounds.size.height - border.vertical(); } background_clip::T::content_box => { let border_padding = self.border_padding.to_physical(style.writing_mode); bounds.origin.x = bounds.origin.x + border_padding.left; bounds.origin.y = bounds.origin.y + border_padding.top; bounds.size.width = bounds.size.width - border_padding.horizontal(); bounds.size.height = bounds.size.height - border_padding.vertical(); } } display_list.push(DisplayItem::SolidColorClass(box SolidColorDisplayItem { base: BaseDisplayItem::new(bounds, DisplayItemMetadata::new(self.node, style, Cursor::DefaultCursor), clip.clone()), color: background_color.to_gfx_color(), }), level); // The background image is painted on top of the background color. // Implements background image, per spec: // http://www.w3.org/TR/CSS21/colors.html#background let background = style.get_background(); match background.background_image.0 { None => {} Some(computed::Image::LinearGradient(ref gradient)) => { self.build_display_list_for_background_linear_gradient(display_list, level, absolute_bounds, &clip, gradient, style) } Some(computed::Image::Url(ref image_url)) => { self.build_display_list_for_background_image(style, display_list, layout_context, level, absolute_bounds, &clip, image_url) } } } fn compute_background_image_size(&self, style: &ComputedValues, bounds: &Rect<Au>, image: &Image) -> Size2D<Au> { // If `image_aspect_ratio` < `bounds_aspect_ratio`, the image is tall; otherwise, it is // wide. let image_aspect_ratio = (image.width as f64) / (image.height as f64); let bounds_aspect_ratio = bounds.size.width.to_f64_px() / bounds.size.height.to_f64_px(); let intrinsic_size = Size2D::new(Au::from_px(image.width as i32), Au::from_px(image.height as i32)); match (style.get_background().background_size.clone(), image_aspect_ratio < bounds_aspect_ratio) { (background_size::T::Contain, false) | (background_size::T::Cover, true) => { Size2D::new(bounds.size.width, Au::from_f64_px(bounds.size.width.to_f64_px() / image_aspect_ratio)) } (background_size::T::Contain, true) | (background_size::T::Cover, false) => { Size2D::new(Au::from_f64_px(bounds.size.height.to_f64_px() * image_aspect_ratio), bounds.size.height) } (background_size::T::Explicit(background_size::ExplicitSize { width, height: LengthOrPercentageOrAuto::Auto, }), _) => { let width = MaybeAuto::from_style(width, bounds.size.width) .specified_or_default(intrinsic_size.width); Size2D::new(width, Au::from_f64_px(width.to_f64_px() / image_aspect_ratio)) } (background_size::T::Explicit(background_size::ExplicitSize { width: LengthOrPercentageOrAuto::Auto, height }), _) => { let height = MaybeAuto::from_style(height, bounds.size.height) .specified_or_default(intrinsic_size.height); Size2D::new(Au::from_f64_px(height.to_f64_px() * image_aspect_ratio), height) } (background_size::T::Explicit(background_size::ExplicitSize { width, height }), _) => { Size2D::new(MaybeAuto::from_style(width, bounds.size.width) .specified_or_default(intrinsic_size.width), MaybeAuto::from_style(height, bounds.size.height) .specified_or_default(intrinsic_size.height)) } } } fn build_display_list_for_background_image(&self, style: &ComputedValues, display_list: &mut DisplayList, layout_context: &LayoutContext, level: StackingLevel, absolute_bounds: &Rect<Au>, clip: &ClippingRegion, image_url: &Url) { let background = style.get_background(); let image = layout_context.get_or_request_image(image_url.clone(), UsePlaceholder::No); if let Some(image) = image { debug!("(building display list) building background image"); // Use `background-size` to get the size. let mut bounds = *absolute_bounds; let image_size = self.compute_background_image_size(style, &bounds, &*image); // Clip. // // TODO: Check the bounds to see if a clip item is actually required. let clip = clip.clone().intersect_rect(&bounds); // Background image should be positioned on the padding box basis. let border = style.logical_border_width().to_physical(style.writing_mode); // Use 'background-origin' to get the origin value. let (mut origin_x, mut origin_y) = match background.background_origin { background_origin::T::padding_box => { (Au(0), Au(0)) } background_origin::T::border_box => { (-border.left, -border.top) } background_origin::T::content_box => { let border_padding = self.border_padding.to_physical(self.style.writing_mode); (border_padding.left - border.left, border_padding.top - border.top) } }; // Use `background-attachment` to get the initial virtual origin let (virtual_origin_x, virtual_origin_y) = match background.background_attachment { background_attachment::T::scroll => { (absolute_bounds.origin.x, absolute_bounds.origin.y) } background_attachment::T::fixed => { // If the ‘background-attachment’ value for this image is ‘fixed’, then // 'background-origin' has no effect. origin_x = Au(0); origin_y = Au(0); (Au(0), Au(0)) } }; // Use `background-position` to get the offset. let horizontal_position = model::specified(background.background_position.horizontal, bounds.size.width - image_size.width); let vertical_position = model::specified(background.background_position.vertical, bounds.size.height - image_size.height); let abs_x = border.left + virtual_origin_x + horizontal_position + origin_x; let abs_y = border.top + virtual_origin_y + vertical_position + origin_y; // Adjust origin and size based on background-repeat match background.background_repeat { background_repeat::T::no_repeat => { bounds.origin.x = abs_x; bounds.origin.y = abs_y; bounds.size.width = image_size.width; bounds.size.height = image_size.height; } background_repeat::T::repeat_x => { bounds.origin.y = abs_y; bounds.size.height = image_size.height; ImageFragmentInfo::tile_image(&mut bounds.origin.x, &mut bounds.size.width, abs_x, image_size.width.to_nearest_px() as u32); } background_repeat::T::repeat_y => { bounds.origin.x = abs_x; bounds.size.width = image_size.width; ImageFragmentInfo::tile_image(&mut bounds.origin.y, &mut bounds.size.height, abs_y, image_size.height.to_nearest_px() as u32); } background_repeat::T::repeat => { ImageFragmentInfo::tile_image(&mut bounds.origin.x, &mut bounds.size.width, abs_x, image_size.width.to_nearest_px() as u32); ImageFragmentInfo::tile_image(&mut bounds.origin.y, &mut bounds.size.height, abs_y, image_size.height.to_nearest_px() as u32); } }; // Create the image display item. display_list.push(DisplayItem::ImageClass(box ImageDisplayItem { base: BaseDisplayItem::new(bounds, DisplayItemMetadata::new(self.node, style, Cursor::DefaultCursor), clip), image: image.clone(), stretch_size: Size2D::new(image_size.width, image_size.height), image_rendering: style.get_effects().image_rendering.clone(), }), level); } } fn build_display_list_for_background_linear_gradient(&self, display_list: &mut DisplayList, level: StackingLevel, absolute_bounds: &Rect<Au>, clip: &ClippingRegion, gradient: &LinearGradient, style: &ComputedValues) { let clip = clip.clone().intersect_rect(absolute_bounds); // This is the distance between the center and the ending point; i.e. half of the distance // between the starting point and the ending point. let delta = match gradient.angle_or_corner { AngleOrCorner::Angle(angle) => { // Get correct gradient line length, based on: // https://drafts.csswg.org/css-images-3/#linear-gradients let dir = Point2D::new(angle.radians().sin(), -angle.radians().cos()); let line_length = (dir.x * absolute_bounds.size.width.to_f32_px()).abs() + (dir.y * absolute_bounds.size.height.to_f32_px()).abs(); let inv_dir_length = 1.0 / (dir.x * dir.x + dir.y * dir.y).sqrt(); Point2D::new(Au::from_f32_px(dir.x * inv_dir_length * line_length / 2.0), Au::from_f32_px(dir.y * inv_dir_length * line_length / 2.0)) } AngleOrCorner::Corner(horizontal, vertical) => { let x_factor = match horizontal { HorizontalDirection::Left => -1, HorizontalDirection::Right => 1, }; let y_factor = match vertical { VerticalDirection::Top => -1, VerticalDirection::Bottom => 1, }; Point2D::new(absolute_bounds.size.width * x_factor / 2, absolute_bounds.size.height * y_factor / 2) } }; // This is the length of the gradient line. let length = Au::from_f32_px( (delta.x.to_f32_px() * 2.0).hypot(delta.y.to_f32_px() * 2.0)); // Determine the position of each stop per CSS-IMAGES § 3.4. // // FIXME(#3908, pcwalton): Make sure later stops can't be behind earlier stops. let (mut stops, mut stop_run) = (Vec::new(), None); for (i, stop) in gradient.stops.iter().enumerate() { let offset = match stop.position { None => { if stop_run.is_none() { // Initialize a new stop run. let start_offset = if i == 0 { 0.0 } else { // `unwrap()` here should never fail because this is the beginning of // a stop run, which is always bounded by a length or percentage. position_to_offset(gradient.stops[i - 1].position.unwrap(), length) }; let (end_index, end_offset) = match gradient.stops[i..] .iter() .enumerate() .find(|&(_, ref stop)| stop.position.is_some()) { None => (gradient.stops.len() - 1, 1.0), Some((end_index, end_stop)) => { // `unwrap()` here should never fail because this is the end of // a stop run, which is always bounded by a length or // percentage. (end_index, position_to_offset(end_stop.position.unwrap(), length)) } }; stop_run = Some(StopRun { start_offset: start_offset, end_offset: end_offset, start_index: i, stop_count: end_index - i, }) } let stop_run = stop_run.unwrap(); let stop_run_length = stop_run.end_offset - stop_run.start_offset; if stop_run.stop_count == 0 { stop_run.end_offset } else { stop_run.start_offset + stop_run_length * (i - stop_run.start_index) as f32 / (stop_run.stop_count as f32) } } Some(position) => { stop_run = None; position_to_offset(position, length) } }; stops.push(GradientStop { offset: offset, color: style.resolve_color(stop.color).to_gfx_color() }) } let center = Point2D::new(absolute_bounds.origin.x + absolute_bounds.size.width / 2, absolute_bounds.origin.y + absolute_bounds.size.height / 2); let gradient_display_item = DisplayItem::GradientClass(box GradientDisplayItem { base: BaseDisplayItem::new(*absolute_bounds, DisplayItemMetadata::new(self.node, style, Cursor::DefaultCursor), clip), start_point: center - delta, end_point: center + delta, stops: stops, }); display_list.push(gradient_display_item, level) } fn build_display_list_for_box_shadow_if_applicable(&self, style: &ComputedValues, list: &mut DisplayList, _layout_context: &LayoutContext, level: StackingLevel, absolute_bounds: &Rect<Au>, clip: &ClippingRegion) { // NB: According to CSS-BACKGROUNDS, box shadows render in *reverse* order (front to back). for box_shadow in style.get_effects().box_shadow.0.iter().rev() { let bounds = shadow_bounds(&absolute_bounds.translate(&Point2D::new(box_shadow.offset_x, box_shadow.offset_y)), box_shadow.blur_radius, box_shadow.spread_radius); // TODO(pcwalton): Multiple border radii; elliptical border radii. list.push(DisplayItem::BoxShadowClass(box BoxShadowDisplayItem { base: BaseDisplayItem::new(bounds, DisplayItemMetadata::new(self.node, style, Cursor::DefaultCursor), (*clip).clone()), box_bounds: *absolute_bounds, color: style.resolve_color(box_shadow.color).to_gfx_color(), offset: Point2D::new(box_shadow.offset_x, box_shadow.offset_y), blur_radius: box_shadow.blur_radius, spread_radius: box_shadow.spread_radius, border_radius: model::specified_border_radius(style.get_border() .border_top_left_radius, absolute_bounds.size.width).width, clip_mode: if box_shadow.inset { BoxShadowClipMode::Inset } else { BoxShadowClipMode::Outset }, }), level); } } fn build_display_list_for_borders_if_applicable( &self, style: &ComputedValues, border_painting_mode: BorderPaintingMode, display_list: &mut DisplayList, bounds: &Rect<Au>, level: StackingLevel, clip: &ClippingRegion) { let mut border = style.logical_border_width(); match border_painting_mode { BorderPaintingMode::Separate => {} BorderPaintingMode::Collapse(collapsed_borders) => { collapsed_borders.adjust_border_widths_for_painting(&mut border) } BorderPaintingMode::Hidden => return, } if border.is_zero() { return } let border_style_struct = style.get_border(); let mut colors = SideOffsets2D::new(border_style_struct.border_top_color, border_style_struct.border_right_color, border_style_struct.border_bottom_color, border_style_struct.border_left_color); let mut border_style = SideOffsets2D::new(border_style_struct.border_top_style, border_style_struct.border_right_style, border_style_struct.border_bottom_style, border_style_struct.border_left_style); if let BorderPaintingMode::Collapse(collapsed_borders) = border_painting_mode { collapsed_borders.adjust_border_colors_and_styles_for_painting(&mut colors, &mut border_style, style.writing_mode); } let colors = SideOffsets2D::new(style.resolve_color(colors.top), style.resolve_color(colors.right), style.resolve_color(colors.bottom), style.resolve_color(colors.left)); // If this border collapses, then we draw outside the boundaries we were given. let mut bounds = *bounds; if let BorderPaintingMode::Collapse(collapsed_borders) = border_painting_mode { collapsed_borders.adjust_border_bounds_for_painting(&mut bounds, style.writing_mode) } // Append the border to the display list. display_list.push(DisplayItem::BorderClass(box BorderDisplayItem { base: BaseDisplayItem::new(bounds, DisplayItemMetadata::new(self.node, style, Cursor::DefaultCursor), (*clip).clone()), border_widths: border.to_physical(style.writing_mode), color: SideOffsets2D::new(colors.top.to_gfx_color(), colors.right.to_gfx_color(), colors.bottom.to_gfx_color(), colors.left.to_gfx_color()), style: border_style, radius: build_border_radius(&bounds, border_style_struct), }), level); } fn build_display_list_for_outline_if_applicable(&self, style: &ComputedValues, display_list: &mut DisplayList, bounds: &Rect<Au>, clip: &ClippingRegion) { let width = style.get_outline().outline_width; if width == Au(0) { return } let outline_style = style.get_outline().outline_style; if outline_style == border_style::T::none { return } // Outlines are not accounted for in the dimensions of the border box, so adjust the // absolute bounds. let mut bounds = *bounds; let offset = width + style.get_outline().outline_offset; bounds.origin.x = bounds.origin.x - offset; bounds.origin.y = bounds.origin.y - offset; bounds.size.width = bounds.size.width + offset + offset; bounds.size.height = bounds.size.height + offset + offset; // Append the outline to the display list. let color = style.resolve_color(style.get_outline().outline_color).to_gfx_color(); display_list.outlines.push_back(DisplayItem::BorderClass(box BorderDisplayItem { base: BaseDisplayItem::new(bounds, DisplayItemMetadata::new(self.node, style, Cursor::DefaultCursor), (*clip).clone()), border_widths: SideOffsets2D::new_all_same(width), color: SideOffsets2D::new_all_same(color), style: SideOffsets2D::new_all_same(outline_style), radius: Default::default(), })) } fn build_debug_borders_around_text_fragments(&self, style: &ComputedValues, display_list: &mut DisplayList, stacking_relative_border_box: &Rect<Au>, stacking_relative_content_box: &Rect<Au>, text_fragment: &ScannedTextFragmentInfo, clip: &ClippingRegion) { // FIXME(pcwalton, #2795): Get the real container size. let container_size = Size2D::zero(); // Compute the text fragment bounds and draw a border surrounding them. display_list.content.push_back(DisplayItem::BorderClass(box BorderDisplayItem { base: BaseDisplayItem::new(*stacking_relative_border_box, DisplayItemMetadata::new(self.node, style, Cursor::DefaultCursor), (*clip).clone()), border_widths: SideOffsets2D::new_all_same(Au::from_px(1)), color: SideOffsets2D::new_all_same(color::rgb(0, 0, 200)), style: SideOffsets2D::new_all_same(border_style::T::solid), radius: Default::default(), })); // Draw a rectangle representing the baselines. let mut baseline = LogicalRect::from_physical(self.style.writing_mode, *stacking_relative_content_box, container_size); baseline.start.b = baseline.start.b + text_fragment.run.ascent(); baseline.size.block = Au(0); let baseline = baseline.to_physical(self.style.writing_mode, container_size); let line_display_item = box LineDisplayItem { base: BaseDisplayItem::new(baseline, DisplayItemMetadata::new(self.node, style, Cursor::DefaultCursor), (*clip).clone()), color: color::rgb(0, 200, 0), style: border_style::T::dashed, }; display_list.content.push_back(DisplayItem::LineClass(line_display_item)); } fn build_debug_borders_around_fragment(&self, display_list: &mut DisplayList, stacking_relative_border_box: &Rect<Au>, clip: &ClippingRegion) { // This prints a debug border around the border of this fragment. display_list.content.push_back(DisplayItem::BorderClass(box BorderDisplayItem { base: BaseDisplayItem::new(*stacking_relative_border_box, DisplayItemMetadata::new(self.node, &*self.style, Cursor::DefaultCursor), (*clip).clone()), border_widths: SideOffsets2D::new_all_same(Au::from_px(1)), color: SideOffsets2D::new_all_same(color::rgb(0, 0, 200)), style: SideOffsets2D::new_all_same(border_style::T::solid), radius: Default::default(), })); } fn calculate_style_specified_clip(&self, parent_clip: &ClippingRegion, stacking_relative_border_box: &Rect<Au>) -> ClippingRegion { // Account for `clip` per CSS 2.1 § 11.1.2. let style_clip_rect = match (self.style().get_box().position, self.style().get_effects().clip.0) { (position::T::absolute, Some(style_clip_rect)) => style_clip_rect, _ => return (*parent_clip).clone(), }; // FIXME(pcwalton, #2795): Get the real container size. let clip_origin = Point2D::new(stacking_relative_border_box.origin.x + style_clip_rect.left, stacking_relative_border_box.origin.y + style_clip_rect.top); let right = style_clip_rect.right.unwrap_or(stacking_relative_border_box.size.width); let bottom = style_clip_rect.bottom.unwrap_or(stacking_relative_border_box.size.height); let clip_size = Size2D::new(right - clip_origin.x, bottom - clip_origin.y); (*parent_clip).clone().intersect_rect(&Rect::new(clip_origin, clip_size)) } fn build_display_items_for_selection_if_necessary(&self, display_list: &mut DisplayList, stacking_relative_border_box: &Rect<Au>, level: StackingLevel, clip: &ClippingRegion) { let scanned_text_fragment_info = match self.specific { SpecificFragmentInfo::ScannedText(ref scanned_text_fragment_info) => { scanned_text_fragment_info } _ => return, }; let insertion_point_index = match scanned_text_fragment_info.insertion_point { Some(insertion_point_index) => insertion_point_index, None => return, }; let range = Range::new(CharIndex(0), insertion_point_index); let advance = scanned_text_fragment_info.run.advance_for_range(&range); let insertion_point_bounds; let cursor; if !self.style.writing_mode.is_vertical() { insertion_point_bounds = Rect::new(Point2D::new(stacking_relative_border_box.origin.x + advance, stacking_relative_border_box.origin.y), Size2D::new(INSERTION_POINT_LOGICAL_WIDTH, stacking_relative_border_box.size.height)); cursor = Cursor::TextCursor; } else { insertion_point_bounds = Rect::new(Point2D::new(stacking_relative_border_box.origin.x, stacking_relative_border_box.origin.y + advance), Size2D::new(stacking_relative_border_box.size.width, INSERTION_POINT_LOGICAL_WIDTH)); cursor = Cursor::VerticalTextCursor; }; display_list.push(DisplayItem::SolidColorClass(box SolidColorDisplayItem { base: BaseDisplayItem::new(insertion_point_bounds, DisplayItemMetadata::new(self.node, &*self.style, cursor), clip.clone()), color: self.style().get_color().color.to_gfx_color(), }), level); } fn build_display_list(&mut self, display_list: &mut DisplayList, layout_context: &LayoutContext, stacking_relative_flow_origin: &Point2D<Au>, relative_containing_block_size: &LogicalSize<Au>, relative_containing_block_mode: WritingMode, border_painting_mode: BorderPaintingMode, background_and_border_level: BackgroundAndBorderLevel, clip: &ClippingRegion, stacking_relative_display_port: &Rect<Au>) { if self.style().get_inheritedbox().visibility != visibility::T::visible { return } // Compute the fragment position relative to the parent stacking context. If the fragment // itself establishes a stacking context, then the origin of its position will be (0, 0) // for the purposes of this computation. let stacking_relative_border_box = self.stacking_relative_border_box(stacking_relative_flow_origin, relative_containing_block_size, relative_containing_block_mode, CoordinateSystem::Own); debug!("Fragment::build_display_list at rel={:?}, abs={:?}, flow origin={:?}: {:?}", self.border_box, stacking_relative_border_box, stacking_relative_flow_origin, self); if !stacking_relative_border_box.intersects(stacking_relative_display_port) { debug!("Fragment::build_display_list: outside display port"); return } // Calculate the clip rect. If there's nothing to render at all, don't even construct // display list items. let clip = self.calculate_style_specified_clip(clip, &stacking_relative_border_box); if !clip.might_intersect_rect(&stacking_relative_border_box) { return; } debug!("Fragment::build_display_list: intersected. Adding display item..."); if self.is_primary_fragment() { let level = StackingLevel::from_background_and_border_level(background_and_border_level); // Add shadows, background, borders, and outlines, if applicable. if let Some(ref inline_context) = self.inline_context { for node in inline_context.nodes.iter().rev() { self.build_display_list_for_background_if_applicable( &*node.style, display_list, layout_context, level, &stacking_relative_border_box, &clip); self.build_display_list_for_box_shadow_if_applicable( &*node.style, display_list, layout_context, level, &stacking_relative_border_box, &clip); let mut style = node.style.clone(); properties::modify_border_style_for_inline_sides( &mut style, node.flags.contains(FIRST_FRAGMENT_OF_ELEMENT), node.flags.contains(LAST_FRAGMENT_OF_ELEMENT)); self.build_display_list_for_borders_if_applicable( &*style, border_painting_mode, display_list, &stacking_relative_border_box, level, &clip); self.build_display_list_for_outline_if_applicable( &*node.style, display_list, &stacking_relative_border_box, &clip); } } if !self.is_scanned_text_fragment() { self.build_display_list_for_background_if_applicable(&*self.style, display_list, layout_context, level, &stacking_relative_border_box, &clip); self.build_display_list_for_box_shadow_if_applicable(&*self.style, display_list, layout_context, level, &stacking_relative_border_box, &clip); self.build_display_list_for_borders_if_applicable(&*self.style, border_painting_mode, display_list, &stacking_relative_border_box, level, &clip); self.build_display_list_for_outline_if_applicable(&*self.style, display_list, &stacking_relative_border_box, &clip); } // Paint the selection point if necessary. self.build_display_items_for_selection_if_necessary(display_list, &stacking_relative_border_box, level, &clip); } // Create special per-fragment-type display items. self.build_fragment_type_specific_display_items(display_list, layout_context, &stacking_relative_border_box, &clip); if opts::get().show_debug_fragment_borders { self.build_debug_borders_around_fragment(display_list, &stacking_relative_border_box, &clip) } } fn build_fragment_type_specific_display_items(&mut self, display_list: &mut DisplayList, layout_context: &LayoutContext, stacking_relative_border_box: &Rect<Au>, clip: &ClippingRegion) { // Compute the context box position relative to the parent stacking context. let stacking_relative_content_box = self.stacking_relative_content_box(stacking_relative_border_box); match self.specific { SpecificFragmentInfo::ScannedText(ref text_fragment) => { // Create items for shadows. // // NB: According to CSS-BACKGROUNDS, text shadows render in *reverse* order (front // to back). let text_color = self.style().get_color().color; for text_shadow in self.style.get_effects().text_shadow.0.iter().rev() { let offset = &Point2D::new(text_shadow.offset_x, text_shadow.offset_y); let color = self.style().resolve_color(text_shadow.color); self.build_display_list_for_text_fragment(display_list, &**text_fragment, color, &stacking_relative_content_box, Some(text_shadow.blur_radius), offset, clip); } // Create the main text display item. self.build_display_list_for_text_fragment(display_list, &**text_fragment, text_color, &stacking_relative_content_box, None, &Point2D::new(Au(0), Au(0)), clip); if opts::get().show_debug_fragment_borders { self.build_debug_borders_around_text_fragments(self.style(), display_list, stacking_relative_border_box, &stacking_relative_content_box, &**text_fragment, clip) } } SpecificFragmentInfo::Generic | SpecificFragmentInfo::GeneratedContent(..) | SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper | SpecificFragmentInfo::InlineBlock(_) | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) | SpecificFragmentInfo::InlineAbsolute(_) => { if opts::get().show_debug_fragment_borders { self.build_debug_borders_around_fragment(display_list, stacking_relative_border_box, clip); } } SpecificFragmentInfo::Iframe(ref fragment_info) => { // TODO(mrobinson): When https://github.com/servo/euclid/issues/109 is fixed this // check can just become stacking_relative_content_box.is_empty(). if stacking_relative_content_box.size.width != Zero::zero() && stacking_relative_content_box.size.height != Zero::zero() { let layer_id = self.layer_id(); display_list.content.push_back(DisplayItem::LayeredItemClass(box LayeredItem { item: DisplayItem::NoopClass( box BaseDisplayItem::new(stacking_relative_content_box, DisplayItemMetadata::new(self.node, &*self.style, Cursor::DefaultCursor), (*clip).clone())), layer_id: layer_id })); display_list.layer_info.push_back(LayerInfo::new(layer_id, ScrollPolicy::Scrollable, Some(fragment_info.pipeline_id))); } } SpecificFragmentInfo::Image(ref mut image_fragment) => { // Place the image into the display list. if let Some(ref image) = image_fragment.image { display_list.content.push_back(DisplayItem::ImageClass(box ImageDisplayItem { base: BaseDisplayItem::new(stacking_relative_content_box, DisplayItemMetadata::new(self.node, &*self.style, Cursor::DefaultCursor), (*clip).clone()), image: image.clone(), stretch_size: stacking_relative_content_box.size, image_rendering: self.style.get_effects().image_rendering.clone(), })); } } SpecificFragmentInfo::Canvas(ref canvas_fragment_info) => { // TODO(ecoal95): make the canvas with a renderer use the custom layer let width = canvas_fragment_info.replaced_image_fragment_info .computed_inline_size.map_or(0, |w| w.to_px() as usize); let height = canvas_fragment_info.replaced_image_fragment_info .computed_block_size.map_or(0, |h| h.to_px() as usize); if width > 0 && height > 0 { let layer_id = self.layer_id(); let canvas_data = match canvas_fragment_info.ipc_renderer { Some(ref ipc_renderer) => { let ipc_renderer = ipc_renderer.lock().unwrap(); let (sender, receiver) = ipc::channel().unwrap(); ipc_renderer.send(CanvasMsg::FromLayout( FromLayoutMsg::SendPixelContents(sender))).unwrap(); let data = receiver.recv().unwrap(); // Propagate the layer and the renderer to the paint task. layout_context.shared.canvas_layers_sender.lock().unwrap().send( (layer_id, (*ipc_renderer).clone())).unwrap(); data }, None => IpcSharedMemory::from_byte(0xFFu8, width * height * 4), }; let display_item = DisplayItem::ImageClass(box ImageDisplayItem { base: BaseDisplayItem::new(stacking_relative_content_box, DisplayItemMetadata::new(self.node, &*self.style, Cursor::DefaultCursor), (*clip).clone()), image: Arc::new(Image { width: width as u32, height: height as u32, format: PixelFormat::RGBA8, bytes: canvas_data, }), stretch_size: stacking_relative_content_box.size, image_rendering: image_rendering::T::Auto, }); display_list.content.push_back(DisplayItem::LayeredItemClass(box LayeredItem { item: display_item, layer_id: layer_id })); display_list.layer_info.push_back( LayerInfo::new(layer_id, ScrollPolicy::Scrollable, None)); } } SpecificFragmentInfo::UnscannedText(_) => { panic!("Shouldn't see unscanned fragments here.") } SpecificFragmentInfo::TableColumn(_) => { panic!("Shouldn't see table column fragments here.") } } } fn create_stacking_context(&self, base_flow: &BaseFlow, display_list: Box<DisplayList>, scroll_policy: ScrollPolicy, mode: StackingContextCreationMode) -> Arc<StackingContext> { let border_box = match mode { StackingContextCreationMode::Normal | StackingContextCreationMode::OuterScrollWrapper => { self.stacking_relative_border_box(&base_flow.stacking_relative_position, &base_flow.early_absolute_position_info .relative_containing_block_size, base_flow.early_absolute_position_info .relative_containing_block_mode, CoordinateSystem::Parent) } StackingContextCreationMode::InnerScrollWrapper => { Rect::new(ZERO_POINT, base_flow.overflow.size) } }; let overflow = match mode { StackingContextCreationMode::Normal => { // First, compute the offset of our border box (including relative positioning) // from our flow origin, since that is what `BaseFlow::overflow` is relative to. let border_box_offset = border_box.translate(&-base_flow.stacking_relative_position).origin; // Then, using that, compute our overflow region relative to our border box. base_flow.overflow.translate(&-border_box_offset) } StackingContextCreationMode::InnerScrollWrapper | StackingContextCreationMode::OuterScrollWrapper => { Rect::new(ZERO_POINT, border_box.size) } }; let mut transform = Matrix4::identity(); if let Some(ref operations) = self.style().get_effects().transform.0 { let transform_origin = self.style().get_effects().transform_origin; let transform_origin = Point3D::new(model::specified(transform_origin.horizontal, border_box.size.width).to_f32_px(), model::specified(transform_origin.vertical, border_box.size.height).to_f32_px(), transform_origin.depth.to_f32_px()); let pre_transform = Matrix4::create_translation(transform_origin.x, transform_origin.y, transform_origin.z); let post_transform = Matrix4::create_translation(-transform_origin.x, -transform_origin.y, -transform_origin.z); for operation in operations { let matrix = match *operation { transform::ComputedOperation::Rotate(ax, ay, az, theta) => { let theta = 2.0f32 * f32::consts::PI - theta.radians(); Matrix4::create_rotation(ax, ay, az, theta) } transform::ComputedOperation::Perspective(d) => { Matrix4::create_perspective(d.to_f32_px()) } transform::ComputedOperation::Scale(sx, sy, sz) => { Matrix4::create_scale(sx, sy, sz) } transform::ComputedOperation::Translate(tx, ty, tz) => { let tx = model::specified(tx, border_box.size.width).to_f32_px(); let ty = model::specified(ty, border_box.size.height).to_f32_px(); let tz = tz.to_f32_px(); Matrix4::create_translation(tx, ty, tz) } transform::ComputedOperation::Matrix(m) => { m.to_gfx_matrix() } transform::ComputedOperation::Skew(theta_x, theta_y) => { Matrix4::create_skew(theta_x.radians(), theta_y.radians()) } }; transform = transform.mul(&matrix); } transform = pre_transform.mul(&transform).mul(&post_transform); } let perspective = match self.style().get_effects().perspective { LengthOrNone::Length(d) => { let perspective_origin = self.style().get_effects().perspective_origin; let perspective_origin = Point2D::new(model::specified(perspective_origin.horizontal, border_box.size.width).to_f32_px(), model::specified(perspective_origin.vertical, border_box.size.height).to_f32_px()); let pre_transform = Matrix4::create_translation(perspective_origin.x, perspective_origin.y, 0.0); let post_transform = Matrix4::create_translation(-perspective_origin.x, -perspective_origin.y, 0.0); let perspective_matrix = Matrix4::create_perspective(d.to_f32_px()); pre_transform.mul(&perspective_matrix).mul(&post_transform) } LengthOrNone::None => { Matrix4::identity() } }; // Create the filter pipeline. let effects = self.style().get_effects(); let mut filters = effects.filter.clone(); if effects.opacity != 1.0 { filters.push(Filter::Opacity(effects.opacity)) } // There are two situations that need layers: when the fragment has the HAS_LAYER // flag and when we are building a layer tree for overflow scrolling. let layer_info = if mode == StackingContextCreationMode::InnerScrollWrapper { Some(LayerInfo::new(self.layer_id_for_overflow_scroll(), scroll_policy, None)) } else if self.flags.contains(HAS_LAYER) { Some(LayerInfo::new(self.layer_id(), scroll_policy, None)) } else { None }; let scrolls_overflow_area = mode == StackingContextCreationMode::OuterScrollWrapper; let transform_style = self.style().get_used_transform_style(); let establishes_3d_context = scrolls_overflow_area || transform_style == transform_style::T::flat; Arc::new(StackingContext::new(display_list, &border_box, &overflow, self.effective_z_index(), filters, self.style().get_effects().mix_blend_mode, transform, perspective, establishes_3d_context, scrolls_overflow_area, layer_info)) } fn clipping_region_for_children(&self, current_clip: &ClippingRegion, stacking_relative_border_box: &Rect<Au>, is_absolutely_positioned: bool) -> ClippingRegion { // Don't clip if we're text. if self.is_scanned_text_fragment() { return (*current_clip).clone() } // Account for style-specified `clip`. let mut current_clip = self.calculate_style_specified_clip(current_clip, stacking_relative_border_box); // Clip according to the values of `overflow-x` and `overflow-y`. // // TODO(pcwalton): Support scrolling of non-absolutely-positioned elements. // FIXME(pcwalton): This may be more complex than it needs to be, since it seems to be // impossible with the computed value rules as they are to have `overflow-x: visible` with // `overflow-y: <scrolling>` or vice versa! match (self.style.get_box().overflow_x, is_absolutely_positioned) { (overflow_x::T::hidden, _) | (overflow_x::T::auto, false) | (overflow_x::T::scroll, false) => { let mut bounds = current_clip.bounding_rect(); let max_x = cmp::min(bounds.max_x(), stacking_relative_border_box.max_x()); bounds.origin.x = cmp::max(bounds.origin.x, stacking_relative_border_box.origin.x); bounds.size.width = max_x - bounds.origin.x; current_clip = current_clip.intersect_rect(&bounds) } _ => {} } match (self.style.get_box().overflow_y.0, is_absolutely_positioned) { (overflow_x::T::hidden, _) | (overflow_x::T::auto, false) | (overflow_x::T::scroll, false) => { let mut bounds = current_clip.bounding_rect(); let max_y = cmp::min(bounds.max_y(), stacking_relative_border_box.max_y()); bounds.origin.y = cmp::max(bounds.origin.y, stacking_relative_border_box.origin.y); bounds.size.height = max_y - bounds.origin.y; current_clip = current_clip.intersect_rect(&bounds) } _ => {} } current_clip } fn build_display_list_for_text_fragment(&self, display_list: &mut DisplayList, text_fragment: &ScannedTextFragmentInfo, text_color: RGBA, stacking_relative_content_box: &Rect<Au>, shadow_blur_radius: Option<Au>, offset: &Point2D<Au>, clip: &ClippingRegion) { // Determine the orientation and cursor to use. let (orientation, cursor) = if self.style.writing_mode.is_vertical() { if self.style.writing_mode.is_sideways_left() { (TextOrientation::SidewaysLeft, Cursor::VerticalTextCursor) } else { (TextOrientation::SidewaysRight, Cursor::VerticalTextCursor) } } else { (TextOrientation::Upright, Cursor::TextCursor) }; // Compute location of the baseline. // // FIXME(pcwalton): Get the real container size. let container_size = Size2D::zero(); let metrics = &text_fragment.run.font_metrics; let stacking_relative_content_box = stacking_relative_content_box.translate(offset); let baseline_origin = stacking_relative_content_box.origin + LogicalPoint::new(self.style.writing_mode, Au(0), metrics.ascent).to_physical(self.style.writing_mode, container_size); // Create the text display item. display_list.content.push_back(DisplayItem::TextClass(box TextDisplayItem { base: BaseDisplayItem::new(stacking_relative_content_box, DisplayItemMetadata::new(self.node, self.style(), cursor), (*clip).clone()), text_run: text_fragment.run.clone(), range: text_fragment.range, text_color: text_color.to_gfx_color(), orientation: orientation, baseline_origin: baseline_origin, blur_radius: shadow_blur_radius.unwrap_or(Au(0)), })); // Create display items for text decorations. let mut text_decorations = self.style() .get_inheritedtext() ._servo_text_decorations_in_effect; if shadow_blur_radius.is_some() { // If we're painting a shadow, paint the decorations the same color as the shadow. text_decorations.underline = text_decorations.underline.map(|_| text_color); text_decorations.overline = text_decorations.overline.map(|_| text_color); text_decorations.line_through = text_decorations.line_through.map(|_| text_color); } let stacking_relative_content_box = LogicalRect::from_physical(self.style.writing_mode, stacking_relative_content_box, container_size); if let Some(ref underline_color) = text_decorations.underline { let mut stacking_relative_box = stacking_relative_content_box; stacking_relative_box.start.b = stacking_relative_content_box.start.b + metrics.ascent - metrics.underline_offset; stacking_relative_box.size.block = metrics.underline_size; self.build_display_list_for_text_decoration(display_list, underline_color, &stacking_relative_box, clip, shadow_blur_radius.unwrap_or(Au(0))) } if let Some(ref overline_color) = text_decorations.overline { let mut stacking_relative_box = stacking_relative_content_box; stacking_relative_box.size.block = metrics.underline_size; self.build_display_list_for_text_decoration(display_list, overline_color, &stacking_relative_box, clip, shadow_blur_radius.unwrap_or(Au(0))) } if let Some(ref line_through_color) = text_decorations.line_through { let mut stacking_relative_box = stacking_relative_content_box; stacking_relative_box.start.b = stacking_relative_box.start.b + metrics.ascent - metrics.strikeout_offset; stacking_relative_box.size.block = metrics.strikeout_size; self.build_display_list_for_text_decoration(display_list, line_through_color, &stacking_relative_box, clip, shadow_blur_radius.unwrap_or(Au(0))) } } fn build_display_list_for_text_decoration(&self, display_list: &mut DisplayList, color: &RGBA, stacking_relative_box: &LogicalRect<Au>, clip: &ClippingRegion, blur_radius: Au) { // Perhaps surprisingly, text decorations are box shadows. This is because they may need // to have blur in the case of `text-shadow`, and this doesn't hurt performance because box // shadows are optimized into essentially solid colors if there is no need for the blur. // // FIXME(pcwalton, #2795): Get the real container size. let container_size = Size2D::zero(); let stacking_relative_box = stacking_relative_box.to_physical(self.style.writing_mode, container_size); let metadata = DisplayItemMetadata::new(self.node, &*self.style, Cursor::DefaultCursor); display_list.content.push_back(DisplayItem::BoxShadowClass(box BoxShadowDisplayItem { base: BaseDisplayItem::new(shadow_bounds(&stacking_relative_box, blur_radius, Au(0)), metadata, (*clip).clone()), box_bounds: stacking_relative_box, color: color.to_gfx_color(), offset: ZERO_POINT, blur_radius: blur_radius, spread_radius: Au(0), border_radius: Au(0), clip_mode: BoxShadowClipMode::None, })) } } pub trait BlockFlowDisplayListBuilding { fn build_display_list_for_block_base(&mut self, display_list: &mut DisplayList, layout_context: &LayoutContext, border_painting_mode: BorderPaintingMode, background_border_level: BackgroundAndBorderLevel); fn build_display_list_for_static_block(&mut self, display_list: Box<DisplayList>, layout_context: &LayoutContext, border_painting_mode: BorderPaintingMode, background_border_level: BackgroundAndBorderLevel); fn build_display_list_for_absolutely_positioned_block( &mut self, display_list: Box<DisplayList>, layout_context: &LayoutContext, border_painting_mode: BorderPaintingMode); fn build_display_list_for_floating_block(&mut self, display_list: Box<DisplayList>, layout_context: &LayoutContext, border_painting_mode: BorderPaintingMode); fn build_display_list_for_block(&mut self, display_list: Box<DisplayList>, layout_context: &LayoutContext, border_painting_mode: BorderPaintingMode); } impl BlockFlowDisplayListBuilding for BlockFlow { fn build_display_list_for_block_base(&mut self, display_list: &mut DisplayList, layout_context: &LayoutContext, border_painting_mode: BorderPaintingMode, background_border_level: BackgroundAndBorderLevel) { // Add the box that starts the block context. let clip = if self.fragment.establishes_stacking_context() { self.base.clip.translate(&-self.base.stacking_relative_position) } else { self.base.clip.clone() }; self.fragment .build_display_list(display_list, layout_context, &self.base.stacking_relative_position, &self.base .early_absolute_position_info .relative_containing_block_size, self.base .early_absolute_position_info .relative_containing_block_mode, border_painting_mode, background_border_level, &clip, &self.base.stacking_relative_position_of_display_port); // Add children. for kid in self.base.children.iter_mut() { display_list.append_from(&mut flow::mut_base(kid).display_list_building_result); } self.base.build_display_items_for_debugging_tint(display_list, self.fragment.node); } fn build_display_list_for_static_block(&mut self, mut display_list: Box<DisplayList>, layout_context: &LayoutContext, border_painting_mode: BorderPaintingMode, background_border_level: BackgroundAndBorderLevel) { self.build_display_list_for_block_base(&mut *display_list, layout_context, border_painting_mode, background_border_level); self.base.display_list_building_result = if self.fragment.establishes_stacking_context() { let scroll_policy = if self.is_fixed() { ScrollPolicy::FixedPosition } else { ScrollPolicy::Scrollable }; Some(DisplayList::new_with_stacking_context( self.fragment.create_stacking_context(&self.base, display_list, scroll_policy, StackingContextCreationMode::Normal))) } else { if self.fragment.style.get_box().position != position::T::static_ { display_list.form_pseudo_stacking_context_for_positioned_content(); } Some(display_list) } } fn build_display_list_for_absolutely_positioned_block( &mut self, mut display_list: Box<DisplayList>, layout_context: &LayoutContext, border_painting_mode: BorderPaintingMode) { // If `overflow: scroll` is in effect, we add this fragment's display items to a new // stacking context. let outer_display_list_for_overflow_scroll = match (self.fragment.style().get_box().overflow_x, self.fragment.style().get_box().overflow_y.0) { (overflow_x::T::auto, _) | (overflow_x::T::scroll, _) | (_, overflow_x::T::auto) | (_, overflow_x::T::scroll) => { // Create a separate display list for our own fragment. let mut outer_display_list_for_overflow_scroll = box DisplayList::new(); let clip = self.base.clip.translate(&-self.base.stacking_relative_position); self.fragment.build_display_list( &mut outer_display_list_for_overflow_scroll, layout_context, &self.base.stacking_relative_position, &self.base.early_absolute_position_info.relative_containing_block_size, self.base.early_absolute_position_info.relative_containing_block_mode, border_painting_mode, BackgroundAndBorderLevel::RootOfStackingContext, &clip, &self.base.stacking_relative_position_of_display_port); // Add the fragments of our children to the display list we'll use for the inner // stacking context. for kid in self.base.children.iter_mut() { display_list.append_from(&mut flow::mut_base(kid).display_list_building_result); } Some(outer_display_list_for_overflow_scroll) } _ => { let establishes_stacking_context = self.fragment.establishes_stacking_context(); let background_and_border_level = if establishes_stacking_context { BackgroundAndBorderLevel::RootOfStackingContext } else { BackgroundAndBorderLevel::Block }; self.build_display_list_for_block_base(&mut *display_list, layout_context, border_painting_mode, background_and_border_level); None } }; if !self.fragment.flags.contains(HAS_LAYER) && !self.fragment.establishes_stacking_context() { display_list.form_pseudo_stacking_context_for_positioned_content(); self.base.display_list_building_result = Some(display_list); return; } // If we got here, then we need a new layer. let scroll_policy = if self.is_fixed() { ScrollPolicy::FixedPosition } else { ScrollPolicy::Scrollable }; let stacking_context = match outer_display_list_for_overflow_scroll { Some(mut outer_display_list) => { outer_display_list.positioned_content.push_back( DisplayItem::StackingContextClass(self.fragment.create_stacking_context( &self.base, display_list, scroll_policy, StackingContextCreationMode::InnerScrollWrapper))); self.fragment.create_stacking_context( &self.base, outer_display_list, scroll_policy, StackingContextCreationMode::OuterScrollWrapper) } None => { self.fragment.create_stacking_context( &self.base, display_list, scroll_policy, StackingContextCreationMode::Normal) } }; self.base.display_list_building_result = Some(DisplayList::new_with_stacking_context(stacking_context)); } fn build_display_list_for_floating_block(&mut self, mut display_list: Box<DisplayList>, layout_context: &LayoutContext, border_painting_mode: BorderPaintingMode) { self.build_display_list_for_block_base(&mut *display_list, layout_context, border_painting_mode, BackgroundAndBorderLevel::RootOfStackingContext); display_list.form_float_pseudo_stacking_context();<|fim▁hole|> Some(DisplayList::new_with_stacking_context( self.fragment.create_stacking_context(&self.base, display_list, ScrollPolicy::Scrollable, StackingContextCreationMode::Normal))) } else { if self.fragment.style.get_box().position != position::T::static_ { display_list.form_pseudo_stacking_context_for_positioned_content(); } Some(display_list) } } fn build_display_list_for_block(&mut self, display_list: Box<DisplayList>, layout_context: &LayoutContext, border_painting_mode: BorderPaintingMode) { if self.base.flags.is_float() { // TODO(#2009, pcwalton): This is a pseudo-stacking context. We need to merge `z-index: // auto` kids into the parent stacking context, when that is supported. self.build_display_list_for_floating_block(display_list, layout_context, border_painting_mode); } else if self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) { self.build_display_list_for_absolutely_positioned_block(display_list, layout_context, border_painting_mode); } else { self.build_display_list_for_static_block(display_list, layout_context, border_painting_mode, BackgroundAndBorderLevel::Block); } } } pub trait InlineFlowDisplayListBuilding { fn build_display_list_for_inline(&mut self, layout_context: &LayoutContext); } impl InlineFlowDisplayListBuilding for InlineFlow { fn build_display_list_for_inline(&mut self, layout_context: &LayoutContext) { // TODO(#228): Once we form lines and have their cached bounds, we can be smarter and // not recurse on a line if nothing in it can intersect the dirty region. debug!("Flow: building display list for {} inline fragments", self.fragments.len()); let mut display_list = box DisplayList::new(); let mut has_stacking_context = false; for fragment in &mut self.fragments.fragments { fragment.build_display_list(&mut *display_list, layout_context, &self.base.stacking_relative_position, &self.base .early_absolute_position_info .relative_containing_block_size, self.base .early_absolute_position_info .relative_containing_block_mode, BorderPaintingMode::Separate, BackgroundAndBorderLevel::Content, &self.base.clip, &self.base.stacking_relative_position_of_display_port); has_stacking_context = fragment.establishes_stacking_context(); match fragment.specific { SpecificFragmentInfo::InlineBlock(ref mut block_flow) => { let block_flow = flow_ref::deref_mut(&mut block_flow.flow_ref); display_list.append_from( &mut flow::mut_base(block_flow).display_list_building_result) } SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut block_flow) => { let block_flow = flow_ref::deref_mut(&mut block_flow.flow_ref); display_list.append_from( &mut flow::mut_base(block_flow).display_list_building_result) } SpecificFragmentInfo::InlineAbsolute(ref mut block_flow) => { let block_flow = flow_ref::deref_mut(&mut block_flow.flow_ref); display_list.append_from( &mut flow::mut_base(block_flow).display_list_building_result) } _ => {} } } if !self.fragments.fragments.is_empty() { self.base.build_display_items_for_debugging_tint(&mut *display_list, self.fragments.fragments[0].node); } // FIXME(Savago): fix Fragment::establishes_stacking_context() for absolute positioned item // and remove the check for filter presence. Further details on #5812. // // FIXME(#7424, pcwalton): This is terribly bogus! What is even going on here? if has_stacking_context { match self.fragments.fragments[0].specific { SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::Iframe(_) => {} _ => { has_stacking_context = !self.fragments.fragments[0].style().get_effects().filter.is_empty() } } } self.base.display_list_building_result = if has_stacking_context { Some(DisplayList::new_with_stacking_context( self.fragments.fragments[0].create_stacking_context( &self.base, display_list, ScrollPolicy::Scrollable, StackingContextCreationMode::Normal))) } else { Some(display_list) }; if opts::get().validate_display_list_geometry { self.base.validate_display_list_geometry(); } } } pub trait ListItemFlowDisplayListBuilding { fn build_display_list_for_list_item(&mut self, display_list: Box<DisplayList>, layout_context: &LayoutContext); } impl ListItemFlowDisplayListBuilding for ListItemFlow { fn build_display_list_for_list_item(&mut self, mut display_list: Box<DisplayList>, layout_context: &LayoutContext) { // Draw the marker, if applicable. for marker in &mut self.marker_fragments { marker.build_display_list(&mut *display_list, layout_context, &self.block_flow.base.stacking_relative_position, &self.block_flow .base .early_absolute_position_info .relative_containing_block_size, self.block_flow .base .early_absolute_position_info .relative_containing_block_mode, BorderPaintingMode::Separate, BackgroundAndBorderLevel::Content, &self.block_flow.base.clip, &self.block_flow .base .stacking_relative_position_of_display_port); } // Draw the rest of the block. self.block_flow.build_display_list_for_block(display_list, layout_context, BorderPaintingMode::Separate) } } pub trait FlexFlowDisplayListBuilding { fn build_display_list_for_flex(&mut self, display_list: Box<DisplayList>, layout_context: &LayoutContext); } impl FlexFlowDisplayListBuilding for FlexFlow { fn build_display_list_for_flex(&mut self, display_list: Box<DisplayList>, layout_context: &LayoutContext) { // Draw the rest of the block. self.as_mut_block().build_display_list_for_block(display_list, layout_context, BorderPaintingMode::Separate) } } trait BaseFlowDisplayListBuilding { fn build_display_items_for_debugging_tint(&self, display_list: &mut DisplayList, node: OpaqueNode); } impl BaseFlowDisplayListBuilding for BaseFlow { fn build_display_items_for_debugging_tint(&self, display_list: &mut DisplayList, node: OpaqueNode) { if !opts::get().show_debug_parallel_layout { return } let thread_id = self.thread_id; let stacking_context_relative_bounds = Rect::new(self.stacking_relative_position, self.position.size.to_physical(self.writing_mode)); let mut color = THREAD_TINT_COLORS[thread_id as usize % THREAD_TINT_COLORS.len()]; color.a = 1.0; display_list.push(DisplayItem::BorderClass(box BorderDisplayItem { base: BaseDisplayItem::new(stacking_context_relative_bounds.inflate(Au::from_px(2), Au::from_px(2)), DisplayItemMetadata { node: node, pointing: None, }, self.clip.clone()), border_widths: SideOffsets2D::new_all_same(Au::from_px(2)), color: SideOffsets2D::new_all_same(color), style: SideOffsets2D::new_all_same(border_style::T::solid), radius: BorderRadii::all_same(Au(0)), }), StackingLevel::Content); } } // A helper data structure for gradients. #[derive(Copy, Clone)] struct StopRun { start_offset: f32, end_offset: f32, start_index: usize, stop_count: usize, } fn fmin(a: f32, b: f32) -> f32 { if a < b { a } else { b } } fn position_to_offset(position: LengthOrPercentage, Au(total_length): Au) -> f32 { match position { LengthOrPercentage::Length(Au(length)) => { fmin(1.0, (length as f32) / (total_length as f32)) } LengthOrPercentage::Percentage(percentage) => percentage as f32, LengthOrPercentage::Calc(calc) => fmin(1.0, calc.percentage() + (calc.length().0 as f32) / (total_length as f32)), } } /// "Steps" as defined by CSS 2.1 § E.2. #[derive(Clone, PartialEq, Debug, Copy)] pub enum StackingLevel { /// The border and backgrounds for the root of this stacking context: steps 1 and 2. BackgroundAndBorders, /// Borders and backgrounds for block-level descendants: step 4. BlockBackgroundsAndBorders, /// All non-positioned content. Content, } impl StackingLevel { #[inline] pub fn from_background_and_border_level(level: BackgroundAndBorderLevel) -> StackingLevel { match level { BackgroundAndBorderLevel::RootOfStackingContext => StackingLevel::BackgroundAndBorders, BackgroundAndBorderLevel::Block => StackingLevel::BlockBackgroundsAndBorders, BackgroundAndBorderLevel::Content => StackingLevel::Content, } } } /// Which level to place backgrounds and borders in. pub enum BackgroundAndBorderLevel { RootOfStackingContext, Block, Content, } trait StackingContextConstruction { /// Adds the given display item at the specified level to this display list. fn push(&mut self, display_item: DisplayItem, level: StackingLevel); } impl StackingContextConstruction for DisplayList { fn push(&mut self, display_item: DisplayItem, level: StackingLevel) { match level { StackingLevel::BackgroundAndBorders => { self.background_and_borders.push_back(display_item) } StackingLevel::BlockBackgroundsAndBorders => { self.block_backgrounds_and_borders.push_back(display_item) } StackingLevel::Content => self.content.push_back(display_item), } } } /// Adjusts `content_rect` as necessary for the given spread, and blur so that the resulting /// bounding rect contains all of a shadow's ink. fn shadow_bounds(content_rect: &Rect<Au>, blur_radius: Au, spread_radius: Au) -> Rect<Au> { let inflation = spread_radius + blur_radius * BLUR_INFLATION_FACTOR; content_rect.inflate(inflation, inflation) } /// Allows a CSS color to be converted into a graphics color. pub trait ToGfxColor { /// Converts a CSS color to a graphics color. fn to_gfx_color(&self) -> Color; } impl ToGfxColor for RGBA { fn to_gfx_color(&self) -> Color { color::rgba(self.red, self.green, self.blue, self.alpha) } } /// Describes how to paint the borders. #[derive(Copy, Clone)] pub enum BorderPaintingMode<'a> { /// Paint borders separately (`border-collapse: separate`). Separate, /// Paint collapsed borders. Collapse(&'a CollapsedBordersForCell), /// Paint no borders. Hidden, } #[derive(Copy, Clone, PartialEq)] pub enum StackingContextCreationMode { Normal, OuterScrollWrapper, InnerScrollWrapper, }<|fim▁end|>
self.base.display_list_building_result = if self.fragment.establishes_stacking_context() {
<|file_name|>test_etags.py<|end_file_name|><|fim▁begin|>import cherrypy from cherrypy.test import helper class ETagTest(helper.CPWebCase): def setup_server(): class Root: def resource(self): return "Oh wah ta goo Siam." resource.exposed = True def fail(self, code): code = int(code) if 300 <= code <= 399: raise cherrypy.HTTPRedirect([], code) else: raise cherrypy.HTTPError(code) fail.exposed = True def unicoded(self): return u'I am a \u1ee4nicode string.' unicoded.exposed = True unicoded._cp_config = {'tools.encode.on': True} conf = {'/': {'tools.etags.on': True, 'tools.etags.autotags': True, }} cherrypy.tree.mount(Root(), config=conf) setup_server = staticmethod(setup_server) def test_etags(self): self.getPage("/resource") self.assertStatus('200 OK') self.assertHeader('Content-Type', 'text/html;charset=utf-8') self.assertBody('Oh wah ta goo Siam.') etag = self.assertHeader('ETag') # Test If-Match (both valid and invalid) self.getPage("/resource", headers=[('If-Match', etag)]) self.assertStatus("200 OK") self.getPage("/resource", headers=[('If-Match', "*")]) self.assertStatus("200 OK") self.getPage("/resource", headers=[('If-Match', "*")], method="POST") self.assertStatus("200 OK")<|fim▁hole|> self.getPage("/resource", headers=[('If-None-Match', etag)]) self.assertStatus(304) self.getPage("/resource", method='POST', headers=[('If-None-Match', etag)]) self.assertStatus("412 Precondition Failed") self.getPage("/resource", headers=[('If-None-Match', "*")]) self.assertStatus(304) self.getPage("/resource", headers=[('If-None-Match', "a bogus tag")]) self.assertStatus("200 OK") def test_errors(self): self.getPage("/resource") self.assertStatus(200) etag = self.assertHeader('ETag') # Test raising errors in page handler self.getPage("/fail/412", headers=[('If-Match', etag)]) self.assertStatus(412) self.getPage("/fail/304", headers=[('If-Match', etag)]) self.assertStatus(304) self.getPage("/fail/412", headers=[('If-None-Match', "*")]) self.assertStatus(412) self.getPage("/fail/304", headers=[('If-None-Match', "*")]) self.assertStatus(304) def test_unicode_body(self): self.getPage("/unicoded") self.assertStatus(200) etag1 = self.assertHeader('ETag') self.getPage("/unicoded", headers=[('If-Match', etag1)]) self.assertStatus(200) self.assertHeader('ETag', etag1)<|fim▁end|>
self.getPage("/resource", headers=[('If-Match', "a bogus tag")]) self.assertStatus("412 Precondition Failed") # Test If-None-Match (both valid and invalid)
<|file_name|>plane-details.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit, Input, Output } from '@angular/core'; import {PlanesService} from '../planes/planes.service'; import {IPlane, AppStore} from '../interfaces'; @Component({ moduleId: module.id, selector: 'app-plane-details', // templateUrl: 'plane-details.component.html', template: ` <div class="plane-details" *ngIf="plane"> <h2>{{plane.model}} Details</h2> <table class="plain-table"> <tr> <th>Model</th> <th>Sits Count</th> <th>Flights</th> </tr> <tr> <td>{{plane.model}}</td> <td>{{plane.sitsCount}}</td> <td>No Available Flights</td> </tr><|fim▁hole|> </div> `, styleUrls: ['plane-details.component.css'], providers: [PlanesService] }) export class PlaneDetailsComponent implements OnInit { @Input('plane') plane: any; @Output('upPlane') upPlane: any; sPlane: IPlane; constructor(private planesService: PlanesService) { // this.plane = this.planesService.selectedPlane; this.sPlane = planesService.selectedPlane; } ngOnInit() { } }<|fim▁end|>
</table>
<|file_name|>test_manual_mqtt.py<|end_file_name|><|fim▁begin|>"""The tests for the manual_mqtt Alarm Control Panel component.""" from datetime import timedelta import unittest from unittest.mock import patch from homeassistant.setup import setup_component from homeassistant.const import ( STATE_ALARM_DISARMED, STATE_ALARM_ARMED_HOME, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_NIGHT, STATE_ALARM_PENDING, STATE_ALARM_TRIGGERED) from homeassistant.components import alarm_control_panel import homeassistant.util.dt as dt_util from tests.common import ( fire_time_changed, get_test_home_assistant, mock_mqtt_component, fire_mqtt_message, assert_setup_component) CODE = 'HELLO_CODE' class TestAlarmControlPanelManualMqtt(unittest.TestCase): """Test the manual_mqtt alarm module.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.mock_publish = mock_mqtt_component(self.hass) def tearDown(self): # pylint: disable=invalid-name """Stop down everything that was started.""" self.hass.stop() def test_fail_setup_without_state_topic(self): """Test for failing with no state topic.""" with assert_setup_component(0) as config: assert setup_component(self.hass, alarm_control_panel.DOMAIN, { alarm_control_panel.DOMAIN: { 'platform': 'mqtt_alarm', 'command_topic': 'alarm/command' } }) assert not config[alarm_control_panel.DOMAIN] def test_fail_setup_without_command_topic(self): """Test failing with no command topic.""" with assert_setup_component(0): assert setup_component(self.hass, alarm_control_panel.DOMAIN, { alarm_control_panel.DOMAIN: { 'platform': 'mqtt_alarm', 'state_topic': 'alarm/state' } }) def test_arm_home_no_pending(self): """Test arm home method.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'code': CODE, 'pending_time': 0, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state', }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_arm_home(self.hass, CODE) self.hass.block_till_done() self.assertEqual(STATE_ALARM_ARMED_HOME, self.hass.states.get(entity_id).state) def test_arm_home_with_pending(self): """Test arm home method.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'code': CODE, 'pending_time': 1, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state', }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_arm_home(self.hass, CODE, entity_id) self.hass.block_till_done() self.assertEqual(STATE_ALARM_PENDING, self.hass.states.get(entity_id).state) state = self.hass.states.get(entity_id) assert state.attributes['post_pending_state'] == STATE_ALARM_ARMED_HOME future = dt_util.utcnow() + timedelta(seconds=1) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.assertEqual(STATE_ALARM_ARMED_HOME, self.hass.states.get(entity_id).state) def test_arm_home_with_invalid_code(self): """Attempt to arm home without a valid code.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'code': CODE, 'pending_time': 1, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state', }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_arm_home(self.hass, CODE + '2') self.hass.block_till_done() self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) def test_arm_away_no_pending(self): """Test arm home method.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'code': CODE, 'pending_time': 0, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state', }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_arm_away(self.hass, CODE, entity_id) self.hass.block_till_done() self.assertEqual(STATE_ALARM_ARMED_AWAY, self.hass.states.get(entity_id).state) def test_arm_home_with_template_code(self): """Attempt to arm with a template-based code.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'code_template': '{{ "abc" }}', 'pending_time': 0, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state', }})) entity_id = 'alarm_control_panel.test' self.hass.start() self.hass.block_till_done() self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_arm_home(self.hass, 'abc') self.hass.block_till_done() state = self.hass.states.get(entity_id) self.assertEqual(STATE_ALARM_ARMED_HOME, state.state) def test_arm_away_with_pending(self): """Test arm home method.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'code': CODE, 'pending_time': 1, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state', }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_arm_away(self.hass, CODE) self.hass.block_till_done() self.assertEqual(STATE_ALARM_PENDING, self.hass.states.get(entity_id).state) state = self.hass.states.get(entity_id) assert state.attributes['post_pending_state'] == STATE_ALARM_ARMED_AWAY future = dt_util.utcnow() + timedelta(seconds=1) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.assertEqual(STATE_ALARM_ARMED_AWAY, self.hass.states.get(entity_id).state) def test_arm_away_with_invalid_code(self): """Attempt to arm away without a valid code.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'code': CODE, 'pending_time': 1, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state', }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_arm_away(self.hass, CODE + '2') self.hass.block_till_done() self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) def test_arm_night_no_pending(self): """Test arm night method.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'code': CODE, 'pending_time': 0, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state', }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_arm_night(self.hass, CODE, entity_id) self.hass.block_till_done() self.assertEqual(STATE_ALARM_ARMED_NIGHT, self.hass.states.get(entity_id).state) def test_arm_night_with_pending(self): """Test arm night method.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'code': CODE, 'pending_time': 1, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state', }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_arm_night(self.hass, CODE) self.hass.block_till_done() self.assertEqual(STATE_ALARM_PENDING, self.hass.states.get(entity_id).state) state = self.hass.states.get(entity_id) assert state.attributes['post_pending_state'] == \ STATE_ALARM_ARMED_NIGHT future = dt_util.utcnow() + timedelta(seconds=1) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.assertEqual(STATE_ALARM_ARMED_NIGHT, self.hass.states.get(entity_id).state) # Do not go to the pending state when updating to the same state alarm_control_panel.alarm_arm_night(self.hass, CODE, entity_id) self.hass.block_till_done() self.assertEqual(STATE_ALARM_ARMED_NIGHT, self.hass.states.get(entity_id).state) def test_arm_night_with_invalid_code(self): """Attempt to arm night without a valid code.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'code': CODE, 'pending_time': 1, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state', }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_arm_night(self.hass, CODE + '2') self.hass.block_till_done() self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) def test_trigger_no_pending(self): """Test triggering when no pending submitted method.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'trigger_time': 1, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state', }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_trigger(self.hass, entity_id=entity_id) self.hass.block_till_done() self.assertEqual(STATE_ALARM_PENDING, self.hass.states.get(entity_id).state) future = dt_util.utcnow() + timedelta(seconds=60) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.assertEqual(STATE_ALARM_TRIGGERED, self.hass.states.get(entity_id).state) def test_trigger_with_delay(self): """Test trigger method and switch from pending to triggered.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'code': CODE, 'delay_time': 1, 'pending_time': 0, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state' }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_arm_away(self.hass, CODE) self.hass.block_till_done() self.assertEqual(STATE_ALARM_ARMED_AWAY, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_trigger(self.hass, entity_id=entity_id) self.hass.block_till_done() state = self.hass.states.get(entity_id) self.assertEqual(STATE_ALARM_PENDING, state.state) self.assertEqual(STATE_ALARM_TRIGGERED, state.attributes['post_pending_state']) future = dt_util.utcnow() + timedelta(seconds=1) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() state = self.hass.states.get(entity_id) self.assertEqual(STATE_ALARM_TRIGGERED, state.state) def test_trigger_zero_trigger_time(self): """Test disabled trigger.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'pending_time': 0, 'trigger_time': 0, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state' }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_trigger(self.hass) self.hass.block_till_done() self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) def test_trigger_zero_trigger_time_with_pending(self): """Test disabled trigger.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'pending_time': 2, 'trigger_time': 0, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state' }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_trigger(self.hass) self.hass.block_till_done() self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) def test_trigger_with_pending(self): """Test arm home method.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'pending_time': 2, 'trigger_time': 3, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state', }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_trigger(self.hass) self.hass.block_till_done() self.assertEqual(STATE_ALARM_PENDING, self.hass.states.get(entity_id).state) state = self.hass.states.get(entity_id) assert state.attributes['post_pending_state'] == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=2) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.assertEqual(STATE_ALARM_TRIGGERED, self.hass.states.get(entity_id).state) future = dt_util.utcnow() + timedelta(seconds=5) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) def test_trigger_with_disarm_after_trigger(self): """Test disarm after trigger.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'trigger_time': 5, 'pending_time': 0, 'disarm_after_trigger': True, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state', }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_trigger(self.hass, entity_id=entity_id) self.hass.block_till_done() self.assertEqual(STATE_ALARM_TRIGGERED, self.hass.states.get(entity_id).state) future = dt_util.utcnow() + timedelta(seconds=5) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) def test_trigger_with_zero_specific_trigger_time(self): """Test trigger method.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'trigger_time': 5, 'disarmed': { 'trigger_time': 0 }, 'pending_time': 0, 'disarm_after_trigger': True, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state' }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_trigger(self.hass, entity_id=entity_id) self.hass.block_till_done() self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) def test_trigger_with_unused_zero_specific_trigger_time(self): """Test disarm after trigger.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'trigger_time': 5, 'armed_home': { 'trigger_time': 0 }, 'pending_time': 0, 'disarm_after_trigger': True, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state' }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_trigger(self.hass, entity_id=entity_id) self.hass.block_till_done() self.assertEqual(STATE_ALARM_TRIGGERED, self.hass.states.get(entity_id).state) future = dt_util.utcnow() + timedelta(seconds=5) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) def test_trigger_with_specific_trigger_time(self): """Test disarm after trigger.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'disarmed': { 'trigger_time': 5 }, 'pending_time': 0, 'disarm_after_trigger': True, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state' }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_trigger(self.hass, entity_id=entity_id) self.hass.block_till_done() self.assertEqual(STATE_ALARM_TRIGGERED, self.hass.states.get(entity_id).state) future = dt_util.utcnow() + timedelta(seconds=5) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) def test_back_to_back_trigger_with_no_disarm_after_trigger(self): """Test no disarm after back to back trigger.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'trigger_time': 5, 'pending_time': 0, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state', }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_arm_away(self.hass, CODE, entity_id) self.hass.block_till_done() self.assertEqual(STATE_ALARM_ARMED_AWAY, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_trigger(self.hass, entity_id=entity_id) self.hass.block_till_done() self.assertEqual(STATE_ALARM_TRIGGERED, self.hass.states.get(entity_id).state) future = dt_util.utcnow() + timedelta(seconds=5) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.assertEqual(STATE_ALARM_ARMED_AWAY, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_trigger(self.hass, entity_id=entity_id) self.hass.block_till_done() self.assertEqual(STATE_ALARM_TRIGGERED, self.hass.states.get(entity_id).state) future = dt_util.utcnow() + timedelta(seconds=5) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.assertEqual(STATE_ALARM_ARMED_AWAY, self.hass.states.get(entity_id).state) def test_disarm_while_pending_trigger(self): """Test disarming while pending state.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'trigger_time': 5, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state', }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_trigger(self.hass) self.hass.block_till_done() self.assertEqual(STATE_ALARM_PENDING, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_disarm(self.hass, entity_id=entity_id) self.hass.block_till_done() self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) future = dt_util.utcnow() + timedelta(seconds=5) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) def test_disarm_during_trigger_with_invalid_code(self): """Test disarming while code is invalid.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'pending_time': 5, 'code': CODE + '2', 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state', }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_trigger(self.hass) self.hass.block_till_done() self.assertEqual(STATE_ALARM_PENDING, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_disarm(self.hass, entity_id=entity_id) self.hass.block_till_done() self.assertEqual(STATE_ALARM_PENDING, self.hass.states.get(entity_id).state) future = dt_util.utcnow() + timedelta(seconds=5) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.assertEqual(STATE_ALARM_TRIGGERED, self.hass.states.get(entity_id).state) def test_trigger_with_unused_specific_delay(self): """Test trigger method and switch from pending to triggered.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'code': CODE, 'delay_time': 5, 'pending_time': 0, 'armed_home': { 'delay_time': 10 }, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state' }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_arm_away(self.hass, CODE) self.hass.block_till_done() self.assertEqual(STATE_ALARM_ARMED_AWAY, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_trigger(self.hass, entity_id=entity_id) self.hass.block_till_done() state = self.hass.states.get(entity_id) self.assertEqual(STATE_ALARM_PENDING, state.state) self.assertEqual(STATE_ALARM_TRIGGERED, state.attributes['post_pending_state']) future = dt_util.utcnow() + timedelta(seconds=5) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() state = self.hass.states.get(entity_id) assert state.state == STATE_ALARM_TRIGGERED def test_trigger_with_specific_delay(self): """Test trigger method and switch from pending to triggered.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'code': CODE, 'delay_time': 10, 'pending_time': 0, 'armed_away': { 'delay_time': 1 }, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state' }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_arm_away(self.hass, CODE) self.hass.block_till_done() self.assertEqual(STATE_ALARM_ARMED_AWAY, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_trigger(self.hass, entity_id=entity_id) self.hass.block_till_done() state = self.hass.states.get(entity_id) self.assertEqual(STATE_ALARM_PENDING, state.state) self.assertEqual(STATE_ALARM_TRIGGERED, state.attributes['post_pending_state']) future = dt_util.utcnow() + timedelta(seconds=1) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() state = self.hass.states.get(entity_id) assert state.state == STATE_ALARM_TRIGGERED def test_trigger_with_pending_and_delay(self): """Test trigger method and switch from pending to triggered.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'code': CODE, 'delay_time': 1, 'pending_time': 0, 'triggered': { 'pending_time': 1 }, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state' }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_arm_away(self.hass, CODE) self.hass.block_till_done() self.assertEqual(STATE_ALARM_ARMED_AWAY, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_trigger(self.hass, entity_id=entity_id) self.hass.block_till_done() state = self.hass.states.get(entity_id) assert state.state == STATE_ALARM_PENDING assert state.attributes['post_pending_state'] == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() state = self.hass.states.get(entity_id) assert state.state == STATE_ALARM_PENDING assert state.attributes['post_pending_state'] == STATE_ALARM_TRIGGERED future += timedelta(seconds=1) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() state = self.hass.states.get(entity_id) assert state.state == STATE_ALARM_TRIGGERED def test_trigger_with_pending_and_specific_delay(self): """Test trigger method and switch from pending to triggered.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'code': CODE, 'delay_time': 10, 'pending_time': 0, 'armed_away': { 'delay_time': 1 }, 'triggered': { 'pending_time': 1 }, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state' }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_arm_away(self.hass, CODE) self.hass.block_till_done() self.assertEqual(STATE_ALARM_ARMED_AWAY, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_trigger(self.hass, entity_id=entity_id) self.hass.block_till_done() state = self.hass.states.get(entity_id) assert state.state == STATE_ALARM_PENDING assert state.attributes['post_pending_state'] == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() state = self.hass.states.get(entity_id) assert state.state == STATE_ALARM_PENDING assert state.attributes['post_pending_state'] == STATE_ALARM_TRIGGERED future += timedelta(seconds=1) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() state = self.hass.states.get(entity_id) assert state.state == STATE_ALARM_TRIGGERED def test_armed_home_with_specific_pending(self): """Test arm home method.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'pending_time': 10, 'armed_home': { 'pending_time': 2 }, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state', }})) entity_id = 'alarm_control_panel.test' alarm_control_panel.alarm_arm_home(self.hass) self.hass.block_till_done() self.assertEqual(STATE_ALARM_PENDING, self.hass.states.get(entity_id).state) future = dt_util.utcnow() + timedelta(seconds=2) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.assertEqual(STATE_ALARM_ARMED_HOME, self.hass.states.get(entity_id).state) def test_armed_away_with_specific_pending(self): """Test arm home method.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'pending_time': 10, 'armed_away': { 'pending_time': 2 }, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state', }})) entity_id = 'alarm_control_panel.test' alarm_control_panel.alarm_arm_away(self.hass) self.hass.block_till_done() self.assertEqual(STATE_ALARM_PENDING, self.hass.states.get(entity_id).state) future = dt_util.utcnow() + timedelta(seconds=2) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.assertEqual(STATE_ALARM_ARMED_AWAY, self.hass.states.get(entity_id).state) def test_armed_night_with_specific_pending(self): """Test arm home method."""<|fim▁hole|> 'platform': 'manual_mqtt', 'name': 'test', 'pending_time': 10, 'armed_night': { 'pending_time': 2 }, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state', }})) entity_id = 'alarm_control_panel.test' alarm_control_panel.alarm_arm_night(self.hass) self.hass.block_till_done() self.assertEqual(STATE_ALARM_PENDING, self.hass.states.get(entity_id).state) future = dt_util.utcnow() + timedelta(seconds=2) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.assertEqual(STATE_ALARM_ARMED_NIGHT, self.hass.states.get(entity_id).state) def test_trigger_with_specific_pending(self): """Test arm home method.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'pending_time': 10, 'triggered': { 'pending_time': 2 }, 'trigger_time': 3, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state', }})) entity_id = 'alarm_control_panel.test' alarm_control_panel.alarm_trigger(self.hass) self.hass.block_till_done() self.assertEqual(STATE_ALARM_PENDING, self.hass.states.get(entity_id).state) future = dt_util.utcnow() + timedelta(seconds=2) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.assertEqual(STATE_ALARM_TRIGGERED, self.hass.states.get(entity_id).state) future = dt_util.utcnow() + timedelta(seconds=5) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) def test_arm_away_after_disabled_disarmed(self): """Test pending state with and without zero trigger time.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'code': CODE, 'pending_time': 0, 'delay_time': 1, 'armed_away': { 'pending_time': 1, }, 'disarmed': { 'trigger_time': 0 }, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state', }})) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_arm_away(self.hass, CODE) self.hass.block_till_done() state = self.hass.states.get(entity_id) self.assertEqual(STATE_ALARM_PENDING, state.state) self.assertEqual(STATE_ALARM_DISARMED, state.attributes['pre_pending_state']) self.assertEqual(STATE_ALARM_ARMED_AWAY, state.attributes['post_pending_state']) alarm_control_panel.alarm_trigger(self.hass, entity_id=entity_id) self.hass.block_till_done() state = self.hass.states.get(entity_id) self.assertEqual(STATE_ALARM_PENDING, state.state) self.assertEqual(STATE_ALARM_DISARMED, state.attributes['pre_pending_state']) self.assertEqual(STATE_ALARM_ARMED_AWAY, state.attributes['post_pending_state']) future = dt_util.utcnow() + timedelta(seconds=1) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() state = self.hass.states.get(entity_id) self.assertEqual(STATE_ALARM_ARMED_AWAY, state.state) alarm_control_panel.alarm_trigger(self.hass, entity_id=entity_id) self.hass.block_till_done() state = self.hass.states.get(entity_id) self.assertEqual(STATE_ALARM_PENDING, state.state) self.assertEqual(STATE_ALARM_ARMED_AWAY, state.attributes['pre_pending_state']) self.assertEqual(STATE_ALARM_TRIGGERED, state.attributes['post_pending_state']) future += timedelta(seconds=1) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() state = self.hass.states.get(entity_id) self.assertEqual(STATE_ALARM_TRIGGERED, state.state) def test_disarm_with_template_code(self): """Attempt to disarm with a valid or invalid template-based code.""" self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': { 'platform': 'manual_mqtt', 'name': 'test', 'code_template': '{{ "" if from_state == "disarmed" else "abc" }}', 'pending_time': 0, 'disarm_after_trigger': False, 'command_topic': 'alarm/command', 'state_topic': 'alarm/state', }})) entity_id = 'alarm_control_panel.test' self.hass.start() self.hass.block_till_done() self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_arm_home(self.hass, 'def') self.hass.block_till_done() state = self.hass.states.get(entity_id) self.assertEqual(STATE_ALARM_ARMED_HOME, state.state) alarm_control_panel.alarm_disarm(self.hass, 'def') self.hass.block_till_done() state = self.hass.states.get(entity_id) self.assertEqual(STATE_ALARM_ARMED_HOME, state.state) alarm_control_panel.alarm_disarm(self.hass, 'abc') self.hass.block_till_done() state = self.hass.states.get(entity_id) self.assertEqual(STATE_ALARM_DISARMED, state.state) def test_arm_home_via_command_topic(self): """Test arming home via command topic.""" assert setup_component(self.hass, alarm_control_panel.DOMAIN, { alarm_control_panel.DOMAIN: { 'platform': 'manual_mqtt', 'name': 'test', 'pending_time': 1, 'state_topic': 'alarm/state', 'command_topic': 'alarm/command', 'payload_arm_home': 'ARM_HOME', } }) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) # Fire the arm command via MQTT; ensure state changes to pending fire_mqtt_message(self.hass, 'alarm/command', 'ARM_HOME') self.hass.block_till_done() self.assertEqual(STATE_ALARM_PENDING, self.hass.states.get(entity_id).state) # Fast-forward a little bit future = dt_util.utcnow() + timedelta(seconds=1) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.assertEqual(STATE_ALARM_ARMED_HOME, self.hass.states.get(entity_id).state) def test_arm_away_via_command_topic(self): """Test arming away via command topic.""" assert setup_component(self.hass, alarm_control_panel.DOMAIN, { alarm_control_panel.DOMAIN: { 'platform': 'manual_mqtt', 'name': 'test', 'pending_time': 1, 'state_topic': 'alarm/state', 'command_topic': 'alarm/command', 'payload_arm_away': 'ARM_AWAY', } }) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) # Fire the arm command via MQTT; ensure state changes to pending fire_mqtt_message(self.hass, 'alarm/command', 'ARM_AWAY') self.hass.block_till_done() self.assertEqual(STATE_ALARM_PENDING, self.hass.states.get(entity_id).state) # Fast-forward a little bit future = dt_util.utcnow() + timedelta(seconds=1) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.assertEqual(STATE_ALARM_ARMED_AWAY, self.hass.states.get(entity_id).state) def test_arm_night_via_command_topic(self): """Test arming night via command topic.""" assert setup_component(self.hass, alarm_control_panel.DOMAIN, { alarm_control_panel.DOMAIN: { 'platform': 'manual_mqtt', 'name': 'test', 'pending_time': 1, 'state_topic': 'alarm/state', 'command_topic': 'alarm/command', 'payload_arm_night': 'ARM_NIGHT', } }) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) # Fire the arm command via MQTT; ensure state changes to pending fire_mqtt_message(self.hass, 'alarm/command', 'ARM_NIGHT') self.hass.block_till_done() self.assertEqual(STATE_ALARM_PENDING, self.hass.states.get(entity_id).state) # Fast-forward a little bit future = dt_util.utcnow() + timedelta(seconds=1) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.assertEqual(STATE_ALARM_ARMED_NIGHT, self.hass.states.get(entity_id).state) def test_disarm_pending_via_command_topic(self): """Test disarming pending alarm via command topic.""" assert setup_component(self.hass, alarm_control_panel.DOMAIN, { alarm_control_panel.DOMAIN: { 'platform': 'manual_mqtt', 'name': 'test', 'pending_time': 1, 'state_topic': 'alarm/state', 'command_topic': 'alarm/command', 'payload_disarm': 'DISARM', } }) entity_id = 'alarm_control_panel.test' self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) alarm_control_panel.alarm_trigger(self.hass) self.hass.block_till_done() self.assertEqual(STATE_ALARM_PENDING, self.hass.states.get(entity_id).state) # Now that we're pending, receive a command to disarm fire_mqtt_message(self.hass, 'alarm/command', 'DISARM') self.hass.block_till_done() self.assertEqual(STATE_ALARM_DISARMED, self.hass.states.get(entity_id).state) def test_state_changes_are_published_to_mqtt(self): """Test publishing of MQTT messages when state changes.""" assert setup_component(self.hass, alarm_control_panel.DOMAIN, { alarm_control_panel.DOMAIN: { 'platform': 'manual_mqtt', 'name': 'test', 'pending_time': 1, 'trigger_time': 1, 'state_topic': 'alarm/state', 'command_topic': 'alarm/command', } }) # Component should send disarmed alarm state on startup self.hass.block_till_done() self.mock_publish.async_publish.assert_called_once_with( 'alarm/state', STATE_ALARM_DISARMED, 0, True) self.mock_publish.async_publish.reset_mock() # Arm in home mode alarm_control_panel.alarm_arm_home(self.hass) self.hass.block_till_done() self.mock_publish.async_publish.assert_called_once_with( 'alarm/state', STATE_ALARM_PENDING, 0, True) self.mock_publish.async_publish.reset_mock() # Fast-forward a little bit future = dt_util.utcnow() + timedelta(seconds=1) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.mock_publish.async_publish.assert_called_once_with( 'alarm/state', STATE_ALARM_ARMED_HOME, 0, True) self.mock_publish.async_publish.reset_mock() # Arm in away mode alarm_control_panel.alarm_arm_away(self.hass) self.hass.block_till_done() self.mock_publish.async_publish.assert_called_once_with( 'alarm/state', STATE_ALARM_PENDING, 0, True) self.mock_publish.async_publish.reset_mock() # Fast-forward a little bit future = dt_util.utcnow() + timedelta(seconds=1) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.mock_publish.async_publish.assert_called_once_with( 'alarm/state', STATE_ALARM_ARMED_AWAY, 0, True) self.mock_publish.async_publish.reset_mock() # Arm in night mode alarm_control_panel.alarm_arm_night(self.hass) self.hass.block_till_done() self.mock_publish.async_publish.assert_called_once_with( 'alarm/state', STATE_ALARM_PENDING, 0, True) self.mock_publish.async_publish.reset_mock() # Fast-forward a little bit future = dt_util.utcnow() + timedelta(seconds=1) with patch(('homeassistant.components.alarm_control_panel.manual_mqtt.' 'dt_util.utcnow'), return_value=future): fire_time_changed(self.hass, future) self.hass.block_till_done() self.mock_publish.async_publish.assert_called_once_with( 'alarm/state', STATE_ALARM_ARMED_NIGHT, 0, True) self.mock_publish.async_publish.reset_mock() # Disarm alarm_control_panel.alarm_disarm(self.hass) self.hass.block_till_done() self.mock_publish.async_publish.assert_called_once_with( 'alarm/state', STATE_ALARM_DISARMED, 0, True)<|fim▁end|>
self.assertTrue(setup_component( self.hass, alarm_control_panel.DOMAIN, {'alarm_control_panel': {
<|file_name|>documentation.py<|end_file_name|><|fim▁begin|>import sys import requests from urllib.parse import urlparse docs_repos = [ "frappe_docs", "erpnext_documentation", "erpnext_com", "frappe_io", ] def uri_validator(x): result = urlparse(x) return all([result.scheme, result.netloc, result.path]) def docs_link_exists(body): for line in body.splitlines(): for word in line.split(): if word.startswith('http') and uri_validator(word): parsed_url = urlparse(word) if parsed_url.netloc == "github.com": parts = parsed_url.path.split('/') if len(parts) == 5 and parts[1] == "frappe" and parts[2] in docs_repos: return True if __name__ == "__main__": pr = sys.argv[1] response = requests.get("https://api.github.com/repos/frappe/frappe/pulls/{}".format(pr)) if response.ok: payload = response.json() title = payload.get("title", "").lower() head_sha = payload.get("head", {}).get("sha") body = payload.get("body", "").lower()<|fim▁hole|> if docs_link_exists(body): print("Documentation Link Found. You're Awesome! 🎉") else: print("Documentation Link Not Found! ⚠️") sys.exit(1) else: print("Skipping documentation checks... 🏃")<|fim▁end|>
if title.startswith("feat") and head_sha and "no-docs" not in body:
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>import threading import numpy as np def ros_ensure_valid_name(name): return name.replace('-','_') def lineseg_box(xmin, ymin, xmax, ymax): return [ [xmin,ymin,xmin,ymax], [xmin,ymax,xmax,ymax], [xmax,ymax,xmax,ymin], [xmax,ymin,xmin,ymin], ] def lineseg_circle(x,y,radius,N=64): draw_linesegs = [] theta = np.arange(N)*2*np.pi/N xdraw = x+np.cos(theta)*radius ydraw = y+np.sin(theta)*radius for i in range(N-1): draw_linesegs.append( (xdraw[i],ydraw[i],xdraw[i+1],ydraw[i+1])) draw_linesegs.append( (xdraw[-1],ydraw[-1],xdraw[0],ydraw[0])) return draw_linesegs class SharedValue: def __init__(self): self.evt = threading.Event() self._val = None def set(self,value): # called from producer thread self._val = value self.evt.set() def is_new_value_waiting(self): return self.evt.isSet() def get(self,*args,**kwargs): # called from consumer thread self.evt.wait(*args,**kwargs) val = self._val self.evt.clear() return val def get_nowait(self): # XXX TODO this is not atomic and is thus dangerous.<|fim▁hole|> self.evt.clear() return val class SharedValue1(object): def __init__(self,initial_value): self._val = initial_value self.lock = threading.Lock() def get(self): self.lock.acquire() try: val = self._val finally: self.lock.release() return val def set(self,new_value): self.lock.acquire() try: self._val = new_value finally: self.lock.release()<|fim▁end|>
# (The value could get read, then another thread could set it, # and only then might it get flagged as clear by this thread, # even though a new value is waiting.) val = self._val
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>fn main() {<|fim▁hole|>}<|fim▁end|>
let x:f32 = std::f32::MAX; let y:f64 = std::f64::MAX; println!("{:?}",x ); println!("{:?}",y );
<|file_name|>IneoRealTimeFileFormat.cpp<|end_file_name|><|fim▁begin|>/** IneoRealTimeFileFormat class implementation. @file IneoRealTimeFileFormat.cpp This file belongs to the SYNTHESE project (public transportation specialized software) Copyright (C) 2002 Hugues Romain - RCSmobility <contact@rcsmobility.com> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ #include "IneoRealTimeFileFormat.hpp" #include "Import.hpp" #include "DataSource.h" #include "DataSourceTableSync.h" #include "DBTransaction.hpp" #include "ImportableTableSync.hpp" #include "ScheduledServiceTableSync.h" #include "StopPointTableSync.hpp" #include "CommercialLineTableSync.h" #include "JourneyPatternTableSync.hpp" #include "DesignatedLinePhysicalStop.hpp" #include "LineStopTableSync.h" #include <boost/lexical_cast.hpp> using namespace std; using namespace boost; using namespace boost::posix_time; using namespace gregorian; namespace synthese { using namespace data_exchange; using namespace pt; using namespace server; using namespace util; using namespace impex; using namespace db; using namespace graph; using namespace util; namespace util { template<> const string FactorableTemplate<FileFormat, IneoRealTimeFileFormat>::FACTORY_KEY("ineo_temps_reel"); } namespace data_exchange { const string IneoRealTimeFileFormat::Importer_::PARAMETER_PLANNED_DATASOURCE_ID("ps"); const string IneoRealTimeFileFormat::Importer_::PARAMETER_COURSE_ID("ci"); const string IneoRealTimeFileFormat::Importer_::PARAMETER_DB_CONN_STRING("conn_string"); const string IneoRealTimeFileFormat::Importer_::PARAMETER_STOP_CODE_PREFIX("stop_code_prefix"); bool IneoRealTimeFileFormat::Importer_::_read( ) const { if(_database.empty() || !_plannedDataSource.get()) { return false; } DataSource& dataSource(*_import.get<DataSource>()); boost::shared_ptr<DB> db; if(_dbConnString) { db = DBModule::GetDBForStandaloneUse(*_dbConnString); } else { db = DBModule::GetDBSPtr(); } date today(day_clock::local_day()); string todayStr("'"+ to_iso_extended_string(today) +"'"); // Services linked to the planned source ImportableTableSync::ObjectBySource<StopPointTableSync> stops(*_plannedDataSource, _env); ImportableTableSync::ObjectBySource<CommercialLineTableSync> lines(*_plannedDataSource, _env); if(!_courseId) { BOOST_FOREACH(const ImportableTableSync::ObjectBySource<CommercialLineTableSync>::Map::value_type& itLine, lines.getMap()) { BOOST_FOREACH(const ImportableTableSync::ObjectBySource<CommercialLineTableSync>::Map::mapped_type::value_type& line, itLine.second) { JourneyPatternTableSync::Search(_env, line->getKey()); ScheduledServiceTableSync::Search(_env, optional<RegistryKeyType>(), line->getKey()); BOOST_FOREACH(const Path* route, line->getPaths()) { LineStopTableSync::Search(_env, route->getKey()); } } } // 1 : clean the old references to the current source ImportableTableSync::ObjectBySource<ScheduledServiceTableSync> sourcedServices(dataSource, _env); BOOST_FOREACH(const ImportableTableSync::ObjectBySource<ScheduledServiceTableSync>::Map::value_type& itService, sourcedServices.getMap()) { BOOST_FOREACH(const ImportableTableSync::ObjectBySource<ScheduledServiceTableSync>::Map::mapped_type::value_type& obj, itService.second) { obj->removeSourceLinks(dataSource); } } } else { // 1 : clean the old references to the current source ImportableTableSync::ObjectBySource<ScheduledServiceTableSync> sourcedServices(dataSource, _env); set<ScheduledService*> services(sourcedServices.get(*_courseId)); BOOST_FOREACH(ScheduledService* service, services) { service->removeSourceLinks(dataSource); _services.insert(service); } } // 2 : loop on the services present in the database and link to existing or new services stringstream query; query << "SELECT c.ref, c.chainage, c.ligne, l.mnemo as ligne_ref FROM " << _database << ".COURSE c " << "INNER JOIN " << _database << ".LIGNE l on c.ligne=l.ref AND l.jour=c.jour " << "WHERE c.jour=" << todayStr << " AND c.type='C'"; if(_courseId) { query << " AND c.ref=" << *_courseId; } DBResultSPtr result(db->execQuery(query.str())); while(result->next()) { string serviceRef(result->getText("ref")); string chainage(result->getText("chainage")); string ligneRef(result->getText("ligne_ref")); _logDebug( "Processing serviceRef="+ serviceRef +" chainage="+ chainage +" ligneRef="+ ligneRef ); CommercialLine* line( _getLine( lines, ligneRef, *_plannedDataSource ) ); if(!line) { _logWarning( "Line "+ ligneRef +" was not found for service "+ serviceRef ); continue; } stringstream chainageQuery; chainageQuery << "SELECT a.mnemol AS mnemol, h.htd AS htd, h.hta AS hta, h.type AS type, c.pos AS pos FROM " << _database << ".ARRETCHN c " << "INNER JOIN " << _database << ".ARRET a ON a.ref=c.arret AND a.jour=c.jour " << "INNER JOIN " << _database << ".HORAIRE h ON h.arretchn=c.ref AND h.jour=a.jour " << "INNER JOIN " << _database << ".COURSE o ON o.chainage=c.chainage AND o.ref=h.course AND c.jour=o.jour " << "WHERE h.course='" << serviceRef << "' AND h.jour=" << todayStr << " ORDER BY c.pos"; DBResultSPtr chainageResult(db->execQuery(chainageQuery.str())); JourneyPattern::StopsWithDepartureArrivalAuthorization servedStops; SchedulesBasedService::Schedules departureSchedules; SchedulesBasedService::Schedules arrivalSchedules; while(chainageResult->next()) { string type(chainageResult->getText("type")); string stopCode(chainageResult->getText("mnemol")); time_duration departureTime(duration_from_string(chainageResult->getText("htd"))); time_duration arrivalTime(duration_from_string(chainageResult->getText("hta"))); MetricOffset stopPos(chainageResult->getInt("pos")); bool referenceStop(type != "N"); std::set<StopPoint*> stopsSet( _getStopPoints( stops, _stopCodePrefix + stopCode, boost::optional<const std::string&>() ) ); if(stopsSet.empty()) { _logWarning( "Can't find stops for code "+ _stopCodePrefix + stopCode ); continue; } servedStops.push_back( JourneyPattern::StopWithDepartureArrivalAuthorization( stopsSet, stopPos, (type != "A"), (type != "D"), referenceStop ) ); // Ignoring interpolated times if(referenceStop) { // If the bus leaves after midnight, the hours are stored as 0 instead of 24 if( !departureSchedules.empty() && departureTime < *departureSchedules.rbegin()) { departureTime += hours(24); } if( !arrivalSchedules.empty() && arrivalTime < *arrivalSchedules.rbegin()) { arrivalTime += hours(24); } // round of the seconds departureTime -= seconds(departureTime.seconds()); if(arrivalTime.seconds()) { arrivalTime += seconds(60 - arrivalTime.seconds()); } // storage of the times departureSchedules.push_back(departureTime); arrivalSchedules.push_back(arrivalTime); } } set<JourneyPattern*> routes( _getRoutes( *line, servedStops, *_plannedDataSource ) ); if(routes.empty()) { stringstream routeQuery; routeQuery << "SELECT * FROM " << _database << ".CHAINAGE c " << "WHERE c.ref='" << chainage << "' AND c.jour=" << todayStr; DBResultSPtr routeResult(db->execQuery(routeQuery.str())); if(routeResult->next()) { string routeName(routeResult->getText("nom")); bool wayBack(routeResult->getText("sens") != "A"); _logCreation( "Creation of route "+ routeName ); JourneyPattern* result = new JourneyPattern( JourneyPatternTableSync::getId() ); result->setCommercialLine(line); line->addPath(result); result->setName(routeName); result->setWayBack(wayBack); result->addCodeBySource(*_plannedDataSource, string()); _env.getEditableRegistry<JourneyPattern>().add(boost::shared_ptr<JourneyPattern>(result)); routes.insert(result); size_t rank(0); BOOST_FOREACH(const JourneyPattern::StopWithDepartureArrivalAuthorization stop, servedStops) { boost::shared_ptr<LineStop> ls( new LineStop( LineStopTableSync::getId(), result, rank, rank+1 < servedStops.size() && stop._departure, rank > 0 && stop._arrival, *stop._metricOffset, **stop._stop.begin() ) ); ls->set<ScheduleInput>(stop._withTimes ? *stop._withTimes : true); ls->link(_env, true); _env.getEditableRegistry<LineStop>().add(ls); ++rank; } } } assert(!routes.empty()); ScheduledService* service(NULL); BOOST_FOREACH(JourneyPattern* route, routes) { boost::shared_lock<util::shared_recursive_mutex> sharedServicesLock( *route->sharedServicesMutex ); BOOST_FOREACH(Service* sservice, route->getAllServices()) { service = dynamic_cast<ScheduledService*>(sservice); if(!service) { continue; } if( service->isActive(today) && service->comparePlannedSchedules(departureSchedules, arrivalSchedules) ){ _logLoad( "Use of service "+ lexical_cast<string>(service->getKey()) +" ("+ lexical_cast<string>(departureSchedules[0]) +") on route "+ lexical_cast<string>(route->getKey()) +" ("+ route->getName() +")" ); service->addCodeBySource(dataSource, serviceRef); _services.insert(service); break; } service = NULL; } if(service) { break; } } if(!service) { if (!departureSchedules.empty() && !arrivalSchedules.empty()) { JourneyPattern* route(*routes.begin()); service = new ScheduledService( ScheduledServiceTableSync::getId(), string(), route ); service->setDataSchedules(departureSchedules, arrivalSchedules); service->setPath(route); service->addCodeBySource(dataSource, serviceRef); service->setActive(today); route->addService(*service, false); _env.getEditableRegistry<ScheduledService>().add(boost::shared_ptr<ScheduledService>(service)); _services.insert(service); _logCreation( "Creation of service ("+ lexical_cast<string>(departureSchedules[0]) +") on route "+ lexical_cast<string>(route->getKey()) +" ("+ route->getName() +")" ); } else { _logWarning( "Service (ref="+ serviceRef +") has empty departure or arrival schedules, not creating" ); } } } // 3 : loop on the planned services and remove current day of run if not linked to current source BOOST_FOREACH(const ImportableTableSync::ObjectBySource<CommercialLineTableSync>::Map::value_type& itLine, lines.getMap()) { BOOST_FOREACH(const ImportableTableSync::ObjectBySource<CommercialLineTableSync>::Map::mapped_type::value_type& obj, itLine.second) { BOOST_FOREACH(Path* route, obj->getPaths()) { // Avoid junctions if(!dynamic_cast<JourneyPattern*>(route)) { continue; } JourneyPattern* jp(static_cast<JourneyPattern*>(route)); if(!jp->hasLinkWithSource(*_plannedDataSource)) { continue; } boost::shared_lock<util::shared_recursive_mutex> sharedServicesLock( *jp->sharedServicesMutex ); BOOST_FOREACH(const Service* service, jp->getAllServices()) { const ScheduledService* sservice(dynamic_cast<const ScheduledService*>(service)); if( sservice && sservice->isActive(today) && !sservice->hasLinkWithSource(dataSource) ){ const_cast<ScheduledService*>(sservice)->setInactive(today); _logInfo( "Deactivating unlinked service "+ lexical_cast<string>(sservice->getKey()) + " on route "+ lexical_cast<string>(sservice->getRoute()->getKey()) +" (" + sservice->getRoute()->getName() +")" ); } } } } } return true; } IneoRealTimeFileFormat::Importer_::Importer_( util::Env& env, const impex::Import& import, impex::ImportLogLevel minLogLevel, const std::string& logPath, boost::optional<std::ostream&> outputStream, util::ParametersMap& pm ): Importer(env, import, minLogLevel, logPath, outputStream, pm), DatabaseReadImporter<IneoRealTimeFileFormat>(env, import, minLogLevel, logPath, outputStream, pm), PTFileFormat(env, import, minLogLevel, logPath, outputStream, pm) {} util::ParametersMap IneoRealTimeFileFormat::Importer_::_getParametersMap() const { ParametersMap map; if(_plannedDataSource.get()) { map.insert(PARAMETER_PLANNED_DATASOURCE_ID, _plannedDataSource->getKey()); } if(_courseId) { map.insert(PARAMETER_COURSE_ID, *_courseId); } if(_dbConnString) { map.insert(PARAMETER_DB_CONN_STRING, *_dbConnString); } if(!_stopCodePrefix.empty()) { map.insert(PARAMETER_STOP_CODE_PREFIX, _stopCodePrefix); } return map; } void IneoRealTimeFileFormat::Importer_::_setFromParametersMap( const util::ParametersMap& map ) { if(map.isDefined(PARAMETER_PLANNED_DATASOURCE_ID)) try { _plannedDataSource = DataSourceTableSync::Get(map.get<RegistryKeyType>(PARAMETER_PLANNED_DATASOURCE_ID), _env); } catch(ObjectNotFoundException<DataSource>&) { throw Exception("No such planned data source"); } _courseId = map.getOptional<string>(PARAMETER_COURSE_ID); _dbConnString = map.getOptional<string>(PARAMETER_DB_CONN_STRING); _stopCodePrefix = map.getDefault<string>(PARAMETER_STOP_CODE_PREFIX, ""); } db::DBTransaction IneoRealTimeFileFormat::Importer_::_save() const { DBTransaction transaction; if(_courseId) { BOOST_FOREACH(ScheduledService* service, _services) { JourneyPatternTableSync::Save(static_cast<JourneyPattern*>(service->getPath()), transaction); BOOST_FOREACH(LineStop* edge, static_cast<JourneyPattern*>(service->getPath())->getLineStops()) { LineStopTableSync::Save(edge, transaction); } ScheduledServiceTableSync::Save(service, transaction); } } else { BOOST_FOREACH(const Registry<JourneyPattern>::value_type& journeyPattern, _env.getRegistry<JourneyPattern>()) { JourneyPatternTableSync::Save(journeyPattern.second.get(), transaction); } BOOST_FOREACH(Registry<LineStop>::value_type lineStop, _env.getRegistry<LineStop>()) { LineStopTableSync::Save(lineStop.second.get(), transaction); } BOOST_FOREACH(const Registry<ScheduledService>::value_type& service, _env.getRegistry<ScheduledService>()) { ScheduledServiceTableSync::Save(service.second.get(), transaction); } }<|fim▁hole|><|fim▁end|>
return transaction; } } }
<|file_name|>response.go<|end_file_name|><|fim▁begin|>/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package handlers import ( "context" "fmt" "net/http" "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/api/meta" metainternalversion "k8s.io/apimachinery/pkg/apis/meta/internalversion" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" metav1beta1 "k8s.io/apimachinery/pkg/apis/meta/v1beta1" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/runtime/schema" "k8s.io/apiserver/pkg/endpoints/handlers/negotiation" "k8s.io/apiserver/pkg/endpoints/handlers/responsewriters" ) // transformResponseObject takes an object loaded from storage and performs any necessary transformations. // Will write the complete response object. func transformResponseObject(ctx context.Context, scope RequestScope, req *http.Request, w http.ResponseWriter, statusCode int, mediaType negotiation.MediaTypeOptions, result runtime.Object) { // status objects are ignored for transformation if _, ok := result.(*metav1.Status); ok { responsewriters.WriteObject(statusCode, scope.Kind.GroupVersion(), scope.Serializer, result, w, req) return } // ensure the self link and empty list array are set if err := setObjectSelfLink(ctx, result, req, scope.Namer); err != nil { scope.err(err, w, req) return } trace := scope.Trace // If conversion was allowed by the scope, perform it before writing the response switch target := mediaType.Convert; { case target == nil: trace.Step("Writing response") responsewriters.WriteObject(statusCode, scope.Kind.GroupVersion(), scope.Serializer, result, w, req) case target.Kind == "PartialObjectMetadata" && target.GroupVersion() == metav1beta1.SchemeGroupVersion: partial, err := asV1Beta1PartialObjectMetadata(result) if err != nil { scope.err(err, w, req) return } if err := writeMetaInternalVersion(partial, statusCode, w, req, &scope, target.GroupVersion()); err != nil { scope.err(err, w, req) return } case target.Kind == "PartialObjectMetadataList" && target.GroupVersion() == metav1beta1.SchemeGroupVersion: trace.Step("Processing list items") partial, err := asV1Beta1PartialObjectMetadataList(result) if err != nil { scope.err(err, w, req) return } if err := writeMetaInternalVersion(partial, statusCode, w, req, &scope, target.GroupVersion()); err != nil { scope.err(err, w, req) return } case target.Kind == "Table" && target.GroupVersion() == metav1beta1.SchemeGroupVersion: opts := &metav1beta1.TableOptions{} trace.Step("Decoding parameters") if err := metav1beta1.ParameterCodec.DecodeParameters(req.URL.Query(), metav1beta1.SchemeGroupVersion, opts); err != nil { scope.err(err, w, req) return } table, err := asV1Beta1Table(ctx, result, opts, scope) if err != nil { scope.err(err, w, req) return } if err := writeMetaInternalVersion(table, statusCode, w, req, &scope, target.GroupVersion()); err != nil { scope.err(err, w, req) return } default: // this block should only be hit if scope AllowsConversion is incorrect accepted, _ := negotiation.MediaTypesForSerializer(metainternalversion.Codecs) err := negotiation.NewNotAcceptableError(accepted) scope.err(err, w, req) } } // errNotAcceptable indicates Accept negotiation has failed type errNotAcceptable struct { message string } func newNotAcceptableError(message string) error { return errNotAcceptable{message} } func (e errNotAcceptable) Error() string { return e.message } func (e errNotAcceptable) Status() metav1.Status { return metav1.Status{ Status: metav1.StatusFailure, Code: http.StatusNotAcceptable, Reason: metav1.StatusReason("NotAcceptable"),<|fim▁hole|>func asV1Beta1Table(ctx context.Context, result runtime.Object, opts *metav1beta1.TableOptions, scope RequestScope) (runtime.Object, error) { trace := scope.Trace trace.Step("Converting to table") table, err := scope.TableConvertor.ConvertToTable(ctx, result, opts) if err != nil { return nil, err } trace.Step("Processing rows") for i := range table.Rows { item := &table.Rows[i] switch opts.IncludeObject { case metav1beta1.IncludeObject: item.Object.Object, err = scope.Convertor.ConvertToVersion(item.Object.Object, scope.Kind.GroupVersion()) if err != nil { return nil, err } // TODO: rely on defaulting for the value here? case metav1beta1.IncludeMetadata, "": m, err := meta.Accessor(item.Object.Object) if err != nil { return nil, err } // TODO: turn this into an internal type and do conversion in order to get object kind automatically set? partial := meta.AsPartialObjectMetadata(m) partial.GetObjectKind().SetGroupVersionKind(metav1beta1.SchemeGroupVersion.WithKind("PartialObjectMetadata")) item.Object.Object = partial case metav1beta1.IncludeNone: item.Object.Object = nil default: // TODO: move this to validation on the table options? err = errors.NewBadRequest(fmt.Sprintf("unrecognized includeObject value: %q", opts.IncludeObject)) return nil, err } } return table, nil } func asV1Beta1PartialObjectMetadata(result runtime.Object) (runtime.Object, error) { if meta.IsListType(result) { // TODO: this should be calculated earlier err := newNotAcceptableError(fmt.Sprintf("you requested PartialObjectMetadata, but the requested object is a list (%T)", result)) return nil, err } m, err := meta.Accessor(result) if err != nil { return nil, err } partial := meta.AsPartialObjectMetadata(m) partial.GetObjectKind().SetGroupVersionKind(metav1beta1.SchemeGroupVersion.WithKind("PartialObjectMetadata")) return partial, nil } func asV1Beta1PartialObjectMetadataList(result runtime.Object) (runtime.Object, error) { if !meta.IsListType(result) { // TODO: this should be calculated earlier return nil, newNotAcceptableError(fmt.Sprintf("you requested PartialObjectMetadataList, but the requested object is not a list (%T)", result)) } list := &metav1beta1.PartialObjectMetadataList{} err := meta.EachListItem(result, func(obj runtime.Object) error { m, err := meta.Accessor(obj) if err != nil { return err } partial := meta.AsPartialObjectMetadata(m) partial.GetObjectKind().SetGroupVersionKind(metav1beta1.SchemeGroupVersion.WithKind("PartialObjectMetadata")) list.Items = append(list.Items, partial) return nil }) if err != nil { return nil, err } return list, nil } func writeMetaInternalVersion(obj runtime.Object, statusCode int, w http.ResponseWriter, req *http.Request, restrictions negotiation.EndpointRestrictions, target schema.GroupVersion) error { // renegotiate under the internal version _, info, err := negotiation.NegotiateOutputMediaType(req, metainternalversion.Codecs, restrictions) if err != nil { return err } encoder := metainternalversion.Codecs.EncoderForVersion(info.Serializer, target) responsewriters.SerializeObject(info.MediaType, encoder, w, req, statusCode, obj) return nil }<|fim▁end|>
Message: e.Error(), } }
<|file_name|>value.py<|end_file_name|><|fim▁begin|># # Copyright 2009-2010 Goran Sterjov # This file is part of Myelin. # # Myelin is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Myelin is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with Myelin. If not, see <http://www.gnu.org/licenses/>. # import ctypes from type import Type # get library import myelin.library _lib = myelin.library.get_library() _types = [] def add_type (klass): _types.append (klass) def get_type (type): for klass in _types: if klass._class.get_type().get_atom() == type.get_atom(): return klass return None def get_types (): return _types class Value (object): def __init__ (self, ptr = None): if ptr is None: ptr = _lib.myelin_value_new () self._ptr = ptr def __del__ (self): _lib.myelin_value_unref (self) def __repr__ (self): return ("<%s.%s object at %#x with an instance of type %s at %#x>" % (self.__module__, self.__class__.__name__, id(self), self.get_type().get_name(), self.as_pointer())) @classmethod def from_pointer (cls, ptr): if ptr is None: raise ValueError ("Value pointer cannot be 'None'") instance = cls (ptr) _lib.myelin_value_ref (instance) return instance def from_param (self): return self._ptr def get (self): # empty value if self.is_empty(): return None # get value type<|fim▁hole|> # convert value types if not type.is_pointer() and not type.is_reference(): # fundamental types if atom == Type.type_bool (): return self.get_bool () elif atom == Type.type_char (): return self.get_char () elif atom == Type.type_uchar (): return self.get_uchar () elif atom == Type.type_int (): return self.get_int () elif atom == Type.type_uint (): return self.get_uint () elif atom == Type.type_long (): return self.get_long () elif atom == Type.type_ulong (): return self.get_ulong () elif atom == Type.type_int64 (): return self.get_int64 () elif atom == Type.type_uint64 (): return self.get_uint64 () elif atom == Type.type_float (): return self.get_float () elif atom == Type.type_double (): return self.get_double () # elif atom == Type.type_string (): return self.get_string () # convert value to meta class instance class_type = get_type (type) if class_type is not None: return class_type (instance = self) # dont know how to convert value so just return it as is else: return self def set (self, value, atom = None): from myelin.module import MetaObject # convert python types if type(value) is bool: self.set_bool (value) # set the right integer type elif type(value) is int or type(value) is long: if atom is not None: if atom == Type.type_char(): self.set_char (value) elif atom == Type.type_uchar(): self.set_uchar (value) elif atom == Type.type_int(): self.set_int (value) elif atom == Type.type_uint(): self.set_uint (value) elif atom == Type.type_long(): self.set_long (value) elif atom == Type.type_ulong(): self.set_ulong (value) # for long only elif type(value) is long: if atom == Type.type_int64(): self.set_int64 (value) elif atom == Type.type_uint64(): self.set_uint64 (value) else: if type(value) is int: self.set_long (value) else: self.set_int64 (value) elif type(value) is float: if atom is not None: if atom == Type.type_float(): self.set_float (value) elif atom == Type.type_double(): self.set_double (value) else: self.set_double (value) elif type(value) is str: self.set_string (value) # set meta object instance elif isinstance(value, MetaObject): val = value._object.get_instance() self.set_pointer (val.get_type(), val.as_pointer()) else: raise TypeError ("Cannot determine an equivalent type for the " \ "value type '%s'. Conversion failed." % type(value)) def get_type (self): type = _lib.myelin_value_get_type (self) return Type.from_pointer (type) def is_empty (self): return _lib.myelin_value_is_empty (self) def clear (self): _lib.myelin_value_clear (self) def get_bool (self): return _lib.myelin_value_get_bool (self) def set_bool (self, value): _lib.myelin_value_set_bool (self, value) def get_char (self): return _lib.myelin_value_get_char (self) def set_char (self, value): _lib.myelin_value_set_char (self, value) def get_uchar (self): return _lib.myelin_value_get_uchar (self) def set_uchar (self, value): _lib.myelin_value_set_uchar (self, value) def get_int (self): return _lib.myelin_value_get_int (self) def set_int (self, value): _lib.myelin_value_set_int (self, value) def get_uint (self): return _lib.myelin_value_get_uint (self) def set_uint (self, value): _lib.myelin_value_set_uint (self, value) def get_long (self): return _lib.myelin_value_get_long (self) def set_long (self, value): _lib.myelin_value_set_long (self, value) def get_ulong (self): return _lib.myelin_value_get_ulong (self) def set_ulong (self, value): _lib.myelin_value_set_ulong (self, value) def get_int64 (self): return _lib.myelin_value_get_int64 (self) def set_int64 (self, value): _lib.myelin_value_set_int64 (self, value) def get_uint64 (self): return _lib.myelin_value_get_uint64 (self) def set_uint64 (self, value): _lib.myelin_value_set_uint64 (self, value) def get_float (self): return _lib.myelin_value_get_float (self) def set_float (self, value): _lib.myelin_value_set_float (self, value) def get_double (self): return _lib.myelin_value_get_double (self) def set_double (self, value): _lib.myelin_value_set_double (self, value) def get_string (self): return _lib.myelin_value_get_string (self) def set_string (self, value): _lib.myelin_value_set_string (self, value) def as_pointer (self): return _lib.myelin_value_as_pointer (self) def set_pointer (self, type, pointer): _lib.myelin_value_set_pointer (self, type, pointer) ############################################### # Prototypes # ############################################### _lib.myelin_value_new.argtypes = None _lib.myelin_value_new.restype = ctypes.c_void_p _lib.myelin_value_ref.argtypes = [Value] _lib.myelin_value_ref.restype = ctypes.c_void_p _lib.myelin_value_unref.argtypes = [Value] _lib.myelin_value_unref.restype = None _lib.myelin_value_get_type.argtypes = [Value] _lib.myelin_value_get_type.restype = ctypes.c_void_p _lib.myelin_value_is_empty.argtypes = [Value] _lib.myelin_value_is_empty.restype = ctypes.c_bool _lib.myelin_value_clear.argtypes = [Value] _lib.myelin_value_clear.restype = None # boolean _lib.myelin_value_get_bool.argtypes = [Value] _lib.myelin_value_get_bool.restype = ctypes.c_bool _lib.myelin_value_set_bool.argtypes = [Value, ctypes.c_bool] _lib.myelin_value_set_bool.restype = None # char _lib.myelin_value_get_char.argtypes = [Value] _lib.myelin_value_get_char.restype = ctypes.c_char _lib.myelin_value_set_char.argtypes = [Value, ctypes.c_char] _lib.myelin_value_set_char.restype = None # uchar _lib.myelin_value_get_uchar.argtypes = [Value] _lib.myelin_value_get_uchar.restype = ctypes.c_ubyte _lib.myelin_value_set_uchar.argtypes = [Value, ctypes.c_ubyte] _lib.myelin_value_set_uchar.restype = None # integer _lib.myelin_value_get_int.argtypes = [Value] _lib.myelin_value_get_int.restype = ctypes.c_int _lib.myelin_value_set_int.argtypes = [Value, ctypes.c_int] _lib.myelin_value_set_int.restype = None # uint _lib.myelin_value_get_uint.argtypes = [Value] _lib.myelin_value_get_uint.restype = ctypes.c_bool _lib.myelin_value_set_uint.argtypes = [Value, ctypes.c_uint] _lib.myelin_value_set_uint.restype = None # long _lib.myelin_value_get_long.argtypes = [Value] _lib.myelin_value_get_long.restype = ctypes.c_long _lib.myelin_value_set_long.argtypes = [Value, ctypes.c_long] _lib.myelin_value_set_long.restype = None # ulong _lib.myelin_value_get_ulong.argtypes = [Value] _lib.myelin_value_get_ulong.restype = ctypes.c_ulong _lib.myelin_value_set_ulong.argtypes = [Value, ctypes.c_ulong] _lib.myelin_value_set_ulong.restype = None # 64bit integer _lib.myelin_value_get_int64.argtypes = [Value] _lib.myelin_value_get_int64.restype = ctypes.c_int64 _lib.myelin_value_set_int64.argtypes = [Value, ctypes.c_int64] _lib.myelin_value_set_int64.restype = None # unsigned 64bit integer _lib.myelin_value_get_uint64.argtypes = [Value] _lib.myelin_value_get_uint64.restype = ctypes.c_uint64 _lib.myelin_value_set_uint64.argtypes = [Value, ctypes.c_uint64] _lib.myelin_value_set_uint64.restype = None # float _lib.myelin_value_get_float.argtypes = [Value] _lib.myelin_value_get_float.restype = ctypes.c_float _lib.myelin_value_set_float.argtypes = [Value, ctypes.c_float] _lib.myelin_value_set_float.restype = None # double _lib.myelin_value_get_double.argtypes = [Value] _lib.myelin_value_get_double.restype = ctypes.c_double _lib.myelin_value_set_double.argtypes = [Value, ctypes.c_double] _lib.myelin_value_set_double.restype = None # string _lib.myelin_value_get_string.argtypes = [Value] _lib.myelin_value_get_string.restype = ctypes.c_char_p _lib.myelin_value_set_string.argtypes = [Value, ctypes.c_char_p] _lib.myelin_value_set_string.restype = None # pointer _lib.myelin_value_as_pointer.argtypes = [Value] _lib.myelin_value_as_pointer.restype = ctypes.c_void_p _lib.myelin_value_set_pointer.argtypes = [Value, Type, ctypes.c_void_p] _lib.myelin_value_set_pointer.restype = None<|fim▁end|>
type = self.get_type() atom = type.get_atom()
<|file_name|>pack.cc<|end_file_name|><|fim▁begin|>/* * nextpnr -- Next Generation Place and Route * * Copyright (C) 2018-19 gatecat <gatecat@ds0.me> * Copyright (C) 2020 Pepijn de Vos <pepijn@symbioticeda.com> * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. * */ #include <algorithm> #include <iostream> #include <iterator> #include "cells.h" #include "design_utils.h" #include "log.h" #include "util.h" NEXTPNR_NAMESPACE_BEGIN static void make_dummy_alu(Context *ctx, int alu_idx, CellInfo *ci, CellInfo *packed_head, std::vector<std::unique_ptr<CellInfo>> &new_cells) { if ((alu_idx % 2) == 0) { return; } std::unique_ptr<CellInfo> dummy = create_generic_cell(ctx, id_SLICE, ci->name.str(ctx) + "_DUMMY_ALULC"); if (ctx->verbose) { log_info("packed dummy ALU %s.\n", ctx->nameOf(dummy.get())); } dummy->params[id_ALU_MODE] = std::string("C2L"); // add to cluster dummy->cluster = packed_head->name; dummy->constr_z = alu_idx % 6; dummy->constr_x = alu_idx / 6; dummy->constr_y = 0; packed_head->constr_children.push_back(dummy.get()); new_cells.push_back(std::move(dummy)); } // replace ALU with LUT static void pack_alus(Context *ctx) { log_info("Packing ALUs..\n"); // cell name, CIN net name pool<std::pair<IdString, IdString>> alu_heads; // collect heads for (auto &cell : ctx->cells) { CellInfo *ci = cell.second.get(); if (is_alu(ctx, ci)) { NetInfo *cin = ci->ports.at(id_CIN).net; CellInfo *cin_ci = cin->driver.cell; if (cin == nullptr || cin_ci == nullptr) { log_error("CIN disconnected at ALU:%s\n", ctx->nameOf(ci)); continue; } if (!is_alu(ctx, cin_ci) || cin->users.size() > 1) { if (ctx->verbose) { log_info("ALU head found %s. CIN net is %s\n", ctx->nameOf(ci), ctx->nameOf(cin)); } alu_heads.insert(std::make_pair(ci->name, cin->name)); } } } pool<IdString> packed_cells; pool<IdString> delete_nets; std::vector<std::unique_ptr<CellInfo>> new_cells; for (auto &head : alu_heads) { CellInfo *ci = ctx->cells[head.first].get(); IdString cin_netId = head.second; if (ctx->verbose) { log_info("cell '%s' is of type '%s'\n", ctx->nameOf(ci), ci->type.c_str(ctx)); } std::unique_ptr<CellInfo> packed_head = create_generic_cell(ctx, id_SLICE, ci->name.str(ctx) + "_HEAD_ALULC"); if (ctx->verbose) { log_info("packed ALU head into %s. CIN net is %s\n", ctx->nameOf(packed_head.get()), ctx->nameOf(cin_netId)); } connect_port(ctx, ctx->nets[ctx->id("$PACKER_VCC_NET")].get(), packed_head.get(), id_C); if (cin_netId == ctx->id("$PACKER_GND_NET")) { // CIN = 0 packed_head->params[id_ALU_MODE] = std::string("C2L"); } else { if (cin_netId == ctx->id("$PACKER_VCC_NET")) { // CIN = 1 packed_head->params[id_ALU_MODE] = std::string("ONE2C");<|fim▁hole|> packed_head->params[id_ALU_MODE] = std::string("0"); // ADD } } int alu_idx = 1; do { // go through the ALU chain auto alu_bel = ci->attrs.find(ctx->id("BEL")); if (alu_bel != ci->attrs.end()) { log_error("ALU %s placement restrictions are not supported.\n", ctx->nameOf(ci)); return; } // remove cell packed_cells.insert(ci->name); // CIN/COUT are hardwired, delete disconnect_port(ctx, ci, id_CIN); NetInfo *cout = ci->ports.at(id_COUT).net; disconnect_port(ctx, ci, id_COUT); std::unique_ptr<CellInfo> packed = create_generic_cell(ctx, id_SLICE, ci->name.str(ctx) + "_ALULC"); if (ctx->verbose) { log_info("packed ALU into %s. COUT net is %s\n", ctx->nameOf(packed.get()), ctx->nameOf(cout)); } int mode = int_or_default(ci->params, id_ALU_MODE); packed->params[id_ALU_MODE] = mode; if (mode == 9) { // MULT connect_port(ctx, ctx->nets[ctx->id("$PACKER_GND_NET")].get(), packed.get(), id_C); } else { connect_port(ctx, ctx->nets[ctx->id("$PACKER_VCC_NET")].get(), packed.get(), id_C); } // add to cluster packed->cluster = packed_head->name; packed->constr_z = alu_idx % 6; packed->constr_x = alu_idx / 6; packed->constr_y = 0; packed_head->constr_children.push_back(packed.get()); ++alu_idx; // connect all remainig ports replace_port(ci, id_SUM, packed.get(), id_F); switch (mode) { case 0: // ADD replace_port(ci, id_I0, packed.get(), id_B); replace_port(ci, id_I1, packed.get(), id_D); break; case 1: // SUB replace_port(ci, id_I0, packed.get(), id_A); replace_port(ci, id_I1, packed.get(), id_D); break; case 5: // LE replace_port(ci, id_I0, packed.get(), id_A); replace_port(ci, id_I1, packed.get(), id_B); break; case 9: // MULT replace_port(ci, id_I0, packed.get(), id_A); replace_port(ci, id_I1, packed.get(), id_B); disconnect_port(ctx, packed.get(), id_D); connect_port(ctx, ctx->nets[ctx->id("$PACKER_VCC_NET")].get(), packed.get(), id_D); break; default: replace_port(ci, id_I0, packed.get(), id_A); replace_port(ci, id_I1, packed.get(), id_B); replace_port(ci, id_I3, packed.get(), id_D); } new_cells.push_back(std::move(packed)); if (cout != nullptr && cout->users.size() > 0) { // if COUT used by logic if ((cout->users.size() > 1) || (!is_alu(ctx, cout->users.at(0).cell))) { if (ctx->verbose) { log_info("COUT is used by logic\n"); } // make gate C->logic std::unique_ptr<CellInfo> packed_tail = create_generic_cell(ctx, id_SLICE, ci->name.str(ctx) + "_TAIL_ALULC"); if (ctx->verbose) { log_info("packed ALU tail into %s. COUT net is %s\n", ctx->nameOf(packed_tail.get()), ctx->nameOf(cout)); } packed_tail->params[id_ALU_MODE] = std::string("C2L"); connect_port(ctx, cout, packed_tail.get(), id_F); // add to cluster packed_tail->cluster = packed_head->name; packed_tail->constr_z = alu_idx % 6; packed_tail->constr_x = alu_idx / 6; packed_tail->constr_y = 0; ++alu_idx; packed_head->constr_children.push_back(packed_tail.get()); new_cells.push_back(std::move(packed_tail)); make_dummy_alu(ctx, alu_idx, ci, packed_head.get(), new_cells); break; } // next ALU ci = cout->users.at(0).cell; // if ALU is too big if (alu_idx == (ctx->gridDimX - 2) * 6 - 1) { log_error("ALU %s is the %dth in the chain. Such long chains are not supported.\n", ctx->nameOf(ci), alu_idx); break; } } else { // COUT is unused if (ctx->verbose) { log_info("cell is the ALU tail. Index is %d\n", alu_idx); } make_dummy_alu(ctx, alu_idx, ci, packed_head.get(), new_cells); break; } } while (1); // add head to the cluster packed_head->cluster = packed_head->name; new_cells.push_back(std::move(packed_head)); } // actual delete, erase and move cells/nets for (auto pcell : packed_cells) { ctx->cells.erase(pcell); } for (auto dnet : delete_nets) { ctx->nets.erase(dnet); } for (auto &ncell : new_cells) { ctx->cells[ncell->name] = std::move(ncell); } } // pack MUX2_LUT5 static void pack_mux2_lut5(Context *ctx, CellInfo *ci, pool<IdString> &packed_cells, pool<IdString> &delete_nets, std::vector<std::unique_ptr<CellInfo>> &new_cells) { if (bool_or_default(ci->attrs, ctx->id("SINGLE_INPUT_MUX"))) { // find the muxed LUT NetInfo *i1 = ci->ports.at(id_I1).net; CellInfo *lut1 = net_driven_by(ctx, i1, is_lut, id_F); if (lut1 == nullptr) { log_error("MUX2_LUT5 '%s' port I1 isn't connected to the LUT\n", ctx->nameOf(ci)); return; } if (ctx->verbose) { log_info("found attached lut1 %s\n", ctx->nameOf(lut1)); } // XXX enable the placement constraints auto mux_bel = ci->attrs.find(ctx->id("BEL")); auto lut1_bel = lut1->attrs.find(ctx->id("BEL")); if (lut1_bel != lut1->attrs.end() || mux_bel != ci->attrs.end()) { log_error("MUX2_LUT5 '%s' placement restrictions are not supported yet\n", ctx->nameOf(ci)); return; } std::unique_ptr<CellInfo> packed = create_generic_cell(ctx, ctx->id("GW_MUX2_LUT5"), ci->name.str(ctx) + "_LC"); if (ctx->verbose) { log_info("packed cell %s into %s\n", ctx->nameOf(ci), ctx->nameOf(packed.get())); } // mux is the cluster root packed->cluster = packed->name; lut1->cluster = packed->name; lut1->constr_z = -ctx->mux_0_z + 1; packed->constr_children.clear(); // reconnect MUX ports replace_port(ci, id_O, packed.get(), id_OF); replace_port(ci, id_I1, packed.get(), id_I1); // remove cells packed_cells.insert(ci->name); // new MUX cell new_cells.push_back(std::move(packed)); } else { // find the muxed LUTs NetInfo *i0 = ci->ports.at(id_I0).net; NetInfo *i1 = ci->ports.at(id_I1).net; CellInfo *lut0 = net_driven_by(ctx, i0, is_lut, id_F); CellInfo *lut1 = net_driven_by(ctx, i1, is_lut, id_F); if (lut0 == nullptr || lut1 == nullptr) { log_error("MUX2_LUT5 '%s' port I0 or I1 isn't connected to the LUT\n", ctx->nameOf(ci)); return; } if (ctx->verbose) { log_info("found attached lut0 %s\n", ctx->nameOf(lut0)); log_info("found attached lut1 %s\n", ctx->nameOf(lut1)); } // XXX enable the placement constraints auto mux_bel = ci->attrs.find(ctx->id("BEL")); auto lut0_bel = lut0->attrs.find(ctx->id("BEL")); auto lut1_bel = lut1->attrs.find(ctx->id("BEL")); if (lut0_bel != lut0->attrs.end() || lut1_bel != lut1->attrs.end() || mux_bel != ci->attrs.end()) { log_error("MUX2_LUT5 '%s' placement restrictions are not supported yet\n", ctx->nameOf(ci)); return; } std::unique_ptr<CellInfo> packed = create_generic_cell(ctx, ctx->id("GW_MUX2_LUT5"), ci->name.str(ctx) + "_LC"); if (ctx->verbose) { log_info("packed cell %s into %s\n", ctx->nameOf(ci), ctx->nameOf(packed.get())); } // mux is the cluster root packed->cluster = packed->name; lut0->cluster = packed->name; lut0->constr_z = -ctx->mux_0_z; lut1->cluster = packed->name; lut1->constr_z = -ctx->mux_0_z + 1; packed->constr_children.clear(); // reconnect MUX ports replace_port(ci, id_O, packed.get(), id_OF); replace_port(ci, id_S0, packed.get(), id_SEL); replace_port(ci, id_I0, packed.get(), id_I0); replace_port(ci, id_I1, packed.get(), id_I1); // remove cells packed_cells.insert(ci->name); // new MUX cell new_cells.push_back(std::move(packed)); } } // Common MUX2 packing routine static void pack_mux2_lut(Context *ctx, CellInfo *ci, bool (*pred)(const BaseCtx *, const CellInfo *), char const type_suffix, IdString const type_id, int const x[2], int const z[2], pool<IdString> &packed_cells, pool<IdString> &delete_nets, std::vector<std::unique_ptr<CellInfo>> &new_cells) { // find the muxed LUTs NetInfo *i0 = ci->ports.at(id_I0).net; NetInfo *i1 = ci->ports.at(id_I1).net; CellInfo *mux0 = net_driven_by(ctx, i0, pred, id_OF); CellInfo *mux1 = net_driven_by(ctx, i1, pred, id_OF); if (mux0 == nullptr || mux1 == nullptr) { log_error("MUX2_LUT%c '%s' port I0 or I1 isn't connected to the MUX\n", type_suffix, ctx->nameOf(ci)); return; } if (ctx->verbose) { log_info("found attached mux0 %s\n", ctx->nameOf(mux0)); log_info("found attached mux1 %s\n", ctx->nameOf(mux1)); } // XXX enable the placement constraints auto mux_bel = ci->attrs.find(ctx->id("BEL")); auto mux0_bel = mux0->attrs.find(ctx->id("BEL")); auto mux1_bel = mux1->attrs.find(ctx->id("BEL")); if (mux0_bel != mux0->attrs.end() || mux1_bel != mux1->attrs.end() || mux_bel != ci->attrs.end()) { log_error("MUX2_LUT%c '%s' placement restrictions are not supported yet\n", type_suffix, ctx->nameOf(ci)); return; } std::unique_ptr<CellInfo> packed = create_generic_cell(ctx, type_id, ci->name.str(ctx) + "_LC"); if (ctx->verbose) { log_info("packed cell %s into %s\n", ctx->nameOf(ci), ctx->nameOf(packed.get())); } // mux is the cluster root packed->cluster = packed->name; mux0->cluster = packed->name; mux0->constr_x = x[0]; mux0->constr_y = 0; mux0->constr_z = z[0]; for (auto &child : mux0->constr_children) { child->cluster = packed->name; child->constr_x += mux0->constr_x; child->constr_z += mux0->constr_z; packed->constr_children.push_back(child); } mux0->constr_children.clear(); mux1->cluster = packed->name; mux1->constr_x = x[1]; mux0->constr_y = 0; mux1->constr_z = z[1]; for (auto &child : mux1->constr_children) { child->cluster = packed->name; child->constr_x += mux1->constr_x; child->constr_z += mux1->constr_z; packed->constr_children.push_back(child); } mux1->constr_children.clear(); packed->constr_children.push_back(mux0); packed->constr_children.push_back(mux1); // reconnect MUX ports replace_port(ci, id_O, packed.get(), id_OF); replace_port(ci, id_S0, packed.get(), id_SEL); replace_port(ci, id_I0, packed.get(), id_I0); replace_port(ci, id_I1, packed.get(), id_I1); // remove cells packed_cells.insert(ci->name); // new MUX cell new_cells.push_back(std::move(packed)); } // pack MUX2_LUT6 static void pack_mux2_lut6(Context *ctx, CellInfo *ci, pool<IdString> &packed_cells, pool<IdString> &delete_nets, std::vector<std::unique_ptr<CellInfo>> &new_cells) { static int x[] = {0, 0}; static int z[] = {+1, -1}; pack_mux2_lut(ctx, ci, is_gw_mux2_lut5, '6', id_GW_MUX2_LUT6, x, z, packed_cells, delete_nets, new_cells); } // pack MUX2_LUT7 static void pack_mux2_lut7(Context *ctx, CellInfo *ci, pool<IdString> &packed_cells, pool<IdString> &delete_nets, std::vector<std::unique_ptr<CellInfo>> &new_cells) { static int x[] = {0, 0}; static int z[] = {+2, -2}; pack_mux2_lut(ctx, ci, is_gw_mux2_lut6, '7', id_GW_MUX2_LUT7, x, z, packed_cells, delete_nets, new_cells); } // pack MUX2_LUT8 static void pack_mux2_lut8(Context *ctx, CellInfo *ci, pool<IdString> &packed_cells, pool<IdString> &delete_nets, std::vector<std::unique_ptr<CellInfo>> &new_cells) { static int x[] = {1, 0}; static int z[] = {-4, -4}; pack_mux2_lut(ctx, ci, is_gw_mux2_lut7, '8', id_GW_MUX2_LUT8, x, z, packed_cells, delete_nets, new_cells); } // Pack wide LUTs static void pack_wideluts(Context *ctx) { log_info("Packing wide LUTs..\n"); pool<IdString> packed_cells; pool<IdString> delete_nets; std::vector<std::unique_ptr<CellInfo>> new_cells; pool<IdString> mux2lut6; pool<IdString> mux2lut7; pool<IdString> mux2lut8; // do MUX2_LUT5 and collect LUT6/7/8 log_info("Packing LUT5s..\n"); for (auto &cell : ctx->cells) { CellInfo *ci = cell.second.get(); if (ctx->verbose) { log_info("cell '%s' is of type '%s'\n", ctx->nameOf(ci), ci->type.c_str(ctx)); } if (is_widelut(ctx, ci)) { if (is_mux2_lut5(ctx, ci)) { pack_mux2_lut5(ctx, ci, packed_cells, delete_nets, new_cells); } else { if (is_mux2_lut6(ctx, ci)) { mux2lut6.insert(ci->name); } else { if (is_mux2_lut7(ctx, ci)) { mux2lut7.insert(ci->name); } else { if (is_mux2_lut8(ctx, ci)) { mux2lut8.insert(ci->name); } } } } } } // do MUX_LUT6 log_info("Packing LUT6s..\n"); for (auto &cell_name : mux2lut6) { pack_mux2_lut6(ctx, ctx->cells[cell_name].get(), packed_cells, delete_nets, new_cells); } // do MUX_LUT7 log_info("Packing LUT7s..\n"); for (auto &cell_name : mux2lut7) { pack_mux2_lut7(ctx, ctx->cells[cell_name].get(), packed_cells, delete_nets, new_cells); } // do MUX_LUT8 log_info("Packing LUT8s..\n"); for (auto &cell_name : mux2lut8) { pack_mux2_lut8(ctx, ctx->cells[cell_name].get(), packed_cells, delete_nets, new_cells); } // actual delete, erase and move cells/nets for (auto pcell : packed_cells) { ctx->cells.erase(pcell); } for (auto dnet : delete_nets) { ctx->nets.erase(dnet); } for (auto &ncell : new_cells) { ctx->cells[ncell->name] = std::move(ncell); } } // Pack LUTs and LUT-FF pairs static void pack_lut_lutffs(Context *ctx) { log_info("Packing LUT-FFs..\n"); pool<IdString> packed_cells; std::vector<std::unique_ptr<CellInfo>> new_cells; for (auto &cell : ctx->cells) { CellInfo *ci = cell.second.get(); if (ctx->verbose) log_info("cell '%s' is of type '%s'\n", ctx->nameOf(ci), ci->type.c_str(ctx)); if (is_lut(ctx, ci)) { std::unique_ptr<CellInfo> packed = create_generic_cell(ctx, ctx->id("SLICE"), ci->name.str(ctx) + "_LC"); for (auto &attr : ci->attrs) packed->attrs[attr.first] = attr.second; packed_cells.insert(ci->name); if (ctx->verbose) log_info("packed cell %s into %s\n", ctx->nameOf(ci), ctx->nameOf(packed.get())); // See if we can pack into a DFF // TODO: LUT cascade NetInfo *o = ci->ports.at(ctx->id("F")).net; CellInfo *dff = net_only_drives(ctx, o, is_ff, ctx->id("D"), true); auto lut_bel = ci->attrs.find(ctx->id("BEL")); bool packed_dff = false; if (dff) { if (ctx->verbose) log_info("found attached dff %s\n", ctx->nameOf(dff)); auto dff_bel = dff->attrs.find(ctx->id("BEL")); if (lut_bel != ci->attrs.end() && dff_bel != dff->attrs.end() && lut_bel->second != dff_bel->second) { // Locations don't match, can't pack } else { lut_to_lc(ctx, ci, packed.get(), false); dff_to_lc(ctx, dff, packed.get(), false); ctx->nets.erase(o->name); if (dff_bel != dff->attrs.end()) packed->attrs[ctx->id("BEL")] = dff_bel->second; packed_cells.insert(dff->name); if (ctx->verbose) log_info("packed cell %s into %s\n", ctx->nameOf(dff), ctx->nameOf(packed.get())); packed_dff = true; } } if (!packed_dff) { lut_to_lc(ctx, ci, packed.get(), true); } new_cells.push_back(std::move(packed)); } } for (auto pcell : packed_cells) { ctx->cells.erase(pcell); } for (auto &ncell : new_cells) { ctx->cells[ncell->name] = std::move(ncell); } } // Pack FFs not packed as LUTFFs static void pack_nonlut_ffs(Context *ctx) { log_info("Packing non-LUT FFs..\n"); pool<IdString> packed_cells; std::vector<std::unique_ptr<CellInfo>> new_cells; for (auto &cell : ctx->cells) { CellInfo *ci = cell.second.get(); if (is_ff(ctx, ci)) { std::unique_ptr<CellInfo> packed = create_generic_cell(ctx, ctx->id("SLICE"), ci->name.str(ctx) + "_DFFLC"); for (auto &attr : ci->attrs) packed->attrs[attr.first] = attr.second; if (ctx->verbose) log_info("packed cell %s into %s\n", ctx->nameOf(ci), ctx->nameOf(packed.get())); packed_cells.insert(ci->name); dff_to_lc(ctx, ci, packed.get(), true); new_cells.push_back(std::move(packed)); } } for (auto pcell : packed_cells) { ctx->cells.erase(pcell); } for (auto &ncell : new_cells) { ctx->cells[ncell->name] = std::move(ncell); } } // Merge a net into a constant net static void set_net_constant(const Context *ctx, NetInfo *orig, NetInfo *constnet, bool constval) { orig->driver.cell = nullptr; for (auto user : orig->users) { if (user.cell != nullptr) { CellInfo *uc = user.cell; if (ctx->verbose) log_info("%s user %s\n", ctx->nameOf(orig), ctx->nameOf(uc)); if ((is_lut(ctx, uc) || is_lc(ctx, uc)) && (user.port.str(ctx).at(0) == 'I') && !constval) { uc->ports[user.port].net = nullptr; } else { uc->ports[user.port].net = constnet; constnet->users.push_back(user); } } } orig->users.clear(); } // Pack constants (simple implementation) static void pack_constants(Context *ctx) { log_info("Packing constants..\n"); std::unique_ptr<CellInfo> gnd_cell = create_generic_cell(ctx, ctx->id("SLICE"), "$PACKER_GND"); gnd_cell->params[ctx->id("INIT")] = Property(0, 1 << 4); std::unique_ptr<NetInfo> gnd_net = std::unique_ptr<NetInfo>(new NetInfo); gnd_net->name = ctx->id("$PACKER_GND_NET"); gnd_net->driver.cell = gnd_cell.get(); gnd_net->driver.port = ctx->id("F"); gnd_cell->ports.at(ctx->id("F")).net = gnd_net.get(); std::unique_ptr<CellInfo> vcc_cell = create_generic_cell(ctx, ctx->id("SLICE"), "$PACKER_VCC"); // Fill with 1s vcc_cell->params[ctx->id("INIT")] = Property(Property::S1).extract(0, (1 << 4), Property::S1); std::unique_ptr<NetInfo> vcc_net = std::unique_ptr<NetInfo>(new NetInfo); vcc_net->name = ctx->id("$PACKER_VCC_NET"); vcc_net->driver.cell = vcc_cell.get(); vcc_net->driver.port = ctx->id("F"); vcc_cell->ports.at(ctx->id("F")).net = vcc_net.get(); std::vector<IdString> dead_nets; bool gnd_used = false; for (auto &net : ctx->nets) { NetInfo *ni = net.second.get(); if (ni->driver.cell != nullptr && ni->driver.cell->type == ctx->id("GND")) { IdString drv_cell = ni->driver.cell->name; set_net_constant(ctx, ni, gnd_net.get(), false); gnd_used = true; dead_nets.push_back(net.first); ctx->cells.erase(drv_cell); } else if (ni->driver.cell != nullptr && ni->driver.cell->type == ctx->id("VCC")) { IdString drv_cell = ni->driver.cell->name; set_net_constant(ctx, ni, vcc_net.get(), true); dead_nets.push_back(net.first); ctx->cells.erase(drv_cell); } } if (gnd_used) { ctx->cells[gnd_cell->name] = std::move(gnd_cell); ctx->nets[gnd_net->name] = std::move(gnd_net); } // Vcc cell always inserted for now, as it may be needed during carry legalisation (TODO: trim later if actually // never used?) ctx->cells[vcc_cell->name] = std::move(vcc_cell); ctx->nets[vcc_net->name] = std::move(vcc_net); for (auto dn : dead_nets) { ctx->nets.erase(dn); } } static bool is_nextpnr_iob(const Context *ctx, CellInfo *cell) { return cell->type == ctx->id("$nextpnr_ibuf") || cell->type == ctx->id("$nextpnr_obuf") || cell->type == ctx->id("$nextpnr_iobuf"); } static bool is_gowin_iob(const Context *ctx, const CellInfo *cell) { switch (cell->type.index) { case ID_IBUF: case ID_OBUF: case ID_IOBUF: case ID_TBUF: return true; default: return false; } } // Pack IO buffers static void pack_io(Context *ctx) { pool<IdString> packed_cells; pool<IdString> delete_nets; std::vector<std::unique_ptr<CellInfo>> new_cells; log_info("Packing IOs..\n"); for (auto &cell : ctx->cells) { CellInfo *ci = cell.second.get(); if (is_gowin_iob(ctx, ci)) { CellInfo *iob = nullptr; switch (ci->type.index) { case ID_IBUF: iob = net_driven_by(ctx, ci->ports.at(id_I).net, is_nextpnr_iob, id_O); break; case ID_OBUF: iob = net_only_drives(ctx, ci->ports.at(id_O).net, is_nextpnr_iob, id_I); break; case ID_IOBUF: iob = net_driven_by(ctx, ci->ports.at(id_IO).net, is_nextpnr_iob, id_O); break; case ID_TBUF: iob = net_only_drives(ctx, ci->ports.at(id_O).net, is_nextpnr_iob, id_I); break; default: break; } if (iob != nullptr) { // delete the $nexpnr_[io]buf for (auto &p : iob->ports) { IdString netname = p.second.net->name; disconnect_port(ctx, iob, p.first); delete_nets.insert(netname); } packed_cells.insert(iob->name); } // Create a IOB buffer std::unique_ptr<CellInfo> ice_cell = create_generic_cell(ctx, id_IOB, ci->name.str(ctx) + "$iob"); gwio_to_iob(ctx, ci, ice_cell.get(), packed_cells); new_cells.push_back(std::move(ice_cell)); auto gwiob = new_cells.back().get(); packed_cells.insert(ci->name); if (iob != nullptr) { // in Gowin .CST port attributes take precedence over cell attributes. // first copy cell attrs related to IO for (auto &attr : ci->attrs) { if (attr.first == IdString(ID_BEL) || attr.first.str(ctx)[0] == '&') { gwiob->setAttr(attr.first, attr.second); } } // rewrite attributes from the port for (auto &attr : iob->attrs) { gwiob->setAttr(attr.first, attr.second); } } } } for (auto pcell : packed_cells) { ctx->cells.erase(pcell); } for (auto dnet : delete_nets) { ctx->nets.erase(dnet); } for (auto &ncell : new_cells) { ctx->cells[ncell->name] = std::move(ncell); } } // Main pack function bool Arch::pack() { Context *ctx = getCtx(); try { log_break(); pack_constants(ctx); pack_io(ctx); pack_wideluts(ctx); pack_alus(ctx); pack_lut_lutffs(ctx); pack_nonlut_ffs(ctx); ctx->settings[ctx->id("pack")] = 1; ctx->assignArchInfo(); log_info("Checksum: 0x%08x\n", ctx->checksum()); return true; } catch (log_execution_error_exception) { return false; } } NEXTPNR_NAMESPACE_END<|fim▁end|>
} else { // CIN from logic connect_port(ctx, ctx->nets[cin_netId].get(), packed_head.get(), id_B); connect_port(ctx, ctx->nets[cin_netId].get(), packed_head.get(), id_D);
<|file_name|>text-setting.test.tsx<|end_file_name|><|fim▁begin|>import { render } from '@testing-library/react'; import React from 'react'; import configureMockStore from 'redux-mock-store'; import thunk from 'redux-thunk'; import { globalBeforeEach } from '../../../../__jest__/before-each';<|fim▁hole|>import { RootState } from '../../../redux/modules'; import { TextSetting } from '../text-setting'; const middlewares = [thunk]; const mockStore = configureMockStore<RootState>(middlewares); describe('<TextSetting />', () => { const label = 'label text'; const placeholder = 'placeholder text'; const help = 'help text'; let container = {}; const textSetting = ( <TextSetting disabled help={help} label={label} placeholder={placeholder} setting="deviceId" /> ); beforeEach(async () => { await globalBeforeEach(); const store = mockStore(await reduxStateForTest()); container = { wrapper: withReduxStore(store) }; }); it('should render label text', async () => { const { getByLabelText } = render(textSetting, container); expect(getByLabelText(label)).toBeInTheDocument(); }); it('should render placeholder text', async () => { const { getByPlaceholderText } = render(textSetting, container); expect(getByPlaceholderText(placeholder)).toBeInTheDocument(); }); it('should render help text', async () => { const { getByText } = render(textSetting, container); expect(getByText(help)).toBeInTheDocument(); }); it('should be disabled when passed a disabled prop', async () => { const { getByLabelText } = render(textSetting, container); const input = getByLabelText(label).closest('input'); expect(input).toHaveAttribute('disabled'); }); });<|fim▁end|>
import { reduxStateForTest } from '../../../../__jest__/redux-state-for-test'; import { withReduxStore } from '../../../../__jest__/with-redux-store';
<|file_name|>Config.java<|end_file_name|><|fim▁begin|>package com.insane.levellingtools; import net.minecraftforge.common.config.Configuration;<|fim▁hole|> * Created by Michael on 11/08/2014. */ public class Config { public static int baseXP; public static int increasePerLevel; public static int maxLevel; public static void doConfig(File configFile) { Configuration config = new Configuration(configFile); config.load(); config.addCustomCategoryComment("XP Requirements","XP to level calculated as: RequiredXP = BaseXP + (Level-1)*increasePerLevel"); baseXP = config.get("XP Requirements", "BaseXP", 10, "[Default: 50]").getInt(10); increasePerLevel = config.get("XP Requirements", "increasePerLevel", 10, "[Default: 20]").getInt(10); maxLevel = config.get("Limits", "Maximum Level", 2, "[Default: 10]").getInt(2); config.save(); } }<|fim▁end|>
import java.io.File; /**
<|file_name|>0020_auto__add_field_video2013_created.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'Video2013.created' db.add_column('videos_video2013', 'created', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2013, 2, 5, 0, 0), auto_now_add=True, blank=True), keep_default=False) def backwards(self, orm): # Deleting field 'Video2013.created' db.delete_column('videos_video2013', 'created') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),<|fim▁hole|> 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'videos.award': { 'Meta': {'object_name': 'Award'}, 'award_type': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'category': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'preview': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}), 'region': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}), 'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video2012']", 'null': 'True', 'blank': 'True'}) }, 'videos.video2012': { 'Meta': {'object_name': 'Video2012'}, 'bitly_link_db': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}), 'category': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 28, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'judge_mark': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'region': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'shortlink': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}), 'state': ('django.db.models.fields.CharField', [], {'default': "'unsent'", 'max_length': '10'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'upload_url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200'}), 'user_country': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}), 'user_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'user_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}), 'views': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}), 'votes': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}) }, 'videos.video2013': { 'Meta': {'object_name': 'Video2013'}, 'approved': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 2, 5, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'processed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), 'user_notified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'vimeo_id': ('django.db.models.fields.IntegerField', [], {}) } } complete_apps = ['videos']<|fim▁end|>
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
<|file_name|>test_flaskpypi.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_flaskpypi ---------------------------------- <|fim▁hole|>""" import pytest from flaskpypi import flaskpypi # Code from https://wiki.python.org/moin/PyPISimple from xml.etree import ElementTree from urllib.request import urlopen def get_distributions(simple_index='https://pypi.python.org/simple/'): with urlopen(simple_index) as f: tree = ElementTree.parse(f) return [a.text for a in tree.iter('a')] def scrape_links(dist, simple_index='https://pypi.python.org/simple/'): with urlopen(simple_index + dist + '/') as f: tree = ElementTree.parse(f) return [a.attrib['href'] for a in tree.iter('a')] def test_this_is_a_test(): assert True<|fim▁end|>
Tests for `flaskpypi` module.
<|file_name|>TombRunaway.java<|end_file_name|><|fim▁begin|>import br.com.etyllica.EtyllicaFrame; import br.com.etyllica.core.context.Application; import br.com.runaway.menu.MainMenu; <|fim▁hole|>public class TombRunaway extends EtyllicaFrame { private static final long serialVersionUID = 1L; public TombRunaway() { super(800, 600); } public static void main(String[] args){ TombRunaway map = new TombRunaway(); map.init(); } public Application startApplication() { initialSetup("../"); /*JoystickLoader.getInstance().start(1); new Thread(JoystickLoader.getInstance()).start();*/ return new MainMenu(w, h); } }<|fim▁end|>
<|file_name|>js.jquery.js<|end_file_name|><|fim▁begin|>js.Offset = function(rawptr) { this.rawptr = rawptr; } js.Offset.prototype = new konoha.Object(); js.Offset.prototype._new = function(rawptr) { this.rawptr = rawptr; } js.Offset.prototype.getTop = function() { return this.rawptr.top; } js.Offset.prototype.getLeft = function() { return this.rawptr.left; } js.jquery = {}; var initJQuery = function() { var verifyArgs = function(args) { for (var i = 0; i < args.length; i++) { if (args[i].rawptr) { args[i] = args[i].rawptr; } } return args; } var jquery = function(rawptr) { this.rawptr = rawptr; } jquery.prototype = new konoha.Object(); jquery.konohaclass = "js.jquery.JQuery"; /* Selectors */ jquery.prototype.each_ = function(callback) { this.rawptr.each(callback.rawptr); } jquery.prototype.size = function() { return this.rawptr.size(); } jquery.prototype.getSelector = function() { return new konoha.String(this.rawptr.getSelector()); } jquery.prototype.getContext = function() { return new js.dom.Node(this.rawptr.getContext()); } jquery.prototype.getNodeList = function() { return new js.dom.NodeList(this.rawptr.get()); } jquery.prototype.getNode = function(index) { return new js.dom.Node(this.rawptr.get(index)); } /* Attributes */ jquery.prototype.getAttr = function(arg) { return new konoha.String(this.rawptr.attr(arg.rawptr)); } jquery.prototype.attr = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.attr.apply(this.rawptr, args)); } jquery.prototype.removeAttr = function(name) { return new jquery(this.rawptr.removeAttr(name.rawptr)); } jquery.prototype.addClass = function(className) { return new jquery(this.rawptr.addClass(className.rawptr)); } jquery.prototype.removeClass = function(className) { return new jquery(this.rawptr.removeClass(className.rawptr)); } jquery.prototype.toggleClass = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.toggleClass.apply(this.rawptr, args)); } jquery.prototype.getHTML = function() { return new konoha.String(this.rawptr.html()); } jquery.prototype.html = function(val) { return new jquery(this.rawptr.html(val.rawptr)); } jquery.prototype.getText = function() { return new konoha.String(this.rawptr.text()); } jquery.prototype.text = function(val) { return new jquery(this.rawptr.text(val.rawptr)); } jquery.prototype.getVal = function() { return new konoha.Array(this.rawptr.val()) } jquery.prototype.val = function(val) { return new jquery(this.rawptr.val(val.rawptr)); } /* Traversing */ jquery.prototype.eq = function(position) { return new jquery(this.rawptr.eq(position)); } jquery.prototype.filter = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.filter.apply(this.rawptr, args)); } jquery.prototype.is = function(expr) { return this.rawptr.is(expr.rawptr); } jquery.prototype.opnot = function(expr) { return this.rawptr.not(expr.rawptr); } jquery.prototype.slice = function() { return new jquery(this.rawptr.slice.apply(this.rawptr, Array.prototype.slice.call(arguments))); } jquery.prototype.add = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.add.apply(this.rawptr, args)); } jquery.prototype.children = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.children.apply(this.rawptr, args)); } jquery.prototype.closest = function() { var args =verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.closest.apply(this.rawptr, args)); } jquery.prototype.contents = function() { return new jquery(this.rawptr.contents()); } jquery.prototype.find = function(expr) { return new jquery(this.rawptr.find(expr.rawptr)); } jquery.prototype.next = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.next.apply(this.rawptr, args)); } jquery.prototype.nextAll = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.nextAll.apply(this.rawptr, args)); } jquery.prototype.parent = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.parent.apply(this.rawptr, args)); } jquery.prototype.parents = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.parents.apply(this.rawptr, args)); } jquery.prototype.prev = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.prev.apply(this.rawptr, args)); } jquery.prototype.prevAll = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.prevAll.apply(this.rawptr, args)); } jquery.prototype.siblings = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.siblings.apply(this.rawptr, args)); } jquery.prototype.andSelf = function() { return new jquery(this.rawptr.andSelf()); } jquery.prototype.end = function() { return new jquery(this.rawptr.end()); } /* Manipulation */ jquery.prototype.append = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.append.apply(this.rawptr, args)); } jquery.prototype.appendTo = function(content) { return new jquery(this.rawptr.appendTo(content.rawptr)); } jquery.prototype.prepend = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.prepend.apply(this.rawptr, args)); } jquery.prototype.prependTo = function(content) { return new jquery(this.rawptr.prependTo(content.rawptr)); } jquery.prototype.after = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.after.apply(this.rawptr, args)); } jquery.prototype.before = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.before.apply(this.rawptr, args)); } jquery.prototype.insertAfter = function(content) { return new jquery(this.rawptr.insertAfter(content.rawptr)); } jquery.prototype.insertBefore = function(content) { return new jquery(this.rawptr.insertBefore(content.rawptr)); } jquery.prototype.wrap = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.wrap.apply(this.rawptr, args)); } jquery.prototype.wrapAll = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.wrapAll.apply(this.rawptr, args)); } jquery.prototype.wrapInner = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.wrapInner.apply(this.rawptr, args)); } jquery.prototype.replaceWith = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.replaceWith.apply(this.rawptr, args)); } jquery.prototype.replaceAll = function(selector) { return new jquery(this.rawptr.replaceAll(selector.rawptr)); } jquery.prototype.empty = function() { return new jquery(this.rawptr.empty()); } jquery.prototype.remove = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.remove.apply(this.rawptr, args)); } jquery.prototype.clone = function() { return new jquery(this.rawptr.clone.apply(this.rawptr, Array.prototype.slice.call(arguments))); } /* CSS */ jquery.prototype.getCss = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new konoha.String(this.rawptr.css.apply(this.rawptr, args)); } jquery.prototype.css = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.css.apply(this.rawptr, args)); } jquery.prototype.offset = function() { return new js.Offset(this.rawptr.offset()); } jquery.prototype.position = function() { return new js.Offset(this.rawptr.position()); } jquery.prototype.scrollTop = function() { return this.rawptr.scrollTop.apply(this.rawptr, Array.prototype.slice.call(arguments)); } jquery.prototype.scrollLeft = function() { return this.rawptr.scrollLeft.apply(this.rawptr, Array.prototype.slice.call(arguments)); } jquery.prototype.height = function() { return this.rawptr.height.apply(this.rawptr, Array.prototype.slice.call(arguments)); } jquery.prototype.width = function() { return this.rawptr.width.apply(this.rawptr, Array.prototype.slice.call(arguments)); } jquery.prototype.innerHeight = function() { return this.rawptr.innerHeight(); } jquery.prototype.innerWidth = function() { return this.rawptr.innerWidth(); } jquery.prototype.outerHeight = function() { return this.rawptr.outerHeight(); } jquery.prototype.outerWidth = function() { return this.rawptr.outerWidth(); } /* Events */ jquery.prototype.ready = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.ready.apply(this.rawptr, args)); } jquery.prototype.bind = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.bind.apply(this.rawptr, args)); } jquery.prototype.one = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.one.apply(this.rawptr, args)); } jquery.prototype.trigger = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.trigger.apply(this.rawptr, args)); } jquery.prototype.triggerHandler = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.triggerHandler.apply(this.rawptr, args)); } jquery.prototype.unbind = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.unbind.apply(this.rawptr, args)); } jquery.prototype.hover = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.hover.apply(this.rawptr, args)); } jquery.prototype.toggleEvent = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); args = verifyArgs(args[0]); return new jquery(this.rawptr.toggle.apply(this.rawptr, args)); } jquery.prototype.live = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.live.apply(this.rawptr, args)); } jquery.prototype.die = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.die.apply(this.rawptr, args)); } jquery.prototype.blur = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); if (args.length == 0) { return new jquery(this.rawptr.blur.apply(this.rawptr, args)); } else { return new jquery(this.rawptr.blur(function(e) { args[0].apply(new js.dom.Element(this), [new js.jquery.JEvent(e)]); })); } } jquery.prototype.change = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); if (args.length == 0) { return new jquery(this.rawptr.change.apply(this.rawptr, args)); } else { return new jquery(this.rawptr.change(function(e) { args[0].apply(new js.dom.Element(this), [new js.jquery.JEvent(e)]); })); } } jquery.prototype.click = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); if (args.length == 0) { return new jquery(this.rawptr.click.apply(this.rawptr, args)); } else { return new jquery(this.rawptr.click(function(e) { args[0].apply(new js.dom.Element(this), [new js.jquery.JEvent(e)]); })); } } jquery.prototype.dblclick = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); if (args.length == 0) { return new jquery(this.rawptr.dblclick.apply(this.rawptr, args)); } else { return new jquery(this.rawptr.dblclick(function(e) { args[0].apply(new js.dom.Element(this), [new js.jquery.JEvent(e)]); })); } } jquery.prototype.error = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); if (args.length == 0) { return new jquery(this.rawptr.error.apply(this.rawptr, args)); } else { return new jquery(this.rawptr.error(function(e) { args[0].apply(new js.dom.Element(this), [new js.jquery.JEvent(e)]); })); } } jquery.prototype.focus = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); if (args.length == 0) { return new jquery(this.rawptr.focus.apply(this.rawptr, args)); } else { return new jquery(this.rawptr.focus(function(e) { args[0].apply(new js.dom.Element(this), [new js.jquery.JEvent(e)]); })); } } jquery.prototype.keydown = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); if (args.length == 0) { return new jquery(this.rawptr.keydown.apply(this.rawptr, args)); } else { return new jquery(this.rawptr.keydown(function(e) { args[0].apply(new js.dom.Element(this), [new js.jquery.JEvent(e)]); })); }<|fim▁hole|> } jquery.prototype.keypress = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); if (args.length == 0) { return new jquery(this.rawptr.keypress.apply(this.rawptr, args)); } else { return new jquery(this.rawptr.keypress(function(e) { args[0].apply(new js.dom.Element(this), [new js.jquery.JEvent(e)]); })); } } jquery.prototype.keyup = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); if (args.length == 0) { return new jquery(this.rawptr.keyup.apply(this.rawptr, args)); } else { return new jquery(this.rawptr.keyup(function(e) { args[0].apply(new js.dom.Element(this), [new js.jquery.JEvent(e)]); })); } } jquery.prototype.load = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); if (args.length == 0) { return new jquery(this.rawptr.load.apply(this.rawptr, args)); } else { return new jquery(this.rawptr.load(function(e) { args[0].apply(new js.dom.Element(this), [new js.jquery.JEvent(e)]); })); } } jquery.prototype.mousedown = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); if (args.length == 0) { return new jquery(this.rawptr.mousedown.apply(this.rawptr, args)); } else { return new jquery(this.rawptr.mousedown(function(e) { args[0].apply(new js.dom.Element(this), [new js.jquery.JEvent(e)]); })); } } jquery.prototype.mousemove = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); if (args.length == 0) { return new jquery(this.rawptr.mousemove.apply(this.rawptr, args)); } else { return new jquery(this.rawptr.mousemove(function(e) { args[0].apply(new js.dom.Element(this), [new js.jquery.JEvent(e)]); })); } } jquery.prototype.mouseout = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); if (args.length == 0) { return new jquery(this.rawptr.mouseout.apply(this.rawptr, args)); } else { return new jquery(this.rawptr.mouseout(function(e) { args[0].apply(new js.dom.Element(this), [new js.jquery.JEvent(e)]); })); } } jquery.prototype.mouseover = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); if (args.length == 0) { return new jquery(this.rawptr.mouseover.apply(this.rawptr, args)); } else { return new jquery(this.rawptr.mouseover(function(e) { args[0].apply(new js.dom.Element(this), [new js.jquery.JEvent(e)]); })); } } jquery.prototype.mouseup = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); if (args.length == 0) { return new jquery(this.rawptr.mouseup.apply(this.rawptr, args)); } else { return new jquery(this.rawptr.mouseup(function(e) { args[0].apply(new js.dom.Element(this), [new js.jquery.JEvent(e)]); })); } } jquery.prototype.resize = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); if (args.length == 0) { return new jquery(this.rawptr.resize.apply(this.rawptr, args)); } else { return new jquery(this.rawptr.resize(function(e) { args[0].apply(new js.dom.Element(this), [new js.jquery.JEvent(e)]); })); } } jquery.prototype.scroll = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); if (args.length == 0) { return new jquery(this.rawptr.scroll.apply(this.rawptr, args)); } else { return new jquery(this.rawptr.scroll(function(e) { args[0].apply(new js.dom.Element(this), [new js.jquery.JEvent(e)]); })); } } jquery.prototype.select = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); if (args.length == 0) { return new jquery(this.rawptr.select.apply(this.rawptr, args)); } else { return new jquery(this.rawptr.select(function(e) { args[0].apply(new js.dom.Element(this), [new js.jquery.JEvent(e)]); })); } } jquery.prototype.submit = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); if (args.length == 0) { return new jquery(this.rawptr.submit.apply(this.rawptr, args)); } else { return new jquery(this.rawptr.select(function(e) { args[0].apply(new js.dom.Element(this), [new js.jquery.JEvent(e)]); })); } } jquery.prototype.unload = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); if (args.length == 0) { return new jquery(this.rawptr.unload.apply(this.rawptr, args)); } else { return new jquery(this.rawptr.unload(function(e) { args[0].apply(new js.dom.Element(this), [new js.jquery.JEvent(e)]); })); } } /* Effects */ jquery.prototype.show = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.show.apply(this.rawptr, args)); } jquery.prototype.hide = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.hide.apply(this.rawptr, args)); } jquery.prototype.toggle = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.toggle.apply(this.rawptr, args)); } jquery.prototype.slideDown = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.slideDown.apply(this.rawptr, args)); } jquery.prototype.slideUp = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.slideUp.apply(this.rawptr, args)); } jquery.prototype.slideToggle = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.slideToggle.apply(this.rawptr, args)); } jquery.prototype.fadeIn = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.fadeIn.apply(this.rawptr, args)); } jquery.prototype.fadeOut = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.fadeOut.apply(this.rawptr, args)); } jquery.prototype.fadeTo = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); return new jquery(this.rawptr.fadeTo.apply(this.rawptr, args)); } jquery.prototype._new = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); if (arguments.length == 1) { this.rawptr = new $(args[0]); } else if (arguments.length == 2) { this.rawptr = new $(args[0], args[1]); } else { throw ("Script !!"); } return this; } return jquery; } js.jquery.JQuery = new initJQuery(); js.jquery.JEvent = new function() { var jevent = function(rawptr) { this.rawptr = rawptr; } jevent.prototype = new konoha.Object(); jevent.konohaclass = "js.jquery.JEvent"; jevent.prototype.type = function() { return new konoha.String(this.rawptr.type); } jevent.prototype.target = function() { return new konoha.dom.Element(this.rawptr.target); } jevent.prototype.relatedTarget = function() { return new konoha.dom.Element(this.rawptr.relatedTarget); } jevent.prototype.currentTarget = function() { return new konoha.dom.Element(this.rawptr.currentTarget); } jevent.prototype.pageX = function() { return this.rawptr.pageX; } jevent.prototype.pageY = function() { return this.rawptr.pageY; } jevent.prototype.timeStamp = function() { return this.rawptr.timeStamp; } jevent.prototype.preventDefault = function() { return new jevent(this.rawptr.preventDefault()); } jevent.prototype.isDefaultPrevented = function() { return this.rawptr.isDefaultPrevented(); } jevent.prototype.stopPropagation = function() { return new jevent(this.rawptr.stopPropagation()); } jevent.prototype.isPropagationStopped = function() { return this.rawptr.isPropagationStopped(); } jevent.prototype.stopImmediatePropagation = function() { return new jevent(this.rawptr.stopImmediatePropagation()); } jevent.prototype.isImmediatePropagationStopped = function() { return this.rawptr.isImmediatePropagationStopped(); } jevent.prototype._new = function() { var args = verifyArgs(Array.prototype.slice.call(arguments)); if (arguments.length == 1) { this.rawptr = new $(args[0]); } else if (arguments.length == 2) { this.rawptr = new $(args[0], args[1]); } else { throw ("Script !!"); } return this; } return jevent; }();<|fim▁end|>
<|file_name|>views.py<|end_file_name|><|fim▁begin|>""" Views for PubSite app. """ from django.conf import settings from django.contrib.auth.views import ( PasswordResetView, PasswordResetDoneView, PasswordResetConfirmView, PasswordResetCompleteView, ) from django.shortcuts import render import requests import logging logger = logging.getLogger(__name__) def _get_context(page_name): return { "pages": settings.PUBLIC_PAGES, "current_page_name": page_name, } # Regular index # def index(request): # """ # View for the static index page # """ # return render(request, 'public/home.html', _get_context('Home')) def index(request): """ View for the static index page """ return render(request, "public/home.html", _get_context("Home")) def about(request): """ View for the static chapter history page. """ return render(request, "public/about.html", _get_context("About")) def activities(request): """ View for the static chapter service page. """ return render( request, "public/activities.html", _get_context("Service & Activities"), ) def rush(request): """ View for the static chapter service page. """ return render( request, "public/rush.html", _get_context("Rush"), ) def campaign(request): """ View for the campaign service page. """ # Overrride requests Session authentication handling class NoRebuildAuthSession(requests.Session): def rebuild_auth(self, prepared_request, response): """ No code here means requests will always preserve the Authorization header when redirected. Be careful not to leak your credentials to untrusted hosts! """ url = "https://api.givebutter.com/v1/transactions/" headers = {"Authorization": f"Bearer {settings.GIVEBUTTER_API_KEY}"} response = None # Create custom requests session session = NoRebuildAuthSession() # Make GET request to server, timeout in seconds try: r = session.get(url, headers=headers, timeout=0.75) if r.status_code == 200: response = r.json() else: logger.error(f"ERROR in request: {r.status_code}") except requests.exceptions.Timeout: logger.warning("Connection to GiveButter API Timed out")<|fim▁hole|> logger.warning("Connection to GiveButter API could not be resolved") except requests.exceptions.RequestException: logger.error( "An unknown issue occurred while trying to retrieve GiveButter Donor List" ) # Grab context object to use later ctx = _get_context("Campaign") # Check for successful response, if so - filter, sort, and format data if response and "data" in response: response = response["data"] # Pull data from GET response object logger.debug(f"GiveButter API Response: {response}") # Filter by only successful transactions, then sort by amount descending successful_txs = [tx for tx in response if tx["status"] == "succeeded"] sorted_txs = sorted(successful_txs, key=lambda tx: tx["amount"], reverse=True) # Clean data to a list of dictionaries & remove unnecessary data transactions = [ { "name": tx["giving_space"]["name"], "amount": tx["giving_space"]["amount"], "message": tx["giving_space"]["message"], } for tx in sorted_txs[:20] ] # Attach transaction dictionary & length to context object ctx["transactions"] = transactions ctx["num_txs"] = len(successful_txs) return render( request, "public/campaign.html", ctx, ) def permission_denied(request): """ View for 403 (Permission Denied) error. """ return render( request, "common/403.html", _get_context("Permission Denied"), ) def handler404(request, exception): """ """ return render(request, "common/404.html", _get_context("Page Not Found")) class ResetPassword(PasswordResetView): template_name = "password_reset/password_reset_form.html" class ResetPasswordDone(PasswordResetDoneView): template_name = "password_reset/password_reset_done.html" class ResetPasswordConfirm(PasswordResetConfirmView): template_name = "password_reset/password_reset_confirm.html" class ResetPasswordComplete(PasswordResetCompleteView): template_name = "password_reset/password_reset_complete.html"<|fim▁end|>
except requests.ConnectionError:
<|file_name|>CFCCellToCellStencil.C<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------*\ ========= | \\ / F ield | foam-extend: Open Source CFD \\ / O peration | Version: 3.2 \\ / A nd | Web: http://www.foam-extend.org \\/ M anipulation | For copyright notice see file Copyright ------------------------------------------------------------------------------- License This file is part of foam-extend. foam-extend is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. foam-extend is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with foam-extend. If not, see <http://www.gnu.org/licenses/>. \*---------------------------------------------------------------------------*/ #include "CFCCellToCellStencil.H" #include "syncTools.H" #include "SortableList.H" #include "emptyPolyPatch.H" // * * * * * * * * * * * * * Private Member Functions * * * * * * * * * * * // // Calculates per face the neighbour data (= cell or boundary face) void Foam::CFCCellToCellStencil::calcFaceBoundaryData ( labelList& neiGlobal ) const { const polyBoundaryMesh& patches = mesh().boundaryMesh(); const label nBnd = mesh().nFaces()-mesh().nInternalFaces(); const labelList& own = mesh().faceOwner(); neiGlobal.setSize(nBnd); forAll(patches, patchI) { const polyPatch& pp = patches[patchI]; label faceI = pp.start(); if (pp.coupled()) { // For coupled faces get the cell on the other side forAll(pp, i) { label bFaceI = faceI-mesh().nInternalFaces(); neiGlobal[bFaceI] = globalNumbering().toGlobal(own[faceI]);<|fim▁hole|> else if (isA<emptyPolyPatch>(pp)) { forAll(pp, i) { label bFaceI = faceI-mesh().nInternalFaces(); neiGlobal[bFaceI] = -1; faceI++; } } else { // For noncoupled faces get the boundary face. forAll(pp, i) { label bFaceI = faceI-mesh().nInternalFaces(); neiGlobal[bFaceI] = globalNumbering().toGlobal(mesh().nCells()+bFaceI); faceI++; } } } syncTools::swapBoundaryFaceList(mesh(), neiGlobal, false); } // Calculates per cell the neighbour data (= cell or boundary in global // numbering). First element is always cell itself! void Foam::CFCCellToCellStencil::calcCellStencil(labelListList& globalCellCells) const { const label nBnd = mesh().nFaces()-mesh().nInternalFaces(); const labelList& own = mesh().faceOwner(); const labelList& nei = mesh().faceNeighbour(); // Calculate coupled neighbour (in global numbering) // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ labelList neiGlobal(nBnd); calcFaceBoundaryData(neiGlobal); // Determine cellCells in global numbering // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ globalCellCells.setSize(mesh().nCells()); forAll(globalCellCells, cellI) { const cell& cFaces = mesh().cells()[cellI]; labelList& cCells = globalCellCells[cellI]; cCells.setSize(cFaces.size()+1); label nNbr = 0; // Myself cCells[nNbr++] = globalNumbering().toGlobal(cellI); // Collect neighbouring cells/faces forAll(cFaces, i) { label faceI = cFaces[i]; if (mesh().isInternalFace(faceI)) { label nbrCellI = own[faceI]; if (nbrCellI == cellI) { nbrCellI = nei[faceI]; } cCells[nNbr++] = globalNumbering().toGlobal(nbrCellI); } else { label nbrCellI = neiGlobal[faceI-mesh().nInternalFaces()]; if (nbrCellI != -1) { cCells[nNbr++] = nbrCellI; } } } cCells.setSize(nNbr); } } // * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * // Foam::CFCCellToCellStencil::CFCCellToCellStencil(const polyMesh& mesh) : cellToCellStencil(mesh) { // Calculate per cell the (face) connected cells (in global numbering) calcCellStencil(*this); } // ************************************************************************* //<|fim▁end|>
faceI++; } }
<|file_name|>GithubActivity.java<|end_file_name|><|fim▁begin|>package com.meituan.davy.myapplication.github; import android.support.v4.app.Fragment; import com.meituan.davy.myapplication.ContainerActivity;<|fim▁hole|> @Override protected Fragment getFragment() { return new GithubFragment(); } }<|fim▁end|>
public class GithubActivity extends ContainerActivity {
<|file_name|>searcher.py<|end_file_name|><|fim▁begin|>from scipy.spatial import distance as dist<|fim▁hole|> self.index = index def search(self, queryFeature): results = {} for (k, feature) in self.index.items(): d = dist.euclidean(queryFeature, feature) results[k] = d results = sorted([(v, k) for (k, v) in results.items()]) return results<|fim▁end|>
class Searcher: def __init__(self, index):
<|file_name|>uuid.py<|end_file_name|><|fim▁begin|># Copyright 2013 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Keystone UUID Token Provider""" from __future__ import absolute_import import uuid from keystone.token.providers import common <|fim▁hole|> def _get_token_id(self, token_data): return uuid.uuid4().hex<|fim▁end|>
class Provider(common.BaseProvider): def __init__(self, *args, **kwargs): super(Provider, self).__init__(*args, **kwargs)
<|file_name|>item.rs<|end_file_name|><|fim▁begin|>use std::any::Any; use std::any::TypeId; use std::fmt; use std::str::from_utf8; use super::cell::{OptCell, PtrMapCell}; use header::{Header, MultilineFormatter, Raw}; #[derive(Clone)] pub struct Item { raw: OptCell<Raw>, typed: PtrMapCell<Header + Send + Sync> } impl Item { #[inline] pub fn new_raw(data: Raw) -> Item { Item { raw: OptCell::new(Some(data)), typed: PtrMapCell::new(), } } #[inline] pub fn new_typed(ty: Box<Header + Send + Sync>) -> Item { let map = PtrMapCell::new(); unsafe { map.insert((*ty).get_type(), ty); } Item { raw: OptCell::new(None), typed: map, } } #[inline] pub fn raw_mut(&mut self) -> &mut Raw { self.raw(); self.typed = PtrMapCell::new(); unsafe { self.raw.get_mut() } } pub fn raw(&self) -> &Raw { if let Some(ref raw) = *self.raw { return raw; } let raw = unsafe { self.typed.one() }.to_string().into_bytes().into(); self.raw.set(raw); self.raw.as_ref().unwrap() } pub fn typed<H: Header + Any>(&self) -> Option<&H> {<|fim▁hole|> match self.typed.get(tid) { Some(val) => Some(val), None => { match parse::<H>(self.raw.as_ref().expect("item.raw must exist")) { Ok(typed) => { unsafe { self.typed.insert(tid, typed); } self.typed.get(tid) }, Err(_) => None } } }.map(|typed| unsafe { typed.downcast_ref_unchecked() }) } pub fn typed_mut<H: Header>(&mut self) -> Option<&mut H> { let tid = TypeId::of::<H>(); if self.typed.get_mut(tid).is_none() { match parse::<H>(self.raw.as_ref().expect("item.raw must exist")) { Ok(typed) => { unsafe { self.typed.insert(tid, typed); } }, Err(_) => () } } if self.raw.is_some() && self.typed.get_mut(tid).is_some() { self.raw = OptCell::new(None); } self.typed.get_mut(tid).map(|typed| unsafe { typed.downcast_mut_unchecked() }) } pub fn into_typed<H: Header>(self) -> Option<H> { let tid = TypeId::of::<H>(); match self.typed.into_value(tid) { Some(val) => Some(val), None => parse::<H>(self.raw.as_ref().expect("item.raw must exist")).ok() }.map(|typed| unsafe { typed.downcast_unchecked() }) } pub fn write_h1(&self, f: &mut MultilineFormatter) -> fmt::Result { match *self.raw { Some(ref raw) => { for part in raw.iter() { match from_utf8(&part[..]) { Ok(s) => { try!(f.fmt_line(&s)); }, Err(_) => { error!("raw header value is not utf8, value={:?}", part); return Err(fmt::Error); } } } Ok(()) }, None => { let typed = unsafe { self.typed.one() }; typed.fmt_multi_header(f) } } } } #[inline] fn parse<H: Header>(raw: &Raw) -> ::Result<Box<Header + Send + Sync>> { H::parse_header(raw).map(|h| { let h: Box<Header + Send + Sync> = Box::new(h); h }) }<|fim▁end|>
let tid = TypeId::of::<H>();
<|file_name|>storage.py<|end_file_name|><|fim▁begin|># # Kickstart module for the storage. # # Copyright (C) 2018 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from pyanaconda.core.signal import Signal from pyanaconda.core.dbus import DBus from pyanaconda.modules.common.base import KickstartService from pyanaconda.modules.common.constants.services import STORAGE from pyanaconda.modules.common.containers import TaskContainer from pyanaconda.modules.common.errors.storage import InvalidStorageError from pyanaconda.modules.common.structures.requirement import Requirement from pyanaconda.modules.storage.bootloader import BootloaderModule from pyanaconda.modules.storage.checker import StorageCheckerModule from pyanaconda.modules.storage.dasd import DASDModule from pyanaconda.modules.storage.devicetree import DeviceTreeModule from pyanaconda.modules.storage.disk_initialization import DiskInitializationModule from pyanaconda.modules.storage.disk_selection import DiskSelectionModule from pyanaconda.modules.storage.fcoe import FCOEModule from pyanaconda.modules.storage.installation import MountFilesystemsTask, ActivateFilesystemsTask, \ WriteConfigurationTask from pyanaconda.modules.storage.iscsi import ISCSIModule from pyanaconda.modules.storage.kickstart import StorageKickstartSpecification from pyanaconda.modules.storage.nvdimm import NVDIMMModule from pyanaconda.modules.storage.partitioning.constants import PartitioningMethod from pyanaconda.modules.storage.partitioning.factory import PartitioningFactory from pyanaconda.modules.storage.partitioning.validate import StorageValidateTask from pyanaconda.modules.storage.reset import ScanDevicesTask from pyanaconda.modules.storage.snapshot import SnapshotModule from pyanaconda.modules.storage.storage_interface import StorageInterface from pyanaconda.modules.storage.teardown import UnmountFilesystemsTask, TeardownDiskImagesTask from pyanaconda.modules.storage.zfcp import ZFCPModule from pyanaconda.storage.initialization import enable_installer_mode, create_storage from pyanaconda.anaconda_loggers import get_module_logger log = get_module_logger(__name__) class StorageService(KickstartService): """The Storage service.""" def __init__(self): super().__init__() # Initialize Blivet. enable_installer_mode() # The storage model. self._current_storage = None self._storage_playground = None self.storage_changed = Signal() # The created partitioning modules. self._created_partitioning = [] self.created_partitioning_changed = Signal() # The applied partitioning module. self._applied_partitioning = None self.applied_partitioning_changed = Signal() self.partitioning_reset = Signal() # Initialize modules. self._modules = [] self._storage_checker_module = StorageCheckerModule() self._add_module(self._storage_checker_module) self._device_tree_module = DeviceTreeModule() self._add_module(self._device_tree_module) self._disk_init_module = DiskInitializationModule() self._add_module(self._disk_init_module) <|fim▁hole|> self._disk_selection_module = DiskSelectionModule() self._add_module(self._disk_selection_module) self._snapshot_module = SnapshotModule() self._add_module(self._snapshot_module) self._bootloader_module = BootloaderModule() self._add_module(self._bootloader_module) self._fcoe_module = FCOEModule() self._add_module(self._fcoe_module) self._iscsi_module = ISCSIModule() self._add_module(self._iscsi_module) self._nvdimm_module = NVDIMMModule() self._add_module(self._nvdimm_module) self._dasd_module = DASDModule() self._add_module(self._dasd_module) self._zfcp_module = ZFCPModule() self._add_module(self._zfcp_module) # Connect modules to signals. self.storage_changed.connect( self._device_tree_module.on_storage_changed ) self.storage_changed.connect( self._disk_init_module.on_storage_changed ) self.storage_changed.connect( self._disk_selection_module.on_storage_changed ) self.storage_changed.connect( self._snapshot_module.on_storage_changed ) self.storage_changed.connect( self._bootloader_module.on_storage_changed ) self.storage_changed.connect( self._dasd_module.on_storage_changed ) self._disk_init_module.format_unrecognized_enabled_changed.connect( self._dasd_module.on_format_unrecognized_enabled_changed ) self._disk_init_module.format_ldl_enabled_changed.connect( self._dasd_module.on_format_ldl_enabled_changed ) self._disk_selection_module.protected_devices_changed.connect( self.on_protected_devices_changed ) def _add_module(self, storage_module): """Add a base kickstart module.""" self._modules.append(storage_module) def publish(self): """Publish the module.""" TaskContainer.set_namespace(STORAGE.namespace) for kickstart_module in self._modules: kickstart_module.publish() DBus.publish_object(STORAGE.object_path, StorageInterface(self)) DBus.register_service(STORAGE.service_name) @property def kickstart_specification(self): """Return the kickstart specification.""" return StorageKickstartSpecification def process_kickstart(self, data): """Process the kickstart data.""" # Process the kickstart data in modules. for kickstart_module in self._modules: kickstart_module.process_kickstart(data) # Set the default filesystem type. if data.autopart.autopart and data.autopart.fstype: self.storage.set_default_fstype(data.autopart.fstype) # Create a new partitioning module. partitioning_method = PartitioningFactory.get_method_for_kickstart(data) if partitioning_method: partitioning_module = self.create_partitioning(partitioning_method) partitioning_module.process_kickstart(data) def setup_kickstart(self, data): """Set up the kickstart data.""" for kickstart_module in self._modules: kickstart_module.setup_kickstart(data) if self.applied_partitioning: self.applied_partitioning.setup_kickstart(data) @property def storage(self): """The storage model. :return: an instance of Blivet """ if self._storage_playground: return self._storage_playground if not self._current_storage: self._set_storage(create_storage()) return self._current_storage def _set_storage(self, storage): """Set the current storage model. The current storage is the latest model of the system’s storage configuration created by scanning all devices. :param storage: a storage """ self._current_storage = storage if self._storage_playground: return self.storage_changed.emit(storage) log.debug("The storage model has changed.") def _set_storage_playground(self, storage): """Set the storage playground. The storage playground is a model of a valid partitioned storage configuration, that can be used for an installation. :param storage: a storage or None """ self._storage_playground = storage if storage is None: storage = self.storage self.storage_changed.emit(storage) log.debug("The storage model has changed.") def on_protected_devices_changed(self, protected_devices): """Update the protected devices in the storage model.""" if not self._current_storage: return self.storage.protect_devices(protected_devices) def scan_devices_with_task(self): """Scan all devices with a task. We will reset a copy of the current storage model and switch the models if the reset is successful. :return: a task """ # Copy the storage. storage = self.storage.copy() # Set up the storage. storage.ignored_disks = self._disk_selection_module.ignored_disks storage.exclusive_disks = self._disk_selection_module.exclusive_disks storage.protected_devices = self._disk_selection_module.protected_devices storage.disk_images = self._disk_selection_module.disk_images # Create the task. task = ScanDevicesTask(storage) task.succeeded_signal.connect(lambda: self._set_storage(storage)) return task def create_partitioning(self, method: PartitioningMethod): """Create a new partitioning. Allowed values: AUTOMATIC CUSTOM MANUAL INTERACTIVE BLIVET :param PartitioningMethod method: a partitioning method :return: a partitioning module """ module = PartitioningFactory.create_partitioning(method) # Update the module. module.on_storage_changed( self._current_storage ) module.on_selected_disks_changed( self._disk_selection_module.selected_disks ) # Connect the callbacks to signals. self.storage_changed.connect( module.on_storage_changed ) self.partitioning_reset.connect( module.on_partitioning_reset ) self._disk_selection_module.selected_disks_changed.connect( module.on_selected_disks_changed ) # Update the list of modules. self._add_created_partitioning(module) return module @property def created_partitioning(self): """List of all created partitioning modules.""" return self._created_partitioning def _add_created_partitioning(self, module): """Add a created partitioning module.""" self._created_partitioning.append(module) self.created_partitioning_changed.emit(module) log.debug("Created the partitioning %s.", module) def apply_partitioning(self, module): """Apply a partitioning. :param module: a partitioning module :raise: InvalidStorageError of the partitioning is not valid """ # Validate the partitioning. storage = module.storage.copy() task = StorageValidateTask(storage) report = task.run() if not report.is_valid(): raise InvalidStorageError(" ".join(report.error_messages)) # Apply the partitioning. self._set_storage_playground(storage) self._set_applied_partitioning(module) @property def applied_partitioning(self): """The applied partitioning.""" return self._applied_partitioning def _set_applied_partitioning(self, module): """Set the applied partitioning. :param module: a partitioning module or None """ self._applied_partitioning = module self.applied_partitioning_changed.emit() if module is None: module = "NONE" log.debug("The partitioning %s is applied.", module) def reset_partitioning(self): """Reset the partitioning.""" self._set_storage_playground(None) self._set_applied_partitioning(None) self.partitioning_reset.emit() def collect_requirements(self): """Return installation requirements for this module. :return: a list of requirements """ requirements = [] # Add the storage requirements. for name in self.storage.packages: requirements.append(Requirement.for_package( name, reason="Required to manage storage devices." )) # Add other requirements, for example for bootloader. for kickstart_module in self._modules: requirements.extend(kickstart_module.collect_requirements()) return requirements def install_with_tasks(self): """Returns installation tasks of this module. :returns: list of installation tasks """ storage = self.storage return [ ActivateFilesystemsTask(storage), MountFilesystemsTask(storage) ] def write_configuration_with_task(self): """Write the storage configuration with a task. FIXME: This is a temporary workaround. :return: an installation task """ return WriteConfigurationTask(self.storage) def teardown_with_tasks(self): """Returns teardown tasks for this module. :return: a list installation tasks """ storage = self.storage return [ UnmountFilesystemsTask(storage), TeardownDiskImagesTask(storage) ]<|fim▁end|>
<|file_name|>fdFlow.cpp<|end_file_name|><|fim▁begin|>//---------------------------------------------------------------------------- // XC program; finite element analysis code // for structural analysis and design. // // Copyright (C) Luis Claudio Pérez Tato // // This program derives from OpenSees <http://opensees.berkeley.edu> // developed by the «Pacific earthquake engineering research center». // // Except for the restrictions that may arise from the copyright // of the original program (see copyright_opensees.txt) // XC is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // This software is distributed in the hope that it will be useful, but // WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // // You should have received a copy of the GNU General Public License // along with this program. // If not, see <http://www.gnu.org/licenses/>. //---------------------------------------------------------------------------- //=============================================================================== //# COPYRIGHT (C): Woody's license (by BJ): // ``This source code is Copyrighted in // U.S., for an indefinite period, and anybody // caught using it without our permission, will be // mighty good friends of ourn, cause we don't give // a darn. Hack it. Compile it. Debug it. Run it. // Yodel it. Enjoy it. We wrote it, that's all we // wanted to do.'' // //# PROJECT: Object Oriented Finite XC::Element Program //# PURPOSE: Finite Deformation Hyper-Elastic classes //# CLASS: //# //# VERSION: 0.6_(1803398874989) (golden section) //# LANGUAGE: C++ //# TARGET OS: all... //# DESIGN: Zhao Cheng, Boris Jeremic (jeremic@ucdavis.edu) //# PROGRAMMER(S): Zhao Cheng, Boris Jeremic //# //# //# DATE: July 2004 //# UPDATE HISTORY: //# //=============================================================================== #ifndef fdFlow_CPP #define fdFlow_CPP<|fim▁hole|> XC::fdFlow::fdFlow() { } double XC::fdFlow::dFodq(const XC::stresstensor &sts, const XC::FDEPState &fdepstate ) const { return 0.0; } XC::stresstensor XC::fdFlow::dFoda(const XC::stresstensor &sts, const XC::FDEPState &fdepstate ) const { XC::stresstensor Z2; return Z2; } XC::BJtensor XC::fdFlow::d2Fodsds(const XC::stresstensor &sts, const XC::FDEPState &fdepstate ) const { BJtensor Z4(4, def_dim_4, 0.0); return Z4; } XC::stresstensor XC::fdFlow::d2Fodsdq(const XC::stresstensor &sts, const XC::FDEPState &fdepstate ) const { XC::stresstensor Z2; return Z2; } XC::BJtensor XC::fdFlow::d2Fodsda(const XC::stresstensor &sts, const XC::FDEPState &fdepstate ) const { BJtensor Z4(4, def_dim_4, 0.0); return Z4; } double XC::fdFlow::d2Fodqdq(const XC::stresstensor &sts, const XC::FDEPState &fdepstate ) const { return 0.0; } XC::stresstensor XC::fdFlow::d2Fodqda(const XC::stresstensor &sts, const XC::FDEPState &fdepstate ) const { XC::stresstensor Z2; return Z2; } XC::BJtensor XC::fdFlow::d2Fodada(const XC::stresstensor &sts, const XC::FDEPState &fdepstate ) const { BJtensor Z4(4, def_dim_4, 0.0); return Z4; } std::ostream& operator<<(std::ostream &os, const XC::fdFlow &fdfl) { os << "fdFlow Parameters: " << "\n"; return os; } #endif<|fim▁end|>
#include "material/nD/FiniteDeformation/fdFlow/fdFlow.h" #include <utility/matrix/nDarray/stresst.h>
<|file_name|>ExecutionServlet.java<|end_file_name|><|fim▁begin|>/* * cron4j - A pure Java cron-like scheduler * * Copyright (C) 2007-2010 Carlo Pelliccia (www.sauronsoftware.it) * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License version * 2.1, as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License 2.1 for more details. * * You should have received a copy of the GNU Lesser General Public * License version 2.1 along with this program. * If not, see <http://www.gnu.org/licenses/>. */ package example; import it.sauronsoftware.cron4j.Scheduler; import it.sauronsoftware.cron4j.TaskExecutor; import java.io.IOException; import javax.servlet.RequestDispatcher; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** * This servlet enables the user to view and control any ongoing task execution. * The HTML layout is generated calling the /WEB-INF/ongoing.jsp page. */ public class ExecutionServlet extends HttpServlet { private static final long serialVersionUID = 1L; protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { // Retrieves the servlet context. ServletContext context = getServletContext(); // Retrieves the scheduler. Scheduler scheduler = (Scheduler) context .getAttribute(Constants.SCHEDULER); // Retrieves the executors. TaskExecutor[] executors = scheduler.getExecutingTasks(); // Registers the executors in the request. req.setAttribute("executors", executors); // Action requested? String action = req.getParameter("action"); if ("pause".equals(action)) { String id = req.getParameter("id"); TaskExecutor executor = find(executors, id); if (executor != null && executor.isAlive() && !executor.isStopped() && executor.canBePaused() && !executor.isPaused()) { executor.pause(); } } else if ("resume".equals(action)) { String id = req.getParameter("id"); TaskExecutor executor = find(executors, id); if (executor != null && executor.isAlive() && !executor.isStopped() && executor.canBePaused() && executor.isPaused()) { executor.resume(); } } else if ("stop".equals(action)) { String id = req.getParameter("id"); TaskExecutor executor = find(executors, id); if (executor != null && executor.isAlive() && executor.canBeStopped() && !executor.isStopped()) { executor.stop(); }<|fim▁hole|> } // Layout. String page = "/WEB-INF/ongoing.jsp"; RequestDispatcher dispatcher = req.getRequestDispatcher(page); dispatcher.include(req, resp); } private TaskExecutor find(TaskExecutor[] executors, String id) { if (id == null) { return null; } for (int i = 0; i < executors.length; i++) { String aux = executors[i].getGuid(); if (aux.equals(id)) { return executors[i]; } } return null; } }<|fim▁end|>
<|file_name|>printer.js<|end_file_name|><|fim▁begin|>/* @flow */ "use strict"; var _inherits = require("babel-runtime/helpers/inherits")["default"]; var _classCallCheck = require("babel-runtime/helpers/class-call-check")["default"]; var _getIterator = require("babel-runtime/core-js/get-iterator")["default"]; var _Object$assign = require("babel-runtime/core-js/object/assign")["default"]; var _interopRequireDefault = require("babel-runtime/helpers/interop-require-default")["default"]; var _interopRequireWildcard = require("babel-runtime/helpers/interop-require-wildcard")["default"]; exports.__esModule = true; var _repeating = require("repeating"); var _repeating2 = _interopRequireDefault(_repeating); var _buffer = require("./buffer"); var _buffer2 = _interopRequireDefault(_buffer); var _node = require("./node"); var _node2 = _interopRequireDefault(_node); var _babelTypes = require("babel-types"); var t = _interopRequireWildcard(_babelTypes); var Printer = (function (_Buffer) { _inherits(Printer, _Buffer); function Printer() { _classCallCheck(this, Printer); for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) { args[_key] = arguments[_key]; } _Buffer.call.apply(_Buffer, [this].concat(args)); this.insideAux = false; this.printAuxAfterOnNextUserNode = false; } Printer.prototype.print = function print(node, parent) { var opts = arguments.length <= 2 || arguments[2] === undefined ? {} : arguments[2]; if (!node) return; if (parent && parent._compact) { node._compact = true; } var oldInAux = this.insideAux; this.insideAux = !node.loc; var oldConcise = this.format.concise; if (node._compact) { this.format.concise = true; } var printMethod = this[node.type]; if (!printMethod) { throw new ReferenceError("unknown node of type " + JSON.stringify(node.type) + " with constructor " + JSON.stringify(node && node.constructor.name)); } if (node.loc) this.printAuxAfterComment(); this.printAuxBeforeComment(oldInAux); var needsParens = _node2["default"].needsParens(node, parent); if (needsParens) this.push("("); this.printLeadingComments(node, parent); this.catchUp(node); this._printNewline(true, node, parent, opts); if (opts.before) opts.before(); this.map.mark(node, "start"); this._print(node, parent); this.printTrailingComments(node, parent); if (needsParens) this.push(")"); // end this.map.mark(node, "end"); if (opts.after) opts.after(); this.format.concise = oldConcise; this.insideAux = oldInAux; this._printNewline(false, node, parent, opts); }; Printer.prototype.printAuxBeforeComment = function printAuxBeforeComment(wasInAux) { var comment = this.format.auxiliaryCommentBefore; if (!wasInAux && this.insideAux) { this.printAuxAfterOnNextUserNode = true; if (comment) this.printComment({ type: "CommentBlock", value: comment }); } }; Printer.prototype.printAuxAfterComment = function printAuxAfterComment() { if (this.printAuxAfterOnNextUserNode) { this.printAuxAfterOnNextUserNode = false; var comment = this.format.auxiliaryCommentAfter; if (comment) this.printComment({ type: "CommentBlock",<|fim▁hole|> }); } }; Printer.prototype.getPossibleRaw = function getPossibleRaw(node) { var extra = node.extra; if (extra && extra.raw != null && extra.rawValue != null && node.value === extra.rawValue) { return extra.raw; } }; Printer.prototype._print = function _print(node, parent) { var extra = this.getPossibleRaw(node); if (extra) { this.push(""); this._push(extra); } else { var printMethod = this[node.type]; printMethod.call(this, node, parent); } }; Printer.prototype.printJoin = function printJoin(nodes /*: ?Array*/, parent /*: Object*/) { // istanbul ignore next var _this = this; var opts = arguments.length <= 2 || arguments[2] === undefined ? {} : arguments[2]; if (!nodes || !nodes.length) return; var len = nodes.length; var node = undefined, i = undefined; if (opts.indent) this.indent(); var printOpts = { statement: opts.statement, addNewlines: opts.addNewlines, after: function after() { if (opts.iterator) { opts.iterator(node, i); } if (opts.separator && i < len - 1) { _this.push(opts.separator); } } }; for (i = 0; i < nodes.length; i++) { node = nodes[i]; this.print(node, parent, printOpts); } if (opts.indent) this.dedent(); }; Printer.prototype.printAndIndentOnComments = function printAndIndentOnComments(node, parent) { var indent = !!node.leadingComments; if (indent) this.indent(); this.print(node, parent); if (indent) this.dedent(); }; Printer.prototype.printBlock = function printBlock(parent) { var node = parent.body; if (t.isEmptyStatement(node)) { this.semicolon(); } else { this.push(" "); this.print(node, parent); } }; Printer.prototype.generateComment = function generateComment(comment) { var val = comment.value; if (comment.type === "CommentLine") { val = "//" + val; } else { val = "/*" + val + "*/"; } return val; }; Printer.prototype.printTrailingComments = function printTrailingComments(node, parent) { this.printComments(this.getComments("trailingComments", node, parent)); }; Printer.prototype.printLeadingComments = function printLeadingComments(node, parent) { this.printComments(this.getComments("leadingComments", node, parent)); }; Printer.prototype.printInnerComments = function printInnerComments(node) { var indent = arguments.length <= 1 || arguments[1] === undefined ? true : arguments[1]; if (!node.innerComments) return; if (indent) this.indent(); this.printComments(node.innerComments); if (indent) this.dedent(); }; Printer.prototype.printSequence = function printSequence(nodes, parent) { var opts = arguments.length <= 2 || arguments[2] === undefined ? {} : arguments[2]; opts.statement = true; return this.printJoin(nodes, parent, opts); }; Printer.prototype.printList = function printList(items, parent) { var opts = arguments.length <= 2 || arguments[2] === undefined ? {} : arguments[2]; if (opts.separator == null) { opts.separator = ","; if (!this.format.compact) opts.separator += " "; } return this.printJoin(items, parent, opts); }; Printer.prototype._printNewline = function _printNewline(leading, node, parent, opts) { if (!opts.statement && !_node2["default"].isUserWhitespacable(node, parent)) { return; } var lines = 0; if (node.start != null && !node._ignoreUserWhitespace && this.tokens.length) { // user node if (leading) { lines = this.whitespace.getNewlinesBefore(node); } else { lines = this.whitespace.getNewlinesAfter(node); } } else { // generated node if (!leading) lines++; // always include at least a single line after if (opts.addNewlines) lines += opts.addNewlines(leading, node) || 0; var needs = _node2["default"].needsWhitespaceAfter; if (leading) needs = _node2["default"].needsWhitespaceBefore; if (needs(node, parent)) lines++; // generated nodes can't add starting file whitespace if (!this.buf) lines = 0; } this.newline(lines); }; Printer.prototype.getComments = function getComments(key, node) { return node && node[key] || []; }; Printer.prototype.shouldPrintComment = function shouldPrintComment(comment) { if (this.format.shouldPrintComment) { return this.format.shouldPrintComment(comment.value); } else { if (comment.value.indexOf("@license") >= 0 || comment.value.indexOf("@preserve") >= 0) { return true; } else { return this.format.comments; } } }; Printer.prototype.printComment = function printComment(comment) { if (!this.shouldPrintComment(comment)) return; if (comment.ignore) return; comment.ignore = true; if (comment.start != null) { if (this.printedCommentStarts[comment.start]) return; this.printedCommentStarts[comment.start] = true; } this.catchUp(comment); // whitespace before this.newline(this.whitespace.getNewlinesBefore(comment)); var column = this.position.column; var val = this.generateComment(comment); if (column && !this.isLast(["\n", " ", "[", "{"])) { this._push(" "); column++; } // if (comment.type === "CommentBlock" && this.format.indent.adjustMultilineComment) { var offset = comment.loc && comment.loc.start.column; if (offset) { var newlineRegex = new RegExp("\\n\\s{1," + offset + "}", "g"); val = val.replace(newlineRegex, "\n"); } var indent = Math.max(this.indentSize(), column); val = val.replace(/\n/g, "\n" + _repeating2["default"](" ", indent)); } if (column === 0) { val = this.getIndent() + val; } // force a newline for line comments when retainLines is set in case the next printed node // doesn't catch up if ((this.format.compact || this.format.retainLines) && comment.type === "CommentLine") { val += "\n"; } // this._push(val); // whitespace after this.newline(this.whitespace.getNewlinesAfter(comment)); }; Printer.prototype.printComments = function printComments(comments /*:: ?: Array<Object>*/) { if (!comments || !comments.length) return; for (var _iterator = comments, _isArray = Array.isArray(_iterator), _i = 0, _iterator = _isArray ? _iterator : _getIterator(_iterator);;) { var _ref; if (_isArray) { if (_i >= _iterator.length) break; _ref = _iterator[_i++]; } else { _i = _iterator.next(); if (_i.done) break; _ref = _i.value; } var comment = _ref; this.printComment(comment); } }; return Printer; })(_buffer2["default"]); exports["default"] = Printer; var _arr = [require("./generators/template-literals"), require("./generators/expressions"), require("./generators/statements"), require("./generators/classes"), require("./generators/methods"), require("./generators/modules"), require("./generators/types"), require("./generators/flow"), require("./generators/base"), require("./generators/jsx")]; for (var _i2 = 0; _i2 < _arr.length; _i2++) { var generator = _arr[_i2]; _Object$assign(Printer.prototype, generator); } module.exports = exports["default"];<|fim▁end|>
value: comment
<|file_name|>windows8_1StateMachine.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from statemachine import _Statemachine<|fim▁hole|> class Windows8_1StateMachine(_Statemachine): def __init__(self, params): _Statemachine.__init__(self, params) def _list_share(self): return super(Windows8_1StateMachine, self)._list_share() def _list_running(self): return super(Windows8_1StateMachine, self)._list_running() def _list_drives(self): return super(Windows8_1StateMachine, self)._list_drives() def _list_network_drives(self): return super(Windows8_1StateMachine, self)._list_network_drives() def _list_sessions(self): return super(Windows8_1StateMachine, self)._list_sessions() def _list_scheduled_jobs(self): return super(Windows8_1StateMachine, self)._list_scheduled_jobs() def _list_network_adapters(self): return super(Windows8_1StateMachine, self)._list_network_adapters() def _list_arp_table(self): return super(Windows8_1StateMachine, self)._list_arp_table() def _list_route_table(self): return super(Windows8_1StateMachine, self)._list_route_table() def _list_sockets_network(self): return super(Windows8_1StateMachine, self)._list_sockets_network() def _list_sockets_services(self): return super(Windows8_1StateMachine, self)._list_services() def _list_kb(self): return super(Windows8_1StateMachine, self)._list_kb() def csv_list_drives(self): super(Windows8_1StateMachine, self)._csv_list_drives(self._list_drives()) def csv_list_network_drives(self): super(Windows8_1StateMachine, self)._csv_list_network_drives(self._list_network_drives()) def csv_list_share(self): super(Windows8_1StateMachine, self)._csv_list_share(self._list_share()) def csv_list_running_proccess(self): super(Windows8_1StateMachine, self)._csv_list_running_process(self._list_running()) def csv_hash_running_proccess(self): super(Windows10StateMachine, self)._csv_hash_running_process(self._list_running()) def csv_list_sessions(self): super(Windows8_1StateMachine, self)._csv_list_sessions(self._list_sessions()) def csv_list_arp_table(self): super(Windows8_1StateMachine, self)._csv_list_arp_table(self._list_arp_table()) def csv_list_route_table(self): super(Windows8_1StateMachine, self)._csv_list_route_table(self._list_route_table()) def csv_list_sockets_networks(self): super(Windows8_1StateMachine, self)._csv_list_sockets_network(self._list_sockets_network()) def csv_list_services(self): super(Windows8_1StateMachine, self)._csv_list_services(self._list_services()) def csv_list_kb(self): super(Windows8_1StateMachine, self)._csv_list_kb(self._list_kb())<|fim▁end|>
<|file_name|>new_graphics3.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python ############################################################################### # NAME: new_graphics3.py # VERSION: 2.0.0b15 (18SEPTEMBER2006) # AUTHOR: John B. Cole, PhD (jcole@aipl.arsusda.gov) # LICENSE: LGPL ############################################################################### from PyPedal import pyp_demog from PyPedal import pyp_graphics from PyPedal import pyp_jbc from PyPedal import pyp_newclasses from PyPedal import pyp_nrm from PyPedal import pyp_metrics from PyPedal.pyp_utils import pyp_nice_time if __name__ == '__main__':<|fim▁hole|> example = pyp_newclasses.loadPedigree(optionsfile='new_graphics3.ini') if example.kw['messages'] == 'verbose': print '[INFO]: Calling pyp_graphics.new_draw_pedigree() at %s' % (pyp_nice_time()) pyp_graphics.new_draw_pedigree(example, gfilename='graphics3', gtitle='graphics3 pedigree', gorient='p') pyp_jbc.color_pedigree(example,gfilename='graphics3', ghatch='0', \ metric='sons', gtitle='Nodes are colored by number of sons.', \ gprog='dot', gname=1)<|fim▁end|>
print 'Starting pypedal.py at %s' % (pyp_nice_time())
<|file_name|>0006_case_rule_refactor.py<|end_file_name|><|fim▁begin|># Generated by Django 1.10.6 on 2017-04-04 12:54 import django.db.models.deletion from django.db import migrations, models import jsonfield.fields class Migration(migrations.Migration): dependencies = [ ('data_interfaces', '0005_remove_match_type_choices'), ] operations = [ migrations.CreateModel( name='CaseRuleAction', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ], ), migrations.CreateModel( name='CaseRuleCriteria', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ], ), migrations.CreateModel( name='ClosedParentDefinition', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('identifier', models.CharField(default='parent', max_length=126)), ('relationship_id', models.PositiveSmallIntegerField(default=1)), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='CustomActionDefinition', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=126)), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='CustomMatchDefinition', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=126)), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='MatchPropertyDefinition', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('property_name', models.CharField(max_length=126)), ('property_value', models.CharField(max_length=126, null=True)), ('match_type', models.CharField(max_length=15)), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='UpdateCaseDefinition', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('properties_to_update', jsonfield.fields.JSONField(default=list)), ('close_case', models.BooleanField()), ], options={ 'abstract': False, }, ),<|fim▁hole|> field=models.BooleanField(default=False), ), migrations.AddField( model_name='caserulecriteria', name='closed_parent_definition', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='data_interfaces.ClosedParentDefinition'), ), migrations.AddField( model_name='caserulecriteria', name='custom_match_definition', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='data_interfaces.CustomMatchDefinition'), ), migrations.AddField( model_name='caserulecriteria', name='match_property_definition', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='data_interfaces.MatchPropertyDefinition'), ), migrations.AddField( model_name='caserulecriteria', name='rule', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='data_interfaces.AutomaticUpdateRule'), ), migrations.AddField( model_name='caseruleaction', name='custom_action_definition', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='data_interfaces.CustomActionDefinition'), ), migrations.AddField( model_name='caseruleaction', name='rule', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='data_interfaces.AutomaticUpdateRule'), ), migrations.AddField( model_name='caseruleaction', name='update_case_definition', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='data_interfaces.UpdateCaseDefinition'), ), ]<|fim▁end|>
migrations.AddField( model_name='automaticupdaterule', name='migrated',
<|file_name|>cuboid3d.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>use na::Vector3; use ncollide3d::shape::Cuboid; fn main() { let cuboid = Cuboid::new(Vector3::new(2.0f32, 1.0, 3.0)); assert!(cuboid.half_extents.x == 2.0); assert!(cuboid.half_extents.y == 1.0); assert!(cuboid.half_extents.z == 3.0); }<|fim▁end|>
extern crate nalgebra as na;
<|file_name|>cache.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python #-*- coding: utf8 -*- # Copyright 2009-2012 Kamil Winczek <kwinczek@gmail.com> # # This file is part of series.py. # # series.py is free software: you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free # Software Foundation, either version 3 of the License, or (at your option) # any later version. # # series.py is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License # for more details. # # You should have received a copy of the GNU General Public License along # with series.py. If not, see http://www.gnu.org/licenses/. import contextlib import sys import lxml.etree as etree import shelve import subprocess try: import urllib2 except ImportError: import urllib as urllib2 import time import tvs.show # Spinner implementation. @contextlib.contextmanager def spinning_distraction(spin): if spin: global p p = subprocess.Popen(['tvs_spin.py']) yield p.terminate() sys.stdout.write("\r") sys.stdout.flush() else: yield <|fim▁hole|> # --------------------------------------------------------------------- # # # # Class Cache # # # # --------------------------------------------------------------------- # class Cache(object): """ Cache implementation. Cache is a wraper class for Show class. It is capable of retrieving and storing data from tvrage.com. ttl contains date upto when object is valid. """ def __init__(self, keyword, options): self.keyword = keyword self.show = None self.options = options self.now = time.time() if self.options.cache: self.c = shelve.open(self.options.cachefile) self.i = shelve.open(self.options.cacheids) self.url_search = "http://services.tvrage.com/feeds/search.php?show=%s" % self.keyword self.showid = self.__get_show_id() self.url_full_show = "http://services.tvrage.com/feeds/full_show_info.php?sid=%s" % self.showid self.show = self.__get_show() if self.options.debug: print("Search URL: %s" % self.url_search) print("Shows full URL: %s" % self.url_full_show) def __del__(self): """If cache was used all files need to be closed.""" if self.options.cache: self.c.close() self.i.close() def __save_id_to_cache(self, showid): """Saves retrieved show's id to cache""" self.i[self.keyword] = showid def __save_show_to_cache(self, show): if not show: return False # Set TTL, add 12h (43200secs) to current time (12h TTL) self.c[str(self.showid)] = (self.now+43200, show) return True def __get_id_from_cache(self): try: return self.i[self.keyword] except: return None def __get_id_from_tvrage(self): try: with spinning_distraction(spin=self.options.spinner): return etree.fromstring(urllib2.urlopen(self.url_search).read()).xpath('//Results/show/showid')[0].text except KeyboardInterrupt: raise except: return None def __get_show_from_cache(self): try: return self.c[str(self.showid)] except: return (None, None) def __get_show_from_tvrage(self): try: with spinning_distraction(spin=self.options.spinner): return tvs.show.Show(etree.fromstring(urllib2.urlopen(self.url_full_show).read()), self.options) except KeyboardInterrupt: raise except: return None def __get_show_id(self): """Returns first found id from search list. """ # Try to get id from ids cache file if self.options.cache and not self.options.refresh: showid = self.__get_id_from_cache() if not showid: showid = self.__get_id_from_tvrage() if showid: self.__save_id_to_cache(showid) return showid return showid else: return showid elif self.options.refresh: showid = self.__get_id_from_tvrage() if showid: self.__save_id_to_cache(showid) return showid elif not self.options.cache: return self.__get_id_from_tvrage() else: showid = self.__get_id_from_tvrage() if showid: self.__save_id_to_cache(showid) return showid return None def __get_show(self): """Returns show instance with data from tvrage.""" if self.showid == None: # Previously not found show id return None if self.options.cache and not self.options.refresh: ttl, show = self.__get_show_from_cache() if not ttl and not self.show or ttl < self.now: show = self.__get_show_from_tvrage() self.__save_show_to_cache(show) elif self.options.refresh: show = self.__get_show_from_tvrage() self.__save_show_to_cache(show) # If no cache to be used. else: show = self.__get_show_from_tvrage() return show def get_show(self): return self.show<|fim▁end|>
<|file_name|>node-rusage.cc<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2011 by Jakub Lekstan <kuebzky@gmail.com> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ #include <v8.h> #include <node.h> #include <sys/time.h> #include <sys/resource.h> int globalWho = RUSAGE_SELF; static v8::Handle<v8::Value> get_r_usage(const v8::Arguments& args){ v8::HandleScope scope; int localWho = globalWho; if(args.Length() != 0){ bool isError = false; if(args[0]->IsNumber()){ v8::Local<v8::Integer> iWho = v8::Local<v8::Integer>::Cast(args[0]); localWho = (int)(iWho->Int32Value()); if(localWho != RUSAGE_SELF && localWho != RUSAGE_CHILDREN){ isError = true; } }else{ isError = true; } if(isError){ return v8::ThrowException(v8::Exception::TypeError(v8::String::New("First argument must be either a RUSAGE_SELF or RUSAGE_CHILDREN"))); } } rusage rusagedata; int status = getrusage(localWho, &rusagedata); if(status != 0){ scope.Close(v8::Null()); } v8::Local<v8::Object> data = v8::Object::New(); data->Set(v8::String::New("ru_utime.tv_sec"), v8::Number::New(rusagedata.ru_utime.tv_sec)); data->Set(v8::String::New("ru_utime.tv_usec"), v8::Number::New(rusagedata.ru_utime.tv_usec)); data->Set(v8::String::New("ru_stime.tv_sec"), v8::Number::New(rusagedata.ru_stime.tv_sec)); data->Set(v8::String::New("ru_stime.tv_usec"), v8::Number::New(rusagedata.ru_stime.tv_usec)); data->Set(v8::String::New("ru_maxrss"), v8::Number::New(rusagedata.ru_maxrss)); data->Set(v8::String::New("ru_ixrss"), v8::Number::New(rusagedata.ru_ixrss)); data->Set(v8::String::New("ru_idrss"), v8::Number::New(rusagedata.ru_idrss)); data->Set(v8::String::New("ru_isrss"), v8::Number::New(rusagedata.ru_isrss)); data->Set(v8::String::New("ru_minflt"), v8::Number::New(rusagedata.ru_minflt)); data->Set(v8::String::New("ru_majflt"), v8::Number::New(rusagedata.ru_majflt)); data->Set(v8::String::New("ru_nswap"), v8::Number::New(rusagedata.ru_nswap)); data->Set(v8::String::New("ru_inblock"), v8::Number::New(rusagedata.ru_inblock)); data->Set(v8::String::New("ru_oublock"), v8::Number::New(rusagedata.ru_oublock)); data->Set(v8::String::New("ru_msgsnd"), v8::Number::New(rusagedata.ru_msgsnd)); data->Set(v8::String::New("ru_msgrcv"), v8::Number::New(rusagedata.ru_msgrcv)); data->Set(v8::String::New("ru_nsignals"), v8::Number::New(rusagedata.ru_nsignals)); data->Set(v8::String::New("ru_nvcsw"), v8::Number::New(rusagedata.ru_nvcsw)); data->Set(v8::String::New("ru_nivcsw"), v8::Number::New(rusagedata.ru_nivcsw)); return scope.Close(data); } static v8::Handle<v8::Value> usage_cycles(const v8::Arguments& args){ v8::HandleScope scope; rusage rusagedata; int status = getrusage(globalWho, &rusagedata); if(status != 0){ return scope.Close(v8::Null()); } return scope.Close(v8::Number::New(rusagedata.ru_utime.tv_sec * 1e6 + rusagedata.ru_utime.tv_usec)); } static v8::Handle<v8::Value> who(const v8::Arguments& args){ v8::HandleScope scope; if(args.Length() != 0 && args[0]->IsNumber()){ v8::Local<v8::Integer> iWho = v8::Local<v8::Integer>::Cast(args[0]); int localWho = (int)(iWho->Int32Value()); if(localWho != RUSAGE_SELF && localWho != RUSAGE_CHILDREN){ return v8::ThrowException(v8::Exception::TypeError(v8::String::New("First argument must be either a RUSAGE_SELF or RUSAGE_CHILDREN"))); } globalWho = localWho; return scope.Close(v8::True()); }else{ return scope.Close(v8::False()); } } extern "C" void init (v8::Handle<v8::Object> target){ v8::HandleScope scope; NODE_SET_METHOD(target, "get", get_r_usage); NODE_SET_METHOD(target, "cycles", usage_cycles); NODE_SET_METHOD(target, "who", who); target->Set(v8::String::New("RUSAGE_SELF"), v8::Number::New(RUSAGE_SELF));<|fim▁hole|><|fim▁end|>
target->Set(v8::String::New("RUSAGE_CHILDREN"), v8::Number::New(RUSAGE_CHILDREN)); }
<|file_name|>boss_selin_fireheart.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2008-2016 TrinityCore <http://www.trinitycore.org/> * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the * Free Software Foundation; either version 2 of the License, or (at your * option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program. If not, see <http://www.gnu.org/licenses/>. */ #include "ScriptMgr.h" #include "ScriptedCreature.h" #include "magisters_terrace.h" enum Says { SAY_AGGRO = 0, SAY_ENERGY = 1, SAY_EMPOWERED = 2, SAY_KILL = 3, SAY_DEATH = 4, EMOTE_CRYSTAL = 5 }; enum Spells { // Crystal effect spells SPELL_FEL_CRYSTAL_DUMMY = 44329, SPELL_MANA_RAGE = 44320, // This spell triggers 44321, which changes scale and regens mana Requires an entry in spell_script_target // Selin's spells SPELL_DRAIN_LIFE = 44294, SPELL_FEL_EXPLOSION = 44314, SPELL_DRAIN_MANA = 46153 // Heroic only }; enum Phases { PHASE_NORMAL = 1, PHASE_DRAIN = 2 }; enum Events { EVENT_FEL_EXPLOSION = 1, EVENT_DRAIN_CRYSTAL, EVENT_DRAIN_MANA, EVENT_DRAIN_LIFE, EVENT_EMPOWER }; enum Misc { ACTION_SWITCH_PHASE = 1 }; class boss_selin_fireheart : public CreatureScript { public: boss_selin_fireheart() : CreatureScript("boss_selin_fireheart") { } struct boss_selin_fireheartAI : public BossAI { boss_selin_fireheartAI(Creature* creature) : BossAI(creature, DATA_SELIN) { _scheduledEvents = false; } void Reset() override { Crystals.clear(); me->GetCreatureListWithEntryInGrid(Crystals, NPC_FEL_CRYSTAL, 250.0f); for (Creature* creature : Crystals) { if (!creature->IsAlive()) creature->Respawn(); creature->SetFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_NOT_SELECTABLE); } _Reset(); CrystalGUID.Clear(); _scheduledEvents = false; } void DoAction(int32 action) override { switch (action) { case ACTION_SWITCH_PHASE: events.SetPhase(PHASE_NORMAL); events.ScheduleEvent(EVENT_FEL_EXPLOSION, 2000, 0, PHASE_NORMAL); AttackStart(me->GetVictim()); me->GetMotionMaster()->MoveChase(me->GetVictim()); break; default: break; } } void SelectNearestCrystal() { if (Crystals.empty()) return; Crystals.sort(Trinity::ObjectDistanceOrderPred(me)); if (Creature* CrystalChosen = Crystals.front()) { Talk(SAY_ENERGY); Talk(EMOTE_CRYSTAL); DoCast(CrystalChosen, SPELL_FEL_CRYSTAL_DUMMY); CrystalGUID = CrystalChosen->GetGUID(); Crystals.remove(CrystalChosen); float x, y, z; CrystalChosen->GetClosePoint(x, y, z, me->GetObjectSize(), CONTACT_DISTANCE); events.SetPhase(PHASE_DRAIN); me->SetWalk(false); me->GetMotionMaster()->MovePoint(1, x, y, z); } } void ShatterRemainingCrystals() { if (Crystals.empty()) return; for (Creature* crystal : Crystals) { if (crystal && crystal->IsAlive()) crystal->KillSelf(); } } void EnterCombat(Unit* /*who*/) override { Talk(SAY_AGGRO); _EnterCombat(); events.SetPhase(PHASE_NORMAL); events.ScheduleEvent(EVENT_FEL_EXPLOSION, 2100, 0, PHASE_NORMAL); } void KilledUnit(Unit* victim) override { if (victim->GetTypeId() == TYPEID_PLAYER) Talk(SAY_KILL);<|fim▁hole|> void MovementInform(uint32 type, uint32 id) override { if (type == POINT_MOTION_TYPE && id == 1) { Unit* CrystalChosen = ObjectAccessor::GetUnit(*me, CrystalGUID); if (CrystalChosen && CrystalChosen->IsAlive()) { CrystalChosen->RemoveFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_NOT_SELECTABLE); CrystalChosen->CastSpell(me, SPELL_MANA_RAGE, true); events.ScheduleEvent(EVENT_EMPOWER, 10000, PHASE_DRAIN); } } } void JustDied(Unit* /*killer*/) override { Talk(SAY_DEATH); _JustDied(); ShatterRemainingCrystals(); } void UpdateAI(uint32 diff) override { if (!UpdateVictim()) return; events.Update(diff); if (me->HasUnitState(UNIT_STATE_CASTING)) return; while (uint32 eventId = events.ExecuteEvent()) { switch (eventId) { case EVENT_FEL_EXPLOSION: DoCastAOE(SPELL_FEL_EXPLOSION); events.ScheduleEvent(EVENT_FEL_EXPLOSION, 2000, 0, PHASE_NORMAL); break; case EVENT_DRAIN_CRYSTAL: SelectNearestCrystal(); _scheduledEvents = false; break; case EVENT_DRAIN_MANA: if (Unit* target = SelectTarget(SELECT_TARGET_RANDOM, 0, 45.0f, true)) DoCast(target, SPELL_DRAIN_MANA); events.ScheduleEvent(EVENT_DRAIN_MANA, 10000, 0, PHASE_NORMAL); break; case EVENT_DRAIN_LIFE: if (Unit* target = SelectTarget(SELECT_TARGET_RANDOM, 0, 20.0f, true)) DoCast(target, SPELL_DRAIN_LIFE); events.ScheduleEvent(EVENT_DRAIN_LIFE, 10000, 0, PHASE_NORMAL); break; case EVENT_EMPOWER: { Talk(SAY_EMPOWERED); Creature* CrystalChosen = ObjectAccessor::GetCreature(*me, CrystalGUID); if (CrystalChosen && CrystalChosen->IsAlive()) CrystalChosen->KillSelf(); CrystalGUID.Clear(); me->GetMotionMaster()->Clear(); me->GetMotionMaster()->MoveChase(me->GetVictim()); break; } default: break; } } if (me->GetPowerPct(POWER_MANA) < 10.f) { if (events.IsInPhase(PHASE_NORMAL) && !_scheduledEvents) { _scheduledEvents = true; uint32 timer = urand(3000, 7000); events.ScheduleEvent(EVENT_DRAIN_LIFE, timer, 0, PHASE_NORMAL); if (IsHeroic()) { events.ScheduleEvent(EVENT_DRAIN_CRYSTAL, urand(10000, 15000), 0, PHASE_NORMAL); events.ScheduleEvent(EVENT_DRAIN_MANA, timer + 5000, 0, PHASE_NORMAL); } else events.ScheduleEvent(EVENT_DRAIN_CRYSTAL, urand(20000, 25000), 0, PHASE_NORMAL); } } DoMeleeAttackIfReady(); } private: std::list<Creature*> Crystals; ObjectGuid CrystalGUID; bool _scheduledEvents; }; CreatureAI* GetAI(Creature* creature) const override { return GetInstanceAI<boss_selin_fireheartAI>(creature); }; }; class npc_fel_crystal : public CreatureScript { public: npc_fel_crystal() : CreatureScript("npc_fel_crystal") { } struct npc_fel_crystalAI : public ScriptedAI { npc_fel_crystalAI(Creature* creature) : ScriptedAI(creature) { } void JustDied(Unit* /*killer*/) override { if (InstanceScript* instance = me->GetInstanceScript()) { Creature* Selin = ObjectAccessor::GetCreature(*me, instance->GetGuidData(DATA_SELIN)); if (Selin && Selin->IsAlive()) Selin->AI()->DoAction(ACTION_SWITCH_PHASE); } } }; CreatureAI* GetAI(Creature* creature) const override { return GetInstanceAI<npc_fel_crystalAI>(creature); }; }; void AddSC_boss_selin_fireheart() { new boss_selin_fireheart(); new npc_fel_crystal(); }<|fim▁end|>
}
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>from django import forms from selectable.forms import AutoCompleteSelectField from selectable.forms import AutoCompleteSelectWidget from opendata.catalog.lookups import CityLookup, CountyLookup from .models import Request class SearchForm(forms.Form): text = forms.CharField(required=False) class RequestForm(forms.ModelForm): county = AutoCompleteSelectField( lookup_class=CountyLookup, required=False, widget=AutoCompleteSelectWidget( lookup_class=CountyLookup, attrs={"class": "suggestions-hidden suggestions-county"}, ) ) city = AutoCompleteSelectField( lookup_class=CityLookup, required=False, widget=AutoCompleteSelectWidget( lookup_class=CityLookup,<|fim▁hole|> attrs={"class": "suggestions-hidden suggestions-city"}, ) ) class Meta: model = Request exclude = ('suggested_by', 'resources', 'rating', 'status', ) class Media: js = ( "suggestions/js/form.js", )<|fim▁end|>
<|file_name|>County.java<|end_file_name|><|fim▁begin|>package com.kenshin.windystreet.db; import org.litepal.crud.DataSupport; /** * Created by Kenshin on 2017/4/3. */ public class County extends DataSupport { private int id; //编号 private String countyName; //县名 private String weatherId; //对应的天气id private int cityId; //所属市的id public void setId(int id) { this.id = id; } public void setCountyName(String countyName) { this.countyName = countyName; } public void setWeatherId(String weatherId) { this.weatherId = weatherId; } public void setCityId(int cityId) {<|fim▁hole|> public int getId() { return id; } public String getCountyName() { return countyName; } public String getWeatherId() { return weatherId; } public int getCityId() { return cityId; } }<|fim▁end|>
this.cityId = cityId; }
<|file_name|>Folder.java<|end_file_name|><|fim▁begin|>/* * This file is part of Track It!. * Copyright (C) 2013 Henrique Malheiro * Copyright (C) 2015 Pedro Gomes * * TrackIt! is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Track It! is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Track It!. If not, see <http://www.gnu.org/licenses/>. * */ package com.trackit.business.domain; import java.util.ArrayList;<|fim▁hole|> import com.trackit.business.exception.TrackItException; import com.trackit.presentation.event.Event; import com.trackit.presentation.event.EventManager; import com.trackit.presentation.event.EventPublisher; public class Folder extends TrackItBaseType implements DocumentItem { private String name; private List<GPSDocument> documents; public Folder(String name) { super(); this.name = name; documents = new ArrayList<GPSDocument>(); } public String getName() { return name; } public void setName(String name) { this.name = name; } public List<GPSDocument> getDocuments() { return documents; } public void add(GPSDocument document) { documents.add(document); } public void remove(GPSDocument document) { documents.remove(document); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((name == null) ? 0 : name.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Folder other = (Folder) obj; if (name == null) { if (other.name != null) return false; } else if (!name.equals(other.name)) return false; return true; } @Override public String toString() { return String.format("Folder [name=%s]", name); } @Override public void publishSelectionEvent(EventPublisher publisher) { EventManager.getInstance().publish(publisher, Event.FOLDER_SELECTED, this); } @Override public void accept(Visitor visitor) throws TrackItException { visitor.visit(this); } }<|fim▁end|>
import java.util.List;
<|file_name|>PBSQuery.py<|end_file_name|><|fim▁begin|># # Authors: Roy Dragseth (roy.dragseth@cc.uit.no) # Bas van der Vlies (basv@sara.nl) # # SVN INFO: # $Id$ # """ Usage: from PBSQuery import PBSQuery This class gets the info from the pbs_server via the pbs.py module for the several batch objects. All get..() functions return an dictionary with id as key and batch object as value There are four batch objects: - server - queue - job - node Each object can be handled as an dictionary and has several member functions. The second parameter is an python list and can be used if you are only interested in certain resources, see example There are the following functions for PBSQuery: job - getjob(job_id, attributes=<default is all>) getjobs(attributes=<default is all>) node - getnode(node_id, attributes=<default is all>) getnodes(attributes=<default is all>) queue - getqueue(queue_id, attributes=<default is all>) getqueues(attributes=<default is all>) server - get_serverinfo(attributes=<default is all>) Here is an example how to use the module: from PBSQuery import PBSQuery p = PBSQuery() nodes = p.getnodes() for name,node in nodes.items(): print name if node.is_free(): print node, node['state'] l = [ 'state', 'np' ] nodes = p.getnodes(l) for name,node in nodes.items(): print node, node['state'] The parameter 'attributes' is an python list of resources that you are interested in, eg: only show state of nodes l = list() l.append('state') nodes = p.getnodes(l) """ import pbs import UserDict import string import sys import re import types class PBSError(Exception): def __init__(self, msg=''): self.msg = msg Exception.__init__(self, msg) def __repr__(self): return self.msg __str__ = __repr__ class PBSQuery: # a[key] = value, key and value are data type string # OLD_DATA_STRUCTURE = False def __init__(self, server=None): if not server: self.server = pbs.pbs_default() else: self.server = server self._connect() ## this is needed for getjob a jobid is made off: # sequence_number.server (is not self.server) # self.job_server_id = list(self.get_serverinfo())[0] self._disconnect() def _connect(self): """Connect to the PBS/Torque server""" self.con = pbs.pbs_connect(self.server) if self.con < 0: str = "Could not make a connection with %s\n" %(self.server) raise PBSError(str) def _disconnect(self): """Close the PBS/Torque connection""" pbs.pbs_disconnect(self.con) self.attribs = 'NULL' def _list_2_attrib(self, list): """Convert a python list to an attrib list suitable for pbs""" self.attribs = pbs.new_attrl( len(list) ) i = 0 for attrib in list: # So we can user Resource attrib = attrib.split('.') self.attribs[i].name = attrib[0] i = i + 1 def _pbsstr_2_list(self, str, delimiter): """Convert a string to a python list and use delimiter as spit char""" l = sting.splitfields(str, delimiter) if len(l) > 1: return l def _list_2_dict(self, l, class_func): """ Convert a pbsstat function list to a class dictionary, The data structure depends on the function new_data_structure(). Default data structure is: class[key] = value, Where key and value are of type string Future release, can be set by new_data_structure(): - class[key] = value where value can be: 1. a list of values of type string 2. a dictionary with as list of values of type string. If values contain a '=' character eg: print node['np'] >> [ '2' ] print node['status']['arch'] >> [ 'x86_64' ] """ self.d = {} for item in l: new = class_func() self.d[item.name] = new new.name = item.name for a in item.attribs: if self.OLD_DATA_STRUCTURE: if a.resource:<|fim▁hole|> new[key] = a.value else: values = string.split(a.value, ',') sub_dict = string.split(a.value, '=') # We must creat sub dicts, only for specified # key values # if a.name in ['status', 'Variable_List']: for v in values: tmp_l = v.split('=') ## Support for multiple EVENT mesages in format [key=value:]+ # format eg: message=EVENT:sample.time=1288864220.003,EVENT:kernel=upgrade,cputotals.user=0 # message=ERROR <text> # if tmp_l[0] in ['message']: if tmp_l[1].startswith('EVENT:'): tmp_d = dict() new['event'] = class_func(tmp_d) message_list = v.split(':') for event_type in message_list[1:]: tmp_l = event_type.split('=') new['event'][ tmp_l[0] ] = tmp_l[1:] else: ## ERROR message # new['error'] = tmp_l [1:] elif tmp_l[0].startswith('EVENT:'): message_list = v.split(':') for event_type in message_list[1:]: tmp_l = event_type.split('=') new['event'][ tmp_l[0] ] = tmp_l[1:] else: ## Check if we already added the key # if new.has_key(a.name): new[a.name][ tmp_l[0] ] = tmp_l[1:] else: tmp_d = dict() tmp_d[ tmp_l[0] ] = tmp_l[1:] new[a.name] = class_func(tmp_d) else: ## Check if it is a resource type variable, eg: # - Resource_List.(nodes, walltime, ..) # if a.resource: if new.has_key(a.name): new[a.name][a.resource] = values else: tmp_d = dict() tmp_d[a.resource] = values new[a.name] = class_func(tmp_d) else: # Simple value # new[a.name] = values self._free(l) def _free(self, memory): """ freeing up used memmory """ pbs.pbs_statfree(memory) def _statserver(self, attrib_list=None): """Get the server config from the pbs server""" if attrib_list: self._list_2_attrib(attrib_list) else: self.attribs = 'NULL' self._connect() serverinfo = pbs.pbs_statserver(self.con, self.attribs, 'NULL') self._disconnect() self._list_2_dict(serverinfo, server) def get_serverinfo(self, attrib_list=None): self._statserver(attrib_list) return self.d def _statqueue(self, queue_name='', attrib_list=None): """Get the queue config from the pbs server""" if attrib_list: self._list_2_attrib(attrib_list) else: self.attribs = 'NULL' self._connect() queues = pbs.pbs_statque(self.con, queue_name, self.attribs, 'NULL') self._disconnect() self._list_2_dict(queues, queue) def getqueue(self, name, attrib_list=None): self._statqueue(name, attrib_list) try: return self.d[name] except KeyError, detail: return self.d def getqueues(self, attrib_list=None): self._statqueue('', attrib_list) return self.d def _statnode(self, select='', attrib_list=None, property=None): """Get the node config from the pbs server""" if attrib_list: self._list_2_attrib(attrib_list) else: self.attribs = 'NULL' if property: select = ':%s' %(property) self._connect() nodes = pbs.pbs_statnode(self.con, select, self.attribs, 'NULL') self._disconnect() self._list_2_dict(nodes, node) def getnode(self, name, attrib_list=None): self._statnode(name, attrib_list) try: return self.d[name] except KeyError, detail: return self.d def getnodes(self, attrib_list=None): self._statnode('', attrib_list) return self.d def getnodes_with_property(self, property, attrib_list=None): self._statnode('', attrib_list, property) return self.d def _statjob(self, job_name='', attrib_list=None): """Get the job config from the pbs server""" if attrib_list: self._list_2_attrib(attrib_list) else: self.attribs = 'NULL' self._connect() jobs = pbs.pbs_statjob(self.con, job_name, self.attribs, 'NULL') self._disconnect() self._list_2_dict(jobs, job) def getjob(self, name, attrib_list=None): ## To make sure we use the full name of a job; Changes a name # like 1234567 into 1234567.job_server_id # if len(name.split('.')) == 1 : name = name.split('.')[0] + "." + self.job_server_id self._statjob(name, attrib_list) try: return self.d[name] except KeyError, detail: return self.d def getjobs(self, attrib_list=None): self._statjob('', attrib_list) return self.d def get_server_name(self): return self.server def new_data_structure(self): """ Use the new data structure. Is now the default """ self.OLD_DATA_STRUCTURE = False def old_data_structure(self): """ Use the old data structure. This function is obselete and will be removed in a future release """ self.OLD_DATA_STRUCTURE = True class _PBSobject(UserDict.UserDict): TRUE = 1 FALSE = 0 def __init__(self, dictin = None): UserDict.UserDict.__init__(self) self.name = None if dictin: if dictin.has_key('name'): self.name = dictin['name'] del dictin['name'] self.data = dictin def get_value(self, key): if self.has_key(key): return self[key] else: return None def __repr__(self): return repr(self.data) def __str__(self): return str(self.data) def __getattr__(self, name): """ override the class attribute get method. Return the value from the Userdict """ try: return self.data[name] except KeyError: error = 'Attribute key error: %s' %(name) raise PBSError(error) ## Disabled for this moment, BvdV 16 July 2010 # #def __setattr__(self, name, value): # """ # override the class attribute set method only when the UserDict # has set its class attribute # """ # if self.__dict__.has_key('data'): # self.data[name] = value # else: # self.__dict__[name] = value def __iter__(self): return iter(self.data.keys()) def uniq(self, list): """Filter out unique items of a list""" uniq_items = {} for item in list: uniq_items[item] = 1 return uniq_items.keys() def return_value(self, key): """Function that returns a value independent of new or old data structure""" if isinstance(self[key], types.ListType): return self[key][0] else: return self[key] class job(_PBSobject): """PBS job class""" def is_running(self): value = self.return_value('job_state') if value == 'Q': return self.TRUE else: return self.FALSE def get_nodes(self, unique=None): """ Returns a list of the nodes which run this job format: * exec_host: gb-r10n14/5+gb-r10n14/4+gb-r10n14/3+gb-r10n14/2+gb-r10n14/1+gb-r10n14/0 * split on '+' and if uniq is set split on '/' """ nodes = self.get_value('exec_host') if isinstance(nodes, str): if nodes: nodelist = string.split(nodes,'+') if not unique: return nodelist else: l = list() for n in nodelist: t = string.split(n,'/') if t[0] not in l: l.append(t[0]) return l else: return list() else: l = list() for n in nodes: nlist = string.split(n,'+') if unique: for entry in nlist: t = string.split(entry,'/') if t[0] not in l: l.append(t[0]) else: l += nlist return l class node(_PBSobject): """PBS node class""" def is_free(self): """Check if node is free""" value = self.return_value('state') if value == 'free': return self.TRUE else: return self.FALSE def has_job(self): """Does the node run a job""" try: a = self['jobs'] return self.TRUE except KeyError, detail: return self.FALSE def get_jobs(self, unique=None): """Returns a list of the currently running job-id('s) on the node""" jobs = self.get_value('jobs') if jobs: if isinstance(jobs, str): jlist = re.compile('[^\\ /]\\d+[^/.]').findall( jobs ) if not unique: return jlist else: return self.uniq(jlist) else: job_re = re.compile('^(?:\d+/)?(.+)') l = list() if unique: for j in jobs: jobstr = job_re.findall(j.strip())[0] if jobstr not in l: l.append(jobstr) return l else: return jobs return list() class queue(_PBSobject): """PBS queue class""" def is_enabled(self): value = self.return_value('enabled') if value == 'True': return self.TRUE else: return self.FALSE def is_execution(self): value = self.return_value('queue_type') if value == 'Execution': return self.TRUE else: return self.FALSE class server(_PBSobject): """PBS server class""" def get_version(self): return self.get_value('pbs_version') def main(): p = PBSQuery() serverinfo = p.get_serverinfo() for server in serverinfo.keys(): print server, ' version: ', serverinfo[server].get_version() for resource in serverinfo[server].keys(): print '\t ', resource, ' = ', serverinfo[server][resource] queues = p.getqueues() for queue in queues.keys(): print queue if queues[queue].is_execution(): print '\t ', queues[queue] if queues[queue].has_key('acl_groups'): print '\t acl_groups: yes' else: print '\t acl_groups: no' jobs = p.getjobs() for name,job in jobs.items(): if job.is_running(): print job l = ['state'] nodes = p.getnodes(l) for name,node in nodes.items(): if node.is_free(): print node if __name__ == "__main__": main()<|fim▁end|>
key = '%s.%s' %(a.name, a.resource) else: key = '%s' %(a.name)
<|file_name|>NettyWorkerPoolBuilder.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.netty4; import io.netty.channel.EventLoopGroup; import io.netty.channel.epoll.EpollEventLoopGroup; import io.netty.channel.nio.NioEventLoopGroup; import org.apache.camel.util.concurrent.CamelThreadFactory; /** * A builder to create Netty {@link io.netty.channel.EventLoopGroup} which can be used for sharing worker pools * with multiple Netty {@link NettyServerBootstrapFactory} server bootstrap configurations. */ public final class NettyWorkerPoolBuilder { private String name = "NettyWorker"; private String pattern; private int workerCount; private boolean nativeTransport; private volatile EventLoopGroup workerPool; public void setName(String name) { this.name = name; } public void setPattern(String pattern) { this.pattern = pattern; } public void setWorkerCount(int workerCount) { this.workerCount = workerCount; } public void setNativeTransport(boolean nativeTransport) { this.nativeTransport = nativeTransport; } public NettyWorkerPoolBuilder withName(String name) { setName(name); return this; } public NettyWorkerPoolBuilder withPattern(String pattern) { setPattern(pattern); return this;<|fim▁hole|> } public NettyWorkerPoolBuilder withWorkerCount(int workerCount) { setWorkerCount(workerCount); return this; } public NettyWorkerPoolBuilder withNativeTransport(boolean nativeTransport) { setNativeTransport(nativeTransport); return this; } /** * Creates a new worker pool. */ public EventLoopGroup build() { int count = workerCount > 0 ? workerCount : NettyHelper.DEFAULT_IO_THREADS; if (nativeTransport) { workerPool = new EpollEventLoopGroup(count, new CamelThreadFactory(pattern, name, false)); } else { workerPool = new NioEventLoopGroup(count, new CamelThreadFactory(pattern, name, false)); } return workerPool; } /** * Shutdown the created worker pool */ public void destroy() { if (workerPool != null) { workerPool.shutdownGracefully(); workerPool = null; } } }<|fim▁end|>
<|file_name|>diffu.py<|end_file_name|><|fim▁begin|>from collections import deque from sys import stdout import re contextLines = 3 class DiffLines: """A single span of lines from a chunk of diff, used to store either the original or the changed lines""" def __init__(self, start, lines): """Note: end is inclusive""" self.start = start # prepopulate end, which for empty line sets is one less than the start self.end = start + len(lines) self.lines = lines def take(self, n): if n > len(self.lines): raise ValueError('not enough lines remaining') piece = DiffLines(self.start, self.lines[:n]) self.start += n self.lines = self.lines[n:] return piece def bump(self, n): self.start += n self.end += n def count(self): return self.end - self.start def write(self, output, prefix=''): for line in self.lines: output.write(prefix) output.write(line) def isEmpty(self): return self.count() == 0 def setStart(self, start): # set end before count() is messed up self.end = start + self.count() self.start = start class DiffChunk: """A single piece of diff, original and changed line spans both""" def __init__(self, original, changed, preContext=None, postContext=None): self.original = original self.changed = changed if preContext is None: self.preContext = [] else: self.preContext = preContext[-contextLines:] if postContext is None: self.postContext = [] else: self.postContext = postContext[:contextLines] def take(self, n, m=None): if m is None: m = n retOrig = self.original.take(n) retPost = self.original.lines + self.postContext retPost = retPost[:contextLines] ret = DiffChunk(retOrig, self.changed.take(m), self.preContext, retPost) self.preContext += ret.original.lines self.preContext = self.preContext[-contextLines:] return ret def delta(self): """Determine how many lines this change adds""" return self.changed.count() - self.original.count() def update(self, other): """Takes the other patch chunk and assumes that it's been applied. Returns True if changes were made""" if other.original.start <= self.original.start: # overlap on the preContext part #self.original.bump(other.delta()) overlap = other.original.end - (self.original.start - len(self.preContext)) if overlap > 0: overlapstart = max(0, overlap - other.original.count()) self.preContext[overlapstart:overlap] = other.changed.lines self.preContext = self.preContext[-contextLines:] return True if other.original.end >= self.original.end: # overlap on the postContext part overlap = self.original.end + len(self.postContext) - other.original.start if overlap > 0: oend = len(self.postContext) - overlap + other.original.count() self.postContext[-overlap:oend] = other.changed.lines self.postContext = self.postContext[:contextLines] return True return False def resetChangedLineStart(self): """When taken on its own, both the original and changed lines start at the same line number. This makes it so.""" self.changed.setStart(self.original.start) def bumpOriginal(self, other): if other.changed.start <= self.original.start: self.original.bump(other.delta()) def bumpChanged(self, other): """Takes the other patch and assumes that it's in the same patch set. When patches are grouped together, the line counts on the changed end need to be incremented based on what has come before. """ if other.original.end < self.original.start: self.changed.bump(other.delta()) def contextOverlap(self, other): """If other follows this, return the amount of overlap in the context parts. If this is positive, the chunks will have to be merged for output. """ endOfSelf = self.original.end + len(self.postContext) startOfOther = other.original.start - len(other.preContext) return endOfSelf - startOfOther def saveContext(line, context, pendingChunks): """save a line of context. sometimes this gives a pending chunk enough trailing context to be complete, so return true when that happens so that the chunk can be emitted""" if context is not None: context.append(line) context = context[-contextLines:] for chunk in pendingChunks: if len(chunk.postContext) < contextLines: chunk.postContext.append(line) # only the first chunk will be finished, return true iff it is return len(pendingChunks) > 0 and len(pendingChunks[0].postContext) >= contextLines def parseDiff(input): line = input.readline() while line != '' and line[:3] != '---': line = input.readline() line = input.readline() if line[:3] == '+++': line = input.readline() headerRegex = re.compile(r'^@@ -(\d+),\d+ \+(\d+),\d+ @@') pendingChunks = deque() while line != '': operation, remainder = line[0], line[1:] if operation == '@': for chunk in pendingChunks: yield chunk pendingChunks.clear() context = [] original = [] changed = [] m = headerRegex.match(line) if m is None: raise RuntimeError('can\'t parse @@ line') originalLine, changedLine = map(int, (m.group(1), m.group(2))) elif operation == '-': original.append(remainder) # don't add to context, so that we don't get original # lines mixed up in there, we'll need to add these lines back later # though in case there a multiple chunks in the one section if saveContext(remainder, None, pendingChunks): yield pendingChunks.popleft() elif operation == '+': changed.append(remainder) elif operation == ' ': if len(original) > 0 or len(changed) > 0: pendingChunks.append( DiffChunk(DiffLines(originalLine, original), DiffLines(changedLine, changed), context))<|fim▁hole|> context += original originalLine += len(original) changedLine += len(changed) original = [] changed = [] originalLine += 1 changedLine += 1 if saveContext(remainder, context, pendingChunks): yield pendingChunks.popleft() else: raise RuntimeError('unknown diff character %s' % operation) line = input.readline() for chunk in pendingChunks: yield chunk def writeMergedChunks(chunks, output): prev = None totalOriginal = 0 totalChanged = 0 for c in chunks: contextSize = len(c.preContext) + len(c.postContext) if prev is not None: contextSize -= prev.contextOverlap(c) totalOriginal += c.original.count() + contextSize totalChanged += c.changed.count() + contextSize prev = c output.write("@@ -%d,%d +%d,%d @@\n" % (chunks[0].original.start - len(chunks[0].preContext), totalOriginal, chunks[0].changed.start - len(chunks[0].preContext), totalChanged)) prev = None for c in chunks: overlap = 0 if prev is not None: overlap = prev.contextOverlap(c) removed = min(len(prev.postContext), overlap) overlap -= removed context = prev.postContext[:-removed] else: context = [] context += c.preContext[overlap:] for cline in context: output.write(' ') output.write(cline) c.original.write(output, '-') c.changed.write(output, '+') prev = c for cline in prev.postContext: output.write(' ') output.write(cline)<|fim▁end|>
<|file_name|>debugging.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # coding=utf-8 """ This module, debugging.py, will contain code related to debugging (such as printing error messages). """ #import sys #sys.path.insert(0, '/home/dev_usr/urbtek') #from universal_code import system_operations as so class MyException(Exception): """ Just something useful to have to throw some of my own custom exception. """ pass class ParameterException(Exception): """ A custom exception for when a function receives bad parameter data. """ def __init__(self, message): super(ParameterException, self).__init__(message) class AbstractMethodNotImplementedException(Exception): """ A custom exception for when a function gets called that hasn't been set in a child class. """ def __init(self, message): super(AbstractMethodNotImplementedException, self).__init__(message) def raise_exception(exception, message): raise exception(message) TCP_LOCAL_HOST = 'tcp://127.0.0.1:' LOCAL_HOST = '127.0.0.1' NEXUS_DEV_RECEIVE_PORT = 40000 NEXUS_DEV_MANUAL_COMMUNICATION_PORT = 40001 NEXUS_DEV_AUTOMATED_COMMUNICATION_PORT = 40002 starting_port = NEXUS_DEV_AUTOMATED_COMMUNICATION_PORT + 1 def get_a_free_port(): global starting_port # We can assume ports are free because ports above 30000 have been sealed off. # TODO: THIS WILL BREAK WHEN MORE THAN DEV EXISTS. starting_port += 1 return starting_port - 1 # Terminal font coloring and styling. class TextColors: HEADER = '\033[95m' OK_BLUE = '\033[94m' OK_GREEN = '\033[92m' WARNING = '\033[93m' FAIL = '\033[91m' ENDC = '\033[0m' BOLD = '\033[1m' UNDERLINE = '\033[4m' def print_text_with_color(text, color, end=None): if end is None: print(color + text + TextColors.ENDC + '\n') else:<|fim▁hole|> print(color + text + TextColors.ENDC, end='') def terminate(termination_message=''): if termination_message is '': print_text_with_color('Program termination has been initiated, good bye!', TextColors.FAIL) else: print_text_with_color(termination_message, TextColors.WARNING, '') if not termination_message.endswith('.'): print_text_with_color('. The program will now terminate.', TextColors.FAIL) else: print_text_with_color(' The program will now terminate.', TextColors.FAIL) exit()<|fim▁end|>
<|file_name|>matrix-tween.js<|end_file_name|><|fim▁begin|>import { Tween } from '../core'; import { mat4 } from '../math'; export class MatrixTween extends Tween { action() { for (let i = 0; i < this.from.length; i++) { this.object[i] = this.from[i] + this.current_step * (this.to[i] - this.from[i]); } } pre_start() {<|fim▁hole|> this.from = mat4.clone(this.object); } }<|fim▁end|>
super.pre_start();
<|file_name|>kifu.py<|end_file_name|><|fim▁begin|>class Kifu: def __init__(self): self.kifu = []<|fim▁hole|> def add(self, from_x, from_y, to_x, to_y, promote, koma): self.kifu.append((from_x, from_y, to_x, to_y, promote, koma)) def pop(self): return self.kifu.pop()<|fim▁end|>
<|file_name|>act_twitter.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # # TODO prog_base.py - A starting template for Python scripts # # Copyright 2013 Robert B. Hawkins # """ SYNOPSIS TODO prog_base [-h,--help] [-v,--verbose] [--version] DESCRIPTION TODO This describes how to use this script. This docstring will be printed by the script if there is an error or if the user requests help (-h or --help). EXAMPLES TODO: Show some examples of how to use this script. EXIT STATUS TODO: List exit codes AUTHOR Rob Hawkins <webwords@txhawkins.net> LICENSE This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. VERSION 1.0.0 """ __author__ = "Rob Hawkins <webwords@txhawkins.net>" __version__ = "1.0.0" __date__ = "2013.12.01" # Version Date Notes # ------- ---------- ------------------------------------------------------- # 1.0.0 2013.12.01 Starting script template # import sys, os, traceback, argparse import time import re #from pexpect import run, spawn def test (): global options, args # TODO: Do something more interesting here... print 'Hello from the test() function!' def main (): global options, args # TODO: Do something more interesting here... print 'Hello world!' if __name__ == '__main__': try: start_time = time.time() #parser = argparse.ArgumentParser(description="This is the program description", usage=globals()['__doc__']) parser = argparse.ArgumentParser(description='This is the program description') parser.add_argument('--version', action='version', version='%(prog)s v'+__version__) parser.add_argument ('-v', '--verbose', action='store_true', help='produce verbose output') parser.add_argument ('-t', '--test', action='store_true', help='run test suite') args = parser.parse_args() #if len(args) < 1: # parser.error ('missing argument') if args.verbose: print time.asctime() if args.test: test() else: main()<|fim▁hole|> sys.exit(0) except KeyboardInterrupt, e: # Ctrl-C raise e except SystemExit, e: # sys.exit() raise e except Exception, e: print 'ERROR, UNEXPECTED EXCEPTION' print str(e) traceback.print_exc() os._exit(1)<|fim▁end|>
if args.verbose: print time.asctime() if args.verbose: print 'TOTAL TIME IN MINUTES:', if args.verbose: print (time.time() - start_time) / 60.0
<|file_name|>KMeans.java<|end_file_name|><|fim▁begin|>package pl.edu.uwm.wmii.visearch.clustering; /* * * Z linii poleceń * /usr/local/mahout/bin/mahout kmeans -i kmeans/data1/in -c kmeans/data1/cl -o kmeans/data1/out -x 10 -k 2 -ow -cl * opcja -ow nadpisuje katalog wyjściowy (nie trzeba ręcznie kasować) * opcja -cl generuje katalog clusteredPoints, zawierający informację o tym który punt do którego klastra * /usr/local/mahout/bin/mahout clusterdump -i kmeans/data1/out/clusters-*-final * * */ import java.io.File; import java.io.IOException; import java.security.InvalidKeyException; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import java.sql.Statement; import java.sql.PreparedStatement; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.TreeMap; import java.util.StringTokenizer; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.mahout.clustering.Cluster; import org.apache.mahout.clustering.conversion.InputDriver; import org.apache.mahout.clustering.iterator.ClusterWritable; import org.apache.mahout.clustering.kmeans.KMeansDriver; import org.apache.mahout.clustering.kmeans.RandomSeedGenerator; import org.apache.mahout.common.HadoopUtil; import org.apache.mahout.common.Pair; import org.apache.mahout.common.distance.DistanceMeasure; import org.apache.mahout.common.distance.EuclideanDistanceMeasure; import org.apache.mahout.common.iterator.sequencefile.PathFilters; import org.apache.mahout.common.iterator.sequencefile.PathType; import org.apache.mahout.common.iterator.sequencefile.SequenceFileDirIterable; import org.apache.mahout.common.iterator.sequencefile.SequenceFileDirIterator; import org.apache.mahout.common.iterator.sequencefile.SequenceFileDirValueIterable; import org.apache.mahout.math.DenseVector; import org.apache.mahout.math.NamedVector; import org.apache.mahout.math.RandomAccessSparseVector;<|fim▁hole|>import org.apache.mahout.math.VectorWritable; import org.apache.mahout.math.Vector; import org.apache.mahout.utils.clustering.ClusterDumper; import org.apache.mahout.vectorizer.SparseVectorsFromSequenceFiles; import org.apache.mahout.clustering.classify.ClusterClassificationDriver; import org.apache.mahout.clustering.classify.WeightedVectorWritable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.SequenceFile; import pl.edu.uwm.wmii.visearch.core.ConfigFile; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.OutputKeys; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import org.w3c.dom.Attr; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; public class KMeans { private static final Logger log = LoggerFactory.getLogger(KMeans.class); private static final Path BASE_DIR = new Path("visearch"); private static final Path DESCRIPTORS_DIR = new Path("visearch/descriptors"); private static final Path DICTIONARY_DIR = new Path("visearch/dictionary"); private static final Path VISUAL_WORDS_DIR = new Path( "visearch/visualwords"); private static final Path REPRESENTATIONS_DIR = new Path( "visearch/representations"); private static void createInput(Configuration conf, ConfigFile configFile, Path outputDir) throws Exception { String descriptorsDir = configFile.get("descriptorsDir") + "/SIFT"; File files = new File(descriptorsDir); DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); // wyczysc katalog z deskryptorami FileSystem fs = outputDir.getFileSystem(conf); fs.delete(outputDir, true); fs.mkdirs(outputDir); Path outputFile = new Path(outputDir, "all-descriptors"); SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf, outputFile, Text.class, VectorWritable.class); // key:value pair for output file Text key = new Text(); VectorWritable val = new VectorWritable(); // procedura tworzaca pojedynczy duzy plik z deskryptorami dla kazdego // obrazka // k: [hash obrazka]:[kolejny numer deskryptora] // v: [deskryptor, 128 elementow dla SIFT'a] int totalFiles = 0; int badFiles = 0; for (File f : files.listFiles()) { totalFiles++; String docId = f.getName().split("\\.")[0]; log.info(String.valueOf(totalFiles)); try { Document doc = dBuilder.parse(f); doc.getDocumentElement().normalize(); NodeList nList = doc.getElementsByTagName("desc"); for (int i = 0; i < nList.getLength(); i++) { String csv = nList.item(i).getTextContent(); String[] csvParts = csv.split(","); double[] data = new double[csvParts.length]; for (int j = 0; j < csvParts.length; j++) { data[j] = Integer.parseInt(csvParts[j].trim()); } StringBuilder sb = new StringBuilder(); sb.append(docId).append(":").append(i); key.set(sb.toString()); val.set(new DenseVector(data)); writer.append(key, val); } } catch (Exception e) { badFiles++; System.out.println(badFiles+"/"+totalFiles+" "+e); } } writer.close(); System.out.println("Done making a bigfile, #files: "+(totalFiles-badFiles)); } private static void runClustering(Configuration conf, ConfigFile configFile) throws IOException, ClassNotFoundException, InterruptedException { FileSystem fs = FileSystem.get(conf); Path clusters = new Path(BASE_DIR, new Path("initial-clusters")); fs.delete(DICTIONARY_DIR, true); fs.mkdirs(DICTIONARY_DIR); DistanceMeasure measure = new EuclideanDistanceMeasure(); int k = configFile.get("dictionarySize",100); double convergenceDelta = configFile.get("dictionaryConvergenceDelta",0.001); int maxIterations = configFile.get("dictionaryMaxIterations",10); // Random clusters clusters = RandomSeedGenerator.buildRandom(conf, DESCRIPTORS_DIR, clusters, k, measure); log.info("Random clusters generated, running K-Means, k="+k+" maxIter="+maxIterations); log.info("KMeansDriver.run(..."); log.info(DESCRIPTORS_DIR.toString()); log.info(clusters.toString()); log.info(DICTIONARY_DIR.toString()); log.info("....)"); KMeansDriver.run(conf, DESCRIPTORS_DIR, clusters, DICTIONARY_DIR, measure, convergenceDelta, maxIterations, true, 0.0, VM.RunSequential()); log.info("KMeans done"); } /** * @param args * @throws Exception */ public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); /* * TODO: musze jakos ustawic sciezki dla jar'a do /usr/local/hadoop/conf * bo KMeansDriver nie widzi ustawien hdfs i zapisuje wyniki klasteryzacji * do lokalnego katalogu * File files = new File("/usr/local/hadoop/conf"); for (File f : files.listFiles()) { System.out.println(f.getAbsolutePath()); conf.addResource(f.getAbsolutePath()); }*/ log.info("Configuration: "+conf.toString()); log.info("fs.default.name: "+conf.get("fs.default.name")); FileSystem fs = FileSystem.get(conf); ConfigFile configFile = new ConfigFile("settings.cfg"); boolean skipCreatingDictionary = false; try { List<String> largs = Arrays.asList(args); if (largs.contains("skipdict")) { skipCreatingDictionary = true; } } catch (Exception e) { } if (!skipCreatingDictionary) { if (VM.RunSequential()) { System.out.println("Running as SEQ"); } else { System.out.println("Running as MR"); } // stworz pliki z deskryptorami na podstawie xml'i // TODO: najlepiej zeby Anazyler zapisywal pliki od razu do hdfs (daily basis?) createInput(conf, configFile, DESCRIPTORS_DIR); // uruchom K-Means dla deskryptorow runClustering(conf, configFile); } else { log.info("Skipped creating dictionary"); } ImageToTextDriver.run(conf, DESCRIPTORS_DIR, DICTIONARY_DIR, VISUAL_WORDS_DIR, VM.RunSequential()); String dbUrl = configFile.get("dbUrl"); String dbUser = configFile.get("dbUser"); String dbPass = configFile.get("dbPass"); Connection dbConnection = DriverManager.getConnection("jdbc:" + dbUrl, dbUser, dbPass); log.info("Connected to {}", dbUrl); Statement statement = dbConnection.createStatement(); statement.executeUpdate("DELETE FROM ImageRepresentations"); statement.executeUpdate("DELETE FROM IFS"); for (Pair<Text, Text> entry : new SequenceFileDirIterable<Text, Text>( VISUAL_WORDS_DIR, PathType.LIST, conf)) { String docId = entry.getFirst().toString(); String line = entry.getSecond().toString(); StringTokenizer tokenizer = new StringTokenizer(line); Map<Integer, Integer> termFreq = new TreeMap<Integer, Integer>(); while (tokenizer.hasMoreTokens()) { int key = Integer.parseInt(tokenizer.nextToken()); if (termFreq.containsKey(key)) { termFreq.put(key, termFreq.get(key) + 1); } else { termFreq.put(key, 1); } } saveToDb(docId, termFreq, dbConnection); } dbConnection.close(); /* * MyClusterClassificationDriver .run(conf, DESCRIPTORS_DIR, * DICTIONARY_DIR, VISUAL_WORDS_DIR, 0.0, true, VM.RunSequential()); */ /* * Albo stworze wlasny map-reduce, ktory utworzy histogramy TF * * Albo zapisze obrazki jako tekst, i zapuszce na nich narzedzia * dostepne w Mahout org.apache.mahout.text.SequenceFilesFromDirectory * org.apache.mahout.vectorizer.SparseVectorsFromSequenceFiles */ // BagOfWordsDriver.run(conf, VISUAL_WORDS_DIR, REPRESENTATIONS_DIR, // VM.RunSequential()); /* * Path representationsFile = new Path(REPRESENTATIONS_DIR, "part-0"); * SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf, * representationsFile, Text.class, VectorWritable.class); Text key = * new Text(); VectorWritable val = new VectorWritable(); * * RandomAccessSparseVector freq = new RandomAccessSparseVector(10); for * (Pair<IntWritable, Text> entry : new * SequenceFileDirIterable<IntWritable, Text>( VISUAL_WORDS_DIR, * PathType.LIST, conf)) { int idx = entry.getFirst().get(); * freq.incrementQuick(idx, 1); } */ /* * // stworzenie histogramu i zapisanie do pliku jako sparse vector for * (FileStatus f : fs.listStatus(VISUAL_WORDS_DIR)) { if (f.isDir()) { * RandomAccessSparseVector freq = new RandomAccessSparseVector(10); for * (Pair<IntWritable, WeightedVectorWritable> entry : new * SequenceFileDirIterable<IntWritable, WeightedVectorWritable>( new * Path(f.getPath(), "part-*"), PathType.GLOB, conf)) { int idx = * entry.getFirst().get(); freq.incrementQuick(idx, 1); } * key.set(f.getPath().getName()); val.set(freq); writer.append(key, * val); * * log.info("REP: {}",key); } } writer.close(); */ // saveToDb(conf, configFile); log.info("Done"); } private static void saveToDb(String docId, Map<Integer, Integer> termFreq, Connection dbConnection) throws InvalidKeyException, SQLException, IOException { String sql; PreparedStatement ps; // build json string and IFS Iterator<Entry<Integer, Integer>> it = termFreq.entrySet().iterator(); Map.Entry<Integer, Integer> e; String json = "{"; while (it.hasNext()) { e = it.next(); json += "\"" + e.getKey() + "\":\"" + e.getValue() + "\""; if (it.hasNext()) { json += ", "; } // save to IFS as well sql = "INSERT INTO IFS SELECT ?, ImageId FROM Images WHERE FileName LIKE ?"; ps = dbConnection.prepareStatement(sql); ps.setInt(1, e.getKey()); ps.setString(2, docId + "%"); ps.executeUpdate(); } json += "}"; System.out.println(termFreq); System.out.println(json); sql = "INSERT INTO ImageRepresentations SELECT ImageId, ? FROM Images WHERE FileName LIKE ?"; ps = dbConnection.prepareStatement(sql); ps.setString(1, json); ps.setString(2, docId.toString() + "%"); ps.executeUpdate(); } }<|fim▁end|>
import org.apache.mahout.math.SequentialAccessSparseVector;
<|file_name|>util.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # See: # https://github.com/codeforamerica/pittsburgh-purchasing-suite/blob/master/purchasing_test/unit/util.py import datetime from feedback.user.models import User def create_a_user(email='foo@foo.com'): return User(email=email, first_name='foo', last_name='foo') def insert_a_user(email='foo@foo.com'): user = create_a_user(email)<|fim▁hole|><|fim▁end|>
user.save() return user
<|file_name|>bridge.controller.js<|end_file_name|><|fim▁begin|>/** * Using Rails-like standard naming convention for endpoints. * GET /api/bridges -> index * POST /api/bridges -> create * GET /api/bridges/:id -> show * PUT /api/bridges/:id -> upsert * PATCH /api/bridges/:id -> patch * DELETE /api/bridges/:id -> destroy */ 'use strict'; import jsonpatch from 'fast-json-patch'; import Bridge from './bridge.model'; function respondWithResult(res, statusCode) { statusCode = statusCode || 200; return function(entity) { if (entity) { return res.status(statusCode).json(entity); } return null; }; } function patchUpdates(patches) { return function(entity) { try { // eslint-disable-next-line prefer-reflect jsonpatch.apply(entity, patches, /*validate*/ true);<|fim▁hole|> } catch(err) { return Promise.reject(err); } return entity.save(); }; } function removeEntity(res) { return function(entity) { if (entity) { return entity.remove() .then(() => { res.status(204).end(); }); } }; } function handleEntityNotFound(res) { return function(entity) { if (!entity) { res.status(404).end(); return null; } return entity; }; } function handleError(res, statusCode) { statusCode = statusCode || 500; return function(err) { res.status(statusCode).send(err); }; } // Gets a list of Bridges export function index(req, res) { return Bridge.find().exec() .then(respondWithResult(res)) .catch(handleError(res)); } // Gets a single Bridge from the DB export function show(req, res) { return Bridge.findById(req.params.id).exec() .then(handleEntityNotFound(res)) .then(respondWithResult(res)) .catch(handleError(res)); } // Creates a new Bridge in the DB export function create(req, res) { return Bridge.create(req.body) .then(respondWithResult(res, 201)) .catch(handleError(res)); } // Upserts the given Bridge in the DB at the specified ID export function upsert(req, res) { if (req.body._id) { Reflect.deleteProperty(req.body, '_id'); } return Bridge.findOneAndUpdate({_id: req.params.id}, req.body, {new: true, upsert: true, setDefaultsOnInsert: true, runValidators: true}).exec() .then(respondWithResult(res)) .catch(handleError(res)); } // Updates an existing Bridge in the DB export function patch(req, res) { if (req.body._id) { Reflect.deleteProperty(req.body, '_id'); } return Bridge.findById(req.params.id).exec() .then(handleEntityNotFound(res)) .then(patchUpdates(req.body)) .then(respondWithResult(res)) .catch(handleError(res)); } // Deletes a Bridge from the DB export function destroy(req, res) { return Bridge.findById(req.params.id).exec() .then(handleEntityNotFound(res)) .then(removeEntity(res)) .catch(handleError(res)); }<|fim▁end|>
<|file_name|>IInteractivity.d.ts<|end_file_name|><|fim▁begin|>import type { InteractivityDetect } from "../../../Enums";<|fim▁hole|> detect_on: InteractivityDetect | keyof typeof InteractivityDetect; detectsOn: InteractivityDetect | keyof typeof InteractivityDetect; events: IEvents; modes: IModes; }<|fim▁end|>
import type { IEvents } from "./Events/IEvents"; import type { IModes } from "./Modes/IModes"; export interface IInteractivity {
<|file_name|>network_test.py<|end_file_name|><|fim▁begin|># Copyright (C) 2008-2010 INRIA - EDF R&D # Author: Damien Garaud # # This file is part of the PuppetMaster project. It checks the module # 'network'. # # This script is free; you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation; either version 2 of the License, or (at your option) any later # version. # # This is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. import sys import socket import unittest from puppetmaster import network test_method_name = ['testInit', 'testGetValue', 'testUsedMemory', 'testAvailableHost', 'testLaunchCommand'] class NetworkTestCase(unittest.TestCase): def __init__(self, methodName='runTest', host_file = None, forced_ssh_config = False): unittest.TestCase.__init__(self, methodName) self.host_file = host_file self.forced_ssh_config = forced_ssh_config # If there is file. if self.host_file == None: self.is_file = False else: self.is_file = True def setUp(self): import random if self.is_file: self.net = network.Network(self.host_file, self.forced_ssh_config) # Just local host. self.net_local = network.Network() # The command which will be launched. self.command = "echo 'Hello World!'" def tearDown(self): pass def testInit(self): # Checks the name and the number of cpu. # For the local host. self.assertTrue(self.net_local.hosts[0].name == socket.gethostname()) self.assertTrue(self.net_local.GetNhost() == 1) self.assertTrue(self.net_local.hosts[0].connection) # Is there a file? if self.is_file: self.assertTrue(self.net.GetNhost() > 0) self.assertTrue(self.net.GetConnectedHostNumber() > 0) # Wrong argument.<|fim▁hole|> # An 'network' instance takes a list 'host' instance, list of string # or a file. self.assertRaises(ValueError, network.Network, 1) self.assertRaises(ValueError, network.Network, []) self.assertRaises(ValueError, network.Network, [1,2]) self.assertRaises(ValueError, network.Network, 'no_file') def testGetValue(self): # For the local host. host_name = self.net_local.GetHostNames() proc_num = self.net_local.GetProcessorNumber() connected_num = self.net_local.GetConnectedHostNumber() # 'host_name' must be a list of string. self.assertTrue(isinstance(host_name, list)) self.assertTrue(isinstance(host_name[0], str)) # 'proc_num' must be a list of tuples (hostname, Nproc) self.assertTrue(isinstance(proc_num, list)) self.assertTrue(isinstance(proc_num[0], tuple)) self.assertTrue(isinstance(proc_num[0][0], str)) self.assertTrue(isinstance(proc_num[0][1], int)) # 'connected_num' must be an integer greater than 0. self.assertTrue(isinstance(connected_num, int)) # Checks size. self.assertTrue(len(host_name) > 0) self.assertTrue(len(proc_num[0]) == 2) self.assertTrue(connected_num > 0) # For a list of hosts. if self.is_file: host_name = self.net.GetHostNames() proc_num = self.net.GetProcessorNumber() connected_num = self.net.GetConnectedHostNumber() # 'host_name' must be a list of string. self.assertTrue(isinstance(host_name, list)) self.assertTrue(isinstance(host_name[0], str)) # 'proc_num' must be a list of tuples (hostname, Nproc) self.assertTrue(isinstance(proc_num, list)) self.assertTrue(isinstance(proc_num[0], tuple)) self.assertTrue(isinstance(proc_num[0][0], str)) self.assertTrue(isinstance(proc_num[0][1], int)) # 'connected_num' must be an integer greater than 0. self.assertTrue(isinstance(connected_num, int)) # Checks size. self.assertTrue(len(host_name) > 0) self.assertTrue(len(proc_num[0]) == 2) self.assertTrue(connected_num > 0) def testUsedMemory(self): # Gets used memory ('free' Unix command). # For the local host. used_mem = self.net_local.GetUsedMemory() # 'used_mem' must be a list of tuple (hostname, value). self.assertTrue(isinstance(used_mem, list)) self.assertTrue(isinstance(used_mem[0], tuple)) self.assertTrue(isinstance(used_mem[0][0], str)) # Checks size. self.assertTrue(len(used_mem) == 1) self.assertTrue(len(used_mem[0]) == 2) # For a list of hosts. if self.is_file: used_mem = self.net.GetUsedMemory() # 'used_mem' must be a list of tuple (hostname, value). self.assertTrue(isinstance(used_mem, list)) self.assertTrue(isinstance(used_mem[0], tuple)) self.assertTrue(isinstance(used_mem[0][0], str)) # Checks size. self.assertTrue(len(used_mem) >= 1) self.assertTrue(len(used_mem[0]) == 2) def testAvailableHost(self): # Gets available hosts (used 'uptime' Unix command). # For the local host. available_host = self.net_local.GetAvailableHosts() # 'available_host' must be a list of tuple (hostname, available_cpu). self.assertTrue(isinstance(available_host, list)) if len(available_host) > 0: self.assertTrue(isinstance(available_host[0], tuple)) self.assertTrue(isinstance(available_host[0][0], str)) self.assertTrue(isinstance(available_host[0][1], int)) # For a list of hosts. if self.is_file: available_host = self.net.GetAvailableHosts() # 'available_host' must be a list of tuple # (hostname, available_cpu). self.assertTrue(isinstance(available_host, list)) if len(available_host) > 0: self.assertTrue(isinstance(available_host[0], tuple)) self.assertTrue(isinstance(available_host[0][0], str)) self.assertTrue(isinstance(available_host[0][1], int)) def testLaunchCommand(self): import random # For the local host. status = self.net_local.LaunchInt(self.command) statusout = self.net_local.LaunchFG(self.command) popen4_instance = self.net_local.LaunchBG(self.command) subproc = self.net_local.LaunchSubProcess(self.command) wait_return = self.net_local.LaunchWait(self.command, 2., 0.2) # Checks type. self.assertTrue(isinstance(status, int)) self.assertTrue(isinstance(statusout, tuple)) self.assertTrue(isinstance(statusout[0], int)) self.assertTrue(isinstance(wait_return, tuple)) # The status must be '0'. self.assertTrue(status == 0) self.assertTrue(statusout[0] == 0) self.assertTrue(popen4_instance.wait() == 0) self.assertTrue(subproc.wait() == 0) self.assertTrue(wait_return[0] == 0) # For a random host. if self.is_file: index = random.randint(0, self.net.GetNhost() - 1) random_host = self.net.hosts[index] # Launches the command. status = self.net.LaunchInt(self.command + ' 2>/dev/null', random_host) statusout = self.net.LaunchFG(self.command, random_host) popen4_instance = self.net.LaunchBG(self.command, random_host) subproc = self.net.LaunchSubProcess(self.command, random_host) wait_return = self.net.LaunchWait(self.command, 2., 0.2, random_host) # Checks type. self.assertTrue(isinstance(status, int)) self.assertTrue(isinstance(statusout, tuple)) self.assertTrue(isinstance(statusout[0], int)) self.assertTrue(isinstance(wait_return, tuple)) # The status must be '0' if the connection dit not fail. if random_host.connection: self.assertTrue(status == 0) self.assertTrue(statusout[0] == 0) self.assertTrue(popen4_instance.wait() == 0) self.assertTrue(subproc.wait() == 0) self.assertTrue(wait_return[0] == 0) else: self.assertTrue(status != 0) self.assertTrue(statusout[0] != 0) self.assertTrue(popen4_instance.wait() != 0) self.assertTrue(subproc.wait() != 0) self.assertTrue(wait_return[0] != 0) if __name__ == '__main__': unittest.main()<|fim▁end|>
<|file_name|>datasource-arrayschema.js<|end_file_name|><|fim▁begin|>/* YUI 3.13.0 (build 508226d) Copyright 2013 Yahoo! Inc. All rights reserved. Licensed under the BSD License. http://yuilibrary.com/license/ */ YUI.add('datasource-arrayschema', function (Y, NAME) { /** * Extends DataSource with schema-parsing on array data. * * @module datasource<|fim▁hole|> /** * Adds schema-parsing to the DataSource Utility. * @class DataSourceArraySchema * @extends Plugin.Base */ var DataSourceArraySchema = function() { DataSourceArraySchema.superclass.constructor.apply(this, arguments); }; Y.mix(DataSourceArraySchema, { /** * The namespace for the plugin. This will be the property on the host which * references the plugin instance. * * @property NS * @type String * @static * @final * @value "schema" */ NS: "schema", /** * Class name. * * @property NAME * @type String * @static * @final * @value "dataSourceArraySchema" */ NAME: "dataSourceArraySchema", ///////////////////////////////////////////////////////////////////////////// // // DataSourceArraySchema Attributes // ///////////////////////////////////////////////////////////////////////////// ATTRS: { schema: { //value: {} } } }); Y.extend(DataSourceArraySchema, Y.Plugin.Base, { /** * Internal init() handler. * * @method initializer * @param config {Object} Config object. * @private */ initializer: function(config) { this.doBefore("_defDataFn", this._beforeDefDataFn); }, /** * Parses raw data into a normalized response. * * @method _beforeDefDataFn * @param tId {Number} Unique transaction ID. * @param request {Object} The request. * @param callback {Object} The callback object with the following properties: * <dl> * <dt>success (Function)</dt> <dd>Success handler.</dd> * <dt>failure (Function)</dt> <dd>Failure handler.</dd> * </dl> * @param data {Object} Raw data. * @protected */ _beforeDefDataFn: function(e) { var data = (Y.DataSource.IO && (this.get("host") instanceof Y.DataSource.IO) && Y.Lang.isString(e.data.responseText)) ? e.data.responseText : e.data, response = Y.DataSchema.Array.apply.call(this, this.get("schema"), data), payload = e.details[0]; // Default if (!response) { response = { meta: {}, results: data }; } payload.response = response; this.get("host").fire("response", payload); return new Y.Do.Halt("DataSourceArraySchema plugin halted _defDataFn"); } }); Y.namespace('Plugin').DataSourceArraySchema = DataSourceArraySchema; }, '3.13.0', {"requires": ["datasource-local", "plugin", "dataschema-array"]});<|fim▁end|>
* @submodule datasource-arrayschema */
<|file_name|>apps.py<|end_file_name|><|fim▁begin|>""" Configuration for bookmarks Django app """ <|fim▁hole|>from django.utils.translation import ugettext_lazy as _ from edx_django_utils.plugins import PluginSettings, PluginURLs from openedx.core.djangoapps.plugins.constants import ProjectType, SettingsType class BookmarksConfig(AppConfig): """ Configuration class for bookmarks Django app """ name = 'openedx.core.djangoapps.bookmarks' verbose_name = _("Bookmarks") plugin_app = { PluginURLs.CONFIG: { ProjectType.LMS: { PluginURLs.NAMESPACE: '', PluginURLs.REGEX: '^api/bookmarks/', PluginURLs.RELATIVE_PATH: 'urls', } }, PluginSettings.CONFIG: { ProjectType.LMS: { SettingsType.PRODUCTION: {PluginSettings.RELATIVE_PATH: 'settings.production'}, SettingsType.COMMON: {PluginSettings.RELATIVE_PATH: 'settings.common'}, } } } def ready(self): # Register the signals handled by bookmarks. from . import signals # lint-amnesty, pylint: disable=unused-import<|fim▁end|>
from django.apps import AppConfig
<|file_name|>0005_auto__add_field_idea_color.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'Idea.color' db.add_column(u'brainstorming_idea', 'color', self.gf('django.db.models.fields.CharField')(default='', max_length=100, blank=True), keep_default=False) def backwards(self, orm): # Deleting field 'Idea.color' db.delete_column(u'brainstorming_idea', 'color') models = { u'brainstorming.brainstorming': { 'Meta': {'ordering': "['-created']", 'object_name': 'Brainstorming'}, 'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}), 'creator_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}), 'creator_ip': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}), 'details': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'id': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'primary_key': 'True'}), 'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}), 'question': ('django.db.models.fields.CharField', [], {'max_length': '200'}) }, u'brainstorming.brainstormingwatcher': { 'Meta': {'ordering': "['-created']", 'unique_together': "(('brainstorming', 'email'),)", 'object_name': 'BrainstormingWatcher'}, 'brainstorming': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['brainstorming.Brainstorming']"}), 'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}) }, u'brainstorming.emailverification': { 'Meta': {'ordering': "['-created']", 'object_name': 'EmailVerification'}, 'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}), 'id': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'primary_key': 'True'}), 'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}) }, u'brainstorming.idea': { 'Meta': {'ordering': "['-created']", 'object_name': 'Idea'}, 'brainstorming': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['brainstorming.Brainstorming']"}), 'color': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),<|fim▁hole|> 'creator_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}), 'ratings': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'text': ('django.db.models.fields.TextField', [], {}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}) } } complete_apps = ['brainstorming']<|fim▁end|>
'creator_ip': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
<|file_name|>foo.rs<|end_file_name|><|fim▁begin|>// Copyright 2021 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. <|fim▁hole|>#[test] fn test_ok() { assert_eq!(do_subtract(12, 8), 4) }<|fim▁end|>
pub extern "C" fn do_subtract(a: u32, b: u32) -> u32 { a - b }
<|file_name|>mnist_mlp.py<|end_file_name|><|fim▁begin|>import theano import numpy as np from sklearn.preprocessing import OneHotEncoder from sklearn import cross_validation, metrics, datasets from neupy import algorithms, layers, environment environment.reproducible() theano.config.floatX = 'float32' mnist = datasets.fetch_mldata('MNIST original') target_scaler = OneHotEncoder() target = mnist.target.reshape((-1, 1)) target = target_scaler.fit_transform(target).todense() data = mnist.data / 255. data = data - data.mean(axis=0) x_train, x_test, y_train, y_test = cross_validation.train_test_split( data.astype(np.float32), target.astype(np.float32), train_size=(6 / 7.) ) network = algorithms.Momentum( [ layers.Relu(784), layers.Relu(500), layers.Softmax(300), layers.ArgmaxOutput(10), ], error='categorical_crossentropy', step=0.01, verbose=True, shuffle_data=True, momentum=0.99, nesterov=True, ) network.train(x_train, y_train, x_test, y_test, epochs=20) y_predicted = network.predict(x_test)<|fim▁hole|>print("Validation accuracy: {:.2f}%".format(100 * score))<|fim▁end|>
y_test = np.asarray(y_test.argmax(axis=1)).reshape(len(y_test)) print(metrics.classification_report(y_test, y_predicted)) score = metrics.accuracy_score(y_test, y_predicted)
<|file_name|>student_cnn_xe.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python # -*- coding: utf-8 -*- """Student CNN encoder for XE training.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import tensorflow as tf from models.encoders.core.cnn_util import conv_layer, max_pool, batch_normalization ############################################################ # Architecture: (feature map, kernel(f*t), stride(f,t)) # CNN1: (128, 9*9, (1,1)) * 1 layers # Batch normalization # ReLU # Max pool (3,1) # CNN2: (256, 3*4, (1,1)) * 1 layers # Batch normalization # ReLU # Max pool (1,1) # fc: 2048 (ReLU) * 4 layers ############################################################ class StudentCNNXEEncoder(object): """Student CNN encoder for XE training. Args: input_size (int): the dimensions of input vectors. This is expected to be num_channels * 3 (static + Δ + ΔΔ) splice (int): frames to splice num_stack (int): the number of frames to stack parameter_init (float, optional): the range of uniform distribution to initialize weight parameters (>= 0) name (string, optional): the name of encoder """ def __init__(self, input_size, splice, num_stack, parameter_init, name='cnn_student_xe_encoder'): assert input_size % 3 == 0 self.num_channels = (input_size // 3) // num_stack // splice self.splice = splice self.num_stack = num_stack self.parameter_init = parameter_init self.name = name def __call__(self, inputs, keep_prob, is_training): """Construct model graph. Args: inputs (placeholder): A tensor of size `[B, input_size (num_channels * splice * num_stack * 3)]` keep_prob (placeholder, float): A probability to keep nodes in the hidden-hidden connection is_training (bool): Returns: outputs: Encoder states. if time_major is True, a tensor of size `[T, B, output_dim]` otherwise, `[B, output_dim]` """ # inputs: 2D tensor `[B, input_dim]` batch_size = tf.shape(inputs)[0] input_dim = inputs.shape.as_list()[-1] # NOTE: input_dim: num_channels * splice * num_stack * 3 # for debug # print(input_dim) # 1200 # print(self.num_channels) # 40 # print(self.splice) # 5 # print(self.num_stack) # 2 assert input_dim == self.num_channels * self.splice * self.num_stack * 3 # Reshape to 4D tensor `[B, num_channels, splice * num_stack, 3]` inputs = tf.reshape( inputs, shape=[batch_size, self.num_channels, self.splice * self.num_stack, 3]) # NOTE: filter_size: `[H, W, C_in, C_out]` with tf.variable_scope('CNN1'): inputs = conv_layer(inputs, filter_size=[9, 9, 3, 128], stride=[1, 1], parameter_init=self.parameter_init, activation='relu') inputs = batch_normalization(inputs, is_training=is_training) inputs = max_pool(inputs, pooling_size=[3, 1], stride=[3, 1], name='max_pool') with tf.variable_scope('CNN2'): inputs = conv_layer(inputs, filter_size=[3, 4, 128, 256], stride=[1, 1],<|fim▁hole|> inputs = batch_normalization(inputs, is_training=is_training) inputs = max_pool(inputs, pooling_size=[1, 1], stride=[1, 1], name='max_pool') # Reshape to 2D tensor `[B, new_h * new_w * C_out]` outputs = tf.reshape( inputs, shape=[batch_size, np.prod(inputs.shape.as_list()[-3:])]) for i in range(1, 5, 1): with tf.variable_scope('fc%d' % (i)) as scope: outputs = tf.contrib.layers.fully_connected( inputs=outputs, num_outputs=2048, activation_fn=tf.nn.relu, weights_initializer=tf.truncated_normal_initializer( stddev=self.parameter_init), biases_initializer=tf.zeros_initializer(), scope=scope) return outputs<|fim▁end|>
parameter_init=self.parameter_init, activation='relu')
<|file_name|>CONFIG_FIELD.go<|end_file_name|><|fim▁begin|>package FP256BN // Modulus types const NOT_SPECIAL int = 0 const PSEUDO_MERSENNE int = 1<|fim▁hole|>// Modulus details const MODBITS uint = 256 /* Number of bits in Modulus */ const MOD8 uint = 3 /* Modulus mod 8 */ const MODTYPE int = NOT_SPECIAL //NOT_SPECIAL const FEXCESS int32=((int32(1)<<24)-1) // Modulus Masks const OMASK Chunk = ((Chunk(-1)) << (MODBITS % BASEBITS)) const TBITS uint = MODBITS % BASEBITS // Number of active bits in top word const TMASK Chunk = (Chunk(1) << TBITS) - 1<|fim▁end|>
const MONTGOMERY_FRIENDLY int = 2 const GENERALISED_MERSENNE int = 3
<|file_name|>user.routes.js<|end_file_name|><|fim▁begin|>angular.module('phonebook') .config(['$urlRouterProvider','$stateProvider',function($urlRouterProvider,$stateProvider){ $stateProvider // Greeting State .state('user.main',{ url : '/', templateUrl : 'user/view/welcome.html', controller : 'welcomeCtrl' }) // Phonebook Display State .state('user.PB',{ url : '/contacts', templateUrl : 'user/view/displayContacts.html', controller : 'cntctCtrl' <|fim▁hole|><|fim▁end|>
}) }]);
<|file_name|>FileListListAdapter.java<|end_file_name|><|fim▁begin|>/** * ownCloud Android client application * * @author Bartek Przybylski * @author Tobias Kaminsky * @author David A. Velasco * @author masensio * Copyright (C) 2011 Bartek Przybylski * Copyright (C) 2016 ownCloud Inc. * <p> * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2, * as published by the Free Software Foundation. * <p> <|fim▁hole|> * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * <p> * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.owncloud.android.ui.adapter; import android.accounts.Account; import android.content.Context; import android.graphics.Bitmap; import android.graphics.Color; import android.os.Handler; import android.os.Looper; import android.text.TextUtils; import android.util.SparseBooleanArray; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AbsListView; import android.widget.BaseAdapter; import android.widget.Filter; import android.widget.GridView; import android.widget.ImageView; import android.widget.TextView; import com.owncloud.android.R; import com.owncloud.android.authentication.AccountUtils; import com.owncloud.android.datamodel.FileDataStorageManager; import com.owncloud.android.datamodel.OCFile; import com.owncloud.android.datamodel.ThumbnailsCacheManager; import com.owncloud.android.db.PreferenceManager; import com.owncloud.android.files.services.FileDownloader.FileDownloaderBinder; import com.owncloud.android.files.services.FileUploader.FileUploaderBinder; import com.owncloud.android.lib.common.utils.Log_OC; import com.owncloud.android.lib.resources.files.RemoteFile; import com.owncloud.android.lib.resources.shares.OCShare; import com.owncloud.android.services.OperationsService.OperationsServiceBinder; import com.owncloud.android.ui.activity.ComponentsGetter; import com.owncloud.android.ui.fragment.ExtendedListFragment; import com.owncloud.android.ui.interfaces.OCFileListFragmentInterface; import com.owncloud.android.utils.DisplayUtils; import com.owncloud.android.utils.FileStorageUtils; import com.owncloud.android.utils.MimeTypeUtil; import java.io.File; import java.util.ArrayList; import java.util.Vector; /** * This Adapter populates a ListView with all files and folders in an ownCloud * instance. */ public class FileListListAdapter extends BaseAdapter { private Context mContext; private Vector<OCFile> mFilesAll = new Vector<OCFile>(); private Vector<OCFile> mFiles = null; private boolean mJustFolders; private boolean mShowHiddenFiles; private FileDataStorageManager mStorageManager; private Account mAccount; private ComponentsGetter mTransferServiceGetter; private OCFileListFragmentInterface OCFileListFragmentInterface; private FilesFilter mFilesFilter; private OCFile currentDirectory; private static final String TAG = FileListListAdapter.class.getSimpleName(); public FileListListAdapter( boolean justFolders, Context context, ComponentsGetter transferServiceGetter, OCFileListFragmentInterface OCFileListFragmentInterface ) { this.OCFileListFragmentInterface = OCFileListFragmentInterface; mJustFolders = justFolders; mContext = context; mAccount = AccountUtils.getCurrentOwnCloudAccount(mContext); mTransferServiceGetter = transferServiceGetter; // Read sorting order, default to sort by name ascending FileStorageUtils.mSortOrder = PreferenceManager.getSortOrder(mContext); FileStorageUtils.mSortAscending = PreferenceManager.getSortAscending(mContext); // Fetch preferences for showing hidden files mShowHiddenFiles = PreferenceManager.showHiddenFilesEnabled(mContext); // initialise thumbnails cache on background thread new ThumbnailsCacheManager.InitDiskCacheTask().execute(); } public FileListListAdapter( boolean justFolders, Context context, ComponentsGetter transferServiceGetter, OCFileListFragmentInterface OCFileListFragmentInterface, FileDataStorageManager fileDataStorageManager ) { this(justFolders, context, transferServiceGetter, OCFileListFragmentInterface); mStorageManager = fileDataStorageManager; } @Override public boolean areAllItemsEnabled() { return true; } @Override public boolean isEnabled(int position) { return true; } @Override public int getCount() { return mFiles != null ? mFiles.size() : 0; } @Override public Object getItem(int position) { if (mFiles == null || mFiles.size() <= position) { return null; } return mFiles.get(position); } public void setFavoriteAttributeForItemID(String fileId, boolean favorite) { for (int i = 0; i < mFiles.size(); i++) { if (mFiles.get(i).getRemoteId().equals(fileId)) { mFiles.get(i).setFavorite(favorite); break; } } for (int i = 0; i < mFilesAll.size(); i++) { if (mFilesAll.get(i).getRemoteId().equals(fileId)) { mFilesAll.get(i).setFavorite(favorite); break; } } new Handler(Looper.getMainLooper()).post(new Runnable() { @Override public void run() { notifyDataSetChanged(); } }); } @Override public long getItemId(int position) { if (mFiles == null || mFiles.size() <= position) { return 0; } return mFiles.get(position).getFileId(); } @Override public int getItemViewType(int position) { return 0; } @Override public View getView(int position, View convertView, ViewGroup parent) { View view = convertView; OCFile file = null; LayoutInflater inflator = (LayoutInflater) mContext .getSystemService(Context.LAYOUT_INFLATER_SERVICE); if (mFiles != null && mFiles.size() > position) { file = mFiles.get(position); } // Find out which layout should be displayed ViewType viewType; if (parent instanceof GridView) { if (file != null && (MimeTypeUtil.isImage(file) || MimeTypeUtil.isVideo(file))) { viewType = ViewType.GRID_IMAGE; } else { viewType = ViewType.GRID_ITEM; } } else { viewType = ViewType.LIST_ITEM; } // create view only if differs, otherwise reuse if (convertView == null || convertView.getTag() != viewType) { switch (viewType) { case GRID_IMAGE: view = inflator.inflate(R.layout.grid_image, parent, false); view.setTag(ViewType.GRID_IMAGE); break; case GRID_ITEM: view = inflator.inflate(R.layout.grid_item, parent, false); view.setTag(ViewType.GRID_ITEM); break; case LIST_ITEM: view = inflator.inflate(R.layout.list_item, parent, false); view.setTag(ViewType.LIST_ITEM); break; } } if (file != null) { ImageView fileIcon = (ImageView) view.findViewById(R.id.thumbnail); fileIcon.setTag(file.getFileId()); TextView fileName; String name = file.getFileName(); switch (viewType) { case LIST_ITEM: TextView fileSizeV = (TextView) view.findViewById(R.id.file_size); TextView fileSizeSeparatorV = (TextView) view.findViewById(R.id.file_separator); TextView lastModV = (TextView) view.findViewById(R.id.last_mod); lastModV.setVisibility(View.VISIBLE); lastModV.setText(DisplayUtils.getRelativeTimestamp(mContext, file.getModificationTimestamp())); fileSizeSeparatorV.setVisibility(View.VISIBLE); fileSizeV.setVisibility(View.VISIBLE); fileSizeV.setText(DisplayUtils.bytesToHumanReadable(file.getFileLength())); case GRID_ITEM: // filename fileName = (TextView) view.findViewById(R.id.Filename); name = file.getFileName(); fileName.setText(name); case GRID_IMAGE: // sharedIcon ImageView sharedIconV = (ImageView) view.findViewById(R.id.sharedIcon); if (file.isSharedViaLink()) { sharedIconV.setImageResource(R.drawable.shared_via_link); sharedIconV.setVisibility(View.VISIBLE); sharedIconV.bringToFront(); } else if (file.isSharedWithSharee() || file.isSharedWithMe()) { sharedIconV.setImageResource(R.drawable.shared_via_users); sharedIconV.setVisibility(View.VISIBLE); sharedIconV.bringToFront(); } else { sharedIconV.setVisibility(View.GONE); } // local state ImageView localStateView = (ImageView) view.findViewById(R.id.localFileIndicator); localStateView.bringToFront(); FileDownloaderBinder downloaderBinder = mTransferServiceGetter.getFileDownloaderBinder(); FileUploaderBinder uploaderBinder = mTransferServiceGetter.getFileUploaderBinder(); OperationsServiceBinder opsBinder = mTransferServiceGetter.getOperationsServiceBinder(); localStateView.setVisibility(View.INVISIBLE); // default first if ( //synchronizing opsBinder != null && opsBinder.isSynchronizing(mAccount, file) ) { localStateView.setImageResource(R.drawable.ic_synchronizing); localStateView.setVisibility(View.VISIBLE); } else if ( // downloading downloaderBinder != null && downloaderBinder.isDownloading(mAccount, file) ) { localStateView.setImageResource(R.drawable.ic_synchronizing); localStateView.setVisibility(View.VISIBLE); } else if ( //uploading uploaderBinder != null && uploaderBinder.isUploading(mAccount, file) ) { localStateView.setImageResource(R.drawable.ic_synchronizing); localStateView.setVisibility(View.VISIBLE); } else if (file.getEtagInConflict() != null) { // conflict localStateView.setImageResource(R.drawable.ic_synchronizing_error); localStateView.setVisibility(View.VISIBLE); } else if (file.isDown()) { localStateView.setImageResource(R.drawable.ic_synced); localStateView.setVisibility(View.VISIBLE); } break; } // For all Views if (file.getIsFavorite()) { view.findViewById(R.id.favorite_action).setVisibility(View.VISIBLE); } else { view.findViewById(R.id.favorite_action).setVisibility(View.GONE); } ImageView checkBoxV = (ImageView) view.findViewById(R.id.custom_checkbox); checkBoxV.setVisibility(View.GONE); view.setBackgroundColor(Color.WHITE); AbsListView parentList = (AbsListView) parent; if (parentList.getChoiceMode() != AbsListView.CHOICE_MODE_NONE && parentList.getCheckedItemCount() > 0 ) { if (parentList.isItemChecked(position)) { view.setBackgroundColor(mContext.getResources().getColor( R.color.selected_item_background)); checkBoxV.setImageResource( R.drawable.ic_checkbox_marked); } else { view.setBackgroundColor(Color.WHITE); checkBoxV.setImageResource( R.drawable.ic_checkbox_blank_outline); } checkBoxV.setVisibility(View.VISIBLE); } // this if-else is needed even though kept-in-sync icon is visible by default // because android reuses views in listview if (!file.isAvailableOffline()) { view.findViewById(R.id.keptOfflineIcon).setVisibility(View.GONE); } else { view.findViewById(R.id.keptOfflineIcon).setVisibility(View.VISIBLE); } // No Folder if (!file.isFolder()) { if ((MimeTypeUtil.isImage(file) || MimeTypeUtil.isVideo(file)) && file.getRemoteId() != null) { // Thumbnail in Cache? Bitmap thumbnail = ThumbnailsCacheManager.getBitmapFromDiskCache(file.getRemoteId()); if (thumbnail != null && !file.needsUpdateThumbnail()) { if (MimeTypeUtil.isVideo(file)) { Bitmap withOverlay = ThumbnailsCacheManager.addVideoOverlay(thumbnail); fileIcon.setImageBitmap(withOverlay); } else { fileIcon.setImageBitmap(thumbnail); } } else { // generate new Thumbnail if (ThumbnailsCacheManager.cancelPotentialThumbnailWork(file, fileIcon)) { try { final ThumbnailsCacheManager.ThumbnailGenerationTask task = new ThumbnailsCacheManager.ThumbnailGenerationTask( fileIcon, mStorageManager, mAccount ); if (thumbnail == null) { if (MimeTypeUtil.isVideo(file)) { thumbnail = ThumbnailsCacheManager.mDefaultVideo; } else { thumbnail = ThumbnailsCacheManager.mDefaultImg; } } final ThumbnailsCacheManager.AsyncThumbnailDrawable asyncDrawable = new ThumbnailsCacheManager.AsyncThumbnailDrawable( mContext.getResources(), thumbnail, task ); fileIcon.setImageDrawable(asyncDrawable); task.execute(file); } catch (IllegalArgumentException e) { Log_OC.d(TAG, "ThumbnailGenerationTask : " + e.getMessage()); } } } if (file.getMimetype().equalsIgnoreCase("image/png")) { fileIcon.setBackgroundColor(mContext.getResources() .getColor(R.color.background_color)); } } else { fileIcon.setImageResource(MimeTypeUtil.getFileTypeIconId(file.getMimetype(), file.getFileName())); } } else { // Folder fileIcon.setImageResource( MimeTypeUtil.getFolderTypeIconId( file.isSharedWithMe() || file.isSharedWithSharee(), file.isSharedViaLink() ) ); } } return view; } @Override public int getViewTypeCount() { return 1; } @Override public boolean hasStableIds() { return true; } @Override public boolean isEmpty() { return (mFiles == null || mFiles.isEmpty()); } /** * Change the adapted directory for a new one * * @param directory New folder to adapt. Can be NULL, meaning * "no content to adapt". * @param updatedStorageManager Optional updated storage manager; used to replace * mStorageManager if is different (and not NULL) */ public void swapDirectory(OCFile directory, FileDataStorageManager updatedStorageManager , boolean onlyOnDevice) { if (updatedStorageManager != null && !updatedStorageManager.equals(mStorageManager)) { mStorageManager = updatedStorageManager; mAccount = AccountUtils.getCurrentOwnCloudAccount(mContext); } if (mStorageManager != null) { mFiles = mStorageManager.getFolderContent(directory, onlyOnDevice); if (mJustFolders) { mFiles = getFolders(mFiles); } if (!mShowHiddenFiles) { mFiles = filterHiddenFiles(mFiles); } mFiles = FileStorageUtils.sortOcFolder(mFiles); mFilesAll.clear(); mFilesAll.addAll(mFiles); currentDirectory = directory; } else { mFiles = null; mFilesAll.clear(); } notifyDataSetChanged(); } private void searchForLocalFileInDefaultPath(OCFile file) { if (file.getStoragePath() == null && !file.isFolder()) { File f = new File(FileStorageUtils.getDefaultSavePathFor(mAccount.name, file)); if (f.exists()) { file.setStoragePath(f.getAbsolutePath()); file.setLastSyncDateForData(f.lastModified()); } } } public void setData(ArrayList<Object> objects, ExtendedListFragment.SearchType searchType) { mFiles = new Vector<>(); if (searchType.equals(ExtendedListFragment.SearchType.SHARED_FILTER)) { ArrayList<OCShare> shares = new ArrayList<>(); for (int i = 0; i < objects.size(); i++) { // check type before cast as of long running data fetch it is possible that old result is filled if (objects.get(i) instanceof OCShare) { OCShare ocShare = (OCShare) objects.get(i); shares.add(ocShare); OCFile ocFile = mStorageManager.getFileByPath(ocShare.getPath()); if (!mFiles.contains(ocFile)) { mFiles.add(ocFile); } } } mStorageManager.saveShares(shares); } else { for (int i = 0; i < objects.size(); i++) { OCFile ocFile = FileStorageUtils.fillOCFile((RemoteFile) objects.get(i)); searchForLocalFileInDefaultPath(ocFile); mFiles.add(ocFile); } } if (!searchType.equals(ExtendedListFragment.SearchType.PHOTO_SEARCH) && !searchType.equals(ExtendedListFragment.SearchType.PHOTOS_SEARCH_FILTER) && !searchType.equals(ExtendedListFragment.SearchType.RECENTLY_MODIFIED_SEARCH) && !searchType.equals(ExtendedListFragment.SearchType.RECENTLY_MODIFIED_SEARCH_FILTER)) { mFiles = FileStorageUtils.sortOcFolder(mFiles); } else { mFiles = FileStorageUtils.sortOcFolderDescDateModified(mFiles); } mFilesAll = new Vector<>(); mFilesAll.addAll(mFiles); new Handler(Looper.getMainLooper()).post(new Runnable() { @Override public void run() { notifyDataSetChanged(); OCFileListFragmentInterface.finishedFiltering(); } }); } /** * Filter for getting only the folders * * @param files Collection of files to filter * @return Folders in the input */ public Vector<OCFile> getFolders(Vector<OCFile> files) { Vector<OCFile> ret = new Vector<>(); OCFile current; for (int i = 0; i < files.size(); i++) { current = files.get(i); if (current.isFolder()) { ret.add(current); } } return ret; } public void setSortOrder(Integer order, boolean ascending) { PreferenceManager.setSortOrder(mContext, order); PreferenceManager.setSortAscending(mContext, ascending); FileStorageUtils.mSortOrder = order; FileStorageUtils.mSortAscending = ascending; mFiles = FileStorageUtils.sortOcFolder(mFiles); notifyDataSetChanged(); } public ArrayList<OCFile> getCheckedItems(AbsListView parentList) { SparseBooleanArray checkedPositions = parentList.getCheckedItemPositions(); ArrayList<OCFile> files = new ArrayList<>(); Object item; for (int i = 0; i < checkedPositions.size(); i++) { if (checkedPositions.valueAt(i)) { item = getItem(checkedPositions.keyAt(i)); if (item != null) { files.add((OCFile) item); } } } return files; } public Filter getFilter() { if (mFilesFilter == null) { mFilesFilter = new FilesFilter(); } return mFilesFilter; } private class FilesFilter extends Filter { @Override protected FilterResults performFiltering(CharSequence constraint) { FilterResults results = new FilterResults(); Vector<OCFile> filteredFiles = new Vector<>(); if (!TextUtils.isEmpty(constraint)) { for (int i = 0; i < mFilesAll.size(); i++) { OCFile currentFile = mFilesAll.get(i); if (currentFile.getParentRemotePath().equals(currentDirectory.getRemotePath()) && currentFile.getFileName().toLowerCase().contains(constraint.toString().toLowerCase()) && !filteredFiles.contains(currentFile)) { filteredFiles.add(currentFile); } } } results.values = filteredFiles; results.count = filteredFiles.size(); return results; } @Override protected void publishResults(CharSequence constraint, Filter.FilterResults results) { Vector<OCFile> ocFiles = (Vector<OCFile>) results.values; mFiles = new Vector<>(); if (ocFiles != null && ocFiles.size() > 0) { mFiles.addAll(ocFiles); if (!mShowHiddenFiles) { mFiles = filterHiddenFiles(mFiles); } mFiles = FileStorageUtils.sortOcFolder(mFiles); } notifyDataSetChanged(); OCFileListFragmentInterface.finishedFiltering(); } } /** * Filter for hidden files * * @param files Collection of files to filter * @return Non-hidden files */ public Vector<OCFile> filterHiddenFiles(Vector<OCFile> files) { Vector<OCFile> ret = new Vector<>(); OCFile current; for (int i = 0; i < files.size(); i++) { current = files.get(i); if (!current.isHidden() && !ret.contains(current)) { ret.add(current); } } return ret; } }<|fim▁end|>
<|file_name|>text_packet.py<|end_file_name|><|fim▁begin|>import time from socketio import packet def test(): p = packet.Packet(packet.EVENT, 'hello') start = time.time() count = 0 while True:<|fim▁hole|> break return count if __name__ == '__main__': count = test() print('text_packet:', count, 'packets processed.')<|fim▁end|>
p = packet.Packet(encoded_packet=p.encode()) count += 1 if time.time() - start >= 5:
<|file_name|>msg_wrapper.py<|end_file_name|><|fim▁begin|>__author__ = 'oglebrandon' import logging as logger import types from ib.ext.EWrapper import EWrapper def showmessage(message, mapping): try: del(mapping['self']) except (KeyError, ): pass items = mapping.items() items.sort() print '### %s' % (message, ) for k, v in items: print ' %s:%s' % (k, v) class Observable(object): """ Sender -> dispatches messages to interested callables """ def __init__(self): self.listeners = {} self.logger = logger.getLogger() def register(self,listener,events=None): """ register a listener function Parameters ----------- listener : external listener function events : tuple or list of relevant events (default=None) """ if events is not None and type(events) not in \ (types.TupleType,types.ListType): events = (events,) self.listeners[listener] = events def dispatch(self,event=None, msg=None): """notify listeners """ for listener,events in self.listeners.items(): if events is None or event is None or event in events: try: listener(self,event,msg) except (Exception,): self.unregister(listener) errmsg = "Exception in message dispatch: Handler '{0}' " \ "unregistered for event " \ "'{1}' ".format(listener.func_name,event) self.logger.exception(errmsg) def unregister(self,listener): """ unregister listener function """ del self.listeners[listener] class ReferenceWrapper(EWrapper,Observable): # contract = None # tickerId # field # price def __init__ (self,subs={}): super(ReferenceWrapper, self).__init__() self.orderID = None self.subscriptions = subs def setSubscriptions (self,subs): self.subscriptions = subs <|fim▁hole|> showmessage('tickPrice', vars()) def tickSize(self, tickerId, field, size): showmessage('tickSize', vars()) def tickString(self, tickerId, tickType, value): #showmessage('tickString', vars()) pass def tickOptionComputation(self, tickerId, field, impliedVolatility, delta, x, c, q, w, e, r): #showmessage('tickOptionComputation', vars()) pass def openOrderEnd(self): pass def orderStatus(self, orderId, status, filled, remaining, avgFillPrice, permId, parentId, lastFillPrice, clientId, whyHeId): if filled: self.dispatch(event='execution',msg=[1,2,3]) showmessage('orderStatus', vars()) def openOrder(self, orderId, contract, order, state): showmessage('openOrder', vars()) def connectionClosed(self): showmessage('connectionClosed', {}) def updateAccountValue(self, key, value, currency, accountName): showmessage('updateAccountValue', vars()) def updatePortfolio(self, contract, position, marketPrice, marketValue, averageCost, unrealizedPNL, realizedPNL, accountName): showmessage('updatePortfolio', vars()) def updateAccountTime(self, timeStamp): showmessage('updateAccountTime', vars()) def nextValidId(self, orderId): self.orderID = orderId showmessage('nextValidId', vars()) def contractDetails(self, reqId, contractDetails): showmessage('contractDetails', vars()) print contractDetails.__dict__ def bondContractDetails(self, reqId, contractDetails): showmessage('bondContractDetails', vars()) def execDetails(self, orderId, contract, execution): showmessage('execDetails', vars()) def error(self, id=None, errorCode=None, errorMsg=None): showmessage('error', vars()) def updateMktDepth(self, tickerId, position, operation, side, price, size): showmessage('updateMktDepth', vars()) def updateMktDepthL2(self, tickerId, position, marketMaker, operation, side, price, size): showmessage('updateMktDepthL2', vars()) def updateNewsBulletin(self, msgId, msgType, message, origExchange): showmessage('updateNewsBulletin', vars()) def managedAccounts(self, accountsList): showmessage('managedAccounts', vars()) def receiveFA(self, faDataType, xml): showmessage('receiveFA', vars()) def historicalData(self, reqId, date, open, high, low, close, volume, count, WAP, hasGaps): showmessage('historicalData', vars()) def scannerParameters(self, xml): showmessage('scannerParameters', vars()) def scannerData(self, reqId, rank, contractDetails, distance, benchmark, projection, legsStr): showmessage('scannerData', vars()) def accountDownloadEnd(self, accountName): showmessage('accountDownloadEnd', vars()) def contractDetailsEnd(self, reqId): showmessage('contractDetailsEnd', vars()) def currentTime(self): showmessage('currentTime', vars()) def deltaNeutralValidation(self): showmessage('deltaNeutralValidation', vars()) def error_0(self): showmessage('error_0', vars()) def error_1(self): showmessage('error_1', vars()) def execDetailsEnd(self): showmessage('execDetailsEnd', vars()) def fundamentalData(self): showmessage('fundamentalData', vars()) def realtimeBar(self): showmessage('realtimeBar', vars()) def scannerDataEnd(self): showmessage('scannerDataEnd', vars()) def tickEFP(self): showmessage('tickEFP', vars()) def tickSnapshotEnd(self): showmessage('tickSnapshotEnd', vars()) def marketDataType(self): showmessage('marketDataType', vars()) def commissionReport(self, commissionReport): showmessage('commissionReport', vars())<|fim▁end|>
def tickGeneric(self, tickerId, field, price): pass def tickPrice(self, tickerId, field, price, canAutoExecute):
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models from .mixins import FileMixin class BaseCategory(models.Model): title = models.CharField(max_length=100, unique=True) class Meta: abstract = True ordering = ('title',) def __str__(self): return self.title class FileCategory(BaseCategory): class Meta(BaseCategory.Meta): verbose_name_plural = 'file categories' class File(FileMixin, models.Model): category = models.ForeignKey(FileCategory) title = models.CharField(max_length=100, db_index=True) file = models.FileField(upload_to='assets/file') file_size = models.PositiveIntegerField(default=0, editable=False) created_at = models.DateTimeField(auto_now_add=True) modified_at = models.DateTimeField(auto_now=True) class ImageCategory(BaseCategory): class Meta(BaseCategory.Meta): verbose_name_plural = 'image categories' class Image(FileMixin, models.Model): category = models.ForeignKey(ImageCategory, blank=True, null=True) title = models.CharField(max_length=100, db_index=True) file = models.ImageField( 'Image', upload_to='assets/image', height_field='image_height', width_field='image_width'<|fim▁hole|> image_width = models.PositiveIntegerField(editable=False) file_size = models.PositiveIntegerField(default=0, editable=False) created_at = models.DateTimeField(auto_now_add=True) modified_at = models.DateTimeField(auto_now=True)<|fim▁end|>
) image_height = models.PositiveIntegerField(editable=False)
<|file_name|>async_test.py<|end_file_name|><|fim▁begin|># import asyncio # # async def compute(x, y): # print("Compute %s + %s ..." % (x, y)) # await asyncio.sleep(1.0) # return x + y # # async def print_sum(x, y): # for i in range(10): # result = await compute(x, y) # print("%s + %s = %s" % (x, y, result)) # # loop = asyncio.get_event_loop() # loop.run_until_complete(print_sum(1,2)) # asyncio.ensure_future(print_sum(1, 2)) # asyncio.ensure_future(print_sum(3, 4)) # asyncio.ensure_future(print_sum(5, 6)) # loop.run_forever() import asyncio async def display_date(who, num): i = 0 while True: if i > num: return print('{}: Before loop {}'.format(who, i))<|fim▁hole|>loop = asyncio.get_event_loop() asyncio.ensure_future(display_date('AAA', 4)) asyncio.ensure_future(display_date('BBB', 6)) loop.run_forever()<|fim▁end|>
await asyncio.sleep(1) i += 1
<|file_name|>scripts.ts<|end_file_name|><|fim▁begin|>/** * Stadium mechanics inherit from gen 1 mechanics, but fixes some stuff. */ export const BattleScripts: ModdedBattleScriptsData = { inherit: 'gen1', gen: 1, // BattlePokemon scripts. Stadium shares gen 1 code but it fixes some problems with it. pokemon: { inherit: true, // Gen 1 function to apply a stat modification that is only active until the stat is recalculated or mon switched. // Modified stats are declared in the Pokemon object in sim/pokemon.js in about line 681. modifyStat(statName, modifier) { if (!(statName in this.storedStats)) throw new Error("Invalid `statName` passed to `modifyStat`"); this.modifiedStats![statName] = this.battle.clampIntRange(Math.floor(this.modifiedStats![statName] * modifier), 1); }, // This is run on Stadium after boosts and status changes. recalculateStats() { let statName: StatNameExceptHP; for (statName in this.storedStats) { let stat = this.species.baseStats[statName]; stat = Math.floor( Math.floor( 2 * stat + this.set.ivs[statName] + Math.floor(this.set.evs[statName] / 4) ) * this.level / 100 + 5 ); this.baseStoredStats[statName] = this.storedStats[statName] = Math.floor(stat); this.modifiedStats![statName] = Math.floor(stat); // Re-apply drops, if necessary. if (this.status === 'par') this.modifyStat!('spe', 0.25); if (this.status === 'brn') this.modifyStat!('atk', 0.5); if (this.boosts[statName] !== 0) { if (this.boosts[statName] >= 0) { this.modifyStat!(statName, [1, 1.5, 2, 2.5, 3, 3.5, 4][this.boosts[statName]]); } else { this.modifyStat!(statName, [100, 66, 50, 40, 33, 28, 25][-this.boosts[statName]] / 100); } } } }, // Stadium's fixed boosting function. boostBy(boost) { let changed = false; let i: BoostName; for (i in boost) { let delta = boost[i]; if (delta === undefined) continue; this.boosts[i] += delta; if (this.boosts[i] > 6) { delta -= this.boosts[i] - 6; this.boosts[i] = 6; } if (this.boosts[i] < -6) { delta -= this.boosts[i] - (-6); this.boosts[i] = -6; } if (delta) changed = true; } // @ts-ignore this.recalculateStats(); return changed; }, }, // Battle scripts. runMove(moveOrMoveName, pokemon, targetLoc, sourceEffect) { const move = this.dex.getActiveMove(moveOrMoveName); const target = this.getTarget(pokemon, move, targetLoc); if (target?.subFainted) target.subFainted = null; this.setActiveMove(move, pokemon, target); if (pokemon.moveThisTurn || !this.runEvent('BeforeMove', pokemon, target, move)) { this.debug('' + pokemon.fullname + ' move interrupted; movedThisTurn: ' + pokemon.moveThisTurn); this.clearActiveMove(true); // This is only run for sleep this.runEvent('AfterMoveSelf', pokemon, target, move); return; } if (move.beforeMoveCallback) { if (move.beforeMoveCallback.call(this, pokemon, target, move)) { this.clearActiveMove(true); return; } } pokemon.lastDamage = 0; let lockedMove = this.runEvent('LockMove', pokemon); if (lockedMove === true) lockedMove = false; if ( !lockedMove && (!pokemon.volatiles['partialtrappinglock'] || pokemon.volatiles['partialtrappinglock'].locked !== target) ) { pokemon.deductPP(move, null, target); pokemon.side.lastMove = move; pokemon.lastMove = move; } else { sourceEffect = move; } this.useMove(move, pokemon, target, sourceEffect); this.singleEvent('AfterMove', move, null, pokemon, target, move); // If target fainted if (target && target.hp <= 0) { // We remove screens target.side.removeSideCondition('reflect'); target.side.removeSideCondition('lightscreen'); } else { this.runEvent('AfterMoveSelf', pokemon, target, move); } if (pokemon.volatiles['mustrecharge']) this.add('-mustrecharge', pokemon); // For partial trapping moves, we are saving the target. if (move.volatileStatus === 'partiallytrapped' && target && target.hp > 0) { // It hit, so let's remove must recharge volatile. Yup, this happens on Stadium. target.removeVolatile('mustrecharge'); // Let's check if the lock exists if (pokemon.volatiles['partialtrappinglock'] && target.volatiles['partiallytrapped']) { // Here the partialtrappinglock volatile has been already applied if (!pokemon.volatiles['partialtrappinglock'].locked) { // If it's the first hit, we save the target pokemon.volatiles['partialtrappinglock'].locked = target; } } // If we move to here, the move failed and there's no partial trapping lock } }, tryMoveHit(target, pokemon, move) { let damage: number | false | undefined = 0; // First, check if the target is semi-invulnerable let hitResult = this.runEvent('Invulnerability', target, pokemon, move); if (hitResult === false) { if (!move.spreadHit) this.attrLastMove('[miss]'); this.add('-miss', pokemon); return false; } // Then, check if the Pokemon is immune to this move. if ( (!move.ignoreImmunity || (move.ignoreImmunity !== true && !move.ignoreImmunity[move.type])) && !target.runImmunity(move.type, true) ) { if (move.selfdestruct) { this.faint(pokemon, pokemon, move); } return false; } hitResult = this.singleEvent('TryImmunity', move, null, target, pokemon, move); if (hitResult === false) { this.add('-immune', target); return false; } // Now, let's calculate the accuracy. let accuracy = move.accuracy; // Partial trapping moves: true accuracy while it lasts if (pokemon.volatiles['partialtrappinglock']) { if (move.volatileStatus === 'partiallytrapped' && target === pokemon.volatiles['partialtrappinglock'].locked) { accuracy = true; } else if (pokemon.volatiles['partialtrappinglock'].locked !== target) { // The target switched, therefor, you fail using wrap. delete pokemon.volatiles['partialtrappinglock']; return false; } } // OHKO moves only have a chance to hit if the user is at least as fast as the target if (move.ohko) { if (target.speed > pokemon.speed) { this.add('-immune', target, '[ohko]'); return false; } } // Calculate true accuracy for gen 1, which uses 0-255. // Stadium uses the Gen 2 boost table for accuracy and evasiveness, except for 1/3 instead of 0.33 const boostTable = [1 / 3, 0.36, 0.43, 0.5, 0.66, 0.75, 1, 1.33, 1.66, 2, 2.33, 2.66, 3]; if (accuracy !== true) { accuracy = Math.floor(accuracy * 255 / 100); // Check also for accuracy modifiers. if (!move.ignoreAccuracy) { accuracy = Math.floor(accuracy * boostTable[pokemon.boosts.accuracy + 6]); } if (!move.ignoreEvasion) { accuracy = Math.floor(accuracy * boostTable[-target.boosts.evasion + 6]); } accuracy = Math.min(accuracy, 255); } accuracy = this.runEvent('Accuracy', target, pokemon, move, accuracy); // Stadium fixes the 1/256 accuracy bug. if (accuracy !== true && !this.randomChance(accuracy + 1, 256)) { this.attrLastMove('[miss]'); this.add('-miss', pokemon); damage = false; } // If damage is 0 and not false it means it didn't miss, let's calc. if (damage !== false) { pokemon.lastDamage = 0; if (move.multihit) { let hits = move.multihit; if (Array.isArray(hits)) { // Yes, it's hardcoded... meh if (hits[0] === 2 && hits[1] === 5) { hits = this.sample([2, 2, 3, 3, 4, 5]); } else { hits = this.random(hits[0], hits[1] + 1); } } hits = Math.floor(hits); // In gen 1, all the hits have the same damage for multihits move let moveDamage: number | false | undefined = 0; let i: number; for (i = 0; i < hits && target.hp && pokemon.hp; i++) { move.hit = i + 1; moveDamage = this.moveHit(target, pokemon, move); if (moveDamage === false) break; damage = (moveDamage || 0); // Move damage is fixed to be the first move's damage if (i === 0) move.damage = damage; if (target.subFainted) { i++; break; } } move.damage = null; if (i === 0) return 1; this.add('-hitcount', target, i); } else { damage = this.moveHit(target, pokemon, move); } } if (move.category !== 'Status') target.gotAttacked(move, damage, pokemon); if (move.selfdestruct) { this.faint(pokemon, pokemon, move); } // The move missed. if (damage === false) { // Delete the partial trap lock if necessary. delete pokemon.volatiles['partialtrappinglock']; return false; } if (move.ohko) this.add('-ohko'); if (!move.negateSecondary) { this.singleEvent('AfterMoveSecondary', move, null, target, pokemon, move); this.runEvent('AfterMoveSecondary', target, pokemon, move); } return damage; }, moveHit(target, pokemon, moveOrMoveName, moveData, isSecondary, isSelf) { let damage: number | false | null | undefined = 0; const move = this.dex.getActiveMove(moveOrMoveName); if (!isSecondary && !isSelf) this.setActiveMove(move, pokemon, target); let hitResult: number | boolean = true; if (!moveData) moveData = move; if (move.ignoreImmunity === undefined) { move.ignoreImmunity = (move.category === 'Status'); } if (target) { hitResult = this.singleEvent('TryHit', moveData, {}, target, pokemon, move); // Partial trapping moves still apply their volatile to Pokémon behind a Sub const targetHadSub = !!target.volatiles['substitute']; if (targetHadSub && moveData.volatileStatus && moveData.volatileStatus === 'partiallytrapped') { target.addVolatile(moveData.volatileStatus, pokemon, move); } if (!hitResult) { if (hitResult === false) this.add('-fail', target); return false; } // Only run the hit events for the hit itself, not the secondary or self hits if (!isSelf && !isSecondary) { hitResult = this.runEvent('TryHit', target, pokemon, move); if (!hitResult) { if (hitResult === false) this.add('-fail', target); // Special Substitute hit flag if (hitResult !== 0) { return false; } } if (!this.runEvent('TryFieldHit', target, pokemon, move)) { return false; } } else if (isSecondary && !moveData.self) { hitResult = this.runEvent('TrySecondaryHit', target, pokemon, moveData); } if (hitResult === 0) { target = null; } else if (!hitResult) { if (hitResult === false) this.add('-fail', target); return false; } } if (target) { let didSomething = false; damage = this.getDamage(pokemon, target, moveData); if ((damage || damage === 0) && !target.fainted) { if (move.noFaint && damage >= target.hp) { damage = target.hp - 1; } damage = this.damage(damage, target, pokemon, move); if (!(damage || damage === 0)) return false; didSomething = true; } else if (damage === false && typeof hitResult === 'undefined') { this.add('-fail', target); } if (damage === false || damage === null) { return false; } if (moveData.boosts && !target.fainted) { this.boost(moveData.boosts, target, pokemon, move); } if (moveData.heal && !target.fainted) { const d = target.heal(Math.floor(target.maxhp * moveData.heal[0] / moveData.heal[1])); if (!d) { this.add('-fail', target); return false; } this.add('-heal', target, target.getHealth); didSomething = true; } if (moveData.status) { if (!target.status) { target.setStatus(moveData.status, pokemon, move); // @ts-ignore target.recalculateStats(); } else if (!isSecondary) { if (target.status === moveData.status) { this.add('-fail', target, target.status); } else { this.add('-fail', target); } } didSomething = true; } if (moveData.forceStatus) { if (target.setStatus(moveData.forceStatus, pokemon, move)) { // @ts-ignore target.recalculateStats(); didSomething = true; } } if (moveData.volatileStatus) { if (target.addVolatile(moveData.volatileStatus, pokemon, move)) { didSomething = true; } } if (moveData.sideCondition) { if (target.side.addSideCondition(moveData.sideCondition, pokemon, move)) { didSomething = true; } } if (moveData.pseudoWeather) { if (this.field.addPseudoWeather(moveData.pseudoWeather, pokemon, move)) { didSomething = true; } } // Hit events hitResult = this.singleEvent('Hit', moveData, {}, target, pokemon, move); if (!isSelf && !isSecondary) { this.runEvent('Hit', target, pokemon, move); } if (!hitResult && !didSomething) { if (hitResult === false) this.add('-fail', target); return false; } } // Here's where self effects are applied. if (moveData.self) { this.moveHit(pokemon, pokemon, move, moveData.self, isSecondary, true);<|fim▁hole|> pokemon.volatiles['partialtrappinglock'].damage = pokemon.lastDamage; } // Apply move secondaries. if (moveData.secondaries) { for (const secondary of moveData.secondaries) { // We check here whether to negate the probable secondary status if it's para, burn, or freeze. // In the game, this is checked and if true, the random number generator is not called. // That means that a move that does not share the type of the target can status it. // If a move that was not fire-type would exist on Gen 1, it could burn a Pokémon. if (!(secondary.status && ['par', 'brn', 'frz'].includes(secondary.status) && target && target.hasType(move.type))) { const effectChance = Math.floor((secondary.chance || 100) * 255 / 100); if (typeof secondary.chance === 'undefined' || this.randomChance(effectChance + 1, 256)) { this.moveHit(target, pokemon, move, secondary, true, isSelf); } } } } if (move.selfSwitch && pokemon.hp) { pokemon.switchFlag = move.selfSwitch; } return damage; }, getDamage(pokemon, target, move, suppressMessages) { // First of all, we get the move. if (typeof move === 'string') { move = this.dex.getActiveMove(move); } else if (typeof move === 'number') { move = { basePower: move, type: '???', category: 'Physical', willCrit: false, flags: {}, } as ActiveMove; } // Let's see if the target is immune to the move. if (!move.ignoreImmunity || (move.ignoreImmunity !== true && !move.ignoreImmunity[move.type])) { if (!target.runImmunity(move.type, true)) { return false; } } // Is it an OHKO move? if (move.ohko) { return target.maxhp; } // We edit the damage through move's damage callback if necessary. if (move.damageCallback) { return move.damageCallback.call(this, pokemon, target); } // We take damage from damage=level moves (seismic toss). if (move.damage === 'level') { return pokemon.level; } // If there's a fix move damage, we return that. if (move.damage) { return move.damage; } // If it's the first hit on a Normal-type partially trap move, it hits Ghosts anyways but damage is 0. if (move.volatileStatus === 'partiallytrapped' && move.type === 'Normal' && target.hasType('Ghost')) { return 0; } // Let's check if we are in middle of a partial trap sequence to return the previous damage. if (pokemon.volatiles['partialtrappinglock'] && (target === pokemon.volatiles['partialtrappinglock'].locked)) { return pokemon.volatiles['partialtrappinglock'].damage; } // We check the category and typing to calculate later on the damage. if (!move.category) move.category = 'Physical'; if (!move.defensiveCategory) move.defensiveCategory = move.category; // '???' is typeless damage: used for Struggle and Confusion etc if (!move.type) move.type = '???'; const type = move.type; // We get the base power and apply basePowerCallback if necessary. let basePower: number | false | null = move.basePower; if (move.basePowerCallback) { basePower = move.basePowerCallback.call(this, pokemon, target, move); } if (!basePower) { return basePower === 0 ? undefined : basePower; } basePower = this.clampIntRange(basePower, 1); // Checking for the move's Critical Hit possibility. We check if it's a 100% crit move, otherwise we calculate the chance. let isCrit = move.willCrit || false; if (!isCrit) { // In Stadium, the critical chance is based on speed. // First, we get the base speed and store it. Then we add 76. This is our current crit chance. let critChance = pokemon.species.baseStats['spe'] + 76; // Now we right logical shift it two places, essentially dividing by 4 and flooring it. critChance = critChance >> 2; // Now we check for focus energy volatile. if (pokemon.volatiles['focusenergy']) { // If it exists, crit chance is multiplied by 4 and floored with a logical left shift. critChance = critChance << 2; // Then we add 160. critChance += 160; } else { // If it is not active, we left shift it by 1. critChance = critChance << 1; } // Now we check for the move's critical hit ratio. if (move.critRatio === 2) { // High crit ratio, we multiply the result so far by 4. critChance = critChance << 2; } else if (move.critRatio === 1) { // Normal hit ratio, we divide the crit chance by 2 and floor the result again. critChance = critChance >> 1; } // Now we make sure it's a number between 1 and 255. critChance = this.clampIntRange(critChance, 1, 255); // Last, we check deppending on ratio if the move critical hits or not. // We compare our critical hit chance against a random number between 0 and 255. // If the random number is lower, we get a critical hit. This means there is always a 1/255 chance of not hitting critically. if (critChance > 0) { isCrit = this.randomChance(critChance, 256); } } // There is a critical hit. if (isCrit && this.runEvent('CriticalHit', target, null, move)) { target.getMoveHitData(move).crit = true; } // Happens after crit calculation. if (basePower) { basePower = this.runEvent('BasePower', pokemon, target, move, basePower); if (basePower && move.basePowerModifier) { basePower *= move.basePowerModifier; } } if (!basePower) return 0; basePower = this.clampIntRange(basePower, 1); // We now check attacker's and defender's stats. let level = pokemon.level; let attacker = pokemon; const defender = target; if (move.useTargetOffensive) attacker = target; let atkType: StatNameExceptHP = (move.category === 'Physical') ? 'atk' : 'spa'; const defType: StatNameExceptHP = (move.defensiveCategory === 'Physical') ? 'def' : 'spd'; if (move.useSourceDefensiveAsOffensive) atkType = defType; let attack = attacker.getStat(atkType); let defense = defender.getStat(defType); // In gen 1, screen effect is applied here. if ((defType === 'def' && defender.volatiles['reflect']) || (defType === 'spd' && defender.volatiles['lightscreen'])) { this.debug('Screen doubling (Sp)Def'); defense *= 2; defense = this.clampIntRange(defense, 1, 1998); } // In the event of a critical hit, the offense and defense changes are ignored. // This includes both boosts and screens. // Also, level is doubled in damage calculation. if (isCrit) { move.ignoreOffensive = true; move.ignoreDefensive = true; level *= 2; if (!suppressMessages) this.add('-crit', target); } if (move.ignoreOffensive) { this.debug('Negating (sp)atk boost/penalty.'); attack = attacker.getStat(atkType, true); } if (move.ignoreDefensive) { this.debug('Negating (sp)def boost/penalty.'); defense = target.getStat(defType, true); } // When either attack or defense are higher than 256, they are both divided by 4 and moded by 256. // This is what cuases the roll over bugs. if (attack >= 256 || defense >= 256) { attack = this.clampIntRange(Math.floor(attack / 4) % 256, 1); // Defense isn't checked on the cartridge, but we don't want those / 0 bugs on the sim. defense = this.clampIntRange(Math.floor(defense / 4) % 256, 1); } // Self destruct moves halve defense at this point. if (move.selfdestruct && defType === 'def') { defense = this.clampIntRange(Math.floor(defense / 2), 1); } // Let's go with the calculation now that we have what we need. // We do it step by step just like the game does. let damage = level * 2; damage = Math.floor(damage / 5); damage += 2; damage *= basePower; damage *= attack; damage = Math.floor(damage / defense); damage = this.clampIntRange(Math.floor(damage / 50), 1, 997); damage += 2; // STAB damage bonus, the "???" type never gets STAB if (type !== '???' && pokemon.hasType(type)) { damage += Math.floor(damage / 2); } // Type effectiveness. // The order here is not correct, must change to check the move versus each type. const totalTypeMod = this.dex.getEffectiveness(type, target); // Super effective attack if (totalTypeMod > 0) { if (!suppressMessages) this.add('-supereffective', target); damage *= 20; damage = Math.floor(damage / 10); if (totalTypeMod >= 2) { damage *= 20; damage = Math.floor(damage / 10); } } if (totalTypeMod < 0) { if (!suppressMessages) this.add('-resisted', target); damage *= 5; damage = Math.floor(damage / 10); if (totalTypeMod <= -2) { damage *= 5; damage = Math.floor(damage / 10); } } // If damage becomes 0, the move is made to miss. // This occurs when damage was either 2 or 3 prior to applying STAB/Type matchup, and target is 4x resistant to the move. if (damage === 0) return damage; // Apply random factor is damage is greater than 1 if (damage > 1) { damage *= this.random(217, 256); damage = Math.floor(damage / 255); if (damage > target.hp && !target.volatiles['substitute']) damage = target.hp; if (target.volatiles['substitute'] && damage > target.volatiles['substitute'].hp) { damage = target.volatiles['substitute'].hp; } } // We are done, this is the final damage. return Math.floor(damage); }, };<|fim▁end|>
} // Now we can save the partial trapping damage. if (pokemon.volatiles['partialtrappinglock']) {
<|file_name|>unknown-language-item.rs<|end_file_name|><|fim▁begin|>#![allow(unused)] #![feature(lang_items)] #[lang = "foo"] fn bar() -> ! {<|fim▁hole|>//~^^ ERROR definition of an unknown language item: `foo` loop {} } fn main() {}<|fim▁end|>
<|file_name|>image.js<|end_file_name|><|fim▁begin|>/* Copyright (c) 2003-2011, CKSource - Frederico Knabben. All rights reserved. For licensing, see LICENSE.html or http://ckeditor.com/license */ (function() { var imageDialog = function( editor, dialogType ) { // Load image preview. var IMAGE = 1, LINK = 2, PREVIEW = 4, CLEANUP = 8, regexGetSize = /^\s*(\d+)((px)|\%)?\s*$/i, regexGetSizeOrEmpty = /(^\s*(\d+)((px)|\%)?\s*$)|^$/i, pxLengthRegex = /^\d+px$/; var onSizeChange = function() { var value = this.getValue(), // This = input element. dialog = this.getDialog(), aMatch = value.match( regexGetSize ); // Check value if ( aMatch ) { if ( aMatch[2] == '%' ) // % is allowed - > unlock ratio. switchLockRatio( dialog, false ); // Unlock. value = aMatch[1]; } // Only if ratio is locked if ( dialog.lockRatio ) { var oImageOriginal = dialog.originalElement; if ( oImageOriginal.getCustomData( 'isReady' ) == 'true' ) { if ( this.id == 'txtHeight' ) { if ( value && value != '0' ) value = Math.round( oImageOriginal.$.width * ( value / oImageOriginal.$.height ) ); if ( !isNaN( value ) ) dialog.setValueOf( 'info', 'txtWidth', value ); } else //this.id = txtWidth. { if ( value && value != '0' ) value = Math.round( oImageOriginal.$.height * ( value / oImageOriginal.$.width ) ); if ( !isNaN( value ) ) dialog.setValueOf( 'info', 'txtHeight', value ); } } } updatePreview( dialog ); }; var updatePreview = function( dialog ) { //Don't load before onShow. if ( !dialog.originalElement || !dialog.preview ) return 1; // Read attributes and update imagePreview; dialog.commitContent( PREVIEW, dialog.preview ); return 0; }; // Custom commit dialog logic, where we're intended to give inline style // field (txtdlgGenStyle) higher priority to avoid overwriting styles contribute // by other fields. function commitContent() { var args = arguments; var inlineStyleField = this.getContentElement( 'advanced', 'txtdlgGenStyle' ); inlineStyleField && inlineStyleField.commit.apply( inlineStyleField, args ); this.foreach( function( widget ) { if ( widget.commit && widget.id != 'txtdlgGenStyle' ) widget.commit.apply( widget, args ); }); } // Avoid recursions. var incommit; // Synchronous field values to other impacted fields is required, e.g. border // size change should alter inline-style text as well. function commitInternally( targetFields ) { if ( incommit ) return; incommit = 1; var dialog = this.getDialog(), element = dialog.imageElement; if ( element ) { // Commit this field and broadcast to target fields. this.commit( IMAGE, element ); targetFields = [].concat( targetFields ); var length = targetFields.length, field; for ( var i = 0; i < length; i++ ) { field = dialog.getContentElement.apply( dialog, targetFields[ i ].split( ':' ) ); // May cause recursion. field && field.setup( IMAGE, element ); } } incommit = 0; } var switchLockRatio = function( dialog, value ) { var oImageOriginal = dialog.originalElement; // Dialog may already closed. (#5505) if( !oImageOriginal ) return null; var ratioButton = CKEDITOR.document.getById( btnLockSizesId ); if ( oImageOriginal.getCustomData( 'isReady' ) == 'true' ) { if ( value == 'check' ) // Check image ratio and original image ratio. { var width = dialog.getValueOf( 'info', 'txtWidth' ), height = dialog.getValueOf( 'info', 'txtHeight' ), originalRatio = oImageOriginal.$.width * 1000 / oImageOriginal.$.height, thisRatio = width * 1000 / height; dialog.lockRatio = false; // Default: unlock ratio if ( !width && !height ) dialog.lockRatio = true; else if ( !isNaN( originalRatio ) && !isNaN( thisRatio ) ) { if ( Math.round( originalRatio ) == Math.round( thisRatio ) ) dialog.lockRatio = true; } } else if ( value != undefined ) dialog.lockRatio = value; else dialog.lockRatio = !dialog.lockRatio; } else if ( value != 'check' ) // I can't lock ratio if ratio is unknown. dialog.lockRatio = false; if ( dialog.lockRatio ) ratioButton.removeClass( 'cke_btn_unlocked' ); else ratioButton.addClass( 'cke_btn_unlocked' ); var lang = dialog._.editor.lang.image, label = lang[ dialog.lockRatio ? 'unlockRatio' : 'lockRatio' ]; ratioButton.setAttribute( 'title', label ); ratioButton.getFirst().setText( label ); return dialog.lockRatio; }; var resetSize = function( dialog ) { var oImageOriginal = dialog.originalElement; if ( oImageOriginal.getCustomData( 'isReady' ) == 'true' ) { dialog.setValueOf( 'info', 'txtWidth', oImageOriginal.$.width ); dialog.setValueOf( 'info', 'txtHeight', oImageOriginal.$.height ); } updatePreview( dialog ); }; var setupDimension = function( type, element ) { if ( type != IMAGE ) return; function checkDimension( size, defaultValue ) { var aMatch = size.match( regexGetSize ); if ( aMatch ) { if ( aMatch[2] == '%' ) // % is allowed. { aMatch[1] += '%'; switchLockRatio( dialog, false ); // Unlock ratio } return aMatch[1]; } return defaultValue; } var dialog = this.getDialog(), value = '', dimension = (( this.id == 'txtWidth' )? 'width' : 'height' ), size = element.getAttribute( dimension ); if ( size ) value = checkDimension( size, value ); value = checkDimension( element.getStyle( dimension ), value ); this.setValue( value ); }; var previewPreloader; var onImgLoadEvent = function() { // Image is ready. var original = this.originalElement; original.setCustomData( 'isReady', 'true' ); original.removeListener( 'load', onImgLoadEvent ); original.removeListener( 'error', onImgLoadErrorEvent ); original.removeListener( 'abort', onImgLoadErrorEvent ); // Hide loader CKEDITOR.document.getById( imagePreviewLoaderId ).setStyle( 'display', 'none' ); // New image -> new domensions if ( !this.dontResetSize ) resetSize( this ); if ( this.firstLoad ) CKEDITOR.tools.setTimeout( function(){ switchLockRatio( this, 'check' ); }, 0, this ); this.firstLoad = false; this.dontResetSize = false; }; var onImgLoadErrorEvent = function() { // Error. Image is not loaded. var original = this.originalElement; original.removeListener( 'load', onImgLoadEvent ); original.removeListener( 'error', onImgLoadErrorEvent ); original.removeListener( 'abort', onImgLoadErrorEvent ); // Set Error image. var noimage = CKEDITOR.getUrl( editor.skinPath + 'images/noimage.png' ); if ( this.preview ) this.preview.setAttribute( 'src', noimage ); // Hide loader CKEDITOR.document.getById( imagePreviewLoaderId ).setStyle( 'display', 'none' ); switchLockRatio( this, false ); // Unlock. }; var numbering = function( id ) { return CKEDITOR.tools.getNextId() + '_' + id; }, btnLockSizesId = numbering( 'btnLockSizes' ), btnResetSizeId = numbering( 'btnResetSize' ), imagePreviewLoaderId = numbering( 'ImagePreviewLoader' ), imagePreviewBoxId = numbering( 'ImagePreviewBox' ), previewLinkId = numbering( 'previewLink' ), previewImageId = numbering( 'previewImage' ); return { title : editor.lang.image[ dialogType == 'image' ? 'title' : 'titleButton' ], minWidth : 420, minHeight : 360, onShow : function() { this.imageElement = false; this.linkElement = false; // Default: create a new element. this.imageEditMode = false; this.linkEditMode = false; this.lockRatio = true; this.dontResetSize = false; this.firstLoad = true; this.addLink = false; var editor = this.getParentEditor(), sel = this.getParentEditor().getSelection(), element = sel.getSelectedElement(), link = element && element.getAscendant( 'a' ); //Hide loader. CKEDITOR.document.getById( imagePreviewLoaderId ).setStyle( 'display', 'none' ); // Create the preview before setup the dialog contents. previewPreloader = new CKEDITOR.dom.element( 'img', editor.document ); this.preview = CKEDITOR.document.getById( previewImageId ); // Copy of the image this.originalElement = editor.document.createElement( 'img' ); this.originalElement.setAttribute( 'alt', '' ); this.originalElement.setCustomData( 'isReady', 'false' ); if ( link ) { this.linkElement = link; this.linkEditMode = true; // Look for Image element. var linkChildren = link.getChildren(); if ( linkChildren.count() == 1 ) // 1 child. { var childTagName = linkChildren.getItem( 0 ).getName(); if ( childTagName == 'img' || childTagName == 'input' ) { this.imageElement = linkChildren.getItem( 0 ); if ( this.imageElement.getName() == 'img' ) this.imageEditMode = 'img'; else if ( this.imageElement.getName() == 'input' ) this.imageEditMode = 'input'; } } // Fill out all fields. if ( dialogType == 'image' ) this.setupContent( LINK, link ); } if ( element && element.getName() == 'img' && !element.data( 'cke-realelement' ) || element && element.getName() == 'input' && element.getAttribute( 'type' ) == 'image' ) { this.imageEditMode = element.getName(); this.imageElement = element; } if ( this.imageEditMode ) { // Use the original element as a buffer from since we don't want // temporary changes to be committed, e.g. if the dialog is canceled. this.cleanImageElement = this.imageElement; this.imageElement = this.cleanImageElement.clone( true, true ); // Fill out all fields. this.setupContent( IMAGE, this.imageElement ); // Refresh LockRatio button switchLockRatio ( this, true ); } else this.imageElement = editor.document.createElement( 'img' ); // Dont show preview if no URL given. if ( !CKEDITOR.tools.trim( this.getValueOf( 'info', 'txtUrl' ) ) ) { this.preview.removeAttribute( 'src' ); this.preview.setStyle( 'display', 'none' ); } }, onOk : function() { // Edit existing Image. if ( this.imageEditMode ) { var imgTagName = this.imageEditMode; // Image dialog and Input element. if ( dialogType == 'image' && imgTagName == 'input' && confirm( editor.lang.image.button2Img ) ) { // Replace INPUT-> IMG imgTagName = 'img'; this.imageElement = editor.document.createElement( 'img' ); this.imageElement.setAttribute( 'alt', '' ); editor.insertElement( this.imageElement ); } // ImageButton dialog and Image element. else if ( dialogType != 'image' && imgTagName == 'img' && confirm( editor.lang.image.img2Button )) { // Replace IMG -> INPUT imgTagName = 'input'; this.imageElement = editor.document.createElement( 'input' ); this.imageElement.setAttributes( { type : 'image', alt : '' } ); editor.insertElement( this.imageElement ); } else { // Restore the original element before all commits. this.imageElement = this.cleanImageElement; delete this.cleanImageElement; } } else // Create a new image. { // Image dialog -> create IMG element. if ( dialogType == 'image' ) this.imageElement = editor.document.createElement( 'img' ); else { this.imageElement = editor.document.createElement( 'input' ); this.imageElement.setAttribute ( 'type' ,'image' ); } this.imageElement.setAttribute( 'alt', '' ); } // Create a new link. if ( !this.linkEditMode ) this.linkElement = editor.document.createElement( 'a' ); // Set attributes. this.commitContent( IMAGE, this.imageElement ); this.commitContent( LINK, this.linkElement ); // Remove empty style attribute. if ( !this.imageElement.getAttribute( 'style' ) ) this.imageElement.removeAttribute( 'style' ); // Insert a new Image. if ( !this.imageEditMode ) { if ( this.addLink ) { //Insert a new Link. if ( !this.linkEditMode ) { editor.insertElement(this.linkElement); this.linkElement.append(this.imageElement, false); } else //Link already exists, image not. editor.insertElement(this.imageElement ); } else editor.insertElement( this.imageElement ); } else // Image already exists. { //Add a new link element. if ( !this.linkEditMode && this.addLink ) { editor.insertElement( this.linkElement ); this.imageElement.appendTo( this.linkElement ); } //Remove Link, Image exists. else if ( this.linkEditMode && !this.addLink ) { editor.getSelection().selectElement( this.linkElement ); editor.insertElement( this.imageElement ); } } }, onLoad : function() { if ( dialogType != 'image' ) this.hidePage( 'Link' ); //Hide Link tab. var doc = this._.element.getDocument(); this.addFocusable( doc.getById( btnResetSizeId ), 5 ); this.addFocusable( doc.getById( btnLockSizesId ), 5 ); this.commitContent = commitContent; }, onHide : function() { if ( this.preview ) this.commitContent( CLEANUP, this.preview ); if ( this.originalElement ) { this.originalElement.removeListener( 'load', onImgLoadEvent ); this.originalElement.removeListener( 'error', onImgLoadErrorEvent ); this.originalElement.removeListener( 'abort', onImgLoadErrorEvent ); this.originalElement.remove(); this.originalElement = false; // Dialog is closed. } delete this.imageElement; }, contents : [ { id : 'info', label : editor.lang.image.infoTab, accessKey : 'I', elements : [ { type : 'vbox', padding : 0, children : [ { type : 'hbox', widths : [ '280px', '110px' ], align : 'right', children : [ { id : 'txtUrl', type : 'text', label : editor.lang.common.url, required: true, onChange : function() { var dialog = this.getDialog(), newUrl = this.getValue(); //Update original image if ( newUrl.length > 0 ) //Prevent from load before onShow { dialog = this.getDialog(); var original = dialog.originalElement; dialog.preview.removeStyle( 'display' ); original.setCustomData( 'isReady', 'false' ); // Show loader var loader = CKEDITOR.document.getById( imagePreviewLoaderId ); if ( loader ) loader.setStyle( 'display', '' ); original.on( 'load', onImgLoadEvent, dialog ); original.on( 'error', onImgLoadErrorEvent, dialog ); original.on( 'abort', onImgLoadErrorEvent, dialog ); original.setAttribute( 'src', newUrl ); // Query the preloader to figure out the url impacted by based href. previewPreloader.setAttribute( 'src', newUrl ); dialog.preview.setAttribute( 'src', previewPreloader.$.src ); updatePreview( dialog ); } // Dont show preview if no URL given. else if ( dialog.preview ) { dialog.preview.removeAttribute( 'src' ); dialog.preview.setStyle( 'display', 'none' ); } }, setup : function( type, element ) { if ( type == IMAGE ) { var url = element.data( 'cke-saved-src' ) || element.getAttribute( 'src' ); var field = this; this.getDialog().dontResetSize = true; field.setValue( url ); // And call this.onChange() // Manually set the initial value.(#4191) field.setInitValue(); } }, commit : function( type, element ) { if ( type == IMAGE && ( this.getValue() || this.isChanged() ) ) { element.data( 'cke-saved-src', this.getValue() ); element.setAttribute( 'src', this.getValue() ); } else if ( type == CLEANUP ) { element.setAttribute( 'src', '' ); // If removeAttribute doesn't work. element.removeAttribute( 'src' ); } }, validate : CKEDITOR.dialog.validate.notEmpty( editor.lang.image.urlMissing ) }, { type : 'button', id : 'browse', // v-align with the 'txtUrl' field. // TODO: We need something better than a fixed size here. style : 'display:inline-block;margin-top:10px;', align : 'center', label : editor.lang.common.browseServer, hidden : true, filebrowser : 'info:txtUrl' } ] } ] }, { id : 'txtAlt', type : 'text', label : editor.lang.image.alt, accessKey : 'T', 'default' : '', onChange : function() { updatePreview( this.getDialog() ); }, setup : function( type, element ) { if ( type == IMAGE ) this.setValue( element.getAttribute( 'alt' ) ); }, commit : function( type, element ) { if ( type == IMAGE ) { if ( this.getValue() || this.isChanged() ) element.setAttribute( 'alt', this.getValue() ); } else if ( type == PREVIEW ) { element.setAttribute( 'alt', this.getValue() ); } else if ( type == CLEANUP ) { element.removeAttribute( 'alt' ); } } }, { type : 'hbox', children : [ { type : 'vbox', children : [ { type : 'hbox', widths : [ '50%', '50%' ], children : [ { type : 'vbox', padding : 1, children : [ { type : 'text', width: '40px', id : 'txtWidth', label : editor.lang.common.width, onKeyUp : onSizeChange, onChange : function() { commitInternally.call( this, 'advanced:txtdlgGenStyle' ); }, validate : function() { var aMatch = this.getValue().match( regexGetSizeOrEmpty ); if ( !aMatch ) alert( editor.lang.common.invalidWidth ); return !!aMatch; }, setup : setupDimension, commit : function( type, element, internalCommit ) { var value = this.getValue(); if ( type == IMAGE ) { if ( value ) element.setStyle( 'width', CKEDITOR.tools.cssLength( value ) ); else if ( !value && this.isChanged( ) ) element.removeStyle( 'width' ); !internalCommit && element.removeAttribute( 'width' ); } else if ( type == PREVIEW ) { var aMatch = value.match( regexGetSize ); if ( !aMatch ) { var oImageOriginal = this.getDialog().originalElement; if ( oImageOriginal.getCustomData( 'isReady' ) == 'true' ) element.setStyle( 'width', oImageOriginal.$.width + 'px'); } else element.setStyle( 'width', CKEDITOR.tools.cssLength( value ) ); } else if ( type == CLEANUP ) { element.removeAttribute( 'width' ); element.removeStyle( 'width' ); } } }, { type : 'text', id : 'txtHeight', width: '40px', label : editor.lang.common.height, onKeyUp : onSizeChange, onChange : function() { commitInternally.call( this, 'advanced:txtdlgGenStyle' ); }, validate : function() { var aMatch = this.getValue().match( regexGetSizeOrEmpty ); if ( !aMatch ) alert( editor.lang.common.invalidHeight ); return !!aMatch; }, setup : setupDimension, commit : function( type, element, internalCommit ) { var value = this.getValue(); if ( type == IMAGE ) { if ( value ) element.setStyle( 'height', CKEDITOR.tools.cssLength( value ) ); else if ( !value && this.isChanged( ) ) element.removeStyle( 'height' ); if ( !internalCommit && type == IMAGE ) element.removeAttribute( 'height' ); } else if ( type == PREVIEW ) { var aMatch = value.match( regexGetSize ); if ( !aMatch ) { var oImageOriginal = this.getDialog().originalElement; if ( oImageOriginal.getCustomData( 'isReady' ) == 'true' ) element.setStyle( 'height', oImageOriginal.$.height + 'px' ); } else element.setStyle( 'height', CKEDITOR.tools.cssLength( value ) ); } else if ( type == CLEANUP ) { element.removeAttribute( 'height' ); element.removeStyle( 'height' ); } } } ] }, { type : 'html', style : 'margin-top:30px;width:40px;height:40px;', onLoad : function() { // Activate Reset button var resetButton = CKEDITOR.document.getById( btnResetSizeId ), ratioButton = CKEDITOR.document.getById( btnLockSizesId ); if ( resetButton ) { resetButton.on( 'click', function(evt) { resetSize( this ); evt.data.preventDefault(); }, this.getDialog() ); resetButton.on( 'mouseover', function() { this.addClass( 'cke_btn_over' ); }, resetButton ); resetButton.on( 'mouseout', function() { this.removeClass( 'cke_btn_over' ); }, resetButton ); } // Activate (Un)LockRatio button if ( ratioButton ) { ratioButton.on( 'click', function(evt) { var locked = switchLockRatio( this ), oImageOriginal = this.originalElement, width = this.getValueOf( 'info', 'txtWidth' ); if ( oImageOriginal.getCustomData( 'isReady' ) == 'true' && width ) { var height = oImageOriginal.$.height / oImageOriginal.$.width * width; if ( !isNaN( height ) ) { this.setValueOf( 'info', 'txtHeight', Math.round( height ) ); updatePreview( this ); } } evt.data.preventDefault(); }, this.getDialog() ); ratioButton.on( 'mouseover', function() { this.addClass( 'cke_btn_over' ); }, ratioButton ); ratioButton.on( 'mouseout', function() { this.removeClass( 'cke_btn_over' ); }, ratioButton ); } }, html : '<div>'+ '<a href="javascript:void(0)" tabindex="-1" title="' + editor.lang.image.unlockRatio + '" class="cke_btn_locked" id="' + btnLockSizesId + '" role="button"><span class="cke_label">' + editor.lang.image.unlockRatio + '</span></a>' + '<a href="javascript:void(0)" tabindex="-1" title="' + editor.lang.image.resetSize + '" class="cke_btn_reset" id="' + btnResetSizeId + '" role="button"><span class="cke_label">' + editor.lang.image.resetSize + '</span></a>'+ '</div>' } ] }, { type : 'vbox', padding : 1, children : [ { type : 'text', id : 'txtBorder', width: '60px', label : editor.lang.image.border, 'default' : '', onKeyUp : function() { updatePreview( this.getDialog() ); }, onChange : function() { commitInternally.call( this, 'advanced:txtdlgGenStyle' ); }, validate : CKEDITOR.dialog.validate.integer( editor.lang.image.validateBorder ), setup : function( type, element ) { if ( type == IMAGE ) { var value, borderStyle = element.getStyle( 'border-width' ); borderStyle = borderStyle && borderStyle.match( /^(\d+px)(?: \1 \1 \1)?$/ ); value = borderStyle && parseInt( borderStyle[ 1 ], 10 ); isNaN ( parseInt( value, 10 ) ) && ( value = element.getAttribute( 'border' ) ); this.setValue( value ); } }, commit : function( type, element, internalCommit ) { var value = parseInt( this.getValue(), 10 ); if ( type == IMAGE || type == PREVIEW ) { if ( !isNaN( value ) ) { element.setStyle( 'border-width', CKEDITOR.tools.cssLength( value ) ); element.setStyle( 'border-style', 'solid' ); } else if ( !value && this.isChanged() ) { element.removeStyle( 'border-width' ); element.removeStyle( 'border-style' ); element.removeStyle( 'border-color' ); } if ( !internalCommit && type == IMAGE ) element.removeAttribute( 'border' ); } else if ( type == CLEANUP ) { element.removeAttribute( 'border' ); element.removeStyle( 'border-width' ); element.removeStyle( 'border-style' ); element.removeStyle( 'border-color' ); } } }, { type : 'text', id : 'txtHSpace', width: '60px', label : editor.lang.image.hSpace, 'default' : '', onKeyUp : function() { updatePreview( this.getDialog() ); }, onChange : function() { commitInternally.call( this, 'advanced:txtdlgGenStyle' ); }, validate : CKEDITOR.dialog.validate.integer( editor.lang.image.validateHSpace ), setup : function( type, element ) { if ( type == IMAGE ) { var value, marginLeftPx, marginRightPx, marginLeftStyle = element.getStyle( 'margin-left' ), marginRightStyle = element.getStyle( 'margin-right' ); marginLeftStyle = marginLeftStyle && marginLeftStyle.match( pxLengthRegex ); marginRightStyle = marginRightStyle && marginRightStyle.match( pxLengthRegex ); marginLeftPx = parseInt( marginLeftStyle, 10 ); marginRightPx = parseInt( marginRightStyle, 10 ); value = ( marginLeftPx == marginRightPx ) && marginLeftPx; isNaN( parseInt( value, 10 ) ) && ( value = element.getAttribute( 'hspace' ) ); this.setValue( value ); } }, commit : function( type, element, internalCommit ) { var value = parseInt( this.getValue(), 10 ); if ( type == IMAGE || type == PREVIEW ) { if ( !isNaN( value ) ) { element.setStyle( 'margin-left', CKEDITOR.tools.cssLength( value ) ); element.setStyle( 'margin-right', CKEDITOR.tools.cssLength( value ) ); } else if ( !value && this.isChanged( ) ) { element.removeStyle( 'margin-left' ); element.removeStyle( 'margin-right' ); } if ( !internalCommit && type == IMAGE ) element.removeAttribute( 'hspace' ); } else if ( type == CLEANUP ) { element.removeAttribute( 'hspace' ); element.removeStyle( 'margin-left' ); element.removeStyle( 'margin-right' ); } } }, { type : 'text', id : 'txtVSpace', width : '60px', label : editor.lang.image.vSpace, 'default' : '', onKeyUp : function() { updatePreview( this.getDialog() ); }, onChange : function() { commitInternally.call( this, 'advanced:txtdlgGenStyle' ); }, validate : CKEDITOR.dialog.validate.integer( editor.lang.image.validateVSpace ), setup : function( type, element ) { if ( type == IMAGE ) { var value, marginTopPx, marginBottomPx, marginTopStyle = element.getStyle( 'margin-top' ), marginBottomStyle = element.getStyle( 'margin-bottom' ); marginTopStyle = marginTopStyle && marginTopStyle.match( pxLengthRegex ); marginBottomStyle = marginBottomStyle && marginBottomStyle.match( pxLengthRegex ); marginTopPx = parseInt( marginTopStyle, 10 ); marginBottomPx = parseInt( marginBottomStyle, 10 ); value = ( marginTopPx == marginBottomPx ) && marginTopPx; isNaN ( parseInt( value, 10 ) ) && ( value = element.getAttribute( 'vspace' ) ); this.setValue( value ); } }, commit : function( type, element, internalCommit ) { var value = parseInt( this.getValue(), 10 ); if ( type == IMAGE || type == PREVIEW ) { if ( !isNaN( value ) ) { element.setStyle( 'margin-top', CKEDITOR.tools.cssLength( value ) ); element.setStyle( 'margin-bottom', CKEDITOR.tools.cssLength( value ) ); } else if ( !value && this.isChanged( ) ) { element.removeStyle( 'margin-top' ); element.removeStyle( 'margin-bottom' ); } if ( !internalCommit && type == IMAGE ) element.removeAttribute( 'vspace' ); } else if ( type == CLEANUP ) { element.removeAttribute( 'vspace' ); element.removeStyle( 'margin-top' ); element.removeStyle( 'margin-bottom' ); } } }, { id : 'cmbAlign', type : 'select', widths : [ '35%','65%' ], style : 'width:90px', label : editor.lang.common.align, 'default' : '', items : [ [ editor.lang.common.notSet , ''], [ editor.lang.common.alignLeft , 'left'], [ editor.lang.common.alignRight , 'right'] // Backward compatible with v2 on setup when specified as attribute value, // while these values are no more available as select options. // [ editor.lang.image.alignAbsBottom , 'absBottom'], // [ editor.lang.image.alignAbsMiddle , 'absMiddle'], // [ editor.lang.image.alignBaseline , 'baseline'], // [ editor.lang.image.alignTextTop , 'text-top'], // [ editor.lang.image.alignBottom , 'bottom'], // [ editor.lang.image.alignMiddle , 'middle'], // [ editor.lang.image.alignTop , 'top'] ], onChange : function() { updatePreview( this.getDialog() ); commitInternally.call( this, 'advanced:txtdlgGenStyle' ); }, setup : function( type, element ) { if ( type == IMAGE ) { var value = element.getStyle( 'float' ); switch( value ) { // Ignore those unrelated values. case 'inherit': case 'none': value = ''; } !value && ( value = ( element.getAttribute( 'align' ) || '' ).toLowerCase() ); this.setValue( value ); } }, commit : function( type, element, internalCommit ) { var value = this.getValue(); if ( type == IMAGE || type == PREVIEW ) { if ( value ) element.setStyle( 'float', value ); else element.removeStyle( 'float' ); if ( !internalCommit && type == IMAGE ) { value = ( element.getAttribute( 'align' ) || '' ).toLowerCase(); switch( value ) { // we should remove it only if it matches "left" or "right", // otherwise leave it intact. case 'left': case 'right': element.removeAttribute( 'align' ); } } } else if ( type == CLEANUP ) element.removeStyle( 'float' ); } } ] } ] }, { type : 'vbox', height : '250px', children : [ { type : 'html', style : 'width:95%;', html : '<div>' + CKEDITOR.tools.htmlEncode( editor.lang.common.preview ) +'<br>'+ '<div id="' + imagePreviewLoaderId + '" class="ImagePreviewLoader" style="display:none"><div class="loading">&nbsp;</div></div>'+ '<div id="' + imagePreviewBoxId + '" class="ImagePreviewBox"><table><tr><td>'+ '<a href="javascript:void(0)" target="_blank" onclick="return false;" id="' + previewLinkId + '">'+ '<img id="' + previewImageId + '" alt="" /></a>' + ( editor.config.image_previewText || 'Lorem ipsum dolor sit amet, consectetuer adipiscing elit. '+ 'Maecenas feugiat consequat diam. Maecenas metus. Vivamus diam purus, cursus a, commodo non, facilisis vitae, '+ 'nulla. Aenean dictum lacinia tortor. Nunc iaculis, nibh non iaculis aliquam, orci felis euismod neque, sed ornare massa mauris sed velit. Nulla pretium mi et risus. Fusce mi pede, tempor id, cursus ac, ullamcorper nec, enim. Sed tortor. Curabitur molestie. Duis velit augue, condimentum at, ultrices a, luctus ut, orci. Donec pellentesque egestas eros. Integer cursus, augue in cursus faucibus, eros pede bibendum sem, in tempus tellus justo quis ligula. Etiam eget tortor. Vestibulum rutrum, est ut placerat elementum, lectus nisl aliquam velit, tempor aliquam eros nunc nonummy metus. In eros metus, gravida a, gravida sed, lobortis id, turpis. Ut ultrices, ipsum at venenatis fringilla, sem nulla lacinia tellus, eget aliquet turpis mauris non enim. Nam turpis. Suspendisse lacinia. Curabitur ac tortor ut ipsum egestas elementum. Nunc imperdiet gravida mauris.' ) + '</td></tr></table></div></div>' } ] } ] } ] }, { id : 'Link', label : editor.lang.link.title, padding : 0, elements : [ { id : 'txtUrl', type : 'text', label : editor.lang.common.url, style : 'width: 100%', 'default' : '', setup : function( type, element ) { if ( type == LINK ) { var href = element.data( 'cke-saved-href' ); if ( !href ) href = element.getAttribute( 'href' ); this.setValue( href ); } }, commit : function( type, element ) { if ( type == LINK ) { if ( this.getValue() || this.isChanged() ) { var url = decodeURI( this.getValue() ); element.data( 'cke-saved-href', url ); element.setAttribute( 'href', url ); if ( this.getValue() || !editor.config.image_removeLinkByEmptyURL ) this.getDialog().addLink = true; } } } }, { type : 'button', id : 'browse', filebrowser : { action : 'Browse', target: 'Link:txtUrl', url: editor.config.filebrowserImageBrowseLinkUrl }, style : 'float:right', hidden : true, label : editor.lang.common.browseServer }, { id : 'cmbTarget', type : 'select', label : editor.lang.common.target, 'default' : '', items : [ [ editor.lang.common.notSet , ''], [ editor.lang.common.targetNew , '_blank'], [ editor.lang.common.targetTop , '_top'], [ editor.lang.common.targetSelf , '_self'], [ editor.lang.common.targetParent , '_parent'] ], setup : function( type, element ) { if ( type == LINK ) this.setValue( element.getAttribute( 'target' ) || '' ); }, commit : function( type, element ) { if ( type == LINK ) { if ( this.getValue() || this.isChanged() ) element.setAttribute( 'target', this.getValue() ); } } } ] }, { id : 'Upload', hidden : true, filebrowser : 'uploadButton', label : editor.lang.image.upload, elements : [ { type : 'file', id : 'upload', label : editor.lang.image.btnUpload, style: 'height:40px', size : 38 }, { type : 'fileButton', id : 'uploadButton', filebrowser : 'info:txtUrl', label : editor.lang.image.btnUpload, 'for' : [ 'Upload', 'upload' ] <|fim▁hole|> ] }, { id : 'advanced', label : editor.lang.common.advancedTab, elements : [ { type : 'hbox', widths : [ '50%', '25%', '25%' ], children : [ { type : 'text', id : 'linkId', label : editor.lang.common.id, setup : function( type, element ) { if ( type == IMAGE ) this.setValue( element.getAttribute( 'id' ) ); }, commit : function( type, element ) { if ( type == IMAGE ) { if ( this.getValue() || this.isChanged() ) element.setAttribute( 'id', this.getValue() ); } } }, { id : 'cmbLangDir', type : 'select', style : 'width : 100px;', label : editor.lang.common.langDir, 'default' : '', items : [ [ editor.lang.common.notSet, '' ], [ editor.lang.common.langDirLtr, 'ltr' ], [ editor.lang.common.langDirRtl, 'rtl' ] ], setup : function( type, element ) { if ( type == IMAGE ) this.setValue( element.getAttribute( 'dir' ) ); }, commit : function( type, element ) { if ( type == IMAGE ) { if ( this.getValue() || this.isChanged() ) element.setAttribute( 'dir', this.getValue() ); } } }, { type : 'text', id : 'txtLangCode', label : editor.lang.common.langCode, 'default' : '', setup : function( type, element ) { if ( type == IMAGE ) this.setValue( element.getAttribute( 'lang' ) ); }, commit : function( type, element ) { if ( type == IMAGE ) { if ( this.getValue() || this.isChanged() ) element.setAttribute( 'lang', this.getValue() ); } } } ] }, { type : 'text', id : 'txtGenLongDescr', label : editor.lang.common.longDescr, setup : function( type, element ) { if ( type == IMAGE ) this.setValue( element.getAttribute( 'longDesc' ) ); }, commit : function( type, element ) { if ( type == IMAGE ) { if ( this.getValue() || this.isChanged() ) element.setAttribute( 'longDesc', this.getValue() ); } } }, { type : 'hbox', widths : [ '50%', '50%' ], children : [ { type : 'text', id : 'txtGenClass', label : editor.lang.common.cssClass, 'default' : '', setup : function( type, element ) { if ( type == IMAGE ) this.setValue( element.getAttribute( 'class' ) ); }, commit : function( type, element ) { if ( type == IMAGE ) { if ( this.getValue() || this.isChanged() ) element.setAttribute( 'class', this.getValue() ); } } }, { type : 'text', id : 'txtGenTitle', label : editor.lang.common.advisoryTitle, 'default' : '', onChange : function() { updatePreview( this.getDialog() ); }, setup : function( type, element ) { if ( type == IMAGE ) this.setValue( element.getAttribute( 'title' ) ); }, commit : function( type, element ) { if ( type == IMAGE ) { if ( this.getValue() || this.isChanged() ) element.setAttribute( 'title', this.getValue() ); } else if ( type == PREVIEW ) { element.setAttribute( 'title', this.getValue() ); } else if ( type == CLEANUP ) { element.removeAttribute( 'title' ); } } } ] }, { type : 'text', id : 'txtdlgGenStyle', label : editor.lang.common.cssStyle, 'default' : '', setup : function( type, element ) { if ( type == IMAGE ) { var genStyle = element.getAttribute( 'style' ); if ( !genStyle && element.$.style.cssText ) genStyle = element.$.style.cssText; this.setValue( genStyle ); var height = element.$.style.height, width = element.$.style.width, aMatchH = ( height ? height : '' ).match( regexGetSize ), aMatchW = ( width ? width : '').match( regexGetSize ); this.attributesInStyle = { height : !!aMatchH, width : !!aMatchW }; } }, onChange : function () { commitInternally.call( this, [ 'info:cmbFloat', 'info:cmbAlign', 'info:txtVSpace', 'info:txtHSpace', 'info:txtBorder', 'info:txtWidth', 'info:txtHeight' ] ); updatePreview( this ); }, commit : function( type, element ) { if ( type == IMAGE && ( this.getValue() || this.isChanged() ) ) { element.setAttribute( 'style', this.getValue() ); } } } ] } ] }; }; CKEDITOR.dialog.add( 'image', function( editor ) { return imageDialog( editor, 'image' ); }); CKEDITOR.dialog.add( 'imagebutton', function( editor ) { return imageDialog( editor, 'imagebutton' ); }); })();<|fim▁end|>
}
<|file_name|>dumpPHOTO.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3 import sys from gi.repository import GExiv2 phototags = { 'Exif.Photo.ExposureTime': "Belichtung:\t", 'Exif.Photo.FNumber': "Blende:\t\tF", # 'Exif.Photo.ExposureProgram', 'Exif.Photo.ISOSpeedRatings': "ISO:\t\t", # 'Exif.Photo.SensitivityType', # 'Exif.Photo.ExifVersion', # 'Exif.Photo.DateTimeOriginal', # 'Exif.Photo.DateTimeDigitized', # 'Exif.Photo.ComponentsConfiguration', # 'Exif.Photo.CompressedBitsPerPixel', # 'Exif.Photo.ExposureBiasValue', # 'Exif.Photo.MaxApertureValue', # 'Exif.Photo.MeteringMode', # 'Exif.Photo.LightSource',<|fim▁hole|>} for i in range(1, len(sys.argv)): metadata = GExiv2.Metadata(sys.argv[i]) print("file: {}".format(sys.argv[i])) for key in phototags: try: print("{}: {}".format(phototags[key], metadata[key])) except KeyError: continue<|fim▁end|>
# 'Exif.Photo.Flash', 'Exif.Photo.FocalLength': "Brennweite:\t" # 'Exif.Photo.MakerNote'
<|file_name|>Augur Installer.py<|end_file_name|><|fim▁begin|>import os import sys import shutil import binascii import traceback import subprocess from win32com.client import Dispatch LAUNCHER_PATH = "C:\\Program Files\\Augur" DATA_PATH = os.path.join(os.path.expanduser('~'), 'AppData', 'Roaming', "Augur") PASSFILE = os.path.join(DATA_PATH, "password.txt") if getattr(sys, 'frozen', False): # we are running in a |PyInstaller| bundle BASEDIR = sys._MEIPASS<|fim▁hole|> BASEDIR = os.path.dirname(os.path.abspath(__file__)) GETH_EXE = os.path.join(BASEDIR, 'geth.exe') LAUNCHER_EXE = os.path.join(BASEDIR, 'augurlauncher.exe') def main(): # first make all the appropriate directories print("Making directories...") for d in LAUNCHER_PATH, DATA_PATH: print("Creating", d, end=" ", flush=True) os.mkdir(d) print("Success!") print("Generating random password file...", end=" ", flush=True) # then generate the password password = binascii.b2a_hex(os.urandom(32)) passfile = open(PASSFILE, "w") passfile.write(password.decode('ascii')) passfile.close() print("Success!") # Then copy ".exe"s to the launcher path exes = GETH_EXE, LAUNCHER_EXE results = [] for exe in exes: print("Copying", os.path.basename(exe), "to", LAUNCHER_PATH, "...", end=" ", flush=True) results.append(shutil.copy(exe, LAUNCHER_PATH)) print("Sucess!") print("Creating node account...", end=" ", flush=True) # create account on node p = subprocess.Popen([results[0], "--password", PASSFILE, "account", "new"]) p.wait() print("Success!") print("Creating shortcut...", end=" ", flush=True) desktop = os.path.join(os.path.expanduser('~'), 'Desktop') shortcut_path = os.path.join(desktop, "Augur Launcher.lnk") wDir = LAUNCHER_PATH shell = Dispatch('WScript.Shell') shortcut = shell.CreateShortCut(shortcut_path) shortcut.Targetpath = results[1] shortcut.WorkingDirectory = wDir shortcut.IconLocation = results[1] shortcut.save() print("Success!") def uninstall(): paths = LAUNCHER_PATH, DATA_PATH for p in paths: print("Deleting", p, "...", end=" ", flush=True) shutil.rmtree(p) print("Success!") print("Removing desktop shortcut...", end=" ", flush=True) desktop = os.path.join(os.path.expanduser('~'), 'Desktop') shortcut_path = os.path.join(desktop, "Augur Launcher.lnk") os.remove(shortcut_path) print("Success!") if __name__ == '__main__': try: if len(sys.argv) == 2 and sys.argv[1] == 'uninstall': uninstall() elif len(sys.argv) == 1: main() else: assert len(sys.argv) <= 2, "wrong number of arguements!" except Exception as exc: traceback.print_exc() finally: os.system("pause") sys.exit(0)<|fim▁end|>
else: # we are running in a normal Python environment
<|file_name|>swagger.py<|end_file_name|><|fim▁begin|>""" Copyright (C) 2017-2021 Vanessa Sochat. This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/.<|fim▁hole|> """ from django.conf.urls import url from rest_framework_swagger.views import get_swagger_view swagger_view = get_swagger_view(title="Singularity Registry API", url="") urlpatterns = [url(r"^$", swagger_view)]<|fim▁end|>
<|file_name|>LocalStorage.js<|end_file_name|><|fim▁begin|>exports.setProperty =function(_args){ Ti.App.Properties.setString(_args.name, _args.value) }; exports.getProperty = function(_args){<|fim▁hole|>};<|fim▁end|>
return Ti.App.Properties.getString(_args.name)
<|file_name|>GameshowResults.java<|end_file_name|><|fim▁begin|>/* Copyright 2015 Michelle Mabuyo Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.cmput301.mmabuyo.readysetpress; /** * Created by mmabuyo on 2015-10-01. * Purpose: * This class is responsible for the multiplayer mode of the game. It stores and updates * the results of two, three and four player games as persistent data. * Design Rationale: * Results are stored in integer arrays, the size of which depends on how many * players are playing that round. The first value in the array corresponds to Player One, and * so on and so forth. * Outstanding Issues: * None. */ public class GameshowResults { protected int[] twoPlayerResults; protected int[] threePlayerResults; protected int[] fourPlayerResults; protected MemoryManager memoryManager = new MemoryManager(); public GameshowResults() { twoPlayerResults = new int[2]; threePlayerResults = new int[3]; fourPlayerResults = new int[4]; } public int[] getTwoPlayerResults() { return twoPlayerResults; } public void setTwoPlayerResults(int[] twoPlayerResults) { this.twoPlayerResults = twoPlayerResults; } public int[] getThreePlayerResults() { return threePlayerResults; } public void setThreePlayerResults(int[] threePlayerResults) { this.threePlayerResults = threePlayerResults; } public int[] getFourPlayerResults() { return fourPlayerResults; } public void setFourPlayerResults(int[] fourPlayerResults) { this.fourPlayerResults = fourPlayerResults; } protected void addClick(Player player, int numberOfPlayers) { switch(numberOfPlayers) { case 2: getTwoPlayerResults()[player.getPid()-1]++; break; case 3: getThreePlayerResults()[player.getPid()-1]++; break; case 4: getFourPlayerResults()[player.getPid()-1]++; break; } }<|fim▁hole|> this.setTwoPlayerResults(new int[2]); this.setThreePlayerResults(new int[3]); this.setFourPlayerResults(new int[4]); } }<|fim▁end|>
protected void clear() {
<|file_name|>ConfigHandler.hpp<|end_file_name|><|fim▁begin|>#pragma once // Description: // Notification interface for loading and saving configuration. // // Copyright (C) 2001 Frank Becker // // This program is free software; you can redistribute it and/or modify it under // the terms of the GNU General Public License as published by the Free Software // Foundation; either version 2 of the License, or (at your option) any later // version. // // This program is distributed in the hope that it will be useful, but WITHOUT // ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS // FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details<|fim▁hole|>// #include <string> #include <fstream> class ConfigHandler { public: //when reading the config file, registered ConfigHandlers //will be notified for each line read. virtual void handleLine( const std::string line) = 0; //when writing the config file, registered ConfigHandlers //will be notified with the out file stream virtual void save( std::ofstream &of) = 0; virtual ~ConfigHandler() {} };<|fim▁end|>
<|file_name|>19_TableStickyToolbarExample.js<|end_file_name|><|fim▁begin|>/* eslint-disable */ () => { const filterOptions = [ { id: 'In Stock, Out Of Stock', value: 'In Stock, Out Of Stock' }, { id: 'In Stock', value: 'In Stock' }, { id: 'Out Of Stock', value: 'Out Of Stock' },<|fim▁hole|> const [searchTerm, setSearchTerm] = React.useState(''); const [records, setRecords] = React.useState([ { name: 'Red Slippers', SKU: '0231664667', price: '$14.00', inventory: 'In Stock', }, { name: 'Velvet Hat', SKU: '0231664669', price: '$23.00', inventory: 'In Stock', }, { name: 'Silver Jeans', SKU: '0231664667', price: '$69.00', inventory: 'In Stock', }, { name: 'Orange Stocks', SKU: '0231664671', price: '$9.00', inventory: 'Out Of Stock', }, { name: 'Black T-shirts', SKU: '0231664672', price: '$19.00', inventory: 'In Stock', }, ]); const columns = [ { title: 'Name', render: row => <Highlighter match={searchTerm}>{row.name}</Highlighter>, width: '30%', }, { title: 'SKU', render: row => row.SKU, width: '20%', }, { title: 'Price', render: row => row.price, width: '20%', }, { title: 'Inventory', render: row => row.inventory, width: '20%', }, ]; const _getFilteredData = () => { let filteredData = records.concat(records); if (activeFilter !== 'In Stock, Out Of Stock') { filteredData = filteredData.filter(row => row.inventory === activeFilter); } if (searchTerm !== '') { filteredData = filteredData.filter(row => row.name.toUpperCase().includes(searchTerm.toUpperCase()), ); } return filteredData; }; const filteredData = _getFilteredData(); const _clearSearch = () => { setSearchTerm(''); }; const _renderSearch = expandable => { return ( <Search expandable={expandable} onChange={e => { setSearchTerm(e.target.value); }} value={searchTerm} onClear={_clearSearch} /> ); }; const _renderEmptyState = () => ( <Table.EmptyState title="You haven't added any items yet" subtitle="Add items to your website so people can buy them" image={ <Box height={120} width={120} backgroundColor="#dfe5eb" borderRadius="50%" /> } > <TextButton suffixIcon={<Icons.ExternalLink />}> Learn how to add items </TextButton> </Table.EmptyState> ); const _renderMainToolbar = () => { return ( <Card> <TableToolbar> <TableToolbar.ItemGroup position="start"> <TableToolbar.Item> <TableToolbar.Label> Status <span style={{ width: '150px' }}> <Dropdown options={filterOptions} selectedId={activeFilter} onSelect={selectedOption => { console.log(selectedOption); setActiveFilter(selectedOption.value); }} roundInput /> </span> </TableToolbar.Label> </TableToolbar.Item> <TableToolbar.Item> <TableToolbar.Label> Category <span style={{ width: '150px' }}> <Dropdown options={[{ id: 0, value: 'All categories' }]} selectedId={0} roundInput /> </span> </TableToolbar.Label> </TableToolbar.Item> </TableToolbar.ItemGroup> <TableToolbar.ItemGroup position="end"> <TableToolbar.Item>{_renderSearch(false)}</TableToolbar.Item> </TableToolbar.ItemGroup> </TableToolbar> </Card> ); }; return ( <Page height="600px"> <Page.Header title="Products" actionsBar={ <Box> <Box padding="SP1"> <IconButton skin="inverted"> <Icons.More /> </IconButton>{' '} </Box> <Box padding="SP1"> <Button prefixIcon={<Icons.Add />}>Add Product</Button>{' '} </Box> </Box> } /> <Page.Content> <Table data={filteredData} columns={columns} onSelectionChange={selectedIds => console.log('Table.onSelectionChange(): selectedIds=', selectedIds) } showSelection > <Page.Sticky> <Card> <Table.ToolbarContainer> {selectionContext => selectionContext.selectedCount === 0 ? _renderMainToolbar() : this._renderActionsToolbar({ ...selectionContext }) } </Table.ToolbarContainer> <Table.SubToolbar> <FormField label="Filtered by:" labelPlacement="left"> <TagList tags={[ { id: '1', children: 'In Stock' }, { id: '2', children: 'Out Of Stock' }, ]} maxVisibleTags={2} actionButton={{ label: 'Clear All', onClick: () => {} }} /> </FormField> </Table.SubToolbar> {filteredData.length ? <Table.Titlebar /> : _renderEmptyState()} </Card> </Page.Sticky> <Card> <Table.Content titleBarVisible={false} /> </Card> </Table> </Page.Content> </Page> ); const _renderActionsToolbar = ({ selectedCount, getSelectedIds }) => { return ( <TableToolbar> <TableToolbar.ItemGroup position="start"> <TableToolbar.Item> <TableToolbar.SelectedCount>{`${selectedCount} Selected`}</TableToolbar.SelectedCount> </TableToolbar.Item> </TableToolbar.ItemGroup> <TableToolbar.ItemGroup position="end"> <TableToolbar.Item layout="button"> <Button skin="light" priority="primary" prefixIcon={<Icons.Upload />} onClick={() => window.alert(`Exporting selectedIds=${getSelectedIds()}`) } > Export </Button> </TableToolbar.Item> <TableToolbar.Item layout="button"> <Button skin="light" priority="primary" prefixIcon={<Icons.Duplicate />} onClick={() => window.alert(`Duplicating selectedIds=${getSelectedIds()}`) } > Duplicate </Button> </TableToolbar.Item> <TableToolbar.Item layout="button"> <Button skin="light" priority="primary" prefixIcon={<Icons.Edit />} onClick={() => window.alert(`Editing selectedIds=${getSelectedIds()}`) } > Edit </Button> </TableToolbar.Item> <TableToolbar.Divider /> <TableToolbar.Item>{this._renderSearch(true)}</TableToolbar.Item> </TableToolbar.ItemGroup> </TableToolbar> ); }; };<|fim▁end|>
]; const [activeFilter, setActiveFilter] = React.useState(filterOptions[0].id);
<|file_name|>0013_auto__add_field_worker_ping_response_dts.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'Worker.ping_response_dts' db.add_column('job_runner_worker', 'ping_response_dts', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True), keep_default=False) def backwards(self, orm): # Deleting field 'Worker.ping_response_dts' db.delete_column('job_runner_worker', 'ping_response_dts') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'job_runner.job': { 'Meta': {'ordering': "('title',)", 'unique_together': "(('title', 'job_template'),)", 'object_name': 'Job'}, 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'disable_enqueue_after_fails': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}), 'enqueue_is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}), 'fail_times': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'job_template': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['job_runner.JobTemplate']"}), 'notification_addresses': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['job_runner.Job']"}), 'reschedule_interval': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}), 'reschedule_interval_type': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '6', 'blank': 'True'}), 'reschedule_type': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '18', 'blank': 'True'}), 'script_content': ('django.db.models.fields.TextField', [], {}), 'script_content_partial': ('django.db.models.fields.TextField', [], {}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'job_runner.jobtemplate': { 'Meta': {'ordering': "('title',)", 'object_name': 'JobTemplate'}, 'auth_groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'body': ('django.db.models.fields.TextField', [], {}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'enqueue_is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'notification_addresses': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'worker': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['job_runner.Worker']"})<|fim▁hole|> 'job_runner.killrequest': { 'Meta': {'object_name': 'KillRequest'}, 'enqueue_dts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}), 'execute_dts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'run': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['job_runner.Run']"}), 'schedule_dts': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}) }, 'job_runner.project': { 'Meta': {'ordering': "('title',)", 'object_name': 'Project'}, 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'enqueue_is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'notification_addresses': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'job_runner.rescheduleexclude': { 'Meta': {'object_name': 'RescheduleExclude'}, 'end_time': ('django.db.models.fields.TimeField', [], {}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'job': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['job_runner.Job']"}), 'note': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'start_time': ('django.db.models.fields.TimeField', [], {}) }, 'job_runner.run': { 'Meta': {'ordering': "('-return_dts', '-start_dts', '-enqueue_dts', 'schedule_dts')", 'object_name': 'Run'}, 'enqueue_dts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_manual': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), 'job': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['job_runner.Job']"}), 'pid': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True'}), 'return_dts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}), 'return_success': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'db_index': 'True', 'blank': 'True'}), 'schedule_children': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}), 'schedule_dts': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}), 'start_dts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}) }, 'job_runner.runlog': { 'Meta': {'ordering': "('-run',)", 'object_name': 'RunLog'}, 'content': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'run': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'run_log'", 'unique': 'True', 'to': "orm['job_runner.Run']"}) }, 'job_runner.worker': { 'Meta': {'ordering': "('title',)", 'object_name': 'Worker'}, 'api_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'enqueue_is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'notification_addresses': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'ping_response_dts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['job_runner.Project']"}), 'secret': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}) } } complete_apps = ['job_runner']<|fim▁end|>
},
<|file_name|>domexception.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ use crate::dom::bindings::codegen::Bindings::DOMExceptionBinding; use crate::dom::bindings::codegen::Bindings::DOMExceptionBinding::DOMExceptionConstants; use crate::dom::bindings::codegen::Bindings::DOMExceptionBinding::DOMExceptionMethods; use crate::dom::bindings::reflector::{reflect_dom_object, Reflector}; use crate::dom::bindings::root::DomRoot; use crate::dom::bindings::str::DOMString; use crate::dom::globalscope::GlobalScope; use dom_struct::dom_struct; #[repr(u16)] #[derive(Clone, Copy, Debug, JSTraceable, MallocSizeOf)] pub enum DOMErrorName { IndexSizeError = DOMExceptionConstants::INDEX_SIZE_ERR, HierarchyRequestError = DOMExceptionConstants::HIERARCHY_REQUEST_ERR, WrongDocumentError = DOMExceptionConstants::WRONG_DOCUMENT_ERR, InvalidCharacterError = DOMExceptionConstants::INVALID_CHARACTER_ERR, NoModificationAllowedError = DOMExceptionConstants::NO_MODIFICATION_ALLOWED_ERR, NotFoundError = DOMExceptionConstants::NOT_FOUND_ERR, NotSupportedError = DOMExceptionConstants::NOT_SUPPORTED_ERR, InUseAttributeError = DOMExceptionConstants::INUSE_ATTRIBUTE_ERR, InvalidStateError = DOMExceptionConstants::INVALID_STATE_ERR, SyntaxError = DOMExceptionConstants::SYNTAX_ERR, InvalidModificationError = DOMExceptionConstants::INVALID_MODIFICATION_ERR, NamespaceError = DOMExceptionConstants::NAMESPACE_ERR, InvalidAccessError = DOMExceptionConstants::INVALID_ACCESS_ERR, SecurityError = DOMExceptionConstants::SECURITY_ERR, NetworkError = DOMExceptionConstants::NETWORK_ERR, AbortError = DOMExceptionConstants::ABORT_ERR, TypeMismatchError = DOMExceptionConstants::TYPE_MISMATCH_ERR, QuotaExceededError = DOMExceptionConstants::QUOTA_EXCEEDED_ERR, TimeoutError = DOMExceptionConstants::TIMEOUT_ERR, InvalidNodeTypeError = DOMExceptionConstants::INVALID_NODE_TYPE_ERR, DataCloneError = DOMExceptionConstants::DATA_CLONE_ERR, NotReadableError = DOMExceptionConstants::NOT_READABLE_ERR, } #[dom_struct] pub struct DOMException { reflector_: Reflector, code: DOMErrorName, } impl DOMException { fn new_inherited(code: DOMErrorName) -> DOMException { DOMException { reflector_: Reflector::new(), code: code, } } pub fn new(global: &GlobalScope, code: DOMErrorName) -> DomRoot<DOMException> { reflect_dom_object( Box::new(DOMException::new_inherited(code)), global, DOMExceptionBinding::Wrap, ) } } impl DOMExceptionMethods for DOMException { // https://heycam.github.io/webidl/#dfn-DOMException fn Code(&self) -> u16 { self.code as u16 } // https://heycam.github.io/webidl/#idl-DOMException-error-names fn Name(&self) -> DOMString { DOMString::from(format!("{:?}", self.code)) } // https://heycam.github.io/webidl/#error-names fn Message(&self) -> DOMString { let message = match self.code { DOMErrorName::IndexSizeError => "The index is not in the allowed range.", DOMErrorName::HierarchyRequestError => { "The operation would yield an incorrect node tree." }, DOMErrorName::WrongDocumentError => "The object is in the wrong document.", DOMErrorName::InvalidCharacterError => "The string contains invalid characters.", DOMErrorName::NoModificationAllowedError => "The object can not be modified.", DOMErrorName::NotFoundError => "The object can not be found here.", DOMErrorName::NotSupportedError => "The operation is not supported.", DOMErrorName::InUseAttributeError => "The attribute already in use.",<|fim▁hole|> DOMErrorName::SyntaxError => "The string did not match the expected pattern.", DOMErrorName::InvalidModificationError => "The object can not be modified in this way.", DOMErrorName::NamespaceError => "The operation is not allowed by Namespaces in XML.", DOMErrorName::InvalidAccessError => { "The object does not support the operation or argument." }, DOMErrorName::SecurityError => "The operation is insecure.", DOMErrorName::NetworkError => "A network error occurred.", DOMErrorName::AbortError => "The operation was aborted.", DOMErrorName::TypeMismatchError => "The given type does not match any expected type.", DOMErrorName::QuotaExceededError => "The quota has been exceeded.", DOMErrorName::TimeoutError => "The operation timed out.", DOMErrorName::InvalidNodeTypeError => { "The supplied node is incorrect or has an incorrect ancestor for this operation." }, DOMErrorName::DataCloneError => "The object can not be cloned.", DOMErrorName::NotReadableError => "The I/O read operation failed.", }; DOMString::from(message) } // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-error.prototype.tostring fn Stringifier(&self) -> DOMString { DOMString::from(format!("{}: {}", self.Name(), self.Message())) } }<|fim▁end|>
DOMErrorName::InvalidStateError => "The object is in an invalid state.",
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate libc; use self::libc::c_int; static mut HASKELL:Option<extern fn(c_int)> = None; /* Register the callback */ #[no_mangle] pub fn rs_register(cb: extern fn(c_int)) { unsafe { HASKELL = Some(cb); println!("callback registered"); } } #[no_mangle] pub fn rs_function(val:c_int) { println!("triggered Rust function with: {}", val); <|fim▁hole|> } unsafe { match HASKELL { Some(ref callback) => { println!("registered callback found"); (*callback)(100); }, _ => { println!("no callback has been registered"); } } } }<|fim▁end|>
if val != 42 { println!("life, the universe, and everything");
<|file_name|>CopyingFileVisitor.java<|end_file_name|><|fim▁begin|>package cz.vhromada.utils.file.gui; import java.io.IOException; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; import java.util.Deque; import java.util.LinkedList; import java.util.List; import cz.vhromada.validators.Validators; import org.springframework.util.StringUtils; /** * A class represents file visitor for copying directories and files. */ public class CopyingFileVisitor extends SimpleFileVisitor<Path> { /** * Delimiter in replacing text */ private static final String REPLACING_TEXT_DELIMITER = ","; /** * Source directory */ private Path source; /** * Target directory */ private Path target; /** * Replacing patterns */ private List<ReplacePattern> replacePatterns; /** * Directories */ private Deque<Path> directories; /** * Creates a new instance of CopyingFileVisitor. * * @param source source directory * @param target target directory * @param replacingText replacing text * @param newText new text * @throws IllegalArgumentException if source directory is null * or target directory is null * or replacing text is null * or new text is null * or count of replacing texts is different from count of new texts */ public CopyingFileVisitor(final Path source, final Path target, final String replacingText, final String newText) { Validators.validateArgumentNotNull(source, "Source"); Validators.validateArgumentNotNull(target, "Target"); Validators.validateArgumentNotNull(replacingText, "Replacing text"); Validators.validateArgumentNotNull(newText, "New text"); this.source = source; this.target = target; this.directories = new LinkedList<>(); this.replacePatterns = createReplacePatterns(replacingText, newText); }<|fim▁hole|> final Path directory = getDirectoryName(dir); directories.addLast(directory); if (!Files.exists(directory)) { Files.createDirectory(directory); } return super.preVisitDirectory(dir, attrs); } @Override public FileVisitResult postVisitDirectory(final Path dir, final IOException exc) throws IOException { directories.removeLast(); return super.postVisitDirectory(dir, exc); } @Override public FileVisitResult visitFile(final Path file, final BasicFileAttributes attrs) throws IOException { FileCopy.copy(file, directories.getLast(), replacePatterns); return super.visitFile(file, attrs); } /** * Returns created replace patters. * * @param replacingText replacing text * @param newText new text * @return created replace patters * @throws IllegalArgumentException if count of replacing texts is different from count of new texts */ private static List<ReplacePattern> createReplacePatterns(final String replacingText, final String newText) { final String[] replacingTexts = StringUtils.tokenizeToStringArray(replacingText, REPLACING_TEXT_DELIMITER); final String[] newTexts = StringUtils.tokenizeToStringArray(newText, REPLACING_TEXT_DELIMITER); if (replacingTexts.length != newTexts.length) { throw new IllegalArgumentException("Count of replacing texts is different from count of new texts"); } final List<ReplacePattern> result = new ArrayList<>(); for (int i = 0; i < replacingTexts.length; i++) { final String source = replacingTexts[i]; final String target = newTexts[i]; result.add(new ReplacePattern(source, target)); result.add(new ReplacePattern(source.toLowerCase(), target.toLowerCase())); result.add(new ReplacePattern(StringUtils.capitalize(source.toLowerCase()), StringUtils.capitalize(target.toLowerCase()))); result.add(new ReplacePattern(source.toUpperCase(), target.toUpperCase())); } return result; } /** * Returns directory name. * * @param directory directory * @return directory name */ private Path getDirectoryName(final Path directory) { final String sourcePath = source.toAbsolutePath().toString(); final String targetPath = target.toAbsolutePath().toString(); final String directoryPath = directory.toAbsolutePath().toString(); final String result = directoryPath.replace(sourcePath, targetPath); final String replacedResult = FileCopy.replaceText(result, replacePatterns); return Paths.get(replacedResult); } }<|fim▁end|>
@Override public FileVisitResult preVisitDirectory(final Path dir, final BasicFileAttributes attrs) throws IOException {
<|file_name|>TopologyRunner.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.opensoc.topology.runner; import java.lang.reflect.Constructor; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Stack; import oi.thekraken.grok.api.Grok; import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.lang.StringUtils; import org.apache.storm.hdfs.bolt.HdfsBolt; import org.apache.storm.hdfs.bolt.format.DefaultFileNameFormat; import org.apache.storm.hdfs.bolt.format.DelimitedRecordFormat; import org.apache.storm.hdfs.bolt.format.FileNameFormat; import org.apache.storm.hdfs.bolt.format.RecordFormat; import org.apache.storm.hdfs.bolt.rotation.FileRotationPolicy; import org.apache.storm.hdfs.bolt.rotation.FileSizeRotationPolicy; import org.apache.storm.hdfs.bolt.rotation.FileSizeRotationPolicy.Units; import org.apache.storm.hdfs.bolt.sync.CountSyncPolicy; import org.apache.storm.hdfs.bolt.sync.SyncPolicy; import org.apache.storm.hdfs.common.rotation.MoveFileAction; import org.json.simple.JSONObject; import storm.kafka.BrokerHosts; import storm.kafka.KafkaSpout; import storm.kafka.SpoutConfig; import storm.kafka.ZkHosts; import storm.kafka.bolt.KafkaBolt; import backtype.storm.Config; import backtype.storm.LocalCluster; import backtype.storm.StormSubmitter; import backtype.storm.generated.Grouping; import backtype.storm.spout.RawScheme; import backtype.storm.spout.SchemeAsMultiScheme; import backtype.storm.topology.BoltDeclarer; import backtype.storm.topology.TopologyBuilder; import backtype.storm.tuple.Fields; import com.esotericsoftware.kryo.serializers.FieldSerializer; import com.esotericsoftware.kryo.serializers.MapSerializer; import com.opensoc.alerts.TelemetryAlertsBolt; import com.opensoc.alerts.adapters.HbaseWhiteAndBlacklistAdapter; import com.opensoc.alerts.interfaces.AlertsAdapter; import com.opensoc.enrichment.adapters.cif.CIFHbaseAdapter; import com.opensoc.enrichment.adapters.geo.GeoMysqlAdapter; import com.opensoc.enrichment.adapters.host.HostFromPropertiesFileAdapter; import com.opensoc.enrichment.adapters.whois.WhoisHBaseAdapter; import com.opensoc.enrichment.adapters.threat.ThreatHbaseAdapter; import com.opensoc.enrichment.common.GenericEnrichmentBolt; import com.opensoc.enrichment.interfaces.EnrichmentAdapter; import com.opensoc.hbase.HBaseBolt; import com.opensoc.hbase.HBaseStreamPartitioner; import com.opensoc.hbase.TupleTableConfig; import com.opensoc.helpers.topology.Cli; import com.opensoc.helpers.topology.SettingsLoader; import com.opensoc.index.interfaces.IndexAdapter; import com.opensoc.indexing.TelemetryIndexingBolt; import com.opensoc.json.serialization.JSONKryoSerializer; public abstract class TopologyRunner { protected Configuration config; protected TopologyBuilder builder; protected Config conf; protected boolean local_mode = true; protected boolean debug = true; protected String config_path = null; protected String default_config_path = "OpenSOC_Configs"; protected boolean success = false; protected Stack<String> messageComponents = new Stack<String>(); protected Stack<String> errorComponents = new Stack<String>(); protected Stack<String> alertComponents = new Stack<String>(); protected Stack<String> dataComponents = new Stack<String>(); protected Stack<String> terminalComponents = new Stack<String>(); public void initTopology(String args[], String subdir) throws Exception { Cli command_line = new Cli(args); command_line.parse(); System.out.println("[OpenSOC] Starting topology deployment..."); debug = command_line.isDebug(); System.out.println("[OpenSOC] Debug mode set to: " + debug); local_mode = command_line.isLocal_mode(); System.out.println("[OpenSOC] Local mode set to: " + local_mode); if (command_line.getPath() != null) { config_path = command_line.getPath(); System.out .println("[OpenSOC] Setting config path to external config path: " + config_path); } else { config_path = default_config_path; System.out .println("[OpenSOC] Initializing from default internal config path: " + config_path); } String topology_conf_path = config_path + "/topologies/" + subdir + "/topology.conf"; String environment_identifier_path = config_path + "/topologies/environment_identifier.conf"; String topology_identifier_path = config_path + "/topologies/" + subdir + "/topology_identifier.conf"; System.out.println("[OpenSOC] Looking for environment identifier: " + environment_identifier_path); System.out.println("[OpenSOC] Looking for topology identifier: " + topology_identifier_path); System.out.println("[OpenSOC] Looking for topology config: " + topology_conf_path); config = new PropertiesConfiguration(topology_conf_path); JSONObject environment_identifier = SettingsLoader .loadEnvironmentIdnetifier(environment_identifier_path); JSONObject topology_identifier = SettingsLoader .loadTopologyIdnetifier(topology_identifier_path); String topology_name = SettingsLoader.generateTopologyName( environment_identifier, topology_identifier); System.out.println("[OpenSOC] Initializing Topology: " + topology_name); builder = new TopologyBuilder(); conf = new Config(); conf.registerSerialization(JSONObject.class, MapSerializer.class); conf.setDebug(debug); System.out.println("[OpenSOC] Initializing Spout: " + topology_name); if (command_line.isGenerator_spout()) { String component_name = config.getString("spout.test.name", "DefaultTopologySpout"); success = initializeTestingSpout(component_name); messageComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "spout.test"); } if (!command_line.isGenerator_spout()) { String component_name = config.getString("spout.kafka.name", "DefaultTopologyKafkaSpout"); success = initializeKafkaSpout(component_name); messageComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "spout.kafka"); } if (config.getBoolean("bolt.parser.enabled", true)) { String component_name = config.getString("bolt.parser.name", "DefaultTopologyParserBot"); success = initializeParsingBolt(topology_name, component_name); messageComponents.add(component_name); errorComponents.add(component_name); dataComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.parser"); } if (config.getBoolean("bolt.enrichment.geo.enabled", false)) { String component_name = config.getString( "bolt.enrichment.geo.name", "DefaultGeoEnrichmentBolt"); success = initializeGeoEnrichment(topology_name, component_name); messageComponents.add(component_name); errorComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.enrichment.geo"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "mysql"); } if (config.getBoolean("bolt.enrichment.host.enabled", false)) { String component_name = config.getString( "bolt.enrichment.host.name", "DefaultHostEnrichmentBolt"); success = initializeHostsEnrichment(topology_name, component_name, "OpenSOC_Configs/etc/whitelists/known_hosts.conf"); messageComponents.add(component_name); errorComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.enrichment.host"); } if (config.getBoolean("bolt.enrichment.whois.enabled", false)) { String component_name = config.getString( "bolt.enrichment.whois.name", "DefaultWhoisEnrichmentBolt"); success = initializeWhoisEnrichment(topology_name, component_name); messageComponents.add(component_name); errorComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.enrichment.whois"); } if (config.getBoolean("bolt.enrichment.cif.enabled", false)) { String component_name = config.getString( "bolt.enrichment.cif.name", "DefaultCIFEnrichmentBolt"); success = initializeCIFEnrichment(topology_name, component_name); messageComponents.add(component_name); errorComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.enrichment.cif"); } if (config.getBoolean("bolt.enrichment.threat.enabled", false)) { String component_name = config.getString( "bolt.enrichment.threat.name", "DefaultThreatEnrichmentBolt"); success = initializeThreatEnrichment(topology_name, component_name); messageComponents.add(component_name); errorComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.enrichment.threat"); } if (config.getBoolean("bolt.alerts.enabled", false)) { String component_name = config.getString("bolt.alerts.name", "DefaultAlertsBolt"); success = initializeAlerts(topology_name, component_name, config_path + "/topologies/" + subdir + "/alerts.xml", environment_identifier, topology_identifier); messageComponents.add(component_name); errorComponents.add(component_name); alertComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.alerts"); } if (config.getBoolean("bolt.alerts.indexing.enabled") && config.getBoolean("bolt.alerts.enabled")) { String component_name = config.getString( "bolt.alerts.indexing.name", "DefaultAlertsBolt"); success = initializeAlertIndexing(component_name); terminalComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.alerts.indexing"); } if (config.getBoolean("bolt.kafka.enabled", false)) { String component_name = config.getString("bolt.kafka.name", "DefaultKafkaBolt"); success = initializeKafkaBolt(component_name); terminalComponents.add(component_name); System.out.println("[OpenSOC] Component " + component_name + " initialized"); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.kafka"); } if (config.getBoolean("bolt.indexing.enabled", true)) { String component_name = config.getString("bolt.indexing.name", "DefaultIndexingBolt"); success = initializeIndexingBolt(component_name); errorComponents.add(component_name); terminalComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.indexing"); } if (config.getBoolean("bolt.hdfs.enabled", false)) { String component_name = config.getString("bolt.hdfs.name", "DefaultHDFSBolt"); success = initializeHDFSBolt(topology_name, component_name); terminalComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.hdfs"); } if (config.getBoolean("bolt.error.indexing.enabled")) { String component_name = config.getString( "bolt.error.indexing.name", "DefaultErrorIndexingBolt"); success = initializeErrorIndexBolt(component_name); terminalComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.error"); } if (config.containsKey("bolt.hbase.enabled") && config.getBoolean("bolt.hbase.enabled")) { String component_name = config.getString("bolt.hbase.name", "DefaultHbaseBolt"); String shuffleType = config.getString("bolt.hbase.shuffle.type", "direct"); success = initializeHbaseBolt(component_name, shuffleType); terminalComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.hbase"); } System.out.println("[OpenSOC] Topology Summary: "); System.out.println("[OpenSOC] Message Stream: " + printComponentStream(messageComponents)); System.out.println("[OpenSOC] Alerts Stream: " + printComponentStream(alertComponents)); System.out.println("[OpenSOC] Error Stream: " + printComponentStream(errorComponents)); System.out.println("[OpenSOC] Data Stream: " + printComponentStream(dataComponents)); System.out.println("[OpenSOC] Terminal Components: " + printComponentStream(terminalComponents)); if (local_mode) { conf.setNumWorkers(config.getInt("num.workers")); conf.setMaxTaskParallelism(1); LocalCluster cluster = new LocalCluster(); cluster.submitTopology(topology_name, conf, builder.createTopology()); } else { conf.setNumWorkers(config.getInt("num.workers")); conf.setNumAckers(config.getInt("num.ackers")); StormSubmitter.submitTopology(topology_name, conf, builder.createTopology()); } } private String printComponentStream(List<String> messageComponents) { StringBuilder print_string = new StringBuilder(); for (String component : messageComponents) { print_string.append(component + " -> "); } print_string.append("[TERMINAL COMPONENT]"); return print_string.toString(); } public boolean initializeHbaseBolt(String name, String shuffleType) { try { String messageUpstreamComponent = dataComponents.get(dataComponents .size()-1); System.out.println("[OpenSOC] ------" + name + " is initializing from " + messageUpstreamComponent); String tableName = config.getString("bolt.hbase.table.name") .toString(); TupleTableConfig hbaseBoltConfig = new TupleTableConfig(tableName, config.getString("bolt.hbase.table.key.tuple.field.name") .toString(), config.getString( "bolt.hbase.table.timestamp.tuple.field.name") .toString()); String allColumnFamiliesColumnQualifiers = config.getString( "bolt.hbase.table.fields").toString(); // This is expected in the form // "<cf1>:<cq11>,<cq12>,<cq13>|<cf2>:<cq21>,<cq22>|......." String[] tokenizedColumnFamiliesWithColumnQualifiers = StringUtils .split(allColumnFamiliesColumnQualifiers, "\\|"); for (String tokenizedColumnFamilyWithColumnQualifiers : tokenizedColumnFamiliesWithColumnQualifiers) { String[] cfCqTokens = StringUtils.split( tokenizedColumnFamilyWithColumnQualifiers, ":"); String columnFamily = cfCqTokens[0]; String[] columnQualifiers = StringUtils.split(cfCqTokens[1], ","); for (String columnQualifier : columnQualifiers) { hbaseBoltConfig.addColumn(columnFamily, columnQualifier); } // hbaseBoltConfig.setDurability(Durability.valueOf(conf.get( // "storm.topology.pcap.bolt.hbase.durability").toString())); hbaseBoltConfig.setBatch(Boolean.valueOf(config.getString( "bolt.hbase.enable.batching").toString())); HBaseBolt hbase_bolt = new HBaseBolt(hbaseBoltConfig, config.getString("kafka.zk.list"), config.getString("kafka.zk.port")); hbase_bolt.setAutoAck(true); BoltDeclarer declarer = builder.setBolt(name, hbase_bolt, config.getInt("bolt.hbase.parallelism.hint")) .setNumTasks(config.getInt("bolt.hbase.num.tasks")); if (Grouping._Fields.CUSTOM_OBJECT.toString().equalsIgnoreCase( shuffleType)) { declarer.customGrouping( messageUpstreamComponent, "pcap_data_stream", new HBaseStreamPartitioner( hbaseBoltConfig.getTableName(), 0, Integer.parseInt(conf .get("bolt.hbase.partitioner.region.info.refresh.interval.mins") .toString()))); } else if (Grouping._Fields.DIRECT.toString().equalsIgnoreCase( shuffleType)) { declarer.fieldsGrouping(messageUpstreamComponent, "pcap_data_stream", new Fields("pcap_id")); } } } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } private boolean initializeErrorIndexBolt(String component_name) { try { Class loaded_class = Class.forName(config.getString("bolt.error.indexing.adapter")); IndexAdapter adapter = (IndexAdapter) loaded_class.newInstance(); String dateFormat = "yyyy.MM"; if (config.containsKey("bolt.alerts.indexing.timestamp")) { dateFormat = config.getString("bolt.alerts.indexing.timestamp"); } TelemetryIndexingBolt indexing_bolt = new TelemetryIndexingBolt() .withIndexIP(config.getString("es.ip")) .withIndexPort(config.getInt("es.port")) .withClusterName(config.getString("es.clustername")) .withIndexName( config.getString("bolt.error.indexing.indexname")) .withDocumentName( config.getString("bolt.error.indexing.documentname")) .withIndexTimestamp(dateFormat) .withBulk(config.getInt("bolt.error.indexing.bulk")) .withIndexAdapter(adapter) .withMetricConfiguration(config); BoltDeclarer declarer = builder .setBolt( component_name, indexing_bolt, config.getInt("bolt.error.indexing.parallelism.hint")) .setNumTasks(config.getInt("bolt.error.indexing.num.tasks")); for (String component : errorComponents) declarer.shuffleGrouping(component, "error"); return true; } catch (Exception e) { e.printStackTrace(); return false; } } private boolean initializeKafkaSpout(String name) { try { BrokerHosts zk = new ZkHosts(config.getString("kafka.zk")); String input_topic = config.getString("spout.kafka.topic"); SpoutConfig kafkaConfig = new SpoutConfig(zk, input_topic, "", input_topic); kafkaConfig.scheme = new SchemeAsMultiScheme(new RawScheme()); kafkaConfig.forceFromStart = Boolean.valueOf("True"); kafkaConfig.startOffsetTime = -1; builder.setSpout(name, new KafkaSpout(kafkaConfig), config.getInt("spout.kafka.parallelism.hint")).setNumTasks( config.getInt("spout.kafka.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } abstract boolean initializeParsingBolt(String topology_name, String name); abstract boolean initializeTestingSpout(String name); private boolean initializeGeoEnrichment(String topology_name, String name) { try { String messageUpstreamComponent = messageComponents .get(messageComponents.size() - 1); System.out.println("[OpenSOC] ------" + name + " is initializing from " + messageUpstreamComponent); String[] keys_from_settings = config.getStringArray("bolt.enrichment.geo.fields"); List<String> geo_keys = new ArrayList<String>(Arrays.asList(keys_from_settings)); GeoMysqlAdapter geo_adapter = new GeoMysqlAdapter( config.getString("mysql.ip"), config.getInt("mysql.port"), config.getString("mysql.username"), config.getString("mysql.password"), config.getString("bolt.enrichment.geo.adapter.table")); GenericEnrichmentBolt geo_enrichment = new GenericEnrichmentBolt() .withEnrichmentTag( config.getString("bolt.enrichment.geo.enrichment_tag")) .withOutputFieldName(topology_name) .withAdapter(geo_adapter) .withMaxTimeRetain( config.getInt("bolt.enrichment.geo.MAX_TIME_RETAIN_MINUTES")) .withMaxCacheSize( config.getInt("bolt.enrichment.geo.MAX_CACHE_SIZE_OBJECTS_NUM")) .withKeys(geo_keys).withMetricConfiguration(config); builder.setBolt(name, geo_enrichment, config.getInt("bolt.enrichment.geo.parallelism.hint")) .fieldsGrouping(messageUpstreamComponent, "message", new Fields("key")) .setNumTasks(config.getInt("bolt.enrichment.geo.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } private boolean initializeHostsEnrichment(String topology_name, String name, String hosts_path) { try { String messageUpstreamComponent = messageComponents .get(messageComponents.size() - 1); System.out.println("[OpenSOC] ------" + name + " is initializing from " + messageUpstreamComponent); List<String> hosts_keys = new ArrayList<String>(); hosts_keys.add(config.getString("source.ip")); hosts_keys.add(config.getString("dest.ip")); Map<String, JSONObject> known_hosts = SettingsLoader .loadKnownHosts(hosts_path); HostFromPropertiesFileAdapter host_adapter = new HostFromPropertiesFileAdapter( known_hosts); GenericEnrichmentBolt host_enrichment = new GenericEnrichmentBolt() .withEnrichmentTag( config.getString("bolt.enrichment.host.enrichment_tag")) .withAdapter(host_adapter) .withMaxTimeRetain( config.getInt("bolt.enrichment.host.MAX_TIME_RETAIN_MINUTES")) .withMaxCacheSize( config.getInt("bolt.enrichment.host.MAX_CACHE_SIZE_OBJECTS_NUM")) .withOutputFieldName(topology_name).withKeys(hosts_keys) .withMetricConfiguration(config); builder.setBolt(name, host_enrichment, config.getInt("bolt.enrichment.host.parallelism.hint")) .fieldsGrouping(messageUpstreamComponent, "message", new Fields("key")) .setNumTasks( config.getInt("bolt.enrichment.host.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } @SuppressWarnings("rawtypes") private boolean initializeAlerts(String topology_name, String name, String alerts_path, JSONObject environment_identifier, JSONObject topology_identifier) { try { Class loaded_class = Class.forName(config.getString("bolt.alerts.adapter")); Constructor constructor = loaded_class.getConstructor(new Class[] { Map.class}); Map<String, String> settings = SettingsLoader.getConfigOptions((PropertiesConfiguration)config, config.getString("bolt.alerts.adapter") + "."); System.out.println("Adapter Settings: "); SettingsLoader.printOptionalSettings(settings); AlertsAdapter alerts_adapter = (AlertsAdapter) constructor.newInstance(settings); String messageUpstreamComponent = messageComponents .get(messageComponents.size() - 1); System.out.println("[OpenSOC] ------" + name + " is initializing from " + messageUpstreamComponent); JSONObject alerts_identifier = SettingsLoader .generateAlertsIdentifier(environment_identifier, topology_identifier); TelemetryAlertsBolt alerts_bolt = new TelemetryAlertsBolt() .withIdentifier(alerts_identifier).withMaxCacheSize(1000) .withMaxTimeRetain(3600).withAlertsAdapter(alerts_adapter) .withOutputFieldName("message") .withMetricConfiguration(config); builder.setBolt(name, alerts_bolt, config.getInt("bolt.alerts.parallelism.hint")) .fieldsGrouping(messageUpstreamComponent, "message", new Fields("key")) .setNumTasks(config.getInt("bolt.alerts.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } private boolean initializeAlertIndexing(String name) { try{ String messageUpstreamComponent = alertComponents.get(alertComponents .size() - 1); System.out.println("[OpenSOC] ------" + name + " is initializing from " + messageUpstreamComponent); Class loaded_class = Class.forName(config.getString("bolt.alerts.indexing.adapter")); IndexAdapter adapter = (IndexAdapter) loaded_class.newInstance(); String dateFormat = "yyyy.MM.dd"; if (config.containsKey("bolt.alerts.indexing.timestamp")) { dateFormat = config.getString("bolt.alerts.indexing.timestamp"); } TelemetryIndexingBolt indexing_bolt = new TelemetryIndexingBolt() .withIndexIP(config.getString("es.ip")) .withIndexPort(config.getInt("es.port")) .withClusterName(config.getString("es.clustername")) .withIndexName( config.getString("bolt.alerts.indexing.indexname")) .withDocumentName( config.getString("bolt.alerts.indexing.documentname")) .withIndexTimestamp(dateFormat) .withBulk(config.getInt("bolt.alerts.indexing.bulk")) .withIndexAdapter(adapter) .withMetricConfiguration(config); String alerts_name = config.getString("bolt.alerts.indexing.name"); builder.setBolt(alerts_name, indexing_bolt, config.getInt("bolt.indexing.parallelism.hint")) .shuffleGrouping(messageUpstreamComponent, "alert") .setNumTasks(config.getInt("bolt.indexing.num.tasks")); } catch(Exception e) { e.printStackTrace(); return false; } return true; } private boolean initializeKafkaBolt(String name) { try { String messageUpstreamComponent = messageComponents .get(messageComponents.size() - 1); System.out.println("[OpenSOC] ------" + name + " is initializing from " + messageUpstreamComponent); Map<String, String> kafka_broker_properties = new HashMap<String, String>(); kafka_broker_properties.put("zk.connect", config.getString("kafka.zk")); kafka_broker_properties.put("metadata.broker.list", config.getString("kafka.br")); kafka_broker_properties.put("serializer.class", "com.opensoc.json.serialization.JSONKafkaSerializer"); kafka_broker_properties.put("key.serializer.class", "kafka.serializer.StringEncoder"); String output_topic = config.getString("bolt.kafka.topic"); conf.put("kafka.broker.properties", kafka_broker_properties); conf.put("topic", output_topic); builder.setBolt(name, new KafkaBolt<String, JSONObject>(), config.getInt("bolt.kafka.parallelism.hint")) .shuffleGrouping(messageUpstreamComponent, "message") .setNumTasks(config.getInt("bolt.kafka.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } private boolean initializeWhoisEnrichment(String topology_name, String name) { try { String messageUpstreamComponent = messageComponents .get(messageComponents.size() - 1); System.out.println("[OpenSOC] ------" + name + " is initializing from " + messageUpstreamComponent); String[] keys_from_settings = config.getString("bolt.enrichment.whois.fields").split(","); List<String> whois_keys = new ArrayList<String>(Arrays.asList(keys_from_settings)); EnrichmentAdapter whois_adapter = new WhoisHBaseAdapter( config.getString("bolt.enrichment.whois.hbase.table.name"), config.getString("kafka.zk.list"), config.getString("kafka.zk.port")); GenericEnrichmentBolt whois_enrichment = new GenericEnrichmentBolt() .withEnrichmentTag( config.getString("bolt.enrichment.whois.enrichment_tag")) .withOutputFieldName(topology_name) .withAdapter(whois_adapter) .withMaxTimeRetain( config.getInt("bolt.enrichment.whois.MAX_TIME_RETAIN_MINUTES")) .withMaxCacheSize( config.getInt("bolt.enrichment.whois.MAX_CACHE_SIZE_OBJECTS_NUM")) .withKeys(whois_keys).withMetricConfiguration(config); builder.setBolt(name, whois_enrichment, config.getInt("bolt.enrichment.whois.parallelism.hint")) .fieldsGrouping(messageUpstreamComponent, "message", new Fields("key")) .setNumTasks( config.getInt("bolt.enrichment.whois.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } private boolean initializeIndexingBolt(String name) { try { String messageUpstreamComponent = messageComponents .get(messageComponents.size() - 1); System.out.println("[OpenSOC] ------" + name + " is initializing from " + messageUpstreamComponent); Class loaded_class = Class.forName(config.getString("bolt.indexing.adapter")); IndexAdapter adapter = (IndexAdapter) loaded_class.newInstance(); Map<String, String> settings = SettingsLoader.getConfigOptions((PropertiesConfiguration)config, "optional.settings.bolt.index.search."); if(settings != null && settings.size() > 0) { adapter.setOptionalSettings(settings); System.out.println("[OpenSOC] Index Bolt picket up optional settings:"); SettingsLoader.printOptionalSettings(settings); } // dateFormat defaults to hourly if not specified String dateFormat = "yyyy.MM.dd.hh"; if (config.containsKey("bolt.indexing.timestamp")) { dateFormat = config.getString("bolt.indexing.timestamp"); } TelemetryIndexingBolt indexing_bolt = new TelemetryIndexingBolt() .withIndexIP(config.getString("es.ip")) .withIndexPort(config.getInt("es.port")) .withClusterName(config.getString("es.clustername")) .withIndexName(config.getString("bolt.indexing.indexname")) .withIndexTimestamp(dateFormat) .withDocumentName( config.getString("bolt.indexing.documentname")) .withBulk(config.getInt("bolt.indexing.bulk")) .withIndexAdapter(adapter) .withMetricConfiguration(config); builder.setBolt(name, indexing_bolt, config.getInt("bolt.indexing.parallelism.hint")) .fieldsGrouping(messageUpstreamComponent, "message", new Fields("key")) .setNumTasks(config.getInt("bolt.indexing.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } private boolean initializeThreatEnrichment(String topology_name, String name) { try { String messageUpstreamComponent = messageComponents .get(messageComponents.size() - 1); System.out.println("[OpenSOC] ------" + name + " is initializing from " + messageUpstreamComponent); String[] fields = config.getStringArray("bolt.enrichment.threat.fields"); List<String> threat_keys = new ArrayList<String>(Arrays.asList(fields)); GenericEnrichmentBolt threat_enrichment = new GenericEnrichmentBolt() .withEnrichmentTag( config.getString("bolt.enrichment.threat.enrichment_tag")) .withAdapter( new ThreatHbaseAdapter(config .getString("kafka.zk.list"), config .getString("kafka.zk.port"), config .getString("bolt.enrichment.threat.tablename"))) .withOutputFieldName(topology_name) .withEnrichmentTag(config.getString("bolt.enrichment.threat.enrichment_tag")) .withKeys(threat_keys) .withMaxTimeRetain( config.getInt("bolt.enrichment.threat.MAX_TIME_RETAIN_MINUTES")) .withMaxCacheSize( config.getInt("bolt.enrichment.threat.MAX_CACHE_SIZE_OBJECTS_NUM")) .withMetricConfiguration(config); builder.setBolt(name, threat_enrichment,<|fim▁hole|> config.getInt("bolt.enrichment.threat.parallelism.hint")) .fieldsGrouping(messageUpstreamComponent, "message", new Fields("key")) .setNumTasks(config.getInt("bolt.enrichment.threat.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } private boolean initializeCIFEnrichment(String topology_name, String name) { try { String messageUpstreamComponent = messageComponents .get(messageComponents.size() - 1); System.out.println("[OpenSOC] ------" + name + " is initializing from " + messageUpstreamComponent); List<String> cif_keys = new ArrayList<String>(); String[] ipFields = config.getStringArray("bolt.enrichment.cif.fields.ip"); cif_keys.addAll(Arrays.asList(ipFields)); String[] hostFields = config.getStringArray("bolt.enrichment.cif.fields.host"); cif_keys.addAll(Arrays.asList(hostFields)); String[] emailFields = config.getStringArray("bolt.enrichment.cif.fields.email"); cif_keys.addAll(Arrays.asList(emailFields)); GenericEnrichmentBolt cif_enrichment = new GenericEnrichmentBolt() .withEnrichmentTag( config.getString("bolt.enrichment.cif.enrichment_tag")) .withAdapter( new CIFHbaseAdapter(config .getString("kafka.zk.list"), config .getString("kafka.zk.port"), config .getString("bolt.enrichment.cif.tablename"))) .withOutputFieldName(topology_name) .withKeys(cif_keys) .withMaxTimeRetain( config.getInt("bolt.enrichment.cif.MAX_TIME_RETAIN_MINUTES")) .withMaxCacheSize( config.getInt("bolt.enrichment.cif.MAX_CACHE_SIZE_OBJECTS_NUM")) .withMetricConfiguration(config); builder.setBolt(name, cif_enrichment, config.getInt("bolt.enrichment.cif.parallelism.hint")) .fieldsGrouping(messageUpstreamComponent, "message", new Fields("key")) .setNumTasks(config.getInt("bolt.enrichment.cif.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } private boolean initializeHDFSBolt(String topology_name, String name) { try { String messageUpstreamComponent = messageComponents .get(messageComponents.size() - 1); System.out.println("[OpenSOC] ------" + name + " is initializing from " + messageUpstreamComponent); RecordFormat format = new DelimitedRecordFormat() .withFieldDelimiter( config.getString("bolt.hdfs.field.delimiter") .toString()).withFields( new Fields("message")); // sync the file system after every x number of tuples SyncPolicy syncPolicy = new CountSyncPolicy(Integer.valueOf(config .getString("bolt.hdfs.batch.size").toString())); // rotate files when they reach certain size FileRotationPolicy rotationPolicy = new FileSizeRotationPolicy( Float.valueOf(config.getString( "bolt.hdfs.file.rotation.size.in.mb").toString()), Units.MB); FileNameFormat fileNameFormat = new DefaultFileNameFormat() .withPath(config.getString("bolt.hdfs.wip.file.path") .toString()); // Post rotate action MoveFileAction moveFileAction = (new MoveFileAction()) .toDestination(config.getString( "bolt.hdfs.finished.file.path").toString()); HdfsBolt hdfsBolt = new HdfsBolt() .withFsUrl( config.getString("bolt.hdfs.file.system.url") .toString()) .withFileNameFormat(fileNameFormat) .withRecordFormat(format) .withRotationPolicy(rotationPolicy) .withSyncPolicy(syncPolicy) .addRotationAction(moveFileAction); if (config.getString("bolt.hdfs.compression.codec.class") != null) { hdfsBolt.withCompressionCodec(config.getString( "bolt.hdfs.compression.codec.class").toString()); } builder.setBolt(name, hdfsBolt, config.getInt("bolt.hdfs.parallelism.hint")) .shuffleGrouping(messageUpstreamComponent, "message") .setNumTasks(config.getInt("bolt.hdfs.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } }<|fim▁end|>
<|file_name|>imp.py<|end_file_name|><|fim▁begin|>"""This module provides the components needed to build your own __import__ function. Undocumented functions are obsolete. In most cases it is preferred you consider using the importlib module's functionality over this module. """ # (Probably) need to stay in _imp from _imp import (lock_held, acquire_lock, release_lock, get_frozen_object, is_frozen_package, init_frozen, is_builtin, is_frozen, _fix_co_filename) try: from _imp import create_dynamic except ImportError: # Platform doesn't support dynamic loading. create_dynamic = None from importlib._bootstrap import _ERR_MSG, _exec, _load, _builtin_from_name from importlib._bootstrap_external import SourcelessFileLoader from importlib import machinery from importlib import util import importlib import os import sys import tokenize import types import warnings warnings.warn("the imp module is deprecated in favour of importlib; " "see the module's documentation for alternative uses", PendingDeprecationWarning, stacklevel=2) # DEPRECATED SEARCH_ERROR = 0 PY_SOURCE = 1 PY_COMPILED = 2 C_EXTENSION = 3 PY_RESOURCE = 4 PKG_DIRECTORY = 5 C_BUILTIN = 6 PY_FROZEN = 7 PY_CODERESOURCE = 8 IMP_HOOK = 9 def new_module(name): """**DEPRECATED** Create a new module. The module is not entered into sys.modules. """ return types.ModuleType(name) def get_magic(): """**DEPRECATED** Return the magic number for .pyc files. """ return util.MAGIC_NUMBER def get_tag(): """Return the magic tag for .pyc files.""" return sys.implementation.cache_tag def cache_from_source(path, debug_override=None): """**DEPRECATED** Given the path to a .py file, return the path to its .pyc file. The .py file does not need to exist; this simply returns the path to the .pyc file calculated as if the .py file were imported. If debug_override is not None, then it must be a boolean and is used in place of sys.flags.optimize. If sys.implementation.cache_tag is None then NotImplementedError is raised. """ with warnings.catch_warnings(): warnings.simplefilter('ignore') return util.cache_from_source(path, debug_override) def source_from_cache(path): """**DEPRECATED** Given the path to a .pyc. file, return the path to its .py file. The .pyc file does not need to exist; this simply returns the path to the .py file calculated to correspond to the .pyc file. If path does not conform to PEP 3147 format, ValueError will be raised. If sys.implementation.cache_tag is None then NotImplementedError is raised. """ return util.source_from_cache(path) def get_suffixes(): """**DEPRECATED**""" extensions = [(s, 'rb', C_EXTENSION) for s in machinery.EXTENSION_SUFFIXES] source = [(s, 'r', PY_SOURCE) for s in machinery.SOURCE_SUFFIXES] bytecode = [(s, 'rb', PY_COMPILED) for s in machinery.BYTECODE_SUFFIXES] return extensions + source + bytecode class NullImporter: """**DEPRECATED** Null import object. """ def __init__(self, path): if path == '': raise ImportError('empty pathname', path='') elif os.path.isdir(path): raise ImportError('existing directory', path=path) def find_module(self, fullname): """Always returns None.""" return None class _HackedGetData: """Compatibility support for 'file' arguments of various load_*() functions.""" def __init__(self, fullname, path, file=None): super().__init__(fullname, path) self.file = file def get_data(self, path): """Gross hack to contort loader to deal w/ load_*()'s bad API.""" if self.file and path == self.path: if not self.file.closed: file = self.file else: self.file = file = open(self.path, 'r') with file: # Technically should be returning bytes, but # SourceLoader.get_code() just passed what is returned to # compile() which can handle str. And converting to bytes would # require figuring out the encoding to decode to and # tokenize.detect_encoding() only accepts bytes. return file.read() else: return super().get_data(path) class _LoadSourceCompatibility(_HackedGetData, machinery.SourceFileLoader): """Compatibility support for implementing load_source().""" def load_source(name, pathname, file=None): loader = _LoadSourceCompatibility(name, pathname, file) spec = util.spec_from_file_location(name, pathname, loader=loader) if name in sys.modules: module = _exec(spec, sys.modules[name]) else: module = _load(spec) # To allow reloading to potentially work, use a non-hacked loader which # won't rely on a now-closed file object. module.__loader__ = machinery.SourceFileLoader(name, pathname) module.__spec__.loader = module.__loader__ return module class _LoadCompiledCompatibility(_HackedGetData, SourcelessFileLoader): """Compatibility support for implementing load_compiled().""" def load_compiled(name, pathname, file=None): """**DEPRECATED**""" loader = _LoadCompiledCompatibility(name, pathname, file) spec = util.spec_from_file_location(name, pathname, loader=loader) if name in sys.modules: module = _exec(spec, sys.modules[name]) else: module = _load(spec) # To allow reloading to potentially work, use a non-hacked loader which # won't rely on a now-closed file object. module.__loader__ = SourcelessFileLoader(name, pathname) module.__spec__.loader = module.__loader__ return module def load_package(name, path): """**DEPRECATED**""" if os.path.isdir(path): extensions = (machinery.SOURCE_SUFFIXES[:] + machinery.BYTECODE_SUFFIXES[:]) for extension in extensions: path = os.path.join(path, '__init__'+extension) if os.path.exists(path): break else: raise ValueError('{!r} is not a package'.format(path)) spec = util.spec_from_file_location(name, path, submodule_search_locations=[]) if name in sys.modules: return _exec(spec, sys.modules[name]) else: return _load(spec) def load_module(name, file, filename, details): """**DEPRECATED** Load a module, given information returned by find_module(). The module name must include the full package name, if any. """ suffix, mode, type_ = details if mode and (not mode.startswith(('r', 'U')) or '+' in mode): raise ValueError('invalid file open mode {!r}'.format(mode)) elif file is None and type_ in {PY_SOURCE, PY_COMPILED}: msg = 'file object required for import (type code {})'.format(type_) raise ValueError(msg) elif type_ == PY_SOURCE: return load_source(name, filename, file) elif type_ == PY_COMPILED: return load_compiled(name, filename, file) elif type_ == C_EXTENSION and load_dynamic is not None: if file is None: with open(filename, 'rb') as opened_file: return load_dynamic(name, filename, opened_file) else: return load_dynamic(name, filename, file) elif type_ == PKG_DIRECTORY: return load_package(name, filename) elif type_ == C_BUILTIN: return init_builtin(name) elif type_ == PY_FROZEN: return init_frozen(name) else: msg = "Don't know how to import {} (type code {})".format(name, type_) raise ImportError(msg, name=name) def find_module(name, path=None): """**DEPRECATED** Search for a module. If path is omitted or None, search for a built-in, frozen or special module and continue search in sys.path. The module name cannot contain '.'; to search for a submodule of a package, pass the submodule name and the package's __path__. """ if not isinstance(name, str): raise TypeError("'name' must be a str, not {}".format(type(name))) elif not isinstance(path, (type(None), list)): # Backwards-compatibility raise RuntimeError("'list' must be None or a list, " "not {}".format(type(name))) if path is None: if is_builtin(name): return None, None, ('', '', C_BUILTIN) elif is_frozen(name): return None, None, ('', '', PY_FROZEN) else: path = sys.path for entry in path: package_directory = os.path.join(entry, name) for suffix in ['.py', machinery.BYTECODE_SUFFIXES[0]]: package_file_name = '__init__' + suffix file_path = os.path.join(package_directory, package_file_name) if os.path.isfile(file_path): return None, package_directory, ('', '', PKG_DIRECTORY) for suffix, mode, type_ in get_suffixes(): file_name = name + suffix file_path = os.path.join(entry, file_name) if os.path.isfile(file_path): break else: continue break # Break out of outer loop when breaking out of inner loop. else: raise ImportError(_ERR_MSG.format(name), name=name) encoding = None if 'b' not in mode: with open(file_path, 'rb') as file: encoding = tokenize.detect_encoding(file.readline)[0] file = open(file_path, mode, encoding=encoding) return file, file_path, (suffix, mode, type_) def reload(module): """**DEPRECATED** Reload the module and return it. The module must have been successfully imported before. """ return importlib.reload(module) def init_builtin(name): """**DEPRECATED** Load and return a built-in module by name, or None is such module doesn't exist """ try: return _builtin_from_name(name)<|fim▁hole|> if create_dynamic: def load_dynamic(name, path, file=None): """**DEPRECATED** Load an extension module. """ import importlib.machinery loader = importlib.machinery.ExtensionFileLoader(name, path) # Issue #24748: Skip the sys.modules check in _load_module_shim; # always load new extension spec = importlib.machinery.ModuleSpec( name=name, loader=loader, origin=path) return _load(spec) else: load_dynamic = None<|fim▁end|>
except ImportError: return None
<|file_name|>bipdersig-p2p.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # Copyright (c) 2015-2016 The Presidentielcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. from test_framework.test_framework import ComparisonTestFramework from test_framework.util import * from test_framework.mininode import CTransaction, NetworkThread from test_framework.blocktools import create_coinbase, create_block from test_framework.comptool import TestInstance, TestManager from test_framework.script import CScript from io import BytesIO import time # A canonical signature consists of: # <30> <total len> <02> <len R> <R> <02> <len S> <S> <hashtype> def unDERify(tx): ''' Make the signature in vin 0 of a tx non-DER-compliant, by adding padding after the S-value. ''' scriptSig = CScript(tx.vin[0].scriptSig) newscript = [] for i in scriptSig: if (len(newscript) == 0): newscript.append(i[0:-1] + b'\0' + i[-1:]) else: newscript.append(i) tx.vin[0].scriptSig = CScript(newscript) ''' This test is meant to exercise BIP66 (DER SIG). Connect to a single node. Mine 2 (version 2) blocks (save the coinbases for later). Generate 98 more version 2 blocks, verify the node accepts. Mine 749 version 3 blocks, verify the node accepts. Check that the new DERSIG rules are not enforced on the 750th version 3 block. Check that the new DERSIG rules are enforced on the 751st version 3 block. Mine 199 new version blocks. Mine 1 old-version block. Mine 1 new version block. Mine 1 old version block, see that the node rejects. ''' class BIP66Test(ComparisonTestFramework): def __init__(self): super().__init__() self.num_nodes = 1 def setup_network(self): # Must set the blockversion for this test self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, extra_args=[['-debug', '-whitelist=127.0.0.1', '-blockversion=2']], binary=[self.options.testbinary]) def run_test(self): test = TestManager(self, self.options.tmpdir) test.add_all_connections(self.nodes) NetworkThread().start() # Start up network handling in another thread test.run() def create_transaction(self, node, coinbase, to_address, amount): from_txid = node.getblock(coinbase)['tx'][0] inputs = [{ "txid" : from_txid, "vout" : 0}] outputs = { to_address : amount } rawtx = node.createrawtransaction(inputs, outputs) signresult = node.signrawtransaction(rawtx) tx = CTransaction() f = BytesIO(hex_str_to_bytes(signresult['hex'])) tx.deserialize(f) return tx def get_tests(self): self.coinbase_blocks = self.nodes[0].generate(2) height = 3 # height of the next block to build self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0) self.nodeaddress = self.nodes[0].getnewaddress() self.last_block_time = int(time.time()) ''' 98 more version 2 blocks ''' test_blocks = [] for i in range(98): block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1) block.nVersion = 2 block.rehash() block.solve() test_blocks.append([block, True]) self.last_block_time += 1 self.tip = block.sha256 height += 1 yield TestInstance(test_blocks, sync_every_block=False) ''' Mine 749 version 3 blocks ''' test_blocks = [] for i in range(749): block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1) block.nVersion = 3 block.rehash() block.solve() test_blocks.append([block, True]) self.last_block_time += 1 self.tip = block.sha256<|fim▁hole|> height += 1 yield TestInstance(test_blocks, sync_every_block=False) ''' Check that the new DERSIG rules are not enforced in the 750th version 3 block. ''' spendtx = self.create_transaction(self.nodes[0], self.coinbase_blocks[0], self.nodeaddress, 1.0) unDERify(spendtx) spendtx.rehash() block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1) block.nVersion = 3 block.vtx.append(spendtx) block.hashMerkleRoot = block.calc_merkle_root() block.rehash() block.solve() self.last_block_time += 1 self.tip = block.sha256 height += 1 yield TestInstance([[block, True]]) ''' Check that the new DERSIG rules are enforced in the 751st version 3 block. ''' spendtx = self.create_transaction(self.nodes[0], self.coinbase_blocks[1], self.nodeaddress, 1.0) unDERify(spendtx) spendtx.rehash() block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1) block.nVersion = 3 block.vtx.append(spendtx) block.hashMerkleRoot = block.calc_merkle_root() block.rehash() block.solve() self.last_block_time += 1 yield TestInstance([[block, False]]) ''' Mine 199 new version blocks on last valid tip ''' test_blocks = [] for i in range(199): block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1) block.nVersion = 3 block.rehash() block.solve() test_blocks.append([block, True]) self.last_block_time += 1 self.tip = block.sha256 height += 1 yield TestInstance(test_blocks, sync_every_block=False) ''' Mine 1 old version block ''' block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1) block.nVersion = 2 block.rehash() block.solve() self.last_block_time += 1 self.tip = block.sha256 height += 1 yield TestInstance([[block, True]]) ''' Mine 1 new version block ''' block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1) block.nVersion = 3 block.rehash() block.solve() self.last_block_time += 1 self.tip = block.sha256 height += 1 yield TestInstance([[block, True]]) ''' Mine 1 old version block, should be invalid ''' block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1) block.nVersion = 2 block.rehash() block.solve() self.last_block_time += 1 yield TestInstance([[block, False]]) if __name__ == '__main__': BIP66Test().main()<|fim▁end|>
<|file_name|>identity.py<|end_file_name|><|fim▁begin|>from pyramid.view import view_config @view_config(name='sso', renderer='templates/login.pt') def sign_on(context, request): """ Perform the SAML2 SSO dance. - If the request already has valid credentials, process the 'SAMLRequest' query string value and return a POSTing redirect. - If processing the POSTed login form, authenticate. - If no authenticated user is known, display the login form.<|fim▁hole|> return {'hidden': request.GET.items()}<|fim▁end|>
"""
<|file_name|>pe030-digit-fifth-powers.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # coding=utf-8 """30. Digit fifth powers https://projecteuler.net/problem=30 Surprisingly there are only three numbers that can be written as the sum of fourth powers of their digits: > 1634 = 14 \+ 64 \+ 34 \+ 44 > 8208 = 84 \+ 24 \+ 04 \+ 84 > 9474 = 94 \+ 44 \+ 74 \+ 44 As 1 = 14 is not a sum it is not included. The sum of these numbers is 1634 + 8208 + 9474 = 19316. Find the sum of all the numbers that can be written as the sum of fifth powers<|fim▁hole|><|fim▁end|>
of their digits. """
<|file_name|>RedLock_test.py<|end_file_name|><|fim▁begin|>import pytest from freezegun import freeze_time import demistomock as demisto integration_params = { 'url': 'http://test.com', 'credentials': {'identifier': 'test', 'password': 'pass'}, 'fetch_time': '3 days', 'proxy': 'false', 'unsecure': 'false', } @pytest.fixture(autouse=True) def set_mocks(mocker): mocker.patch.object(demisto, 'params', return_value=integration_params) @freeze_time("2021-07-10T16:34:14.758295 UTC+1") def test_fetch_incidents_first_time_fetch(mocker): """ Given - fetch incidents command - command args When - mock the integration parameters Then - Validate that the last_time is as the now time(not changed, not of the incident) """ mocker.patch.object(demisto, 'command', return_value='fetch-incidents') from RedLock import fetch_incidents mocker.patch('RedLock.req', return_value=[]) _, next_run = fetch_incidents() assert next_run == 1625938454758 def test_redlock_list_scans(mocker): """ Given - The response from the API call of redlock-list-scans command. When - calling redlock-list-scans Then - Validate that the readable output and the context entry of the command is as expected """ from RedLock import redlock_list_scans list_scans_response = { 'data': [{ 'id': '111111111', 'attributes': { 'name': ['test name'], 'type': ['test type'], 'user': ['test user'], 'scanTime': '2021-10-18T14:38:53.654174' } }] } expected_readable_output = '### Scans List:\n|ID|Name|Scan Time|Type|User|\n|---|---|---|---|---|\n| 111111111 |' \ ' test name | 2021-10-18T14:38:53.654174 | test type | test user |\n' expected_context_entry = {'Redlock.Scans(val.id == obj.id)': [{'id': '111111111', 'name': ['test name'], 'type': ['test type'], 'user': ['test user'], 'scanTime': '2021-10-18T14:38:53.654174'}]} mocker.patch('RedLock.req', return_value=list_scans_response) mocker.patch.object(demisto, 'results') redlock_list_scans() assert demisto.results.call_args[0][0].get('HumanReadable') == expected_readable_output assert demisto.results.call_args[0][0].get('EntryContext') == expected_context_entry def test_redlock_get_scan_status(mocker): """ Given - The response from the API call of redlock-get-scan-status command. When - calling redlock-get-scan-status Then - Validate that the readable output and the context entry of the command is as expected """ from RedLock import redlock_get_scan_status get_status_response = { 'data': { 'id': '111111111', 'attributes': { 'status': 'test' } } } expected_readable_output = '### Scan Status:\n|ID|Status|\n|---|---|\n| 111111111 | test |\n' expected_context_entry = {'Redlock.Scans(val.id == obj.id)': {'id': '111111111', 'status': 'test'}} mocker.patch('RedLock.req', return_value=get_status_response) mocker.patch.object(demisto, 'results') redlock_get_scan_status() assert demisto.results.call_args[0][0].get('HumanReadable') == expected_readable_output assert demisto.results.call_args[0][0].get('EntryContext') == expected_context_entry <|fim▁hole|> When - calling redlock-get-scan-result Then - Validate that the readable output and the context entry of the command is as expected """ from RedLock import redlock_get_scan_results get_result_response = { 'data': [{ 'id': '111111111', 'attributes': { 'name': 'test', 'policyId': '2222', 'desc': 'test', 'severity': 'high' }}] } expected_readable_output = '### Scan Results:\n|Description|ID|Name|Policy ID|Severity|\n|---|---|---|---|---|\n|' \ ' test | 111111111 | test | 2222 | high |\n' expected_context_entry = {'Redlock.Scans(val.id == obj.id)': {'id': None, 'results': [ {'id': '111111111', 'attributes': {'name': 'test', 'policyId': '2222', 'desc': 'test', 'severity': 'high'}}]}} mocker.patch('RedLock.req', return_value=get_result_response) mocker.patch.object(demisto, 'results') redlock_get_scan_results() assert demisto.results.call_args[0][0].get('HumanReadable') == expected_readable_output assert demisto.results.call_args[0][0].get('EntryContext') == expected_context_entry<|fim▁end|>
def test_redlock_get_scan_results(mocker): """ Given - The response from the API call of redlock-get-scan-result command.
<|file_name|>hilo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Created on Mon Oct 10 06:32:29 2016 @author: hugo """ import threading <|fim▁hole|>def worker(count): for x in range(count): print "Programación matemática %s \n " % x return threads = list() t = threading.Thread(target=worker, args=(10,)) threads.append(t) t.start() print 'Hola mundo'<|fim▁end|>
<|file_name|>tracey.go<|end_file_name|><|fim▁begin|>// `Tracey` is a simple library which allows for much easier function enter / exit logging package tracey import ( "fmt" "log" "os" "regexp" "strconv" "strings" "reflect" "runtime" ) // Define a global regex for extracting function names var RE_stripFnPreamble = regexp.MustCompile(`^.*\.(.*)$`) var RE_detectFN = regexp.MustCompile(`\$FN`) // These options represent the various settings which tracey exposes. // A pointer to this structure is expected to be passed into the // `tracey.New(...)` function below. type Options struct { // Setting "DisableTracing" to "true" will cause tracey to return // no-op'd functions for both exit() and enter(). The default value // for this is "false" which enables tracing. DisableTracing bool // Setting the "CustomLogger" to nil will cause tracey to log to // os.Stdout. Otherwise, this is a pointer to an object as returned // from `log.New(...)`. CustomLogger *log.Logger // Setting "DisableDepthValue" to "true" will cause tracey to not // prepend the printed function's depth to enter() and exit() messages. // The default value is "false", which logs the depth value. DisableDepthValue bool // Setting "DisableNesting" to "true" will cause tracey to not indent // any messages from nested functions. The default value is "false" // which enables nesting by prepending "SpacesPerIndent" number of // spaces per level nested. DisableNesting bool SpacesPerIndent int `default:"2"` // Setting "EnterMessage" or "ExitMessage" will override the default // value of "Enter: " and "EXIT: " respectively. EnterMessage string `default:"ENTER: "` ExitMessage string `default:"EXIT: "` // Private member, used to keep track of how many levels of nesting // the current trace functions have navigated. currentDepth int } // Main entry-point for the tracey lib. Calling New with nil will // result in the default options being used. func New(opts *Options) (func(string), func(...interface{}) string) { var options Options if opts != nil { options = *opts } // If tracing is not enabled, just return no-op functions if options.DisableTracing { return func(string) {}, func(...interface{}) string { return "" } } // Revert to stdout if no logger is defined if options.CustomLogger == nil { options.CustomLogger = log.New(os.Stdout, "", 0) } // Use reflect to deduce "default" values for the // Enter and Exit messages (if they are not set) reflectedType := reflect.TypeOf(options) if options.EnterMessage == "" { field, _ := reflectedType.FieldByName("EnterMessage") options.EnterMessage = field.Tag.Get("default") } if options.ExitMessage == "" { field, _ := reflectedType.FieldByName("ExitMessage") options.ExitMessage = field.Tag.Get("default") }<|fim▁hole|> if options.DisableNesting { options.SpacesPerIndent = 0 } else if options.SpacesPerIndent == 0 { field, _ := reflectedType.FieldByName("SpacesPerIndent") options.SpacesPerIndent, _ = strconv.Atoi(field.Tag.Get("default")) } // // Define functions we will use and return to the caller // _spacify := func() string { spaces := strings.Repeat(" ", options.currentDepth*options.SpacesPerIndent) if !options.DisableDepthValue { return fmt.Sprintf("[%2d]%s", options.currentDepth, spaces) } return spaces } // Increment function to increase the current depth value _incrementDepth := func() { options.currentDepth += 1 } // Decrement function to decrement the current depth value // + panics if current depth value is < 0 _decrementDepth := func() { options.currentDepth -= 1 if options.currentDepth < 0 { panic("Depth is negative! Should never happen!") } } // Enter function, invoked on function entry _enter := func(args ...interface{}) string { defer _incrementDepth() // Figure out the name of the caller and use that fnName := "<unknown>" pc, _, _, ok := runtime.Caller(1) if ok { fnName = RE_stripFnPreamble.ReplaceAllString(runtime.FuncForPC(pc).Name(), "$1") } traceMessage := fnName if len(args) > 0 { if fmtStr, ok := args[0].(string); ok { // We have a string leading args, assume its to be formatted traceMessage = fmt.Sprintf(fmtStr, args[1:]...) } } // "$FN" will be replaced by the name of the function (if present) traceMessage = RE_detectFN.ReplaceAllString(traceMessage, fnName) options.CustomLogger.Printf("%s%s%s\n", _spacify(), options.EnterMessage, traceMessage) return traceMessage } // Exit function, invoked on function exit (usually deferred) _exit := func(s string) { _decrementDepth() options.CustomLogger.Printf("%s%s%s\n", _spacify(), options.ExitMessage, s) } return _exit, _enter }<|fim▁end|>
// If nesting is enabled, and the spaces are not specified, // use the "default" value
<|file_name|>UnknownSpellException.java<|end_file_name|><|fim▁begin|>package pl.mmorpg.prototype.client.exceptions; public class UnknownSpellException extends GameException<|fim▁hole|> public UnknownSpellException(String identifier) { super(identifier); } public UnknownSpellException(Class<?> type) { super(type.getName()); } }<|fim▁end|>
{
<|file_name|>_attributeToBoolean.js<|end_file_name|><|fim▁begin|>/** * * @description Convert attribute value to boolean value * @param attribute * @return {Boolean} * @private */ jDoc.engines.OXML.prototype._attributeToBoolean = function (attribute) { return (!!attribute && (attribute.value == 'true' || attribute.value == '1' || attribute.value == 'on')); <|fim▁hole|><|fim▁end|>
};
<|file_name|>numbers.js<|end_file_name|><|fim▁begin|><|fim▁hole|>// check number && integer && strictly positive module.exports.isNatural = function (d) { return (typeof d === "number") && (d % 1 === 0) && (d > 0); }; module.exports.isPositive = function (d) { return (typeof d === "number") && (d > 0); };<|fim▁end|>
"use strict";
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. #<|fim▁hole|>############################################################################## import controllers # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|>
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod packet;<|fim▁hole|>mod connection; pub use self::packet::data_flags; pub use self::packet::message_type; pub use self::packet::Packet; pub use self::connection::Connection; pub use self::connection::ConnectionEvent;<|fim▁end|>
<|file_name|>three-effectcomposer.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for three.js (EffectComposer.js) // Project: https://github.com/mrdoob/three.js/blob/r68/examples/js/postprocessing/EffectComposer.js // Definitions by: Satoru Kimura <https://github.com/gyohk> // Definitions: https://github.com/borisyankov/DefinitelyTyped /// <reference path="./three.d.ts" /> /// <reference path="./three-shaderpass.d.ts" /> /// <reference path="./three-copyshader.d.ts" /> declare module THREE { export class EffectComposer { constructor( renderer: WebGLRenderer, renderTarget?: WebGLRenderTarget); renderTarget1: WebGLRenderTarget; renderTarget2: WebGLRenderTarget; writeBuffer: WebGLRenderTarget;<|fim▁hole|> copyPass: ShaderPass; swapBuffers(): void; addPass(pass: any): void; insertPass(pass: any, index: number): void; render(delta: number): void; reset(renderTarget?: WebGLRenderTarget): void; setSize( width: number, height: number ): void; } }<|fim▁end|>
readBuffer: WebGLRenderTarget; passes: any[];
<|file_name|>GetSliceTags.py<|end_file_name|><|fim▁begin|>from PLC.Faults import * from PLC.Method import Method from PLC.Parameter import Parameter, Mixed from PLC.Filter import Filter from PLC.SliceTags import SliceTag, SliceTags from PLC.Persons import Person, Persons from PLC.Sites import Site, Sites from PLC.Nodes import Nodes from PLC.Slices import Slice, Slices from PLC.Auth import Auth class GetSliceTags(Method): """ Returns an array of structs containing details about slice and sliver attributes. An attribute is a sliver attribute if the node_id field is set. If slice_tag_filter is specified and is an array of slice attribute identifiers, or a struct of slice attribute attributes, only slice attributes matching the filter will be returned. If return_fields is specified, only the specified details will be returned. Users may only query attributes of slices or slivers of which they are members. PIs may only query attributes of slices or slivers at their sites, or of which they are members. Admins may query attributes of any slice or sliver. """ roles = ['admin', 'pi', 'user', 'node'] accepts = [ Auth(), Mixed([SliceTag.fields['slice_tag_id']], Filter(SliceTag.fields)), Parameter([str], "List of fields to return", nullok = True) ] returns = [SliceTag.fields] def call(self, auth, slice_tag_filter = None, return_fields = None): # If we are not admin, make sure to only return our own slice # and sliver attributes. # if isinstance(self.caller, Person) and \ # 'admin' not in self.caller['roles']: # # Get slices that we are able to view # valid_slice_ids = self.caller['slice_ids'] # if 'pi' in self.caller['roles'] and self.caller['site_ids']: # sites = Sites(self.api, self.caller['site_ids']) # for site in sites: # valid_slice_ids += site['slice_ids'] # # techs can view all slices on the nodes at their site # if 'tech' in self.caller['roles'] and self.caller['site_ids']: # nodes = Nodes(self.api, {'site_id': self.caller['site_ids']}, ['site_id', 'slice_ids']) # for node in nodes: # valid_slice_ids.extend(node['slice_ids'])<|fim▁hole|># # # Get slice attributes that we are able to view # valid_slice_tag_ids = [] # slices = Slices(self.api, valid_slice_ids) # for slice in slices: # valid_slice_tag_ids += slice['slice_tag_ids'] # # if not valid_slice_tag_ids: # return [] # # if slice_tag_filter is None: # slice_tag_filter = valid_slice_tag_ids # Must query at least slice_tag_id (see below) if return_fields is not None and 'slice_tag_id' not in return_fields: return_fields.append('slice_tag_id') added_fields = True else: added_fields = False slice_tags = SliceTags(self.api, slice_tag_filter, return_fields) # Filter out slice attributes that are not viewable # if isinstance(self.caller, Person) and \ # 'admin' not in self.caller['roles']: # slice_tags = [slice_tag for slice_tag in slice_tags if slice_tag['slice_tag_id'] in valid_slice_tag_ids] # Remove slice_tag_id if not specified if added_fields: for slice_tag in slice_tags: if 'slice_tag_id' in slice_tag: del slice_tag['slice_tag_id'] return slice_tags<|fim▁end|>
# # if not valid_slice_ids: # return []
<|file_name|>test_auth.py<|end_file_name|><|fim▁begin|>from greencouriers.tests import * class TestAuthController(TestController): def test_index(self): response = self.app.get(url(controller='auth', action='index'))<|fim▁hole|><|fim▁end|>
# Test response...