repo_name
stringlengths
5
100
path
stringlengths
4
375
copies
stringclasses
991 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
riteshshrv/trytond-amazon-mws_old
tests/test_views.py
3
1208
# -*- coding: utf-8 -*- """ test_views Tests Views """ import sys import os DIR = os.path.abspath(os.path.normpath( os.path.join( __file__, '..', '..', '..', '..', '..', 'trytond' ) )) if os.path.isdir(DIR): sys.path.insert(0, os.path.dirname(DIR)) import unittest import trytond.tests.test_tryton from trytond.tests.test_tryton import test_view, test_depends class TestViewDepend(unittest.TestCase): ''' Tests views and depends ''' def setUp(self): """ Set up data used in the tests. this method is called before each test function execution. """ trytond.tests.test_tryton.install_module('amazon_mws') def test0005views(self): ''' Test views. ''' test_view('amazon_mws') def test0006depends(self): ''' Test depends. ''' test_depends() def suite(): """ Test Suite """ test_suite = trytond.tests.test_tryton.suite() test_suite.addTests( unittest.TestLoader().loadTestsFromTestCase(TestViewDepend) ) return test_suite if __name__ == '__main__': unittest.TextTestRunner(verbosity=2).run(suite())
bsd-3-clause
anarang/robottelo
tests/foreman/ui/test_variables.py
2
31998
# -*- encoding: utf-8 -*- """Test class for Puppet Smart Variables""" from robottelo.decorators import ( run_only_on, skip_if_bug_open, stubbed, tier1, tier3 ) from robottelo.test import UITestCase class SmartVariablesTestCase(UITestCase): """Implements Smart Variables tests in UI""" @run_only_on('sat') @stubbed() @tier1 def test_positive_create(self): """Create a Smart Variable. @feature: Smart Variables - Create @steps: 1. Create a smart Variable with Valid name and default value. @assert: 1. The smart Variable is created successfully. 2. In YAML output of associated host, the variable with name and its default value is displayed. 3. In Host-> variables tab, the smart variable should be displayed with its respective puppet class. @status: Manual """ @run_only_on('sat') @stubbed() @tier1 def test_negative_create(self): """Smart Variable is not created with invalid data. @feature: Smart Variables - Create @steps: 1. Create a smart Variable with Invalid name and valid default value. @assert: 1. Error is displayed for invalid variable name. 2. The smart Variable is not created. @status: Manual """ @run_only_on('sat') @stubbed() @tier1 def test_positive_delete_smart_variables_menu(self): """Delete a Smart Variable from Smart Variables Menu. @feature: Smart Variables - Delete @steps: 1. Delete a smart Variable from Configure - Smart Variables menu. @assert: 1. The smart Variable is deleted successfully. 2. In YAML output of associated Host, the variable should be removed. 3. In Host-> variables tab, the smart variable should be removed. @status: Manual """ @run_only_on('sat') @stubbed() @tier1 def test_positive_update_variable_puppet_class(self): """Update Smart Variable's puppet class. @feature: Smart Variables - Puppet Class @steps: 1. In Puppet class, Create a smart Variable with Valid name and default value. 2. After successful creation, Update the puppet class of variable. @assert: 1. The variable is updated with new puppet class. 2. In Host/HostGroup -> variables tab, the smart variable is updated with its newly updated puppet class. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_negative_globality(self): """Smart Variable with same names are not allowed. @feature: Smart Variables - Globality @steps: 1. In Puppet class, Create a smart Variable with Valid name and default value. 2. After successful creation, Attempt to create a variable with same name from same/other class. @assert: 1. An Error is displayed in front of Variable Key field as 'has already been taken'. 2. The variable with same name are not allowed to create from any class. @status: Manual """ @run_only_on('sat') @stubbed() @tier1 def test_positive_update_type(self): """Variable is Updated for variable types - Valid Value. Types - string, boolean, integer, real, array, hash, yaml, json @feature: Smart Variables - Variable Type @steps: 1. Update the Key Type. 2. Enter a 'valid' default Value. 3. Submit the changes. @assert: Variable is Updated with a new type successfully. @status: Manual """ @run_only_on('sat') @stubbed() @tier1 def test_negative_update_type(self): """Variable not updated for variable types - Invalid Value. Types - string, boolean, integer, real, array, hash, yaml, json @feature: Smart Variables - Variable Type @steps: 1. Update the Key Type. 2. Enter an 'Invalid' default Value. 3. Submit the changes. @assert: Variable is not updated with new type for invalid value. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_negative_validate_default_value_required_checkbox(self): """Error is raised for blank default Value - Required checkbox. @feature: Smart Variables - Optional Validation @steps: 1. Create a variable with empty value. 2. Check Required checkbox in 'Optional Input Validator'. 3. Submit the change. @assert: Error is raised for blank default value by 'Required' checkbox. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_positive_validate_default_value_required_checkbox(self): """Error is not raised for default Value - Required checkbox. @feature: Smart Variables - Optional Validation @steps: 1. Provide some default value. 2. Check Required checkbox in 'Optional Input Validator'. 3. Submit the change. @assert: Error is not raised default value by 'Required' checkbox. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_negative_validate_matcher_value_required_checkbox(self): """Error is raised for blank matcher Value - Required checkbox. @feature: Smart Variables - Optional Validation @steps: 1. Create a matcher for variable for some attribute. 2. Dont provide Value for matcher. Keep blank. 3. Check Required checkbox in 'Optional Input Validator'. 4. Submit the change. @assert: Error is raised for blank matcher value by 'Required' checkbox. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_positive_validate_matcher_value_required_checkbox(self): """Error is not raised for matcher Value - Required checkbox. @feature: Smart Variables - Optional Validation @steps: 1. Create a matcher for variable for some attribute. 2. Provide some Value for matcher. 3. Check Required checkbox in 'Optional Input Validator'. 4. Submit the change. @assert: Error is not raised for matcher value by 'Required' checkbox. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_negative_validate_default_value_with_regex(self): """Error is raised for default value not matching with regex. @feature: Smart Variables - Optional Validation @steps: 1. Provide default value that doesn't match the regex of step 2. 2. Validate this value with regex validator type and rule. 3. Submit the change. @assert: Error is raised for default value not matching with regex. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_positive_validate_default_value_with_regex(self): """Error is not raised for default value matching with regex. @feature: Smart Variables - Optional Validation @steps: 1. Provide default value that matches the regex of step 2. 2. Validate this value with regex validator type and rule. 3. Submit the change. @assert: Error is not raised for default value matching with regex. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_negative_validate_matcher_value_with_regex(self): """Error is raised for matcher value not matching with regex. @feature: Smart Variables - Optional Validation @steps: 1. Create a matcher with value that doesn't match the regex of step 2. 2. Validate this value with regex validator type and rule. 3. Submit the change. @assert: Error is raised for matcher value not matching with regex. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_positive_validate_matcher_value_with_regex(self): """Error is not raised for matcher value matching with regex. @feature: Smart Variables - Optional Validation @steps: 1. Create a matcher with value that matches the regex of step 2. 2. Validate this value with regex validator type and rule. 3. Submit the change. @assert: Error is not raised for matcher value matching with regex. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_negative_validate_default_value_with_list(self): """Error is raised for default value not in list. @feature: Smart Variables - Optional Validation @steps: 1. Provide default value that doesn't match the list of step 2. 2. Validate this value with list validator type and rule. 3. Submit the change. @assert: Error is raised for default value not in list. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_positive_validate_default_value_with_list(self): """Error is not raised for default value in list. @feature: Smart Variables - Optional Validation @steps: 1. Provide default value that matches the list of step 2. 2. Validate this value with list validator type and rule. 3. Submit the change. @assert: Error is not raised for default value in list. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_negative_validate_matcher_value_with_list(self): """Error is raised for matcher value not in list. @feature: Smart Variables - Optional Validation @steps: 1. Create a matcher with value that doesn't match the list of step 2. 2. Validate this value with list validator type and rule. 3. Submit the change. @assert: Error is raised for matcher value not in list. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_positive_validate_matcher_value_with_list(self): """Error is not raised for matcher value in list. @feature: Smart Variables - Optional Validation @steps: 1. Create a matcher with value that matches the list of step 2. 2. Validate this value with list validator type and rule. 3. Submit the change. @assert: Error is not raised for matcher value in list. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_negative_validate_matcher_value_with_default_type(self): """Error is raised for matcher value not of default type. @feature: Smart Variables - Validation @steps: 1. Update variable default type with valid value. 2. Create a matcher with value that doesn't match the default type. 3. Submit the change. @assert: Error is raised for matcher value not of default type. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_positive_validate_matcher_value_with_default_type(self): """No error for matcher value of default type. @feature: Smart Variables - Validation @steps: 1. Update variable default type with valid value. 2. Create a matcher with value that matches the default type. 3. Submit the change. @assert: Error is not raised for matcher value of default type. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_negative_validate_matcher_and_default_value(self): """Error for invalid default and matcher value both at a time. @feature: Smart Variables - Validation @steps: 1. Update variable default type with Invalid value. 2. Create a matcher with value that doesn't match the default type. 3. Submit the change. @assert: Error is raised for invalid default and matcher value both. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_negative_validate_matcher_non_existing_attribute(self): """Error while creating matcher for Non Existing Attribute. @feature: Smart Variables - Validation @steps: 1. Create a matcher with non existing attribute in org. 2. Attempt to submit the change. @assert: Error is raised for non existing attribute. @status: Manual """ @run_only_on('sat') @skip_if_bug_open('bugzilla', 1259174) @stubbed() @tier1 def test_positive_create_matcher(self): """Create a Smart Variable with matcher. @feature: Smart Variables - Matcher @steps: 1. Create a smart Variable with Valid name and default value. 2. Create a matcher for Host with valid value. @assert: 1. The smart Variable with matcher is created successfully. 2. In YAML output, the variable name with overrided value for host is displayed. 3. In Host-> variables tab, the variable name with overrided value for host is displayed. @status: Manual """ @run_only_on('sat') @stubbed() @tier1 def test_positive_create_matcher_attribute_priority(self): """Matcher Value set on Attribute Priority for Host. @feature: Smart Variables - Matcher @steps: 1. Create variable with some default value. 2. Set fqdn as top priority attribute. 3. Create first matcher for fqdn with valid details. 4. Create second matcher for some attribute with valid details. Note - The fqdn/host should have this attribute. 5. Submit the change. 6. Go to YAML output of associated host. @assert: The YAML output has the value only for fqdn matcher. @status: Manual """ @run_only_on('sat') @stubbed() @tier1 def test_negative_create_matcher_attribute_priority(self): """Matcher Value set on Attribute Priority for Host - alternate priority. @feature: Smart Variables - Matcher @steps: 1. Create variable with some default value. 2. Set some attribute(other than fqdn) as top priority attribute. Note - The fqdn/host should have this attribute. 3. Create first matcher for fqdn with valid details. 4. Create second matcher for attribute of step 3 with valid details. 5. Submit the change. 6. Go to YAML output of associated host. @assert: 1. The YAML output has the value only for step 5 matcher. 2. The YAML output doesn't have value for fqdn/host matcher. @status: Manual """ @run_only_on('sat') @stubbed() @tier1 def test_positive_create_matcher_merge_override(self): """Merge the values of all the associated matchers. @feature: Smart Variables - Matcher @steps: 1. Create variable with some default value. 2. Create first matcher for attribute fqdn with valid details. 3. Create second matcher for other attribute with valid details. Note - The fqdn/host should have this attribute. 4. Create more matchers for some more attributes if any. Note - The fqdn/host should have this attributes. 5. Select 'Merge overrides' checkbox. 6. Submit the change. 7. Go to YAML output of associated host. @assert: 1. The YAML output has the values merged from all the associated matchers. 2. The YAML output doesn't have the default value of variable. 3. Duplicate values in YAML output if any are displayed. @status: Manual """ @run_only_on('sat') @stubbed() @tier1 def test_negative_create_matcher_merge_override(self): """Attempt to merge the values from non associated matchers. @feature: Smart Variables - Matcher @steps: 1. Create variable with some default value. 3. Create first matcher for attribute fqdn with valid details. 4. Create second matcher for other attribute with valid details. Note - The fqdn/host should not have this attribute. 5. Create more matchers for some more attributes if any. Note - The fqdn/host should not have this attributes. 6. Select 'Merge overrides' checkbox. 7. Submit the change. 8. Go to YAML output of associated host. @assert: 1. The YAML output has the values only for fqdn. 2. The YAML output doesn't have the values for attribute which are not associated to host. 3. The YAML output doesn't have the default value of variable. 4. Duplicate values in YAML output if any are displayed. @status: Manual """ @run_only_on('sat') @stubbed() @tier1 def test_positive_create_matcher_merge_default(self): """Merge the values of all the associated matchers + default value. @feature: Smart Variables - Matcher @steps: 1. Create variable with some default value. 2. Create first matcher for attribute fqdn with valid details. 3. Create second matcher for other attribute with valid details. Note - The fqdn/host should have this attribute. 4. Create more matchers for some more attributes if any. Note - The fqdn/host should have this attributes. 5. Select 'Merge overrides' checkbox. 6. Select 'Merge default' checkbox. 7. Submit the change. 8. Go to YAML output of associated host. @assert: 1. The YAML output has the values merged from all the associated matchers. 2. The YAML output has the default value of variable. 3. Duplicate values in YAML output if any are displayed. @status: Manual """ @run_only_on('sat') @stubbed() @tier1 def test_negative_create_matcher_merge_default(self): """Empty default value is not shown in merged values. @feature: Smart Variables - Matcher @steps: 1. Create variable with some default value. 2. Create first matcher for attribute fqdn with valid details. 3. Create second matcher for other attribute with valid details. Note - The fqdn/host should have this attribute. 4. Create more matchers for some more attributes if any. Note - The fqdn/host should have this attributes. 5. Select 'Merge overrides' checkbox. 6. Select 'Merge default' checkbox. 7. Submit the change. 8. Go to YAML output of associated host. @assert: 1. The YAML output has the values merged from all the associated matchers. 2. The YAML output doesn't have the empty default value of variable. 3. Duplicate values in YAML output if any are displayed. @status: Manual """ @run_only_on('sat') @stubbed() @tier1 def test_positive_create_matcher_avoid_duplicate(self): """Merge the values of all the associated matchers, remove duplicates. @feature: Smart Variables - Matcher @steps: 1. Create variable with type array and value. 2. Create first matcher for attribute fqdn with some value. 3. Create second matcher for other attribute with same value as fqdn matcher. Note - The fqdn/host should have this attribute. 4. Select 'Merge overrides' checkbox. 5. Select 'Merge default' checkbox. 6. Select 'Avoid Duplicates' checkbox. 7. Submit the change. 8. Go to YAML output of associated host. @assert: 1. The YAML output has the values merged from all the associated matchers. 2. The YAML output has the default value of variable. 3. Duplicate values in YAML output are removed / not displayed. @status: Manual """ @run_only_on('sat') @stubbed() @tier1 def test_negative_create_matcher_avoid_duplicate(self): """Duplicates not removed as they were not really present. @feature: Smart Variables - Matcher @steps: 1. Create variable with type array and value. 2. Create first matcher for attribute fqdn with some value. 3. Create second matcher for other attribute with other value than fqdn matcher and default value. Note - The fqdn/host should have this attribute. 4. Select 'Merge overrides' checkbox. 5. Select 'Merge default' checkbox. 6. Select 'Avoid Duplicates' checkbox. 7. Submit the change. 8. Go to YAML output of associated host. @assert: 1. The YAML output has the values merged from all matchers. 2. The YAML output has the default value of variable. 3. No value removed as duplicate value. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_positive_enable_merge_overrides_default_checkboxes(self): """Enable Merge Overrides, Merge Default checkbox for supported types. @feature: Smart Variables - Matcher @steps: 1. Set variable type to array/hash. @assert: The Merge Overrides, Merge Default checkbox are enabled to check. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_negative_enable_merge_overrides_default_checkboxes(self): """Disable Merge Overrides, Merge Default checkboxes for non supported types. @feature: Smart Variables - Matcher @steps: 1. Set variable type other than array/hash. @assert: The Merge Overrides, Merge Default checkboxes are not enabled to check. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_positive_enable_avoid_duplicates_checkbox(self): """Enable Avoid duplicates checkbox for supported type- array. @feature: Smart Variables - Matcher @steps: 1. Set variable type to array. 2. Check Merge Overrides checkbox. @assert: The Avoid Duplicates checkbox is enabled to check. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_negative_enable_avaoid_duplicates_checkbox(self): """Disable Avoid duplicates checkbox for non supported types. @feature: Smart Variables - Matcher @steps: 1. Set variable type other than array. @assert: 1. The Merge Overrides checkbox is only enabled to check for type hash. 2. The Avoid duplicates checkbox not enabled to check for any type than array. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_positive_impact_delete_attribute(self): """Impact on variable after deleting associated attribute. @feature: Smart Variables - Outside Impact @steps: 1. Create a variable with matcher for some attribute. 2. Delete the attribute. 3. Recreate the attribute with same name as earlier. @assert: 1. The matcher for deleted attribute removed from variable. 2. On recreating attribute, the matcher should not reappear in variable. @status: Manual """ @run_only_on('sat') @stubbed() @tier1 def test_positive_create_override_from_attribute(self): """Impact on variable on overriding the variable value from attribute. @feature: Smart Variables - Outside Impact @steps: 1. Create a variable. 2. Associate variable with fqdn/hostgroup. 3. From host/hostgroup, override the variable value. 4. Submit the changes. @assert: 1. The host/hostgroup is saved with changes. 2. New matcher for fqdn/hostgroup created inside variable. @status: Manual """ @run_only_on('sat') @stubbed() @tier1 def test_negative_create_override_from_attribute(self): """No impact on variable on overriding the variable with invalid value from attribute. @feature: Smart Variables - Outside Impact @steps: 1. Create a variable. 2. Associate variable with fqdn/hostgroup. 3. From host/hostgroup, Attempt to override the variable with some other key type of value. @assert: 1. Error thrown for invalid type value. 2. No matcher for fqdn/hostgroup is created inside variable. @status: Manual """ @run_only_on('sat') @stubbed() @tier1 def test_negative_create_override_from_attribute_required_checked(self): """Error for empty value on overriding the variable value from attribute - Required checked. @feature: Smart Variables - Outside Impact @steps: 1. Create a variable. 2. Check 'Required' checkbox in variable. 3. Associate variable with fqdn/hostgroup. 4. From host/hostgroup, Attempt to override the variable with empty value. @assert: 1. Error thrown for empty value as the value is required to pass. 2. The info icon changed to warning icon for that variable. 3. No matcher for fqdn/hostgroup created inside variable. @status: Manual """ @run_only_on('sat') @stubbed() @tier1 def test_positive_update_matcher_from_attribute(self): """Impact on variable on editing the variable value from attribute. @feature: Smart Variables - Outside Impact @steps: 1. Create a variable. 2. Associate variable with fqdn/hostgroup. 3. Create a matcher for fqdn/hostgroup with valid details. 4. From host/hostgroup, edit the variable value. 5. Submit the changes. @assert: 1. The host/hostgroup is saved with changes. 2. Matcher value in variable is updated from fqdn/hostgroup. @status: Manual """ @run_only_on('sat') @stubbed() @tier1 def test_negative_update_matcher_from_attribute(self): """No Impact on variable on editing the variable with invalid value from attribute. @feature: Smart Variables - Outside Impact @steps: 1. Create a variable. 2. Associate variable with fqdn/hostgroup. 3. Create a matcher for fqdn/hostgroup with valid details. 4. From host/hostgroup, attempt to edit the variable with invalid value. @assert: 1. Error thrown for invalid value. 2. Matcher value in variable is not updated from fqdn/hostgroup. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_positive_hide_default_value(self): """Hide the default value of variable. @feature: Smart Variables - Value Hiding @steps: 1. Create a variable. 2. Enter some valid default value. 3. Check 'Hidden Value' checkbox. @assert: 1. The default value shown in hidden state. 2. Changes submitted successfully. 3. Matcher values shown hidden if any. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_positive_unhide_default_value(self): """Unhide the default value of variable. @feature: Smart Variables - Value Hiding @steps: 1. Create a variable. 2. Enter some valid default value. 3. Hide the value of variable. 4. After hiding, uncheck the 'Hidden Value' checkbox. @assert: 1. The default value shown in unhidden state. 2. Changes submitted successfully. 3. Matcher values shown unhidden if any. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_positive_hide_default_value_in_attribute(self): """Hide the default value of variable in attribute. @feature: Smart Variables - Value Hiding @steps: 1. Create a variable. 2. Enter some valid default value. 3. Hide the default Value. 4. Submit the changes. 5. Associate variable on host/hostgroup. @assert: 1. In host/hostgroup, the variable value shown in hidden state. 2. The button for unhiding the value is displayed and accessible. 3. The button for overriding the value is displayed and accessible. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_positive_unhide_default_value_in_attribute(self): """Unhide the default value of variable in attribute. @feature: Smart Variables - Value Hiding @steps: 1. Create a variable. 2. Enter some valid default value. 3. Hide the default Value. 4. Submit the changes. 5. Associate variable on host/hostgroup. 6. In host/hostgroup, Click Unhide button icon. @assert: 1. In host/hostgroup, the variable value shown in unhidden state. 2. The button for hiding the value is displayed and accessible. 3. The button for overriding the value is displayed and accessible. 4. In variable, the default value is still hidden. @status: Manual """ @run_only_on('sat') @stubbed() @tier1 def test_positive_update_hidden_value(self): """Update the hidden default value of variable. @feature: Smart Variables - Value Hiding @steps: 1. Create a variable. 2. Enter some valid default value. 3. Hide the default Value. 4. Again update the default value. 5. Submit the changes. @assert: 1. The variable default value is updated. 2. The variable default value displayed as hidden. @status: Manual """ @run_only_on('sat') @stubbed() @tier1 def test_positive_update_hidden_value_in_attribute(self): """Update the hidden default value of variable in attribute. @feature: Smart Variables - Value Hiding @steps: 1. Create a variable. 2. Enter some valid default value. 3. Hide the default Value. 4. Submit the changes. 5. Associate variable on host/hostgroup. 6. In host/hostgroup, update the variable value. @assert: 1. In host/hostgroup, the variable value is updated. 2. The variable Value displayed as hidden. 3. In variable, new matcher created for fqdn/hostgroup. 4. And the value shown hidden. @status: Manual """ @run_only_on('sat') @stubbed() @tier3 def test_positive_hide_empty_default_value(self): """Hiding the empty default value. @feature: Smart Variables - Value Hiding @steps: 1. Create a variable. 2. Don't enter any value, keep blank. 3. Check the 'Hidden Value' icon. 4. Create a matcher with some value. @assert: 1. The 'Hidden Value' checkbox is enabled to check. 2. The default value shows empty on hide. 2. Matcher Value shown as hidden. @status: Manual """
gpl-3.0
lightrabbit/PyBitmessage
src/pyelliptic/cipher.py
24
2675
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (C) 2011 Yann GUIBET <yannguibet@gmail.com> # See LICENSE for details. from pyelliptic.openssl import OpenSSL class Cipher: """ Symmetric encryption import pyelliptic iv = pyelliptic.Cipher.gen_IV('aes-256-cfb') ctx = pyelliptic.Cipher("secretkey", iv, 1, ciphername='aes-256-cfb') ciphertext = ctx.update('test1') ciphertext += ctx.update('test2') ciphertext += ctx.final() ctx2 = pyelliptic.Cipher("secretkey", iv, 0, ciphername='aes-256-cfb') print ctx2.ciphering(ciphertext) """ def __init__(self, key, iv, do, ciphername='aes-256-cbc'): """ do == 1 => Encrypt; do == 0 => Decrypt """ self.cipher = OpenSSL.get_cipher(ciphername) self.ctx = OpenSSL.EVP_CIPHER_CTX_new() if do == 1 or do == 0: k = OpenSSL.malloc(key, len(key)) IV = OpenSSL.malloc(iv, len(iv)) OpenSSL.EVP_CipherInit_ex( self.ctx, self.cipher.get_pointer(), 0, k, IV, do) else: raise Exception("RTFM ...") @staticmethod def get_all_cipher(): """ static method, returns all ciphers available """ return OpenSSL.cipher_algo.keys() @staticmethod def get_blocksize(ciphername): cipher = OpenSSL.get_cipher(ciphername) return cipher.get_blocksize() @staticmethod def gen_IV(ciphername): cipher = OpenSSL.get_cipher(ciphername) return OpenSSL.rand(cipher.get_blocksize()) def update(self, input): i = OpenSSL.c_int(0) buffer = OpenSSL.malloc(b"", len(input) + self.cipher.get_blocksize()) inp = OpenSSL.malloc(input, len(input)) if OpenSSL.EVP_CipherUpdate(self.ctx, OpenSSL.byref(buffer), OpenSSL.byref(i), inp, len(input)) == 0: raise Exception("[OpenSSL] EVP_CipherUpdate FAIL ...") return buffer.raw[0:i.value] def final(self): i = OpenSSL.c_int(0) buffer = OpenSSL.malloc(b"", self.cipher.get_blocksize()) if (OpenSSL.EVP_CipherFinal_ex(self.ctx, OpenSSL.byref(buffer), OpenSSL.byref(i))) == 0: raise Exception("[OpenSSL] EVP_CipherFinal_ex FAIL ...") return buffer.raw[0:i.value] def ciphering(self, input): """ Do update and final in one method """ buff = self.update(input) return buff + self.final() def __del__(self): OpenSSL.EVP_CIPHER_CTX_cleanup(self.ctx) OpenSSL.EVP_CIPHER_CTX_free(self.ctx)
mit
swtp1v07/Savu
savu/test/plugin_test.py
1
6514
# Copyright 2014 Diamond Light Source Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from savu.plugins.plugin import Plugin """ .. module:: plugins_test :platform: Unix :synopsis: unittest test classes for plugins .. moduleauthor:: Mark Basham <scientificsoftware@diamond.ac.uk> """ import unittest from savu.plugins import utils as pu from savu.test import test_utils as tu from savu.plugins.cpu_plugin import CpuPlugin base_class_name = "savu.plugins.plugin" class PluginTest(unittest.TestCase): def setUp(self): self.plugin_name = base_class_name def test_get_plugin(self): try : plugin = pu.load_plugin(self.plugin_name) self.assertIsNotNone(plugin) except ImportError as e: print("Failed to run plugin test as libraries not available (%s), passing test" % (e)) pass def test_process(self): try: plugin = pu.load_plugin(self.plugin_name) if self.plugin_name == base_class_name: self.assertRaises(NotImplementedError, plugin.process, "test", "test", 1, 1) return # load appropriate data data = tu.get_appropriate_input_data(plugin) self.assertGreater(len(data), 0, "Cannot find appropriate test data") # generate somewhere for the data to go output = tu.get_appropriate_output_data(plugin, data) self.assertGreater(len(output), 0, "Cannot create appropriate output data") plugin.set_parameters(None) for i in range(len(data)): plugin.run_process(data[i], output[i], ["CPU0"], 0) print("Output from plugin under test ( %s ) is in %s" % (plugin.name, output[i].backing_file.filename)) data[i].complete() output[i].complete() except ImportError as e: print("Failed to run plugin test as libraries not available (%s), passing test" % (e)) pass class CpuPluginWrapper(Plugin, CpuPlugin): def __init__(self): super(CpuPluginWrapper, self).__init__() self.data = None self.output = None self.processes = None self.process_number = None def process(self, data, output, processes, process): self.data = data self.output = output self.processes = processes self.process_number = process class CpuPluginTest(unittest.TestCase): def setUp(self): self.plugin = None def test_run_process(self): self.plugin = CpuPluginWrapper() self.plugin.run_process("data", "out", ["CPU0"], 0) self.assertEqual(self.plugin.processes, ["CPU0"]) self.assertEqual(self.plugin.process_number, 0) self.plugin = CpuPluginWrapper() self.plugin.run_process("data", "out", ["CPU0", "CPU1", "CPU2", "CPU3"], 0) self.assertEqual(self.plugin.processes, ["CPU0", "CPU1", "CPU2", "CPU3"]) self.assertEqual(self.plugin.process_number, 0) self.plugin = CpuPluginWrapper() self.plugin.run_process("data", "out", ["CPU0", "CPU1", "CPU2", "CPU3"], 1) self.assertEqual(self.plugin.processes, ["CPU0", "CPU1", "CPU2", "CPU3"]) self.assertEqual(self.plugin.process_number, 1) self.plugin = CpuPluginWrapper() self.plugin.run_process("data", "out", ["CPU0", "CPU1", "CPU2", "CPU3"], 3) self.assertEqual(self.plugin.processes, ["CPU0", "CPU1", "CPU2", "CPU3"]) self.assertEqual(self.plugin.process_number, 3) self.plugin = CpuPluginWrapper() self.plugin.run_process("data", "out", ["CPU0", "GPU0", "CPU1", "GPU1"], 0) self.assertEqual(self.plugin.processes, ["CPU0", "CPU1"]) self.assertEqual(self.plugin.process_number, 0) self.plugin = CpuPluginWrapper() self.plugin.run_process("data", "out", ["CPU0", "GPU0", "CPU1", "GPU1"], 1) self.assertEqual(self.plugin.processes, None) self.assertEqual(self.plugin.process_number, None) self.plugin = CpuPluginWrapper() self.plugin.run_process("data", "out", ["CPU0", "GPU0", "CPU1", "GPU1"], 2) self.assertEqual(self.plugin.processes, ["CPU0", "CPU1"]) self.assertEqual(self.plugin.process_number, 1) self.plugin = CpuPluginWrapper() self.plugin.run_process("data", "out", ["CPU0", "GPU0", "CPU1", "GPU1"], 3) self.assertEqual(self.plugin.processes, None) self.assertEqual(self.plugin.process_number, None) def test_run_cpu6_gpu2(self): all_procs = ["CPU0", "CPU1", "CPU2", "CPU3", "CPU4", "CPU5", "GPU0", "GPU1"] cpu_procs = ["CPU0", "CPU1", "CPU2", "CPU3", "CPU4", "CPU5"] for i in range(8): self.plugin = CpuPluginWrapper() self.plugin.run_process("data", "out", all_procs, i) if i < 6: self.assertEqual(self.plugin.processes, cpu_procs) self.assertEqual(self.plugin.process_number, i) else: self.assertEqual(self.plugin.processes, None) self.assertEqual(self.plugin.process_number, None) class TimeseriesFieldCorrectionsTest(PluginTest): def setUp(self): self.plugin_name = "savu.plugins.timeseries_field_corrections" class MedianFilterTest(PluginTest): def setUp(self): self.plugin_name = "savu.plugins.median_filter" class SimpleReconTest(PluginTest): def setUp(self): self.plugin_name = "savu.plugins.simple_recon" if __name__ == "__main__": unittest.main()
apache-2.0
pythonvietnam/scikit-learn
examples/ensemble/plot_adaboost_twoclass.py
347
3268
""" ================== Two-class AdaBoost ================== This example fits an AdaBoosted decision stump on a non-linearly separable classification dataset composed of two "Gaussian quantiles" clusters (see :func:`sklearn.datasets.make_gaussian_quantiles`) and plots the decision boundary and decision scores. The distributions of decision scores are shown separately for samples of class A and B. The predicted class label for each sample is determined by the sign of the decision score. Samples with decision scores greater than zero are classified as B, and are otherwise classified as A. The magnitude of a decision score determines the degree of likeness with the predicted class label. Additionally, a new dataset could be constructed containing a desired purity of class B, for example, by only selecting samples with a decision score above some value. """ print(__doc__) # Author: Noel Dawe <noel.dawe@gmail.com> # # License: BSD 3 clause import numpy as np import matplotlib.pyplot as plt from sklearn.ensemble import AdaBoostClassifier from sklearn.tree import DecisionTreeClassifier from sklearn.datasets import make_gaussian_quantiles # Construct dataset X1, y1 = make_gaussian_quantiles(cov=2., n_samples=200, n_features=2, n_classes=2, random_state=1) X2, y2 = make_gaussian_quantiles(mean=(3, 3), cov=1.5, n_samples=300, n_features=2, n_classes=2, random_state=1) X = np.concatenate((X1, X2)) y = np.concatenate((y1, - y2 + 1)) # Create and fit an AdaBoosted decision tree bdt = AdaBoostClassifier(DecisionTreeClassifier(max_depth=1), algorithm="SAMME", n_estimators=200) bdt.fit(X, y) plot_colors = "br" plot_step = 0.02 class_names = "AB" plt.figure(figsize=(10, 5)) # Plot the decision boundaries plt.subplot(121) x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1 y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1 xx, yy = np.meshgrid(np.arange(x_min, x_max, plot_step), np.arange(y_min, y_max, plot_step)) Z = bdt.predict(np.c_[xx.ravel(), yy.ravel()]) Z = Z.reshape(xx.shape) cs = plt.contourf(xx, yy, Z, cmap=plt.cm.Paired) plt.axis("tight") # Plot the training points for i, n, c in zip(range(2), class_names, plot_colors): idx = np.where(y == i) plt.scatter(X[idx, 0], X[idx, 1], c=c, cmap=plt.cm.Paired, label="Class %s" % n) plt.xlim(x_min, x_max) plt.ylim(y_min, y_max) plt.legend(loc='upper right') plt.xlabel('x') plt.ylabel('y') plt.title('Decision Boundary') # Plot the two-class decision scores twoclass_output = bdt.decision_function(X) plot_range = (twoclass_output.min(), twoclass_output.max()) plt.subplot(122) for i, n, c in zip(range(2), class_names, plot_colors): plt.hist(twoclass_output[y == i], bins=10, range=plot_range, facecolor=c, label='Class %s' % n, alpha=.5) x1, x2, y1, y2 = plt.axis() plt.axis((x1, x2, y1, y2 * 1.2)) plt.legend(loc='upper right') plt.ylabel('Samples') plt.xlabel('Score') plt.title('Decision Scores') plt.tight_layout() plt.subplots_adjust(wspace=0.35) plt.show()
bsd-3-clause
TheWardoctor/Wardoctors-repo
plugin.video.ukturk/pyxbmct/addonwindow.py
2
33753
# -*- coding: utf-8 -*- # PyXBMCt framework module # # PyXBMCt is a mini-framework for creating Kodi (XBMC) Python addons # with arbitrary UI made of Controls - decendants of xbmcgui.Control class. # The framework uses image textures from Kodi Confluence skin. # # Licence: GPL v.3 http://www.gnu.org/licenses/gpl.html """ ``pyxbmct.addonwindow`` module contains all classes and constants of PyXBMCt framework """ import os import xbmc import xbmcgui from xbmcaddon import Addon _images = os.path.join(Addon('plugin.video.ukturk').getAddonInfo('path'),'pyxbmct', 'textures', 'default') # Text alighnment constants. Mixed variants are obtained by bit OR (|) ALIGN_LEFT = 0 """Align left""" ALIGN_RIGHT = 1 """Align right""" ALIGN_CENTER_X = 2 """Align center horisontally""" ALIGN_CENTER_Y = 4 """Align center vertically""" ALIGN_CENTER = 6 """Align center by both axis""" ALIGN_TRUNCATED = 8 """Align truncated""" ALIGN_JUSTIFY = 10 """Align justify""" # Kodi key action codes. # More codes available in xbmcgui module ACTION_PREVIOUS_MENU = 10 """ESC action""" ACTION_NAV_BACK = 92 """Backspace action""" ACTION_MOVE_LEFT = 1 """Left arrow key""" ACTION_MOVE_RIGHT = 2 """Right arrow key""" ACTION_MOVE_UP = 3 """Up arrow key""" ACTION_MOVE_DOWN = 4 """Down arrow key""" ACTION_MOUSE_WHEEL_UP = 104 """Mouse wheel up""" ACTION_MOUSE_WHEEL_DOWN = 105 """Mouse wheel down""" ACTION_MOUSE_DRAG = 106 """Mouse drag""" ACTION_MOUSE_MOVE = 107 """Mouse move""" ACTION_MOUSE_LEFT_CLICK = 100 """Mouse click""" def _set_textures(textures, kwargs): """Set texture arguments for controls.""" for texture in textures.keys(): if kwargs.get(texture) is None: kwargs[texture] = textures[texture] class AddonWindowError(Exception): """Custom exception""" pass class Label(xbmcgui.ControlLabel): """ Label(label, font=None, textColor=None, disabledColor=None, alignment=0,hasPath=False, angle=0) ControlLabel class. Implements a simple text label. :param label: string or unicode - text string. :param font: string - font used for label text. (e.g. 'font13') :param textColor: hexstring - color of enabled label's label. (e.g. '0xFFFFFFFF') :param disabledColor: hexstring - color of disabled label's label. (e.g. '0xFFFF3300') :param alignment: integer - alignment of label - *Note, see xbfont.h :param hasPath: bool - True=stores a path / False=no path. :param angle: integer - angle of control. (+ rotates CCW, - rotates CW) .. note:: After you create the control, you need to add it to the window with placeControl(). Example:: self.label = Label('Status', angle=45) """ def __new__(cls, *args, **kwargs): return super(Label, cls).__new__(cls, -10, -10, 1, 1, *args, **kwargs) class FadeLabel(xbmcgui.ControlFadeLabel): """ FadeLabel(font=None, textColor=None, _alignment=0) Control that scrolls label text. Implements a text label that can auto-scroll very long text. :param font: string - font used for label text. (e.g. 'font13') :param textColor: hexstring - color of fadelabel's labels. (e.g. '0xFFFFFFFF') :param _alignment: integer - alignment of label - *Note, see xbfont.h .. note:: After you create the control, you need to add it to the window with placeControl(). Example:: self.fadelabel = FadeLabel(textColor='0xFFFFFFFF') """ def __new__(cls, *args, **kwargs): return super(FadeLabel, cls).__new__(cls, -10, -10, 1, 1, *args, **kwargs) class TextBox(xbmcgui.ControlTextBox): """ TextBox(font=None, textColor=None) ControlTextBox class Implements a box for displaying multi-line text. Long text is truncated from below. Also supports auto-scrolling. :param font: string - font used for text. (e.g. 'font13') :param textColor: hexstring - color of textbox's text. (e.g. '0xFFFFFFFF') .. note:: After you create the control, you need to add it to the window with placeControl(). Example:: self.textbox = TextBox(textColor='0xFFFFFFFF') """ def __new__(cls, *args, **kwargs): return super(TextBox, cls).__new__(cls, -10, -10, 1, 1, *args, **kwargs) class Image(xbmcgui.ControlImage): """ Image(filename, aspectRatio=0, colorDiffuse=None) ControlImage class. Implements a box for displaying .jpg, .png, and .gif images. :param filename: string - image filename. :param colorKey: hexString - (example, '0xFFFF3300') :param aspectRatio: integer - (values 0 = stretch (default), 1 = scale up (crops), 2 = scale down (black bars) :param colorDiffuse: hexString - (example, '0xC0FF0000' (red tint)). .. note:: After you create the control, you need to add it to the window with placeControl(). Example:: self.image = Image('d:\images\picture.jpg', aspectRatio=2) """ def __new__(cls, *args, **kwargs): return super(Image, cls).__new__(cls, -10, -10, 1, 1, *args, **kwargs) class Button(xbmcgui.ControlButton): """ Button(label, focusTexture=None, noFocusTexture=None, textOffsetX=CONTROL_TEXT_OFFSET_X, textOffsetY=CONTROL_TEXT_OFFSET_Y, alignment=4, font=None, textColor=None, disabledColor=None, angle=0, shadowColor=None, focusedColor=None) ControlButton class. Implements a clickable button. :param label: string or unicode - text string. :param focusTexture: string - filename for focus texture. :param noFocusTexture: string - filename for no focus texture. :param textOffsetX: integer - x offset of label. :param textOffsetY: integer - y offset of label. :param alignment: integer - alignment of label - *Note, see xbfont.h :param font: string - font used for label text. (e.g. 'font13') :param textColor: hexstring - color of enabled button's label. (e.g. '0xFFFFFFFF') :param disabledColor: hexstring - color of disabled button's label. (e.g. '0xFFFF3300') :param angle: integer - angle of control. (+ rotates CCW, - rotates CW) :param shadowColor: hexstring - color of button's label's shadow. (e.g. '0xFF000000') :param focusedColor: hexstring - color of focused button's label. (e.g. '0xFF00FFFF') .. note:: After you create the control, you need to add it to the window with placeControl(). Example:: self.button = Button('Status', font='font14') """ def __new__(cls, *args, **kwargs): textures = {'focusTexture': os.path.join(_images, 'Button', 'KeyboardKey.png'), 'noFocusTexture': os.path.join(_images, 'Button', 'KeyboardKeyNF.png')} _set_textures(textures, kwargs) if kwargs.get('alignment') is None: kwargs['alignment'] = ALIGN_CENTER return super(Button, cls).__new__(cls, -10, -10, 1, 1, *args, **kwargs) class RadioButton(xbmcgui.ControlRadioButton): """ RadioButton(label, focusTexture=None, noFocusTexture=None, textOffsetX=None, textOffsetY=None, _alignment=None, font=None, textColor=None, disabledColor=None, angle=None, shadowColor=None, focusedColor=None, focusOnTexture=None, noFocusOnTexture=None, focusOffTexture=None, noFocusOffTexture=None) ControlRadioButton class. Implements a 2-state switch. :param label: string or unicode - text string. :param focusTexture: string - filename for focus texture. :param noFocusTexture: string - filename for no focus texture. :param textOffsetX: integer - x offset of label. :param textOffsetY: integer - y offset of label. :param _alignment: integer - alignment of label - *Note, see xbfont.h :param font: string - font used for label text. (e.g. 'font13') :param textColor: hexstring - color of enabled radio button's label. (e.g. '0xFFFFFFFF') :param disabledColor: hexstring - color of disabled radio button's label. (e.g. '0xFFFF3300') :param angle: integer - angle of control. (+ rotates CCW, - rotates CW) :param shadowColor: hexstring - color of radio button's label's shadow. (e.g. '0xFF000000') :param focusedColor: hexstring - color of focused radio button's label. (e.g. '0xFF00FFFF') :param focusOnTexture: string - filename for radio focused/checked texture. :param noFocusOnTexture: string - filename for radio not focused/checked texture. :param focusOffTexture: string - filename for radio focused/unchecked texture. :param noFocusOffTexture: string - filename for radio not focused/unchecked texture. .. note:: To customize RadioButton all 4 abovementioned textures need to be provided. .. note:: After you create the control, you need to add it to the window with placeControl(). Example:: self.radiobutton = RadioButton('Status', font='font14') """ def __new__(cls, *args, **kwargs): if int(xbmc.getInfoLabel('System.BuildVersion')[:2]) >= 13: textures = {'focusTexture': os.path.join(_images, 'RadioButton', 'MenuItemFO.png'), 'noFocusTexture': os.path.join(_images, 'RadioButton', 'MenuItemNF.png'), 'focusOnTexture': os.path.join(_images, 'RadioButton', 'radiobutton-focus.png'), 'noFocusOnTexture': os.path.join(_images, 'RadioButton', 'radiobutton-focus.png'), 'focusOffTexture': os.path.join(_images, 'RadioButton', 'radiobutton-nofocus.png'), 'noFocusOffTexture': os.path.join(_images, 'RadioButton', 'radiobutton-nofocus.png')} else: # This is for compatibility with Frodo and earlier versions. textures = {'focusTexture': os.path.join(_images, 'RadioButton', 'MenuItemFO.png'), 'noFocusTexture': os.path.join(_images, 'RadioButton', 'MenuItemNF.png'), 'TextureRadioFocus': os.path.join(_images, 'RadioButton', 'radiobutton-focus.png'), 'TextureRadioNoFocus': os.path.join(_images, 'RadioButton', 'radiobutton-nofocus.png')} _set_textures(textures, kwargs) return super(RadioButton, cls).__new__(cls, -10, -10, 1, 1, *args, **kwargs) class Edit(xbmcgui.ControlEdit): """ Edit(label, font=None, textColor=None, disabledColor=None, _alignment=0, focusTexture=None, noFocusTexture=None, isPassword=False) ControlEdit class. Implements a clickable text entry field with an on-screen keyboard. :param label: string or unicode - text string. :param font: [opt] string - font used for label text. (e.g. 'font13') :param textColor: [opt] hexstring - color of enabled label's label. (e.g. '0xFFFFFFFF') :param disabledColor: [opt] hexstring - color of disabled label's label. (e.g. '0xFFFF3300') :param _alignment: [opt] integer - alignment of label - *Note, see xbfont.h :param focusTexture: [opt] string - filename for focus texture. :param noFocusTexture: [opt] string - filename for no focus texture. :param isPassword: [opt] bool - if true, mask text value. .. note:: You can use the above as keywords for arguments and skip certain optional arguments. Once you use a keyword, all following arguments require the keyword. After you create the control, you need to add it to the window with ``palceControl()``. Example:: self.edit = Edit('Status') """ def __new__(cls, *args, **kwargs): textures = {'focusTexture': os.path.join(_images, 'Edit', 'button-focus.png'), 'noFocusTexture': os.path.join(_images, 'Edit', 'black-back2.png')} _set_textures(textures, kwargs) return super(Edit, cls).__new__(cls, -10, -10, 1, 1, *args, **kwargs) class List(xbmcgui.ControlList): """ List(font=None, textColor=None, buttonTexture=None, buttonFocusTexture=None, selectedColor=None, _imageWidth=10, _imageHeight=10, _itemTextXOffset=10, _itemTextYOffset=2, _itemHeight=27, _space=2, _alignmentY=4) ControlList class. Implements a scrollable list of items. :param font: string - font used for items label. (e.g. 'font13') :param textColor: hexstring - color of items label. (e.g. '0xFFFFFFFF') :param buttonTexture: string - filename for no focus texture. :param buttonFocusTexture: string - filename for focus texture. :param selectedColor: integer - x offset of label. :param _imageWidth: integer - width of items icon or thumbnail. :param _imageHeight: integer - height of items icon or thumbnail. :param _itemTextXOffset: integer - x offset of items label. :param _itemTextYOffset: integer - y offset of items label. :param _itemHeight: integer - height of items. :param _space: integer - space between items. :param _alignmentY: integer - Y-axis alignment of items label - *Note, see xbfont.h .. note:: After you create the control, you need to add it to the window with placeControl(). Example:: self.cList = List('font14', space=5) """ def __new__(cls, *args, **kwargs): textures = {'buttonTexture': os.path.join(_images, 'List', 'MenuItemNF.png'), 'buttonFocusTexture': os.path.join(_images, 'List', 'MenuItemFO.png')} _set_textures(textures, kwargs) return super(List, cls).__new__(cls, -10, -10, 1, 1, *args, **kwargs) class Slider(xbmcgui.ControlSlider): """ Slider(textureback=None, texture=None, texturefocus=None) ControlSlider class. Implements a movable slider for adjusting some value. :param textureback: string - image filename. :param texture: string - image filename. :param texturefocus: string - image filename. .. note:: After you create the control, you need to add it to the window with placeControl(). Example:: self.slider = Slider() """ def __new__(cls, *args, **kwargs): textures = {'textureback': os.path.join(_images, 'Slider', 'osd_slider_bg.png'), 'texture': os.path.join(_images, 'Slider', 'osd_slider_nibNF.png'), 'texturefocus': os.path.join(_images, 'Slider', 'osd_slider_nib.png')} _set_textures(textures, kwargs) return super(Slider, cls).__new__(cls, -10, -10, 1, 1, *args, **kwargs) class AbstractWindow(object): """ Top-level control window. The control windows serves as a parent widget for other XBMC UI controls much like Tkinter.Tk or PyQt QWidget class. This class is a basic "skeleton" for a control window. .. warning:: This is an abstract class and is not supposed to be instantiated directly! """ def __init__(self): self.actions_connected = [] self.controls_connected = [] def setGeometry(self, width_, height_, rows_, columns_, pos_x=-1, pos_y=-1): """ Set width, height, Grid layout, and coordinates (optional) for a new control window. :param width_: widgh of the created window. :param height_: height of the created window. :param rows_: # rows of the Grid layout to place controls on. :param columns_: # colums of the Grid layout to place controls on. :param pos_x: (opt) x coordinate of the top left corner of the window. :param pos_y: (opt) y coordinates of the top left corner of the window. If pos_x and pos_y are not privided, the window will be placed at the center of the screen. Example:: self.setGeometry(400, 500, 5, 4) """ self.width = width_ self.height = height_ self.rows = rows_ self.columns = columns_ if pos_x > 0 and pos_y > 0: self.x = pos_x self.y = pos_y else: self.x = 640 - self.width/2 self.y = 360 - self.height/2 self._setGrid() def _setGrid(self): """ Set window grid layout of rows x columns. This is a helper method not to be called directly. """ self.grid_x = self.x self.grid_y = self.y self.tile_width = self.width / self.columns self.tile_height = self.height / self.rows def placeControl(self, control, row, column, rowspan=1, columnspan=1, pad_x=5, pad_y=5): """ Place a control within the window grid layout. :param control: control instance to be placed in the grid. :param row: row number where to place the control (starts from 0). :param column: column number where to place the control (starts from 0). :param rowspan: set when the control needs to occupy several rows. :param columnspan: set when the control needs to occupy several columns. :param pad_x: horisontal padding. :param pad_y: vertical padding. :raises: :class:`AddonWindowError` if a grid has not yet been set. Use ``pad_x`` and ``pad_y`` to adjust control's aspect. Negative padding values can be used to make a control overlap with grid cells next to it, if necessary. Example:: self.placeControl(self.label, 0, 1) """ try: control_x = (self.grid_x + self.tile_width * column) + pad_x control_y = (self.grid_y + self.tile_height * row) + pad_y control_width = self.tile_width * columnspan - 2 * pad_x control_height = self.tile_height * rowspan - 2 * pad_y except AttributeError: raise AddonWindowError('Window geometry is not defined! Call setGeometry first.') control.setPosition(control_x, control_y) control.setWidth(control_width) control.setHeight(control_height) self.addControl(control) self.setAnimation(control) def getX(self): """Get X coordinate of the top-left corner of the window.""" try: return self.x except AttributeError: raise AddonWindowError('Window geometry is not defined! Call setGeometry first.') def getY(self): """Get Y coordinate of the top-left corner of the window.""" try: return self.y except AttributeError: raise AddonWindowError('Window geometry is not defined! Call setGeometry first.') def getWindowWidth(self): """Get window width.""" try: return self.width except AttributeError: raise AddonWindowError('Window geometry is not defined! Call setGeometry first.') def getWindowHeight(self): """Get window height.""" try: return self.height except AttributeError: raise AddonWindowError('Window geometry is not defined! Call setGeometry first.') def getRows(self): """ Get grid rows count. :raises: :class:`AddonWindowError` if a grid has not yet been set. """ try: return self.rows except AttributeError: raise AddonWindowError('Grid layot is not set! Call setGeometry first.') def getColumns(self): """ Get grid columns count. :raises: :class:`AddonWindowError` if a grid has not yet been set. """ try: return self.columns except AttributeError: raise AddonWindowError('Grid layout is not set! Call setGeometry first.') def connect(self, event, callable): """ Connect an event to a function. :param event: event to be connected. :param callable: callable object the event is connected to. An event can be an inctance of a Control object or an integer key action code. Several basic key action codes are provided by PyXBMCt. ``xbmcgui`` module provides more action codes. You can connect the following Controls: :class:`Button`, :class:`RadioButton` and :class:`List`. Other Controls do not generate any control events when activated so their connections won't work. To catch :class:`Slider` events you need to connect the following key actions: ``ACTION_MOVE_LEFT``, ``ACTION_MOVE_RIGHT`` and ``ACTION_MOUSE_DRAG``, and do a check whether the ``Slider`` instance is focused. ``callable`` parameter is a function or a method to be executed on when the event is fired. .. warning:: For connection you must provide a function object without brackets ``()``, not a function call! ``lambda`` can be used as to call another function or method with parameters known at runtime. Examples:: self.connect(self.exit_button, self.close) or:: self.connect(ACTION_NAV_BACK, self.close) """ try: self.disconnect(event) except AddonWindowError: if type(event) == int: self.actions_connected.append([event, callable]) else: self.controls_connected.append([event, callable]) def connectEventList(self, events, function): """ Connect a list of controls/action codes to a function. See :meth:`connect` docstring for more info. """ [self.connect(event, function) for event in events] def disconnect(self, event): """ Disconnect an event from a function. An event can be an inctance of a Control object or an integer key action code which has previously been connected to a function or a method. :param event: event to be disconnected. :raises: :class:`AddonWindowError` if an event is not connected to any function. Examples:: self.disconnect(self.exit_button) or:: self.disconnect(ACTION_NAV_BACK) """ if type(event) == int: event_list = self.actions_connected else: event_list = self.controls_connected for index in range(len(event_list)): if event == event_list[index][0]: event_list.pop(index) break else: raise AddonWindowError('The action or control %s is not connected!' % event) def disconnectEventList(self, events): """ Disconnect a list of controls/action codes from functions. See :func:`disconnect` docstring for more info. :param events: the list of events to be disconnected. :raises: :class:`AddonWindowError` if at least one event in the list is not connected to any function. """ [self.disconnect(event) for event in events] def _executeConnected(self, event, connected_list): """ Execute a connected event (an action or a control). This is a helper method not to be called directly. """ for item in connected_list: if event == item[0]: item[1]() break def setAnimation(self, control): """ Set animation for control :param control: control for which animation is set. This method is called automatically to set animation properties for all controls added to the current addon window instance -- both for built-in controls (window background, title bar etc.) and for controls added with :meth:`placeControl`. It receives a control instance as the 2nd positional argument (besides ``self``). By default the method does nothing, i.e. no animation is set for controls. To add animation you need to re-implement this method in your child class. E.g:: def setAnimation(self, control): control.setAnimations([('WindowOpen', 'effect=fade start=0 end=100 time=1000',), ('WindowClose', 'effect=fade start=100 end=0 time=1000',)]) """ pass class AddonWindow(AbstractWindow): """ Top-level control window. The control windows serves as a parent widget for other XBMC UI controls much like ``Tkinter.Tk`` or PyQt ``QWidget`` class. This is an abstract class which is not supposed to be instantiated directly and will raise exeptions. It is designed to be implemented in a grand-child class with the second inheritance from ``xbmcgui.Window`` or ``xbmcgui.WindowDialog`` in a direct child class. This class provides a control window with a background and a header similar to top-level widgets of desktop UI frameworks. .. warning:: This is an abstract class and is not supposed to be instantiated directly! """ def __init__(self, title=''): """Constructor method.""" super(AddonWindow, self).__init__() self._setFrame(title) def _setFrame(self, title): """ Set window frame Define paths to images for window background and title background textures, and set control position adjustment constants used in setGrid. This is a helper method not to be called directly. """ # Window background image self.background_img = os.path.join(_images, 'AddonWindow', 'ContentPanel.png') # Background for a window header self.title_background_img = os.path.join(_images, 'AddonWindow', 'dialogheader.png') # Horisontal adjustment for a header background if the main background has transparent edges. self.X_MARGIN = 5 # Vertical adjustment for a header background if the main background has transparent edges self.Y_MARGIN = 5 # Header position adjustment if the main backround has visible borders. self.Y_SHIFT = 4 # The height of a window header (for the title background and the title label). self.HEADER_HEIGHT = 35 self.background = xbmcgui.ControlImage(-10, -10, 1, 1, self.background_img) self.addControl(self.background) self.setAnimation(self.background) self.title_background = xbmcgui.ControlImage(-10, -10, 1, 1, self.title_background_img) self.addControl(self.title_background) self.setAnimation(self.title_background) self.title_bar = xbmcgui.ControlLabel(-10, -10, 1, 1, title, alignment=ALIGN_CENTER, textColor='0xFFFFA500', font='font13_title') self.addControl(self.title_bar) self.setAnimation(self.title_bar) self.window_close_button = xbmcgui.ControlButton(-100, -100, 60, 30, '', focusTexture=os.path.join(_images, 'AddonWindow', 'DialogCloseButton-focus.png'), noFocusTexture=os.path.join(_images, 'AddonWindow', 'DialogCloseButton.png')) self.addControl(self.window_close_button) self.setAnimation(self.window_close_button) def setGeometry(self, width_, height_, rows_, columns_, pos_x=-1, pos_y=-1, padding=5): """ Set width, height, Grid layout, and coordinates (optional) for a new control window. :param width_: new window width in pixels. :param height_: new window height in pixels. :param rows_: # of rows in the Grid layout to place controls on. :param columns_: # of colums in the Grid layout to place controls on. :param pos_x: (optional) x coordinate of the top left corner of the window. :param pos_y: (optional) y coordinate of the top left corner of the window. :param padding: (optional) padding between outer edges of the window and controls placed on it. If ``pos_x`` and ``pos_y`` are not privided, the window will be placed at the center of the screen. Example:: self.setGeometry(400, 500, 5, 4) """ self.win_padding = padding super(AddonWindow, self).setGeometry(width_, height_, rows_, columns_, pos_x, pos_y) self.background.setPosition(self.x, self.y) self.background.setWidth(self.width) self.background.setHeight(self.height) self.title_background.setPosition(self.x + self.X_MARGIN, self.y + self.Y_MARGIN + self.Y_SHIFT) self.title_background.setWidth(self.width - 2 * self.X_MARGIN) self.title_background.setHeight(self.HEADER_HEIGHT) self.title_bar.setPosition(self.x + self.X_MARGIN, self.y + self.Y_MARGIN + self.Y_SHIFT) self.title_bar.setWidth(self.width - 2 * self.X_MARGIN) self.title_bar.setHeight(self.HEADER_HEIGHT) self.window_close_button.setPosition(self.x + self.width - 70, self.y + self.Y_MARGIN + self.Y_SHIFT) def _setGrid(self): """ Set window grid layout of rows * columns. This is a helper method not to be called directly. """ self.grid_x = self.x + self.X_MARGIN + self.win_padding self.grid_y = self.y + self.Y_MARGIN + self.Y_SHIFT + self.HEADER_HEIGHT + self.win_padding self.tile_width = (self.width - 2 * (self.X_MARGIN + self.win_padding))/self.columns self.tile_height = ( self.height - self.HEADER_HEIGHT - self.Y_SHIFT - 2 * (self.Y_MARGIN + self.win_padding))/self.rows def setWindowTitle(self, title=''): """ Set window title. .. warning:: This method must be called **AFTER** (!!!) :meth:`setGeometry`, otherwise there is some werid bug with all skin text labels set to the ``title`` text. Example:: self.setWindowTitle('My Cool Addon') """ self.title_bar.setLabel(title) def getWindowTitle(self): """Get window title.""" return self.title_bar.getLabel() class FullWindowMixin(xbmcgui.Window): """An abstract class to define window event processing.""" def onAction(self, action): """ Catch button actions. ``action`` is an instance of :class:`xbmcgui.Action` class. """ if action == ACTION_PREVIOUS_MENU: self.close() else: self._executeConnected(action, self.actions_connected) def onControl(self, control): """ Catch activated controls. ``control`` is an instance of :class:`xbmcgui.Control` class. """ if control == self.window_close_button: self.close() else: self._executeConnected(control, self.controls_connected) class DialogWindowMixin(xbmcgui.WindowDialog): """An abstract class to define window event processing.""" def onAction(self, action): """ Catch button actions. ``action`` is an instance of class:`xbmcgui.Action` class. """ if action == ACTION_PREVIOUS_MENU: self.close() else: self._executeConnected(action, self.actions_connected) def onControl(self, control): """ Catch activated controls. ``control`` is an instance of :class:`xbmcgui.Control` class. """ if control == self.window_close_button: self.close() else: self._executeConnected(control, self.controls_connected) class BlankFullWindow(FullWindowMixin, AbstractWindow): """ BlankFullWindow() Addon UI container with a solid background. This is a blank window with a black background and without any elements whatsoever. The decoration and layout are completely up to an addon developer. The window controls can hide under video or music visualization. """ pass class BlankDialogWindow(DialogWindowMixin, AbstractWindow): """ BlankDialogWindow() Addon UI container with a transparent background. This is a blank window with a transparent background and without any elements whatsoever. The decoration and layout are completely up to an addon developer. The window controls are always displayed over video or music visualization. """ pass class AddonFullWindow(FullWindowMixin, AddonWindow): """ AddonFullWindow(title='') Addon UI container with a solid background. ``AddonFullWindow`` instance is displayed on top of the main background image -- ``self.main_bg`` -- and can hide behind a fullscreen video or music viaualisation. Minimal example:: addon = AddonFullWindow('My Cool Addon') addon.setGeometry(400, 300, 4, 3) addon.doModal() """ def __new__(cls, title='', *args, **kwargs): return super(AddonFullWindow, cls).__new__(cls, *args, **kwargs) def _setFrame(self, title): """ Set the image for for the fullscreen background. """ # Image for the fullscreen background. self.main_bg_img = os.path.join(_images, 'AddonWindow', 'SKINDEFAULT.jpg') # Fullscreen background image control. self.main_bg = xbmcgui.ControlImage(1, 1, 1280, 720, self.main_bg_img) self.addControl(self.main_bg) super(AddonFullWindow, self)._setFrame(title) def setBackground(self, image=''): """ Set the main bacground to an image file. :param image: path to an image file as str. Example:: self.setBackground('/images/bacground.png') """ self.main_bg.setImage(image) class AddonDialogWindow(DialogWindowMixin, AddonWindow): """ AddonDialogWindow(title='') Addon UI container with a transparent background. .. note:: ``AddonDialogWindow`` instance is displayed on top of XBMC UI, including fullscreen video and music visualization. Minimal example:: addon = AddonDialogWindow('My Cool Addon') addon.setGeometry(400, 300, 4, 3) addon.doModal() """ pass
apache-2.0
atplanet/ansible-modules-extras
network/snmp_facts.py
64
13253
#!/usr/bin/python # This file is part of Networklore's snmp library for Ansible # # The module is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # The module is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. DOCUMENTATION = ''' --- module: snmp_facts version_added: "1.9" author: "Patrick Ogenstad (@ogenstad)" short_description: Retrieve facts for a device using SNMP. description: - Retrieve facts for a device using SNMP, the facts will be inserted to the ansible_facts key. requirements: - pysnmp options: host: description: - Set to target snmp server (normally {{inventory_hostname}}) required: true version: description: - SNMP Version to use, v2/v2c or v3 choices: [ 'v2', 'v2c', 'v3' ] required: true community: description: - The SNMP community string, required if version is v2/v2c required: false level: description: - Authentication level, required if version is v3 choices: [ 'authPriv', 'authNoPriv' ] required: false username: description: - Username for SNMPv3, required if version is v3 required: false integrity: description: - Hashing algoritm, required if version is v3 choices: [ 'md5', 'sha' ] required: false authkey: description: - Authentication key, required if version is v3 required: false privacy: description: - Encryption algoritm, required if level is authPriv choices: [ 'des', 'aes' ] required: false privkey: description: - Encryption key, required if version is authPriv required: false ''' EXAMPLES = ''' # Gather facts with SNMP version 2 - snmp_facts: host={{ inventory_hostname }} version=2c community=public connection: local # Gather facts using SNMP version 3 - snmp_facts: host={{ inventory_hostname }} version=v3 level=authPriv integrity=sha privacy=aes username=snmp-user authkey=abc12345 privkey=def6789 delegate_to: localhost ''' from ansible.module_utils.basic import * from collections import defaultdict try: from pysnmp.entity.rfc3413.oneliner import cmdgen has_pysnmp = True except: has_pysnmp = False class DefineOid(object): def __init__(self,dotprefix=False): if dotprefix: dp = "." else: dp = "" # From SNMPv2-MIB self.sysDescr = dp + "1.3.6.1.2.1.1.1.0" self.sysObjectId = dp + "1.3.6.1.2.1.1.2.0" self.sysUpTime = dp + "1.3.6.1.2.1.1.3.0" self.sysContact = dp + "1.3.6.1.2.1.1.4.0" self.sysName = dp + "1.3.6.1.2.1.1.5.0" self.sysLocation = dp + "1.3.6.1.2.1.1.6.0" # From IF-MIB self.ifIndex = dp + "1.3.6.1.2.1.2.2.1.1" self.ifDescr = dp + "1.3.6.1.2.1.2.2.1.2" self.ifMtu = dp + "1.3.6.1.2.1.2.2.1.4" self.ifSpeed = dp + "1.3.6.1.2.1.2.2.1.5" self.ifPhysAddress = dp + "1.3.6.1.2.1.2.2.1.6" self.ifAdminStatus = dp + "1.3.6.1.2.1.2.2.1.7" self.ifOperStatus = dp + "1.3.6.1.2.1.2.2.1.8" self.ifAlias = dp + "1.3.6.1.2.1.31.1.1.1.18" # From IP-MIB self.ipAdEntAddr = dp + "1.3.6.1.2.1.4.20.1.1" self.ipAdEntIfIndex = dp + "1.3.6.1.2.1.4.20.1.2" self.ipAdEntNetMask = dp + "1.3.6.1.2.1.4.20.1.3" def decode_hex(hexstring): if len(hexstring) < 3: return hexstring if hexstring[:2] == "0x": return hexstring[2:].decode("hex") else: return hexstring def decode_mac(hexstring): if len(hexstring) != 14: return hexstring if hexstring[:2] == "0x": return hexstring[2:] else: return hexstring def lookup_adminstatus(int_adminstatus): adminstatus_options = { 1: 'up', 2: 'down', 3: 'testing' } if int_adminstatus in adminstatus_options.keys(): return adminstatus_options[int_adminstatus] else: return "" def lookup_operstatus(int_operstatus): operstatus_options = { 1: 'up', 2: 'down', 3: 'testing', 4: 'unknown', 5: 'dormant', 6: 'notPresent', 7: 'lowerLayerDown' } if int_operstatus in operstatus_options.keys(): return operstatus_options[int_operstatus] else: return "" def main(): module = AnsibleModule( argument_spec=dict( host=dict(required=True), version=dict(required=True, choices=['v2', 'v2c', 'v3']), community=dict(required=False, default=False), username=dict(required=False), level=dict(required=False, choices=['authNoPriv', 'authPriv']), integrity=dict(required=False, choices=['md5', 'sha']), privacy=dict(required=False, choices=['des', 'aes']), authkey=dict(required=False), privkey=dict(required=False), removeplaceholder=dict(required=False)), required_together = ( ['username','level','integrity','authkey'],['privacy','privkey'],), supports_check_mode=False) m_args = module.params if not has_pysnmp: module.fail_json(msg='Missing required pysnmp module (check docs)') cmdGen = cmdgen.CommandGenerator() # Verify that we receive a community when using snmp v2 if m_args['version'] == "v2" or m_args['version'] == "v2c": if m_args['community'] == False: module.fail_json(msg='Community not set when using snmp version 2') if m_args['version'] == "v3": if m_args['username'] == None: module.fail_json(msg='Username not set when using snmp version 3') if m_args['level'] == "authPriv" and m_args['privacy'] == None: module.fail_json(msg='Privacy algorithm not set when using authPriv') if m_args['integrity'] == "sha": integrity_proto = cmdgen.usmHMACSHAAuthProtocol elif m_args['integrity'] == "md5": integrity_proto = cmdgen.usmHMACMD5AuthProtocol if m_args['privacy'] == "aes": privacy_proto = cmdgen.usmAesCfb128Protocol elif m_args['privacy'] == "des": privacy_proto = cmdgen.usmDESPrivProtocol # Use SNMP Version 2 if m_args['version'] == "v2" or m_args['version'] == "v2c": snmp_auth = cmdgen.CommunityData(m_args['community']) # Use SNMP Version 3 with authNoPriv elif m_args['level'] == "authNoPriv": snmp_auth = cmdgen.UsmUserData(m_args['username'], authKey=m_args['authkey'], authProtocol=integrity_proto) # Use SNMP Version 3 with authPriv else: snmp_auth = cmdgen.UsmUserData(m_args['username'], authKey=m_args['authkey'], privKey=m_args['privkey'], authProtocol=integrity_proto, privProtocol=privacy_proto) # Use p to prefix OIDs with a dot for polling p = DefineOid(dotprefix=True) # Use v without a prefix to use with return values v = DefineOid(dotprefix=False) Tree = lambda: defaultdict(Tree) results = Tree() errorIndication, errorStatus, errorIndex, varBinds = cmdGen.getCmd( snmp_auth, cmdgen.UdpTransportTarget((m_args['host'], 161)), cmdgen.MibVariable(p.sysDescr,), cmdgen.MibVariable(p.sysObjectId,), cmdgen.MibVariable(p.sysUpTime,), cmdgen.MibVariable(p.sysContact,), cmdgen.MibVariable(p.sysName,), cmdgen.MibVariable(p.sysLocation,), lookupMib=False ) if errorIndication: module.fail_json(msg=str(errorIndication)) for oid, val in varBinds: current_oid = oid.prettyPrint() current_val = val.prettyPrint() if current_oid == v.sysDescr: results['ansible_sysdescr'] = decode_hex(current_val) elif current_oid == v.sysObjectId: results['ansible_sysobjectid'] = current_val elif current_oid == v.sysUpTime: results['ansible_sysuptime'] = current_val elif current_oid == v.sysContact: results['ansible_syscontact'] = current_val elif current_oid == v.sysName: results['ansible_sysname'] = current_val elif current_oid == v.sysLocation: results['ansible_syslocation'] = current_val errorIndication, errorStatus, errorIndex, varTable = cmdGen.nextCmd( snmp_auth, cmdgen.UdpTransportTarget((m_args['host'], 161)), cmdgen.MibVariable(p.ifIndex,), cmdgen.MibVariable(p.ifDescr,), cmdgen.MibVariable(p.ifMtu,), cmdgen.MibVariable(p.ifSpeed,), cmdgen.MibVariable(p.ifPhysAddress,), cmdgen.MibVariable(p.ifAdminStatus,), cmdgen.MibVariable(p.ifOperStatus,), cmdgen.MibVariable(p.ipAdEntAddr,), cmdgen.MibVariable(p.ipAdEntIfIndex,), cmdgen.MibVariable(p.ipAdEntNetMask,), cmdgen.MibVariable(p.ifAlias,), lookupMib=False ) if errorIndication: module.fail_json(msg=str(errorIndication)) interface_indexes = [] all_ipv4_addresses = [] ipv4_networks = Tree() for varBinds in varTable: for oid, val in varBinds: current_oid = oid.prettyPrint() current_val = val.prettyPrint() if v.ifIndex in current_oid: ifIndex = int(current_oid.rsplit('.', 1)[-1]) results['ansible_interfaces'][ifIndex]['ifindex'] = current_val interface_indexes.append(ifIndex) if v.ifDescr in current_oid: ifIndex = int(current_oid.rsplit('.', 1)[-1]) results['ansible_interfaces'][ifIndex]['name'] = current_val if v.ifMtu in current_oid: ifIndex = int(current_oid.rsplit('.', 1)[-1]) results['ansible_interfaces'][ifIndex]['mtu'] = current_val if v.ifMtu in current_oid: ifIndex = int(current_oid.rsplit('.', 1)[-1]) results['ansible_interfaces'][ifIndex]['speed'] = current_val if v.ifPhysAddress in current_oid: ifIndex = int(current_oid.rsplit('.', 1)[-1]) results['ansible_interfaces'][ifIndex]['mac'] = decode_mac(current_val) if v.ifAdminStatus in current_oid: ifIndex = int(current_oid.rsplit('.', 1)[-1]) results['ansible_interfaces'][ifIndex]['adminstatus'] = lookup_adminstatus(int(current_val)) if v.ifOperStatus in current_oid: ifIndex = int(current_oid.rsplit('.', 1)[-1]) results['ansible_interfaces'][ifIndex]['operstatus'] = lookup_operstatus(int(current_val)) if v.ipAdEntAddr in current_oid: curIPList = current_oid.rsplit('.', 4)[-4:] curIP = ".".join(curIPList) ipv4_networks[curIP]['address'] = current_val all_ipv4_addresses.append(current_val) if v.ipAdEntIfIndex in current_oid: curIPList = current_oid.rsplit('.', 4)[-4:] curIP = ".".join(curIPList) ipv4_networks[curIP]['interface'] = current_val if v.ipAdEntNetMask in current_oid: curIPList = current_oid.rsplit('.', 4)[-4:] curIP = ".".join(curIPList) ipv4_networks[curIP]['netmask'] = current_val if v.ifAlias in current_oid: ifIndex = int(current_oid.rsplit('.', 1)[-1]) results['ansible_interfaces'][ifIndex]['description'] = current_val interface_to_ipv4 = {} for ipv4_network in ipv4_networks: current_interface = ipv4_networks[ipv4_network]['interface'] current_network = { 'address': ipv4_networks[ipv4_network]['address'], 'netmask': ipv4_networks[ipv4_network]['netmask'] } if not current_interface in interface_to_ipv4: interface_to_ipv4[current_interface] = [] interface_to_ipv4[current_interface].append(current_network) else: interface_to_ipv4[current_interface].append(current_network) for interface in interface_to_ipv4: results['ansible_interfaces'][int(interface)]['ipv4'] = interface_to_ipv4[interface] results['ansible_all_ipv4_addresses'] = all_ipv4_addresses module.exit_json(ansible_facts=results) main()
gpl-3.0
jkur/mmm
mmm/forms.py
1
1345
from flask_wtf import Form from wtforms import validators from wtforms import StringField, PasswordField, BooleanField from mmm.validators import validate_active_user, validate_domain_name, validate_email_username, validate_combined_email_address from mmm.models import Domain from mmm.fields import DomainField class Domain_Form(Form): name = StringField('domainname', [validators.Required(message="domainname missing"), validate_domain_name]) description = StringField() class Account_Form(Form): username = StringField('username', [validators.Required(message="username missing"), #validate_email_username, #validate_combined_email_address ]) domain = DomainField('domain', [validators.Required(message="select domain")]) password = PasswordField('password', [validators.Required(message="password missing")]) active = BooleanField() class Alias_Form(Form): username = StringField('username', [validators.Required(message="username missing"), validate_email_username, validate_combined_email_address]) domain = DomainField('domain', [validators.Required(message="select domain")]) target = StringField()
gpl-3.0
lukeroge/CloudBot
cloudbot/util/timeformat.py
16
6479
""" timeformat.py Contains functions to format time periods. Based on code from the Django project and CloudBot contributors. The licensing for this module isn't solid, because I started working on this module before I had a proper system for tracking code licences. If your code is in this file and you have any queries, contact me by email at <lukeroge@gmail.com>! Maintainer: - Luke Rogers <https://github.com/lukeroge> License: BSD license Copyright (c) Django Software Foundation and individual contributors. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of Django nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ import datetime from cloudbot.util import formatting def time_since(d, now=None, count=2, accuracy=6, simple=False): """ Takes two datetime objects and returns the time between d and now as a nicely formatted string, e.g. "10 minutes". If d occurs after now, then "0 minutes" is returned. This function has a number of optional arguments that can be combined: SIMPLE: displays the time in a simple format >> timesince(SECONDS) 1 hour, 2 minutes and 34 seconds >> timesince(SECONDS, simple=True) 1h 2m 34s COUNT: how many periods should be shown (default 3) >> timesince(SECONDS) 147 years, 9 months and 8 weeks >> timesince(SECONDS, count=6) 147 years, 9 months, 7 weeks, 18 hours, 12 minutes and 34 seconds """ # Convert int or float (unix epoch) to datetime.datetime for comparison if isinstance(d, int) or isinstance(d, float): d = datetime.datetime.fromtimestamp(d) if isinstance(now, int) or isinstance(now, float): now = datetime.datetime.fromtimestamp(now) # Convert datetime.date to datetime.datetime for comparison. if not isinstance(d, datetime.datetime): d = datetime.datetime(d.year, d.month, d.day) if now and not isinstance(now, datetime.datetime): now = datetime.datetime(now.year, now.month, now.day) if not now: now = datetime.datetime.now() # ignore microsecond part of 'd' since we removed it from 'now' delta = now - (d - datetime.timedelta(0, 0, d.microsecond)) since = delta.days * 24 * 60 * 60 + delta.seconds if since <= 0: # d is in the future compared to now, stop processing. return '0 ' + 'minutes' # pass the number in seconds on to format_time to make the output string return format_time(since, count, accuracy, simple) # compatibility timesince = time_since def time_until(d, now=None, count=2, accuracy=6, simple=False): """ Like timesince, but returns a string measuring the time until the given time. """ if not now: now = datetime.datetime.now() return time_since(now, d, count, accuracy, simple) # compatibility timeuntil = time_until def format_time(seconds, count=3, accuracy=6, simple=False): """ Takes a length of time in seconds and returns a string describing that length of time. This function has a number of optional arguments that can be combined: SIMPLE: displays the time in a simple format >> format_time(SECONDS) 1 hour, 2 minutes and 34 seconds >> format_time(SECONDS, simple=True) 1h 2m 34s COUNT: how many periods should be shown (default 3) >> format_time(SECONDS) 147 years, 9 months and 8 weeks >> format_time(SECONDS, count=6) 147 years, 9 months, 7 weeks, 18 hours, 12 minutes and 34 seconds """ if simple: periods = [ ('c', 60 * 60 * 24 * 365 * 100), ('de', 60 * 60 * 24 * 365 * 10), ('y', 60 * 60 * 24 * 365), ('mo', 60 * 60 * 24 * 30), ('d', 60 * 60 * 24), ('h', 60 * 60), ('m', 60), ('s', 1) ] else: periods = [ (('century', 'centuries'), 60 * 60 * 24 * 365 * 100), (('decade', 'decades'), 60 * 60 * 24 * 365 * 10), (('year', 'years'), 60 * 60 * 24 * 365), (('month', 'months'), 60 * 60 * 24 * 30), (('day', 'days'), 60 * 60 * 24), (('hour', 'hours'), 60 * 60), (('minute', 'minutes'), 60), (('second', 'seconds'), 1) ] periods = periods[-accuracy:] strings = [] i = 0 for period_name, period_seconds in periods: if i < count: if seconds > period_seconds: period_value, seconds = divmod(seconds, period_seconds) i += 1 if simple: strings.append("{}{}".format(period_value, period_name)) else: if period_value == 1: strings.append("{} {}".format(period_value, period_name[0])) else: strings.append("{} {}".format(period_value, period_name[1])) else: break if simple: return " ".join(strings) else: return formatting.get_text_list(strings, "and")
gpl-3.0
hyperized/ansible
lib/ansible/modules/network/check_point/cp_mgmt_package_facts.py
20
3953
#!/usr/bin/python # -*- coding: utf-8 -*- # # Ansible module to manage Check Point Firewall (c) 2019 # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # from __future__ import (absolute_import, division, print_function) __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = """ --- module: cp_mgmt_package_facts short_description: Get package objects facts on Check Point over Web Services API description: - Get package objects facts on Check Point devices. - All operations are performed over Web Services API. - This module handles both operations, get a specific object and get several objects, For getting a specific object use the parameter 'name'. version_added: "2.9" author: "Or Soffer (@chkp-orso)" options: name: description: - Object name. This parameter is relevant only for getting a specific object. type: str details_level: description: - The level of detail for some of the fields in the response can vary from showing only the UID value of the object to a fully detailed representation of the object. type: str choices: ['uid', 'standard', 'full'] limit: description: - No more than that many results will be returned. This parameter is relevant only for getting few objects. type: int offset: description: - Skip that many results before beginning to return them. This parameter is relevant only for getting few objects. type: int order: description: - Sorts results by the given field. By default the results are sorted in the ascending order by name. This parameter is relevant only for getting few objects. type: list suboptions: ASC: description: - Sorts results by the given field in ascending order. type: str choices: ['name'] DESC: description: - Sorts results by the given field in descending order. type: str choices: ['name'] extends_documentation_fragment: checkpoint_facts """ EXAMPLES = """ - name: show-package cp_mgmt_package_facts: name: New_Standard_Package_1 - name: show-packages cp_mgmt_package_facts: details_level: standard limit: 50 offset: 0 """ RETURN = """ ansible_facts: description: The checkpoint object facts. returned: always. type: dict """ from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.network.checkpoint.checkpoint import checkpoint_argument_spec_for_facts, api_call_facts def main(): argument_spec = dict( name=dict(type='str'), details_level=dict(type='str', choices=['uid', 'standard', 'full']), limit=dict(type='int'), offset=dict(type='int'), order=dict(type='list', options=dict( ASC=dict(type='str', choices=['name']), DESC=dict(type='str', choices=['name']) )) ) argument_spec.update(checkpoint_argument_spec_for_facts) module = AnsibleModule(argument_spec=argument_spec) api_call_object = "package" api_call_object_plural_version = "packages" result = api_call_facts(module, api_call_object, api_call_object_plural_version) module.exit_json(ansible_facts=result) if __name__ == '__main__': main()
gpl-3.0
alphafoobar/intellij-community
python/lib/Lib/encodings/cp860.py
593
34937
""" Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP860.TXT' with gencodec.py. """#" import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_map) def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_map)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='cp860', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ### Decoding Map decoding_map = codecs.make_identity_dict(range(256)) decoding_map.update({ 0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA 0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS 0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE 0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX 0x0084: 0x00e3, # LATIN SMALL LETTER A WITH TILDE 0x0085: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE 0x0086: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE 0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA 0x0088: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX 0x0089: 0x00ca, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX 0x008a: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE 0x008b: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE 0x008c: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX 0x008d: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE 0x008e: 0x00c3, # LATIN CAPITAL LETTER A WITH TILDE 0x008f: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX 0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE 0x0091: 0x00c0, # LATIN CAPITAL LETTER A WITH GRAVE 0x0092: 0x00c8, # LATIN CAPITAL LETTER E WITH GRAVE 0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX 0x0094: 0x00f5, # LATIN SMALL LETTER O WITH TILDE 0x0095: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE 0x0096: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE 0x0097: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE 0x0098: 0x00cc, # LATIN CAPITAL LETTER I WITH GRAVE 0x0099: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE 0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS 0x009b: 0x00a2, # CENT SIGN 0x009c: 0x00a3, # POUND SIGN 0x009d: 0x00d9, # LATIN CAPITAL LETTER U WITH GRAVE 0x009e: 0x20a7, # PESETA SIGN 0x009f: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE 0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE 0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE 0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE 0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE 0x00a4: 0x00f1, # LATIN SMALL LETTER N WITH TILDE 0x00a5: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE 0x00a6: 0x00aa, # FEMININE ORDINAL INDICATOR 0x00a7: 0x00ba, # MASCULINE ORDINAL INDICATOR 0x00a8: 0x00bf, # INVERTED QUESTION MARK 0x00a9: 0x00d2, # LATIN CAPITAL LETTER O WITH GRAVE 0x00aa: 0x00ac, # NOT SIGN 0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF 0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER 0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK 0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK 0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK 0x00b0: 0x2591, # LIGHT SHADE 0x00b1: 0x2592, # MEDIUM SHADE 0x00b2: 0x2593, # DARK SHADE 0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL 0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT 0x00b5: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE 0x00b6: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE 0x00b7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE 0x00b8: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE 0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT 0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL 0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT 0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT 0x00bd: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE 0x00be: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE 0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT 0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT 0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL 0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL 0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT 0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL 0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL 0x00c6: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE 0x00c7: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE 0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT 0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT 0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL 0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL 0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT 0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL 0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL 0x00cf: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE 0x00d0: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE 0x00d1: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE 0x00d2: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE 0x00d3: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE 0x00d4: 0x2558, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE 0x00d5: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE 0x00d6: 0x2553, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE 0x00d7: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE 0x00d8: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE 0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT 0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT 0x00db: 0x2588, # FULL BLOCK 0x00dc: 0x2584, # LOWER HALF BLOCK 0x00dd: 0x258c, # LEFT HALF BLOCK 0x00de: 0x2590, # RIGHT HALF BLOCK 0x00df: 0x2580, # UPPER HALF BLOCK 0x00e0: 0x03b1, # GREEK SMALL LETTER ALPHA 0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S 0x00e2: 0x0393, # GREEK CAPITAL LETTER GAMMA 0x00e3: 0x03c0, # GREEK SMALL LETTER PI 0x00e4: 0x03a3, # GREEK CAPITAL LETTER SIGMA 0x00e5: 0x03c3, # GREEK SMALL LETTER SIGMA 0x00e6: 0x00b5, # MICRO SIGN 0x00e7: 0x03c4, # GREEK SMALL LETTER TAU 0x00e8: 0x03a6, # GREEK CAPITAL LETTER PHI 0x00e9: 0x0398, # GREEK CAPITAL LETTER THETA 0x00ea: 0x03a9, # GREEK CAPITAL LETTER OMEGA 0x00eb: 0x03b4, # GREEK SMALL LETTER DELTA 0x00ec: 0x221e, # INFINITY 0x00ed: 0x03c6, # GREEK SMALL LETTER PHI 0x00ee: 0x03b5, # GREEK SMALL LETTER EPSILON 0x00ef: 0x2229, # INTERSECTION 0x00f0: 0x2261, # IDENTICAL TO 0x00f1: 0x00b1, # PLUS-MINUS SIGN 0x00f2: 0x2265, # GREATER-THAN OR EQUAL TO 0x00f3: 0x2264, # LESS-THAN OR EQUAL TO 0x00f4: 0x2320, # TOP HALF INTEGRAL 0x00f5: 0x2321, # BOTTOM HALF INTEGRAL 0x00f6: 0x00f7, # DIVISION SIGN 0x00f7: 0x2248, # ALMOST EQUAL TO 0x00f8: 0x00b0, # DEGREE SIGN 0x00f9: 0x2219, # BULLET OPERATOR 0x00fa: 0x00b7, # MIDDLE DOT 0x00fb: 0x221a, # SQUARE ROOT 0x00fc: 0x207f, # SUPERSCRIPT LATIN SMALL LETTER N 0x00fd: 0x00b2, # SUPERSCRIPT TWO 0x00fe: 0x25a0, # BLACK SQUARE 0x00ff: 0x00a0, # NO-BREAK SPACE }) ### Decoding Table decoding_table = ( u'\x00' # 0x0000 -> NULL u'\x01' # 0x0001 -> START OF HEADING u'\x02' # 0x0002 -> START OF TEXT u'\x03' # 0x0003 -> END OF TEXT u'\x04' # 0x0004 -> END OF TRANSMISSION u'\x05' # 0x0005 -> ENQUIRY u'\x06' # 0x0006 -> ACKNOWLEDGE u'\x07' # 0x0007 -> BELL u'\x08' # 0x0008 -> BACKSPACE u'\t' # 0x0009 -> HORIZONTAL TABULATION u'\n' # 0x000a -> LINE FEED u'\x0b' # 0x000b -> VERTICAL TABULATION u'\x0c' # 0x000c -> FORM FEED u'\r' # 0x000d -> CARRIAGE RETURN u'\x0e' # 0x000e -> SHIFT OUT u'\x0f' # 0x000f -> SHIFT IN u'\x10' # 0x0010 -> DATA LINK ESCAPE u'\x11' # 0x0011 -> DEVICE CONTROL ONE u'\x12' # 0x0012 -> DEVICE CONTROL TWO u'\x13' # 0x0013 -> DEVICE CONTROL THREE u'\x14' # 0x0014 -> DEVICE CONTROL FOUR u'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE u'\x16' # 0x0016 -> SYNCHRONOUS IDLE u'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK u'\x18' # 0x0018 -> CANCEL u'\x19' # 0x0019 -> END OF MEDIUM u'\x1a' # 0x001a -> SUBSTITUTE u'\x1b' # 0x001b -> ESCAPE u'\x1c' # 0x001c -> FILE SEPARATOR u'\x1d' # 0x001d -> GROUP SEPARATOR u'\x1e' # 0x001e -> RECORD SEPARATOR u'\x1f' # 0x001f -> UNIT SEPARATOR u' ' # 0x0020 -> SPACE u'!' # 0x0021 -> EXCLAMATION MARK u'"' # 0x0022 -> QUOTATION MARK u'#' # 0x0023 -> NUMBER SIGN u'$' # 0x0024 -> DOLLAR SIGN u'%' # 0x0025 -> PERCENT SIGN u'&' # 0x0026 -> AMPERSAND u"'" # 0x0027 -> APOSTROPHE u'(' # 0x0028 -> LEFT PARENTHESIS u')' # 0x0029 -> RIGHT PARENTHESIS u'*' # 0x002a -> ASTERISK u'+' # 0x002b -> PLUS SIGN u',' # 0x002c -> COMMA u'-' # 0x002d -> HYPHEN-MINUS u'.' # 0x002e -> FULL STOP u'/' # 0x002f -> SOLIDUS u'0' # 0x0030 -> DIGIT ZERO u'1' # 0x0031 -> DIGIT ONE u'2' # 0x0032 -> DIGIT TWO u'3' # 0x0033 -> DIGIT THREE u'4' # 0x0034 -> DIGIT FOUR u'5' # 0x0035 -> DIGIT FIVE u'6' # 0x0036 -> DIGIT SIX u'7' # 0x0037 -> DIGIT SEVEN u'8' # 0x0038 -> DIGIT EIGHT u'9' # 0x0039 -> DIGIT NINE u':' # 0x003a -> COLON u';' # 0x003b -> SEMICOLON u'<' # 0x003c -> LESS-THAN SIGN u'=' # 0x003d -> EQUALS SIGN u'>' # 0x003e -> GREATER-THAN SIGN u'?' # 0x003f -> QUESTION MARK u'@' # 0x0040 -> COMMERCIAL AT u'A' # 0x0041 -> LATIN CAPITAL LETTER A u'B' # 0x0042 -> LATIN CAPITAL LETTER B u'C' # 0x0043 -> LATIN CAPITAL LETTER C u'D' # 0x0044 -> LATIN CAPITAL LETTER D u'E' # 0x0045 -> LATIN CAPITAL LETTER E u'F' # 0x0046 -> LATIN CAPITAL LETTER F u'G' # 0x0047 -> LATIN CAPITAL LETTER G u'H' # 0x0048 -> LATIN CAPITAL LETTER H u'I' # 0x0049 -> LATIN CAPITAL LETTER I u'J' # 0x004a -> LATIN CAPITAL LETTER J u'K' # 0x004b -> LATIN CAPITAL LETTER K u'L' # 0x004c -> LATIN CAPITAL LETTER L u'M' # 0x004d -> LATIN CAPITAL LETTER M u'N' # 0x004e -> LATIN CAPITAL LETTER N u'O' # 0x004f -> LATIN CAPITAL LETTER O u'P' # 0x0050 -> LATIN CAPITAL LETTER P u'Q' # 0x0051 -> LATIN CAPITAL LETTER Q u'R' # 0x0052 -> LATIN CAPITAL LETTER R u'S' # 0x0053 -> LATIN CAPITAL LETTER S u'T' # 0x0054 -> LATIN CAPITAL LETTER T u'U' # 0x0055 -> LATIN CAPITAL LETTER U u'V' # 0x0056 -> LATIN CAPITAL LETTER V u'W' # 0x0057 -> LATIN CAPITAL LETTER W u'X' # 0x0058 -> LATIN CAPITAL LETTER X u'Y' # 0x0059 -> LATIN CAPITAL LETTER Y u'Z' # 0x005a -> LATIN CAPITAL LETTER Z u'[' # 0x005b -> LEFT SQUARE BRACKET u'\\' # 0x005c -> REVERSE SOLIDUS u']' # 0x005d -> RIGHT SQUARE BRACKET u'^' # 0x005e -> CIRCUMFLEX ACCENT u'_' # 0x005f -> LOW LINE u'`' # 0x0060 -> GRAVE ACCENT u'a' # 0x0061 -> LATIN SMALL LETTER A u'b' # 0x0062 -> LATIN SMALL LETTER B u'c' # 0x0063 -> LATIN SMALL LETTER C u'd' # 0x0064 -> LATIN SMALL LETTER D u'e' # 0x0065 -> LATIN SMALL LETTER E u'f' # 0x0066 -> LATIN SMALL LETTER F u'g' # 0x0067 -> LATIN SMALL LETTER G u'h' # 0x0068 -> LATIN SMALL LETTER H u'i' # 0x0069 -> LATIN SMALL LETTER I u'j' # 0x006a -> LATIN SMALL LETTER J u'k' # 0x006b -> LATIN SMALL LETTER K u'l' # 0x006c -> LATIN SMALL LETTER L u'm' # 0x006d -> LATIN SMALL LETTER M u'n' # 0x006e -> LATIN SMALL LETTER N u'o' # 0x006f -> LATIN SMALL LETTER O u'p' # 0x0070 -> LATIN SMALL LETTER P u'q' # 0x0071 -> LATIN SMALL LETTER Q u'r' # 0x0072 -> LATIN SMALL LETTER R u's' # 0x0073 -> LATIN SMALL LETTER S u't' # 0x0074 -> LATIN SMALL LETTER T u'u' # 0x0075 -> LATIN SMALL LETTER U u'v' # 0x0076 -> LATIN SMALL LETTER V u'w' # 0x0077 -> LATIN SMALL LETTER W u'x' # 0x0078 -> LATIN SMALL LETTER X u'y' # 0x0079 -> LATIN SMALL LETTER Y u'z' # 0x007a -> LATIN SMALL LETTER Z u'{' # 0x007b -> LEFT CURLY BRACKET u'|' # 0x007c -> VERTICAL LINE u'}' # 0x007d -> RIGHT CURLY BRACKET u'~' # 0x007e -> TILDE u'\x7f' # 0x007f -> DELETE u'\xc7' # 0x0080 -> LATIN CAPITAL LETTER C WITH CEDILLA u'\xfc' # 0x0081 -> LATIN SMALL LETTER U WITH DIAERESIS u'\xe9' # 0x0082 -> LATIN SMALL LETTER E WITH ACUTE u'\xe2' # 0x0083 -> LATIN SMALL LETTER A WITH CIRCUMFLEX u'\xe3' # 0x0084 -> LATIN SMALL LETTER A WITH TILDE u'\xe0' # 0x0085 -> LATIN SMALL LETTER A WITH GRAVE u'\xc1' # 0x0086 -> LATIN CAPITAL LETTER A WITH ACUTE u'\xe7' # 0x0087 -> LATIN SMALL LETTER C WITH CEDILLA u'\xea' # 0x0088 -> LATIN SMALL LETTER E WITH CIRCUMFLEX u'\xca' # 0x0089 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX u'\xe8' # 0x008a -> LATIN SMALL LETTER E WITH GRAVE u'\xcd' # 0x008b -> LATIN CAPITAL LETTER I WITH ACUTE u'\xd4' # 0x008c -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX u'\xec' # 0x008d -> LATIN SMALL LETTER I WITH GRAVE u'\xc3' # 0x008e -> LATIN CAPITAL LETTER A WITH TILDE u'\xc2' # 0x008f -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX u'\xc9' # 0x0090 -> LATIN CAPITAL LETTER E WITH ACUTE u'\xc0' # 0x0091 -> LATIN CAPITAL LETTER A WITH GRAVE u'\xc8' # 0x0092 -> LATIN CAPITAL LETTER E WITH GRAVE u'\xf4' # 0x0093 -> LATIN SMALL LETTER O WITH CIRCUMFLEX u'\xf5' # 0x0094 -> LATIN SMALL LETTER O WITH TILDE u'\xf2' # 0x0095 -> LATIN SMALL LETTER O WITH GRAVE u'\xda' # 0x0096 -> LATIN CAPITAL LETTER U WITH ACUTE u'\xf9' # 0x0097 -> LATIN SMALL LETTER U WITH GRAVE u'\xcc' # 0x0098 -> LATIN CAPITAL LETTER I WITH GRAVE u'\xd5' # 0x0099 -> LATIN CAPITAL LETTER O WITH TILDE u'\xdc' # 0x009a -> LATIN CAPITAL LETTER U WITH DIAERESIS u'\xa2' # 0x009b -> CENT SIGN u'\xa3' # 0x009c -> POUND SIGN u'\xd9' # 0x009d -> LATIN CAPITAL LETTER U WITH GRAVE u'\u20a7' # 0x009e -> PESETA SIGN u'\xd3' # 0x009f -> LATIN CAPITAL LETTER O WITH ACUTE u'\xe1' # 0x00a0 -> LATIN SMALL LETTER A WITH ACUTE u'\xed' # 0x00a1 -> LATIN SMALL LETTER I WITH ACUTE u'\xf3' # 0x00a2 -> LATIN SMALL LETTER O WITH ACUTE u'\xfa' # 0x00a3 -> LATIN SMALL LETTER U WITH ACUTE u'\xf1' # 0x00a4 -> LATIN SMALL LETTER N WITH TILDE u'\xd1' # 0x00a5 -> LATIN CAPITAL LETTER N WITH TILDE u'\xaa' # 0x00a6 -> FEMININE ORDINAL INDICATOR u'\xba' # 0x00a7 -> MASCULINE ORDINAL INDICATOR u'\xbf' # 0x00a8 -> INVERTED QUESTION MARK u'\xd2' # 0x00a9 -> LATIN CAPITAL LETTER O WITH GRAVE u'\xac' # 0x00aa -> NOT SIGN u'\xbd' # 0x00ab -> VULGAR FRACTION ONE HALF u'\xbc' # 0x00ac -> VULGAR FRACTION ONE QUARTER u'\xa1' # 0x00ad -> INVERTED EXCLAMATION MARK u'\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK u'\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK u'\u2591' # 0x00b0 -> LIGHT SHADE u'\u2592' # 0x00b1 -> MEDIUM SHADE u'\u2593' # 0x00b2 -> DARK SHADE u'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL u'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT u'\u2561' # 0x00b5 -> BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE u'\u2562' # 0x00b6 -> BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE u'\u2556' # 0x00b7 -> BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE u'\u2555' # 0x00b8 -> BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE u'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT u'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL u'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT u'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT u'\u255c' # 0x00bd -> BOX DRAWINGS UP DOUBLE AND LEFT SINGLE u'\u255b' # 0x00be -> BOX DRAWINGS UP SINGLE AND LEFT DOUBLE u'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT u'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT u'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL u'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL u'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT u'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL u'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL u'\u255e' # 0x00c6 -> BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE u'\u255f' # 0x00c7 -> BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE u'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT u'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT u'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL u'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL u'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT u'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL u'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL u'\u2567' # 0x00cf -> BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE u'\u2568' # 0x00d0 -> BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE u'\u2564' # 0x00d1 -> BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE u'\u2565' # 0x00d2 -> BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE u'\u2559' # 0x00d3 -> BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE u'\u2558' # 0x00d4 -> BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE u'\u2552' # 0x00d5 -> BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE u'\u2553' # 0x00d6 -> BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE u'\u256b' # 0x00d7 -> BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE u'\u256a' # 0x00d8 -> BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE u'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT u'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT u'\u2588' # 0x00db -> FULL BLOCK u'\u2584' # 0x00dc -> LOWER HALF BLOCK u'\u258c' # 0x00dd -> LEFT HALF BLOCK u'\u2590' # 0x00de -> RIGHT HALF BLOCK u'\u2580' # 0x00df -> UPPER HALF BLOCK u'\u03b1' # 0x00e0 -> GREEK SMALL LETTER ALPHA u'\xdf' # 0x00e1 -> LATIN SMALL LETTER SHARP S u'\u0393' # 0x00e2 -> GREEK CAPITAL LETTER GAMMA u'\u03c0' # 0x00e3 -> GREEK SMALL LETTER PI u'\u03a3' # 0x00e4 -> GREEK CAPITAL LETTER SIGMA u'\u03c3' # 0x00e5 -> GREEK SMALL LETTER SIGMA u'\xb5' # 0x00e6 -> MICRO SIGN u'\u03c4' # 0x00e7 -> GREEK SMALL LETTER TAU u'\u03a6' # 0x00e8 -> GREEK CAPITAL LETTER PHI u'\u0398' # 0x00e9 -> GREEK CAPITAL LETTER THETA u'\u03a9' # 0x00ea -> GREEK CAPITAL LETTER OMEGA u'\u03b4' # 0x00eb -> GREEK SMALL LETTER DELTA u'\u221e' # 0x00ec -> INFINITY u'\u03c6' # 0x00ed -> GREEK SMALL LETTER PHI u'\u03b5' # 0x00ee -> GREEK SMALL LETTER EPSILON u'\u2229' # 0x00ef -> INTERSECTION u'\u2261' # 0x00f0 -> IDENTICAL TO u'\xb1' # 0x00f1 -> PLUS-MINUS SIGN u'\u2265' # 0x00f2 -> GREATER-THAN OR EQUAL TO u'\u2264' # 0x00f3 -> LESS-THAN OR EQUAL TO u'\u2320' # 0x00f4 -> TOP HALF INTEGRAL u'\u2321' # 0x00f5 -> BOTTOM HALF INTEGRAL u'\xf7' # 0x00f6 -> DIVISION SIGN u'\u2248' # 0x00f7 -> ALMOST EQUAL TO u'\xb0' # 0x00f8 -> DEGREE SIGN u'\u2219' # 0x00f9 -> BULLET OPERATOR u'\xb7' # 0x00fa -> MIDDLE DOT u'\u221a' # 0x00fb -> SQUARE ROOT u'\u207f' # 0x00fc -> SUPERSCRIPT LATIN SMALL LETTER N u'\xb2' # 0x00fd -> SUPERSCRIPT TWO u'\u25a0' # 0x00fe -> BLACK SQUARE u'\xa0' # 0x00ff -> NO-BREAK SPACE ) ### Encoding Map encoding_map = { 0x0000: 0x0000, # NULL 0x0001: 0x0001, # START OF HEADING 0x0002: 0x0002, # START OF TEXT 0x0003: 0x0003, # END OF TEXT 0x0004: 0x0004, # END OF TRANSMISSION 0x0005: 0x0005, # ENQUIRY 0x0006: 0x0006, # ACKNOWLEDGE 0x0007: 0x0007, # BELL 0x0008: 0x0008, # BACKSPACE 0x0009: 0x0009, # HORIZONTAL TABULATION 0x000a: 0x000a, # LINE FEED 0x000b: 0x000b, # VERTICAL TABULATION 0x000c: 0x000c, # FORM FEED 0x000d: 0x000d, # CARRIAGE RETURN 0x000e: 0x000e, # SHIFT OUT 0x000f: 0x000f, # SHIFT IN 0x0010: 0x0010, # DATA LINK ESCAPE 0x0011: 0x0011, # DEVICE CONTROL ONE 0x0012: 0x0012, # DEVICE CONTROL TWO 0x0013: 0x0013, # DEVICE CONTROL THREE 0x0014: 0x0014, # DEVICE CONTROL FOUR 0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE 0x0016: 0x0016, # SYNCHRONOUS IDLE 0x0017: 0x0017, # END OF TRANSMISSION BLOCK 0x0018: 0x0018, # CANCEL 0x0019: 0x0019, # END OF MEDIUM 0x001a: 0x001a, # SUBSTITUTE 0x001b: 0x001b, # ESCAPE 0x001c: 0x001c, # FILE SEPARATOR 0x001d: 0x001d, # GROUP SEPARATOR 0x001e: 0x001e, # RECORD SEPARATOR 0x001f: 0x001f, # UNIT SEPARATOR 0x0020: 0x0020, # SPACE 0x0021: 0x0021, # EXCLAMATION MARK 0x0022: 0x0022, # QUOTATION MARK 0x0023: 0x0023, # NUMBER SIGN 0x0024: 0x0024, # DOLLAR SIGN 0x0025: 0x0025, # PERCENT SIGN 0x0026: 0x0026, # AMPERSAND 0x0027: 0x0027, # APOSTROPHE 0x0028: 0x0028, # LEFT PARENTHESIS 0x0029: 0x0029, # RIGHT PARENTHESIS 0x002a: 0x002a, # ASTERISK 0x002b: 0x002b, # PLUS SIGN 0x002c: 0x002c, # COMMA 0x002d: 0x002d, # HYPHEN-MINUS 0x002e: 0x002e, # FULL STOP 0x002f: 0x002f, # SOLIDUS 0x0030: 0x0030, # DIGIT ZERO 0x0031: 0x0031, # DIGIT ONE 0x0032: 0x0032, # DIGIT TWO 0x0033: 0x0033, # DIGIT THREE 0x0034: 0x0034, # DIGIT FOUR 0x0035: 0x0035, # DIGIT FIVE 0x0036: 0x0036, # DIGIT SIX 0x0037: 0x0037, # DIGIT SEVEN 0x0038: 0x0038, # DIGIT EIGHT 0x0039: 0x0039, # DIGIT NINE 0x003a: 0x003a, # COLON 0x003b: 0x003b, # SEMICOLON 0x003c: 0x003c, # LESS-THAN SIGN 0x003d: 0x003d, # EQUALS SIGN 0x003e: 0x003e, # GREATER-THAN SIGN 0x003f: 0x003f, # QUESTION MARK 0x0040: 0x0040, # COMMERCIAL AT 0x0041: 0x0041, # LATIN CAPITAL LETTER A 0x0042: 0x0042, # LATIN CAPITAL LETTER B 0x0043: 0x0043, # LATIN CAPITAL LETTER C 0x0044: 0x0044, # LATIN CAPITAL LETTER D 0x0045: 0x0045, # LATIN CAPITAL LETTER E 0x0046: 0x0046, # LATIN CAPITAL LETTER F 0x0047: 0x0047, # LATIN CAPITAL LETTER G 0x0048: 0x0048, # LATIN CAPITAL LETTER H 0x0049: 0x0049, # LATIN CAPITAL LETTER I 0x004a: 0x004a, # LATIN CAPITAL LETTER J 0x004b: 0x004b, # LATIN CAPITAL LETTER K 0x004c: 0x004c, # LATIN CAPITAL LETTER L 0x004d: 0x004d, # LATIN CAPITAL LETTER M 0x004e: 0x004e, # LATIN CAPITAL LETTER N 0x004f: 0x004f, # LATIN CAPITAL LETTER O 0x0050: 0x0050, # LATIN CAPITAL LETTER P 0x0051: 0x0051, # LATIN CAPITAL LETTER Q 0x0052: 0x0052, # LATIN CAPITAL LETTER R 0x0053: 0x0053, # LATIN CAPITAL LETTER S 0x0054: 0x0054, # LATIN CAPITAL LETTER T 0x0055: 0x0055, # LATIN CAPITAL LETTER U 0x0056: 0x0056, # LATIN CAPITAL LETTER V 0x0057: 0x0057, # LATIN CAPITAL LETTER W 0x0058: 0x0058, # LATIN CAPITAL LETTER X 0x0059: 0x0059, # LATIN CAPITAL LETTER Y 0x005a: 0x005a, # LATIN CAPITAL LETTER Z 0x005b: 0x005b, # LEFT SQUARE BRACKET 0x005c: 0x005c, # REVERSE SOLIDUS 0x005d: 0x005d, # RIGHT SQUARE BRACKET 0x005e: 0x005e, # CIRCUMFLEX ACCENT 0x005f: 0x005f, # LOW LINE 0x0060: 0x0060, # GRAVE ACCENT 0x0061: 0x0061, # LATIN SMALL LETTER A 0x0062: 0x0062, # LATIN SMALL LETTER B 0x0063: 0x0063, # LATIN SMALL LETTER C 0x0064: 0x0064, # LATIN SMALL LETTER D 0x0065: 0x0065, # LATIN SMALL LETTER E 0x0066: 0x0066, # LATIN SMALL LETTER F 0x0067: 0x0067, # LATIN SMALL LETTER G 0x0068: 0x0068, # LATIN SMALL LETTER H 0x0069: 0x0069, # LATIN SMALL LETTER I 0x006a: 0x006a, # LATIN SMALL LETTER J 0x006b: 0x006b, # LATIN SMALL LETTER K 0x006c: 0x006c, # LATIN SMALL LETTER L 0x006d: 0x006d, # LATIN SMALL LETTER M 0x006e: 0x006e, # LATIN SMALL LETTER N 0x006f: 0x006f, # LATIN SMALL LETTER O 0x0070: 0x0070, # LATIN SMALL LETTER P 0x0071: 0x0071, # LATIN SMALL LETTER Q 0x0072: 0x0072, # LATIN SMALL LETTER R 0x0073: 0x0073, # LATIN SMALL LETTER S 0x0074: 0x0074, # LATIN SMALL LETTER T 0x0075: 0x0075, # LATIN SMALL LETTER U 0x0076: 0x0076, # LATIN SMALL LETTER V 0x0077: 0x0077, # LATIN SMALL LETTER W 0x0078: 0x0078, # LATIN SMALL LETTER X 0x0079: 0x0079, # LATIN SMALL LETTER Y 0x007a: 0x007a, # LATIN SMALL LETTER Z 0x007b: 0x007b, # LEFT CURLY BRACKET 0x007c: 0x007c, # VERTICAL LINE 0x007d: 0x007d, # RIGHT CURLY BRACKET 0x007e: 0x007e, # TILDE 0x007f: 0x007f, # DELETE 0x00a0: 0x00ff, # NO-BREAK SPACE 0x00a1: 0x00ad, # INVERTED EXCLAMATION MARK 0x00a2: 0x009b, # CENT SIGN 0x00a3: 0x009c, # POUND SIGN 0x00aa: 0x00a6, # FEMININE ORDINAL INDICATOR 0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK 0x00ac: 0x00aa, # NOT SIGN 0x00b0: 0x00f8, # DEGREE SIGN 0x00b1: 0x00f1, # PLUS-MINUS SIGN 0x00b2: 0x00fd, # SUPERSCRIPT TWO 0x00b5: 0x00e6, # MICRO SIGN 0x00b7: 0x00fa, # MIDDLE DOT 0x00ba: 0x00a7, # MASCULINE ORDINAL INDICATOR 0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK 0x00bc: 0x00ac, # VULGAR FRACTION ONE QUARTER 0x00bd: 0x00ab, # VULGAR FRACTION ONE HALF 0x00bf: 0x00a8, # INVERTED QUESTION MARK 0x00c0: 0x0091, # LATIN CAPITAL LETTER A WITH GRAVE 0x00c1: 0x0086, # LATIN CAPITAL LETTER A WITH ACUTE 0x00c2: 0x008f, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX 0x00c3: 0x008e, # LATIN CAPITAL LETTER A WITH TILDE 0x00c7: 0x0080, # LATIN CAPITAL LETTER C WITH CEDILLA 0x00c8: 0x0092, # LATIN CAPITAL LETTER E WITH GRAVE 0x00c9: 0x0090, # LATIN CAPITAL LETTER E WITH ACUTE 0x00ca: 0x0089, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX 0x00cc: 0x0098, # LATIN CAPITAL LETTER I WITH GRAVE 0x00cd: 0x008b, # LATIN CAPITAL LETTER I WITH ACUTE 0x00d1: 0x00a5, # LATIN CAPITAL LETTER N WITH TILDE 0x00d2: 0x00a9, # LATIN CAPITAL LETTER O WITH GRAVE 0x00d3: 0x009f, # LATIN CAPITAL LETTER O WITH ACUTE 0x00d4: 0x008c, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX 0x00d5: 0x0099, # LATIN CAPITAL LETTER O WITH TILDE 0x00d9: 0x009d, # LATIN CAPITAL LETTER U WITH GRAVE 0x00da: 0x0096, # LATIN CAPITAL LETTER U WITH ACUTE 0x00dc: 0x009a, # LATIN CAPITAL LETTER U WITH DIAERESIS 0x00df: 0x00e1, # LATIN SMALL LETTER SHARP S 0x00e0: 0x0085, # LATIN SMALL LETTER A WITH GRAVE 0x00e1: 0x00a0, # LATIN SMALL LETTER A WITH ACUTE 0x00e2: 0x0083, # LATIN SMALL LETTER A WITH CIRCUMFLEX 0x00e3: 0x0084, # LATIN SMALL LETTER A WITH TILDE 0x00e7: 0x0087, # LATIN SMALL LETTER C WITH CEDILLA 0x00e8: 0x008a, # LATIN SMALL LETTER E WITH GRAVE 0x00e9: 0x0082, # LATIN SMALL LETTER E WITH ACUTE 0x00ea: 0x0088, # LATIN SMALL LETTER E WITH CIRCUMFLEX 0x00ec: 0x008d, # LATIN SMALL LETTER I WITH GRAVE 0x00ed: 0x00a1, # LATIN SMALL LETTER I WITH ACUTE 0x00f1: 0x00a4, # LATIN SMALL LETTER N WITH TILDE 0x00f2: 0x0095, # LATIN SMALL LETTER O WITH GRAVE 0x00f3: 0x00a2, # LATIN SMALL LETTER O WITH ACUTE 0x00f4: 0x0093, # LATIN SMALL LETTER O WITH CIRCUMFLEX 0x00f5: 0x0094, # LATIN SMALL LETTER O WITH TILDE 0x00f7: 0x00f6, # DIVISION SIGN 0x00f9: 0x0097, # LATIN SMALL LETTER U WITH GRAVE 0x00fa: 0x00a3, # LATIN SMALL LETTER U WITH ACUTE 0x00fc: 0x0081, # LATIN SMALL LETTER U WITH DIAERESIS 0x0393: 0x00e2, # GREEK CAPITAL LETTER GAMMA 0x0398: 0x00e9, # GREEK CAPITAL LETTER THETA 0x03a3: 0x00e4, # GREEK CAPITAL LETTER SIGMA 0x03a6: 0x00e8, # GREEK CAPITAL LETTER PHI 0x03a9: 0x00ea, # GREEK CAPITAL LETTER OMEGA 0x03b1: 0x00e0, # GREEK SMALL LETTER ALPHA 0x03b4: 0x00eb, # GREEK SMALL LETTER DELTA 0x03b5: 0x00ee, # GREEK SMALL LETTER EPSILON 0x03c0: 0x00e3, # GREEK SMALL LETTER PI 0x03c3: 0x00e5, # GREEK SMALL LETTER SIGMA 0x03c4: 0x00e7, # GREEK SMALL LETTER TAU 0x03c6: 0x00ed, # GREEK SMALL LETTER PHI 0x207f: 0x00fc, # SUPERSCRIPT LATIN SMALL LETTER N 0x20a7: 0x009e, # PESETA SIGN 0x2219: 0x00f9, # BULLET OPERATOR 0x221a: 0x00fb, # SQUARE ROOT 0x221e: 0x00ec, # INFINITY 0x2229: 0x00ef, # INTERSECTION 0x2248: 0x00f7, # ALMOST EQUAL TO 0x2261: 0x00f0, # IDENTICAL TO 0x2264: 0x00f3, # LESS-THAN OR EQUAL TO 0x2265: 0x00f2, # GREATER-THAN OR EQUAL TO 0x2320: 0x00f4, # TOP HALF INTEGRAL 0x2321: 0x00f5, # BOTTOM HALF INTEGRAL 0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL 0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL 0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT 0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT 0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT 0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT 0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT 0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT 0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL 0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL 0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL 0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL 0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL 0x2552: 0x00d5, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE 0x2553: 0x00d6, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE 0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT 0x2555: 0x00b8, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE 0x2556: 0x00b7, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE 0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT 0x2558: 0x00d4, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE 0x2559: 0x00d3, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE 0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT 0x255b: 0x00be, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE 0x255c: 0x00bd, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE 0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT 0x255e: 0x00c6, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE 0x255f: 0x00c7, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE 0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT 0x2561: 0x00b5, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE 0x2562: 0x00b6, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE 0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT 0x2564: 0x00d1, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE 0x2565: 0x00d2, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE 0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL 0x2567: 0x00cf, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE 0x2568: 0x00d0, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE 0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL 0x256a: 0x00d8, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE 0x256b: 0x00d7, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE 0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL 0x2580: 0x00df, # UPPER HALF BLOCK 0x2584: 0x00dc, # LOWER HALF BLOCK 0x2588: 0x00db, # FULL BLOCK 0x258c: 0x00dd, # LEFT HALF BLOCK 0x2590: 0x00de, # RIGHT HALF BLOCK 0x2591: 0x00b0, # LIGHT SHADE 0x2592: 0x00b1, # MEDIUM SHADE 0x2593: 0x00b2, # DARK SHADE 0x25a0: 0x00fe, # BLACK SQUARE }
apache-2.0
ClimateImpactLab/open-estimate
openest/models/curve.py
1
7352
import numpy as np from .univariate_model import UnivariateModel from scipy.interpolate import UnivariateSpline from statsmodels.distributions.empirical_distribution import StepFunction class UnivariateCurve(UnivariateModel): def __init__(self, xx): super(UnivariateCurve, self).__init__(xx_is_categorical=False, xx=xx, scaled=True) def __call__(self, x): raise NotImplementedError("call not implemented") def get_xx(self): return self.xx def eval_pval(self, x, p, threshold=1e-3): return self(x) def eval_pvals(self, x, p, threshold=1e-3): return self(x) class CurveCurve(UnivariateCurve): def __init__(self, xx, curve): super(CurveCurve, self).__init__(xx) self.curve = curve def __call__(self, x): return self.curve(x) @staticmethod def make_linear_spline_curve(xx, yy, limits): xx = np.concatenate(([limits[0]], xx, [limits[1]])) yy = np.concatenate(([yy[0]], yy, [yy[-1]])) return UnivariateSpline(xx, yy, s=0, k=1) class FlatCurve(CurveCurve): def __init__(self, yy): self.yy = yy super(FlatCurve, self).__init__([-np.inf, np.inf], lambda x: yy) class LinearCurve(CurveCurve): def __init__(self, yy): super(LinearCurve, self).__init__([-np.inf, np.inf], lambda x: yy * x) class StepCurve(CurveCurve): def __init__(self, xxlimits, yy, xtrans=None): step_function = StepFunction(xxlimits[1:-1], yy[1:], ival=yy[0]) if xtrans is None: super(StepCurve, self).__init__((np.array(xxlimits[0:-1]) + np.array(xxlimits[1:])) / 2, lambda x: step_function(x)) else: super(StepCurve, self).__init__((np.array(xxlimits[0:-1]) + np.array(xxlimits[1:])) / 2, lambda x: step_function(xtrans(x))) self.xxlimits = xxlimits self.yy = yy class ZeroInterceptPolynomialCurve(UnivariateCurve): def __init__(self, xx, ccs): super(ZeroInterceptPolynomialCurve, self).__init__(xx) self.ccs = ccs self.pvcoeffs = list(ccs[::-1]) + [0] # Add on constant and start with highest order def __call__(self, x): return np.polyval(self.pvcoeffs, x) def pos(x): return x * (x > 0) class CubicSplineCurve(UnivariateCurve): def __init__(self, knots, coeffs): super(CubicSplineCurve, self).__init__(knots) self.knots = knots self.coeffs = coeffs def get_terms(self, x): """Get the set of knots-1 terms representing temperature x.""" terms = [x] for kk in range(len(self.knots) - 2): termx_k = pos(x - self.knots[kk])**3 - pos(x - self.knots[-2])**3 * (self.knots[-1] - self.knots[kk]) / (self.knots[-1] - self.knots[-2]) + pos(x - self.knots[-1])**3 * (self.knots[-2] - self.knots[kk]) / (self.knots[-1] - self.knots[-2]) terms.append(termx_k) return terms def __call__(self, x): """Get the set of knots-1 terms representing temperature x and multiply by the coefficients.""" x = np.array(x) # make sure we have an array_like total = x * self.coeffs[0] for kk in range(len(self.knots) - 2): termx_k = pos(x - self.knots[kk])**3 - pos(x - self.knots[-2])**3 * (self.knots[-1] - self.knots[kk]) / (self.knots[-1] - self.knots[-2]) + pos(x - self.knots[-1])**3 * (self.knots[-2] - self.knots[kk]) / (self.knots[-1] - self.knots[-2]) total += termx_k * self.coeffs[kk + 1] return total class CoefficientsCurve(UnivariateCurve): """A curve represented by the sum of multiple predictors, each multiplied by a coefficient.""" def __init__(self, coeffs, curve, xtrans=None): super(CoefficientsCurve, self).__init__([-np.inf, np.inf]) self.coeffs = coeffs self.curve = curve self.xtrans = xtrans def __call__(self, x): if np.isscalar(x): return self.curve(x) elif self.xtrans is not None: return self.xtrans(x).dot(self.coeffs) else: x = np.array(x) return x.dot(self.coeffs) class ShiftedCurve(UnivariateCurve): def __init__(self, curve, offset): super(ShiftedCurve, self).__init__(curve.xx) self.curve = curve self.offset = offset def __call__(self, xs): return self.curve(xs) + self.offset class ProductCurve(UnivariateCurve): def __init__(self, curve1, curve2): super(ProductCurve, self).__init__(curve1.xx) self.curve1 = curve1 self.curve2 = curve2 def __call__(self, xs): return self.curve1(xs) * self.curve2(xs) class ClippedCurve(UnivariateCurve): def __init__(self, curve, cliplow=True): super(ClippedCurve, self).__init__(curve.xx) self.curve = curve self.cliplow = cliplow def __call__(self, xs): ys = self.curve(xs) if self.cliplow: return ys * (ys > 0) else: return ys * (ys < 0) class OtherClippedCurve(ClippedCurve): def __init__(self, clipping_curve, value_curve, clipy=0): super(OtherClippedCurve, self).__init__(value_curve) self.clipping_curve = clipping_curve self.clipy = clipy def __call__(self, xs): ys = self.curve(xs) clipping = self.clipping_curve(xs) ys = [y if y is not None else 0 for y in ys] clipping = [y if not np.isnan(y) else 0 for y in clipping] return ys * (np.array(clipping) > self.clipy) class MinimumCurve(UnivariateCurve): def __init__(self, curve1, curve2): super(MinimumCurve, self).__init__(curve1.xx) self.curve1 = curve1 self.curve2 = curve2 def __call__(self, xs): return np.minimum(self.curve1(xs), self.curve2(xs)) class MaximumCurve(UnivariateCurve): def __init__(self, curve1, curve2): super(MaximumCurve, self).__init__(curve1.xx) self.curve1 = curve1 self.curve2 = curve2 def __call__(self, xs): return np.maximum(self.curve1(xs), self.curve2(xs)) class SelectiveInputCurve(UnivariateCurve): """Assumes input is a matrix, and only pass selected input columns to child curve.""" def __init__(self, curve, indices): super(SelectiveInputCurve, self).__init__(curve.xx) self.curve = curve self.indices = np.array(indices) def __call__(self, xs): return self.curve(xs[:, self.indices]) class PiecewiseCurve(UnivariateCurve): def __init__(self, curves, knots, xtrans=lambda x: x): super(PiecewiseCurve, self).__init__(knots) assert len(curves) == len(knots) - 1 self.curves = curves self.knots = knots self.xtrans = xtrans # for example, to select first column def __call__(self, xs): if np.isscalar(xs): for ii in range(len(self.knots) - 1): if xs >= self.knots[ii] and xs < self.knots[ii+1]: return self.curves[ii](xs) return np.nan ys = np.ones(len(xs)) * np.nan for ii in range(len(self.knots) - 1): txs = self.xtrans(xs) within = (txs >= self.knots[ii]) & (txs < self.knots[ii+1]) wixs = xs[within] if len(wixs) > 0: ys[within] = self.curves[ii](wixs) return ys
gpl-3.0
dmsurti/mayavi
examples/mayavi/interactive/subclassing_mayavi_application.py
8
2818
#!/usr/bin/env python """ This script demonstrates how one can script the Mayavi application by subclassing the application, create a new scene and create a few simple modules. This should be run as:: $ python test.py """ # Author: Prabhu Ramachandran <prabhu_r@users.sf.net> # Copyright (c) 2005-2007, Enthought, Inc. # License: BSD Style. # Standard library imports from os.path import join, abspath, dirname # Enthought library imports from mayavi.plugins.app import Mayavi from mayavi.scripts.util import get_data_dir class MyApp(Mayavi): def run(self): """This is executed once the application GUI has started. *Make sure all other MayaVi specific imports are made here!* """ # Various imports to do different things. from mayavi.sources.vtk_file_reader import VTKFileReader from mayavi.modules.outline import Outline from mayavi.modules.axes import Axes from mayavi.modules.grid_plane import GridPlane from mayavi.modules.image_plane_widget import ImagePlaneWidget from mayavi.modules.text import Text script = self.script # Create a new scene. script.new_scene() # Read a VTK (old style) data file. r = VTKFileReader() r.initialize(join(get_data_dir(dirname(abspath(__file__))), 'heart.vtk')) script.add_source(r) # Put up some text. t = Text(text='MayaVi rules!', x_position=0.2, y_position=0.9, width=0.8) t.property.color = 1, 1, 0 # Bright yellow, yeah! script.add_module(t) # Create an outline for the data. o = Outline() script.add_module(o) # Create an axes for the data. a = Axes() script.add_module(a) # Create an orientation axes for the scene. This only works with # VTK-4.5 and above which is why we have the try block. try: from mayavi.modules.orientation_axes import OrientationAxes except ImportError: pass else: a = OrientationAxes() a.marker.set_viewport(0.0, 0.8, 0.2, 1.0) script.add_module(a) # Create three simple grid plane modules. # First normal to 'x' axis. gp = GridPlane() script.add_module(gp) # Second normal to 'y' axis. gp = GridPlane() gp.grid_plane.axis = 'y' script.add_module(gp) # Third normal to 'z' axis. gp = GridPlane() script.add_module(gp) gp.grid_plane.axis = 'z' # Create one ImagePlaneWidget. ipw = ImagePlaneWidget() script.add_module(ipw) # Set the position to the middle of the data. ipw.ipw.slice_position = 16 if __name__ == '__main__': a = MyApp() a.main()
bsd-3-clause
duyetdev/openerp-6.1.1
openerp/addons/base_report_designer/plugin/openerp_report_designer/bin/script/ConvertFieldsToBraces.py
6
3248
########################################################################## # # Portions of this file are under the following copyright and license: # # # Copyright (c) 2003-2004 Danny Brewer # d29583@groovegarden.com # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # # See: http://www.gnu.org/licenses/lgpl.html # # # and other portions are under the following copyright and license: # # # OpenERP, Open Source Management Solution>.. # Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # ############################################################################## import uno import unohelper import string import re from com.sun.star.task import XJobExecutor if __name__<>"package": from lib.gui import * from LoginTest import * database="test" uid = 3 class ConvertFieldsToBraces( unohelper.Base, XJobExecutor ): def __init__(self,ctx): self.ctx = ctx self.module = "openerp_report" self.version = "0.1" LoginTest() if not loginstatus and __name__=="package": exit(1) self.aReportSyntex=[] self.getFields() def getFields(self): desktop=getDesktop() doc = desktop.getCurrentComponent() oParEnum = doc.getTextFields().createEnumeration() while oParEnum.hasMoreElements(): oPar = oParEnum.nextElement() if oPar.supportsService("com.sun.star.text.TextField.DropDown"): oPar.getAnchor().Text.insertString(oPar.getAnchor(),oPar.Items[1],False) oPar.dispose() if __name__<>"package": ConvertFieldsToBraces(None) else: g_ImplementationHelper.addImplementation( ConvertFieldsToBraces, "org.openoffice.openerp.report.convertFB", ("com.sun.star.task.Job",),) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
Pointedstick/ReplicatorG
skein_engines/skeinforge-31/skeinforge_application/skeinforge_plugins/profile_plugins/extrusion.py
2
2448
""" This page is in the table of contents. Extrusion is a script to set the extrusion profile for the skeinforge chain. The displayed craft sequence is the sequence in which the tools craft the model and export the output. On the extrusion dialog, clicking the 'Add Profile' button will duplicate the selected profile and give it the name in the input field. For example, if ABS is selected and the name ABS_black is in the input field, clicking the 'Add Profile' button will duplicate ABS and save it as ABS_black. The 'Delete Profile' button deletes the selected profile. The profile selection is the setting. If you hit 'Save and Close' the selection will be saved, if you hit 'Cancel' the selection will not be saved. However; adding and deleting a profile is a permanent action, for example 'Cancel' will not bring back any deleted profiles. To change the extrusion profile, in a shell in the profile_plugins folder type: > python extrusion.py An example of using extrusion from the python interpreter follows below. > python Python 2.5.1 (r251:54863, Sep 22 2007, 01:43:31) [GCC 4.2.1 (SUSE Linux)] on linux2 Type "help", "copyright", "credits" or "license" for more information. >>> import extrusion >>> extrusion.main() This brings up the extrusion setting dialog. """ from __future__ import absolute_import import __init__ from fabmetheus_utilities import settings from skeinforge_application.skeinforge_utilities import skeinforge_profile import sys __author__ = 'Enrique Perez (perez_enrique@yahoo.com)' __date__ = '$Date: 2008/21/04 $' __license__ = 'GPL 3.0' def getCraftSequence(): "Get the extrusion craft sequence." return 'carve,preface,widen,inset,fill,multiply,speed,temperature,raft,chamber,tower,jitter,clip,stretch,comb,cool,hop,wipe,oozebane,splodge,home,lash,fillet,limit,dimension,unpause,export'.split(',') def getNewRepository(): "Get the repository constructor." return ExtrusionRepository() class ExtrusionRepository: "A class to handle the export settings." def __init__(self): "Set the default settings, execute title & settings fileName." skeinforge_profile.addListsSetCraftProfile( getCraftSequence(), 'ABS', self, 'skeinforge_plugins.profile_plugins.extrusion.html') def main(): "Display the export dialog." if len( sys.argv ) > 1: writeOutput(' '.join( sys.argv[1 :] ) ) else: settings.startMainLoopFromConstructor( getNewRepository() ) if __name__ == "__main__": main()
gpl-2.0
Work4Labs/lettuce
tests/integration/lib/Django-1.2.5/django/contrib/gis/gdal/prototypes/errcheck.py
404
4207
""" This module houses the error-checking routines used by the GDAL ctypes prototypes. """ from ctypes import c_void_p, string_at from django.contrib.gis.gdal.error import check_err, OGRException, SRSException from django.contrib.gis.gdal.libgdal import lgdal # Helper routines for retrieving pointers and/or values from # arguments passed in by reference. def arg_byref(args, offset=-1): "Returns the pointer argument's by-refernece value." return args[offset]._obj.value def ptr_byref(args, offset=-1): "Returns the pointer argument passed in by-reference." return args[offset]._obj def check_bool(result, func, cargs): "Returns the boolean evaluation of the value." if bool(result): return True else: return False ### String checking Routines ### def check_const_string(result, func, cargs, offset=None): """ Similar functionality to `check_string`, but does not free the pointer. """ if offset: check_err(result) ptr = ptr_byref(cargs, offset) return ptr.value else: return result def check_string(result, func, cargs, offset=-1, str_result=False): """ Checks the string output returned from the given function, and frees the string pointer allocated by OGR. The `str_result` keyword may be used when the result is the string pointer, otherwise the OGR error code is assumed. The `offset` keyword may be used to extract the string pointer passed in by-reference at the given slice offset in the function arguments. """ if str_result: # For routines that return a string. ptr = result if not ptr: s = None else: s = string_at(result) else: # Error-code return specified. check_err(result) ptr = ptr_byref(cargs, offset) # Getting the string value s = ptr.value # Correctly freeing the allocated memory beind GDAL pointer # w/the VSIFree routine. if ptr: lgdal.VSIFree(ptr) return s ### DataSource, Layer error-checking ### ### Envelope checking ### def check_envelope(result, func, cargs, offset=-1): "Checks a function that returns an OGR Envelope by reference." env = ptr_byref(cargs, offset) return env ### Geometry error-checking routines ### def check_geom(result, func, cargs): "Checks a function that returns a geometry." # OGR_G_Clone may return an integer, even though the # restype is set to c_void_p if isinstance(result, (int, long)): result = c_void_p(result) if not result: raise OGRException('Invalid geometry pointer returned from "%s".' % func.__name__) return result def check_geom_offset(result, func, cargs, offset=-1): "Chcks the geometry at the given offset in the C parameter list." check_err(result) geom = ptr_byref(cargs, offset=offset) return check_geom(geom, func, cargs) ### Spatial Reference error-checking routines ### def check_srs(result, func, cargs): if isinstance(result, (int, long)): result = c_void_p(result) if not result: raise SRSException('Invalid spatial reference pointer returned from "%s".' % func.__name__) return result ### Other error-checking routines ### def check_arg_errcode(result, func, cargs): """ The error code is returned in the last argument, by reference. Check its value with `check_err` before returning the result. """ check_err(arg_byref(cargs)) return result def check_errcode(result, func, cargs): """ Check the error code returned (c_int). """ check_err(result) return def check_pointer(result, func, cargs): "Makes sure the result pointer is valid." if isinstance(result, (int, long)): result = c_void_p(result) if bool(result): return result else: raise OGRException('Invalid pointer returned from "%s"' % func.__name__) def check_str_arg(result, func, cargs): """ This is for the OSRGet[Angular|Linear]Units functions, which require that the returned string pointer not be freed. This returns both the double and tring values. """ dbl = result ptr = cargs[-1]._obj return dbl, ptr.value
gpl-3.0
tangjonathan/HKQuiz
node_modules/pryjs/node_modules/pygmentize-bundled/vendor/pygments/pygments/styles/xcode.py
126
1501
# -*- coding: utf-8 -*- """ pygments.styles.xcode ~~~~~~~~~~~~~~~~~~~~~ Style similar to the `Xcode` default theme. :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ from pygments.style import Style from pygments.token import Keyword, Name, Comment, String, Error, \ Number, Operator, Literal class XcodeStyle(Style): """ Style similar to the Xcode default colouring theme. """ default_style = '' styles = { Comment: '#177500', Comment.Preproc: '#633820', String: '#C41A16', String.Char: '#2300CE', Operator: '#000000', Keyword: '#A90D91', Name: '#000000', Name.Attribute: '#836C28', Name.Class: '#3F6E75', Name.Function: '#000000', Name.Builtin: '#A90D91', # In Obj-C code this token is used to colour Cocoa types Name.Builtin.Pseudo: '#5B269A', Name.Variable: '#000000', Name.Tag: '#000000', Name.Decorator: '#000000', # Workaround for a BUG here: lexer treats multiline method signatres as labels Name.Label: '#000000', Literal: '#1C01CE', Number: '#1C01CE', Error: '#000000', }
mit
SebasSBM/django
tests/forms_tests/widget_tests/test_clearablefileinput.py
69
4360
from django.core.files.uploadedfile import SimpleUploadedFile from django.forms import ClearableFileInput from django.utils import six from django.utils.encoding import python_2_unicode_compatible from .base import WidgetTest @python_2_unicode_compatible class FakeFieldFile(object): """ Quacks like a FieldFile (has a .url and unicode representation), but doesn't require us to care about storages etc. """ url = 'something' def __str__(self): return self.url class ClearableFileInputTest(WidgetTest): widget = ClearableFileInput() def test_clear_input_renders(self): """ A ClearableFileInput with is_required False and rendered with an initial value that is a file renders a clear checkbox. """ self.check_html(self.widget, 'myfile', FakeFieldFile(), html=( """ Currently: <a href="something">something</a> <input type="checkbox" name="myfile-clear" id="myfile-clear_id" /> <label for="myfile-clear_id">Clear</label><br /> Change: <input type="file" name="myfile" /> """ )) def test_html_escaped(self): """ A ClearableFileInput should escape name, filename, and URL when rendering HTML (#15182). """ @python_2_unicode_compatible class StrangeFieldFile(object): url = "something?chapter=1&sect=2&copy=3&lang=en" def __str__(self): return '''something<div onclick="alert('oops')">.jpg''' widget = ClearableFileInput() field = StrangeFieldFile() output = widget.render('my<div>file', field) self.assertNotIn(field.url, output) self.assertIn('href="something?chapter=1&amp;sect=2&amp;copy=3&amp;lang=en"', output) self.assertNotIn(six.text_type(field), output) self.assertIn('something&lt;div onclick=&quot;alert(&#39;oops&#39;)&quot;&gt;.jpg', output) self.assertIn('my&lt;div&gt;file', output) self.assertNotIn('my<div>file', output) def test_html_does_not_mask_exceptions(self): """ A ClearableFileInput should not mask exceptions produced while checking that it has a value. """ @python_2_unicode_compatible class FailingURLFieldFile(object): @property def url(self): raise RuntimeError('Canary') def __str__(self): return 'value' widget = ClearableFileInput() field = FailingURLFieldFile() with self.assertRaisesMessage(RuntimeError, 'Canary'): widget.render('myfile', field) def test_clear_input_renders_only_if_not_required(self): """ A ClearableFileInput with is_required=False does not render a clear checkbox. """ widget = ClearableFileInput() widget.is_required = True self.check_html(widget, 'myfile', FakeFieldFile(), html=( """ Currently: <a href="something">something</a> <br /> Change: <input type="file" name="myfile" /> """ )) def test_clear_input_renders_only_if_initial(self): """ A ClearableFileInput instantiated with no initial value does not render a clear checkbox. """ self.check_html(self.widget, 'myfile', None, html='<input type="file" name="myfile" />') def test_clear_input_checked_returns_false(self): """ ClearableFileInput.value_from_datadict returns False if the clear checkbox is checked, if not required. """ value = self.widget.value_from_datadict( data={'myfile-clear': True}, files={}, name='myfile', ) self.assertEqual(value, False) def test_clear_input_checked_returns_false_only_if_not_required(self): """ ClearableFileInput.value_from_datadict never returns False if the field is required. """ widget = ClearableFileInput() widget.is_required = True field = SimpleUploadedFile('something.txt', b'content') value = widget.value_from_datadict( data={'myfile-clear': True}, files={'myfile': field}, name='myfile', ) self.assertEqual(value, field)
bsd-3-clause
ar7z1/ansible
test/units/parsing/yaml/test_dumper.py
49
2314
# coding: utf-8 # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import io from units.compat import unittest from ansible.parsing import vault from ansible.parsing.yaml import dumper, objects from ansible.parsing.yaml.loader import AnsibleLoader from units.mock.yaml_helper import YamlTestUtils from units.mock.vault_helper import TextVaultSecret class TestAnsibleDumper(unittest.TestCase, YamlTestUtils): def setUp(self): self.vault_password = "hunter42" vault_secret = TextVaultSecret(self.vault_password) self.vault_secrets = [('vault_secret', vault_secret)] self.good_vault = vault.VaultLib(self.vault_secrets) self.vault = self.good_vault self.stream = self._build_stream() self.dumper = dumper.AnsibleDumper def _build_stream(self, yaml_text=None): text = yaml_text or u'' stream = io.StringIO(text) return stream def _loader(self, stream): return AnsibleLoader(stream, vault_secrets=self.vault.secrets) def test(self): plaintext = 'This is a string we are going to encrypt.' avu = objects.AnsibleVaultEncryptedUnicode.from_plaintext(plaintext, vault=self.vault, secret=vault.match_secrets(self.vault_secrets, ['vault_secret'])[0][1]) yaml_out = self._dump_string(avu, dumper=self.dumper) stream = self._build_stream(yaml_out) loader = self._loader(stream) data_from_yaml = loader.get_single_data() self.assertEqual(plaintext, data_from_yaml.data)
gpl-3.0
wetek-enigma/enigma2
lib/python/Screens/ChoiceBox.py
14
7656
from Screens.Screen import Screen from Components.ActionMap import NumberActionMap from Components.Label import Label from Components.ChoiceList import ChoiceEntryComponent, ChoiceList from Components.Sources.StaticText import StaticText from Components.Pixmap import Pixmap import enigma class ChoiceBox(Screen): def __init__(self, session, title="", list=None, keys=None, selection=0, skin_name=None, text=""): self.setTitle(_("Choice Box")) if not list: list = [] if not skin_name: skin_name = [] Screen.__init__(self, session) if isinstance(skin_name, str): skin_name = [skin_name] self.skinName = skin_name + ["ChoiceBox"] self["text"] = Label() self.var = "" if skin_name and 'SoftwareUpdateChoices' in skin_name and var and var in ('unstable', 'updating', 'stable', 'unknown'): self.var = var self['feedStatusMSG'] = Label() self['tl_off'] = Pixmap() self['tl_red'] = Pixmap() self['tl_yellow'] = Pixmap() self['tl_green'] = Pixmap() if title: title = _(title) if len(title) < 55 and title.find('\n') == -1: Screen.setTitle(self, title) elif title.find('\n') != -1: temptext = title.split('\n') if len(temptext[0]) < 55: Screen.setTitle(self, temptext[0]) count = 2 labeltext = "" while len(temptext) >= count: if labeltext: labeltext += '\n' labeltext = labeltext + temptext[count-1] count += 1 print 'count',count self["text"].setText(labeltext) else: self["text"] = Label(title) else: self["text"] = Label(title) elif text: self["text"] = Label(_(text)) self.list = [] self.summarylist = [] if keys is None: self.__keys = [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "red", "green", "yellow", "blue" ] + (len(list) - 10) * [""] else: self.__keys = keys + (len(list) - len(keys)) * [""] self.keymap = {} pos = 0 for x in list: strpos = str(self.__keys[pos]) self.list.append(ChoiceEntryComponent(key = strpos, text = x)) if self.__keys[pos] != "": self.keymap[self.__keys[pos]] = list[pos] self.summarylist.append((self.__keys[pos], x[0])) pos += 1 self["list"] = ChoiceList(list = self.list, selection = selection) self["summary_list"] = StaticText() self["summary_selection"] = StaticText() self.updateSummary(selection) self["actions"] = NumberActionMap(["WizardActions", "InputActions", "ColorActions"], { "ok": self.go, "1": self.keyNumberGlobal, "2": self.keyNumberGlobal, "3": self.keyNumberGlobal, "4": self.keyNumberGlobal, "5": self.keyNumberGlobal, "6": self.keyNumberGlobal, "7": self.keyNumberGlobal, "8": self.keyNumberGlobal, "9": self.keyNumberGlobal, "0": self.keyNumberGlobal, "red": self.keyRed, "green": self.keyGreen, "yellow": self.keyYellow, "blue": self.keyBlue, "up": self.up, "down": self.down, "left": self.left, "right": self.right }, -1) self["cancelaction"] = NumberActionMap(["WizardActions", "InputActions", "ColorActions"], { "back": self.cancel, }, -1) self.onShown.append(self.onshow) def onshow(self): if self.skinName and 'SoftwareUpdateChoices' in self.skinName and self.var and self.var in ('unstable', 'updating', 'stable', 'unknown'): status_msgs = {'stable': _('Feeds status: Stable'), 'unstable': _('Feeds status: Unstable'), 'updating': _('Feeds status: Updating'), 'unknown': _('No connection')} self['feedStatusMSG'].setText(status_msgs[self.var]) self['tl_off'].hide() self['tl_red'].hide() self['tl_yellow'].hide() self['tl_green'].hide() if self.var == 'unstable': self['tl_red'].show() elif self.var == 'updating': self['tl_yellow'].show() elif self.var == 'stable': self['tl_green'].show() else: self['tl_off'].show() def autoResize(self): desktop_w = enigma.getDesktop(0).size().width() desktop_h = enigma.getDesktop(0).size().height() count = len(self.list) itemheight = self["list"].getItemHeight() if count > 15: count = 15 if not self["text"].text: # move list textsize = (520, 0) listsize = (520, itemheight*count) self["list"].instance.move(enigma.ePoint(0, 0)) self["list"].instance.resize(enigma.eSize(*listsize)) else: textsize = self["text"].getSize() if textsize[0] < textsize[1]: textsize = (textsize[1],textsize[0]+10) if textsize[0] > 520: textsize = (textsize[0], textsize[1]+itemheight) else: textsize = (520, textsize[1]+itemheight) listsize = (textsize[0], itemheight*count) # resize label self["text"].instance.resize(enigma.eSize(*textsize)) self["text"].instance.move(enigma.ePoint(10, 10)) # move list self["list"].instance.move(enigma.ePoint(0, textsize[1])) self["list"].instance.resize(enigma.eSize(*listsize)) wsizex = textsize[0] wsizey = textsize[1]+listsize[1] wsize = (wsizex, wsizey) self.instance.resize(enigma.eSize(*wsize)) # center window self.instance.move(enigma.ePoint((desktop_w-wsizex)/2, (desktop_h-wsizey)/2)) def left(self): if len(self["list"].list) > 0: while 1: self["list"].instance.moveSelection(self["list"].instance.pageUp) self.updateSummary(self["list"].l.getCurrentSelectionIndex()) if self["list"].l.getCurrentSelection()[0][0] != "--" or self["list"].l.getCurrentSelectionIndex() == 0: break def right(self): if len(self["list"].list) > 0: while 1: self["list"].instance.moveSelection(self["list"].instance.pageDown) self.updateSummary(self["list"].l.getCurrentSelectionIndex()) if self["list"].l.getCurrentSelection()[0][0] != "--" or self["list"].l.getCurrentSelectionIndex() == 0: break def up(self): if len(self["list"].list) > 0: while 1: self["list"].instance.moveSelection(self["list"].instance.moveUp) self.updateSummary(self["list"].l.getCurrentSelectionIndex()) if self["list"].l.getCurrentSelection()[0][0] != "--" or self["list"].l.getCurrentSelectionIndex() == 0: break def down(self): if len(self["list"].list) > 0: while 1: self["list"].instance.moveSelection(self["list"].instance.moveDown) self.updateSummary(self["list"].l.getCurrentSelectionIndex()) if self["list"].l.getCurrentSelection()[0][0] != "--" or self["list"].l.getCurrentSelectionIndex() == len(self["list"].list) - 1: break # runs a number shortcut def keyNumberGlobal(self, number): self.goKey(str(number)) # runs the current selected entry def go(self): cursel = self["list"].l.getCurrentSelection() if cursel: self.goEntry(cursel[0]) else: self.cancel() # runs a specific entry def goEntry(self, entry): if entry and len(entry) > 3 and isinstance(entry[1], str) and entry[1] == "CALLFUNC": arg = entry[3] entry[2](arg) elif entry and len(entry) > 2 and isinstance(entry[1], str) and entry[1] == "CALLFUNC": entry[2](None) else: self.close(entry) # lookups a key in the keymap, then runs it def goKey(self, key): if self.keymap.has_key(key): entry = self.keymap[key] self.goEntry(entry) # runs a color shortcut def keyRed(self): self.goKey("red") def keyGreen(self): self.goKey("green") def keyYellow(self): self.goKey("yellow") def keyBlue(self): self.goKey("blue") def updateSummary(self, curpos=0): pos = 0 summarytext = "" for entry in self.summarylist: if curpos-2 < pos < curpos+5: if pos == curpos: summarytext += ">" self["summary_selection"].setText(entry[1]) else: summarytext += entry[0] summarytext += ' ' + entry[1] + '\n' pos += 1 self["summary_list"].setText(summarytext) def cancel(self): self.close(None)
gpl-2.0
winstonsee/huhamhire-hosts
gui/hostsutil.py
24
4139
#!/usr/bin/env python # -*- coding: utf-8 -*- # # hostsutil.py : Main entrance to GUI module of Hosts Setup Utility. # # Copyleft (C) 2014 - huhamhire hosts team <hosts@huhamhire.com> # ===================================================================== # Licensed under the GNU General Public License, version 3. You should # have received a copy of the GNU General Public License along with # this program. If not, see <http://www.gnu.org/licenses/>. # # This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING # THE WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE. # ===================================================================== __author__ = "huhamhire <me@huhamhire.com>" import sys from zipfile import BadZipfile from qdialog_slots import QDialogSlots sys.path.append("..") from util import RetrieveData, CommonUtil # Path to store language files LANG_DIR = "./gui/lang/" class HostsUtil(QDialogSlots): """ HostsUtil class is the main entrance to the Graphical User Interface (GUI) module of `Hosts Setup Utility`. This class contains methods to launch the main dialog of this utility. .. note:: This class is subclass of :class:`~gui.qdialog_slots.QDialogSlots` class. Typical usage to start a GUI session:: import gui util = gui.HostsUtil() util.start() :ivar int init_flag: Times of the main dialog being initialized. This value would be referenced for translator to set the language of the main dialog. :ivar str filename: Filename of the hosts data file containing data to make hosts files from. Default by "`hostslist.data`". :ivar str infofile: Filename of the info file containing metadata of the hosts data file formatted in JSON. Default by "`hostslist.json`". .. seealso:: :attr:`filename` and :attr:`infofile` in :class:`~tui.curses_ui.CursesUI` class. """ init_flag = 0 # Data file related configuration filename = "hostslist.data" infofile = "hostsinfo.json" def __init__(self): super(HostsUtil, self).__init__() def __del__(self): """ Clear up the temporary data file while TUI session is finished. """ try: RetrieveData.clear() except: pass def start(self): """ Start the GUI session. .. note:: This method is the trigger to launch a GUI session of `Hosts Setup Utility`. """ if not self.init_flag: self.init_main() self.show() sys.exit(self.app.exec_()) def init_main(self): """ Set up the elements on the main dialog. Check the environment of current operating system and current session. * Load server list from a configuration file under working directory. * Try to load the hosts data file under working directory if it exists. .. note:: IF hosts data file does not exists correctly in current working directory, a warning message box would popup. And operations to change the hosts file on current system could be done only until a new data file has been downloaded. .. seealso:: Method :meth:`~tui.hostsutil.HostsUtil.__init__` in :class:`~tui.hostsutil.HostsUtil` class. """ self.ui.SelectMirror.clear() self.set_version() # Set mirrors self.mirrors = CommonUtil.set_network("network.conf") self.set_mirrors() # Read data file and set function list try: RetrieveData.unpack() RetrieveData.connect_db() self.set_func_list(1) self.refresh_func_list() self.set_info() except IOError: self.warning_no_datafile() except BadZipfile: self.warning_incorrect_datafile() # Check if current session have root privileges self.check_writable() self.init_flag += 1 if __name__ == "__main__": HostsUtlMain = HostsUtil() HostsUtlMain.start()
gpl-3.0
kjc88/sl4a
python/src/Lib/distutils/command/build.py
53
5598
"""distutils.command.build Implements the Distutils 'build' command.""" # This module should be kept compatible with Python 2.1. __revision__ = "$Id: build.py 62197 2008-04-07 01:53:39Z mark.hammond $" import sys, os from distutils.core import Command from distutils.errors import DistutilsOptionError from distutils.util import get_platform def show_compilers (): from distutils.ccompiler import show_compilers show_compilers() class build (Command): description = "build everything needed to install" user_options = [ ('build-base=', 'b', "base directory for build library"), ('build-purelib=', None, "build directory for platform-neutral distributions"), ('build-platlib=', None, "build directory for platform-specific distributions"), ('build-lib=', None, "build directory for all distribution (defaults to either " + "build-purelib or build-platlib"), ('build-scripts=', None, "build directory for scripts"), ('build-temp=', 't', "temporary build directory"), ('plat-name=', 'p', "platform name to build for, if supported " "(default: %s)" % get_platform()), ('compiler=', 'c', "specify the compiler type"), ('debug', 'g', "compile extensions and libraries with debugging information"), ('force', 'f', "forcibly build everything (ignore file timestamps)"), ('executable=', 'e', "specify final destination interpreter path (build.py)"), ] boolean_options = ['debug', 'force'] help_options = [ ('help-compiler', None, "list available compilers", show_compilers), ] def initialize_options (self): self.build_base = 'build' # these are decided only after 'build_base' has its final value # (unless overridden by the user or client) self.build_purelib = None self.build_platlib = None self.build_lib = None self.build_temp = None self.build_scripts = None self.compiler = None self.plat_name = None self.debug = None self.force = 0 self.executable = None def finalize_options (self): if self.plat_name is None: self.plat_name = get_platform() else: # plat-name only supported for windows (other platforms are # supported via ./configure flags, if at all). Avoid misleading # other platforms. if os.name != 'nt': raise DistutilsOptionError( "--plat-name only supported on Windows (try " "using './configure --help' on your platform)") plat_specifier = ".%s-%s" % (self.plat_name, sys.version[0:3]) # Make it so Python 2.x and Python 2.x with --with-pydebug don't # share the same build directories. Doing so confuses the build # process for C modules if hasattr(sys, 'gettotalrefcount'): plat_specifier += '-pydebug' # 'build_purelib' and 'build_platlib' just default to 'lib' and # 'lib.<plat>' under the base build directory. We only use one of # them for a given distribution, though -- if self.build_purelib is None: self.build_purelib = os.path.join(self.build_base, 'lib') if self.build_platlib is None: self.build_platlib = os.path.join(self.build_base, 'lib' + plat_specifier) # 'build_lib' is the actual directory that we will use for this # particular module distribution -- if user didn't supply it, pick # one of 'build_purelib' or 'build_platlib'. if self.build_lib is None: if self.distribution.ext_modules: self.build_lib = self.build_platlib else: self.build_lib = self.build_purelib # 'build_temp' -- temporary directory for compiler turds, # "build/temp.<plat>" if self.build_temp is None: self.build_temp = os.path.join(self.build_base, 'temp' + plat_specifier) if self.build_scripts is None: self.build_scripts = os.path.join(self.build_base, 'scripts-' + sys.version[0:3]) if self.executable is None: self.executable = os.path.normpath(sys.executable) # finalize_options () def run (self): # Run all relevant sub-commands. This will be some subset of: # - build_py - pure Python modules # - build_clib - standalone C libraries # - build_ext - Python extensions # - build_scripts - (Python) scripts for cmd_name in self.get_sub_commands(): self.run_command(cmd_name) # -- Predicates for the sub-command list --------------------------- def has_pure_modules (self): return self.distribution.has_pure_modules() def has_c_libraries (self): return self.distribution.has_c_libraries() def has_ext_modules (self): return self.distribution.has_ext_modules() def has_scripts (self): return self.distribution.has_scripts() sub_commands = [('build_py', has_pure_modules), ('build_clib', has_c_libraries), ('build_ext', has_ext_modules), ('build_scripts', has_scripts), ] # class build
apache-2.0
DalikarFT/CFVOP
venv/Lib/site-packages/pip/_vendor/requests/packages/urllib3/poolmanager.py
359
13092
from __future__ import absolute_import import collections import functools import logging try: # Python 3 from urllib.parse import urljoin except ImportError: from urlparse import urljoin from ._collections import RecentlyUsedContainer from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool from .connectionpool import port_by_scheme from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown from .request import RequestMethods from .util.url import parse_url from .util.retry import Retry __all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url'] log = logging.getLogger(__name__) SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs', 'ssl_version', 'ca_cert_dir') # The base fields to use when determining what pool to get a connection from; # these do not rely on the ``connection_pool_kw`` and can be determined by the # URL and potentially the ``urllib3.connection.port_by_scheme`` dictionary. # # All custom key schemes should include the fields in this key at a minimum. BasePoolKey = collections.namedtuple('BasePoolKey', ('scheme', 'host', 'port')) # The fields to use when determining what pool to get a HTTP and HTTPS # connection from. All additional fields must be present in the PoolManager's # ``connection_pool_kw`` instance variable. HTTPPoolKey = collections.namedtuple( 'HTTPPoolKey', BasePoolKey._fields + ('timeout', 'retries', 'strict', 'block', 'source_address') ) HTTPSPoolKey = collections.namedtuple( 'HTTPSPoolKey', HTTPPoolKey._fields + SSL_KEYWORDS ) def _default_key_normalizer(key_class, request_context): """ Create a pool key of type ``key_class`` for a request. According to RFC 3986, both the scheme and host are case-insensitive. Therefore, this function normalizes both before constructing the pool key for an HTTPS request. If you wish to change this behaviour, provide alternate callables to ``key_fn_by_scheme``. :param key_class: The class to use when constructing the key. This should be a namedtuple with the ``scheme`` and ``host`` keys at a minimum. :param request_context: A dictionary-like object that contain the context for a request. It should contain a key for each field in the :class:`HTTPPoolKey` """ context = {} for key in key_class._fields: context[key] = request_context.get(key) context['scheme'] = context['scheme'].lower() context['host'] = context['host'].lower() return key_class(**context) # A dictionary that maps a scheme to a callable that creates a pool key. # This can be used to alter the way pool keys are constructed, if desired. # Each PoolManager makes a copy of this dictionary so they can be configured # globally here, or individually on the instance. key_fn_by_scheme = { 'http': functools.partial(_default_key_normalizer, HTTPPoolKey), 'https': functools.partial(_default_key_normalizer, HTTPSPoolKey), } pool_classes_by_scheme = { 'http': HTTPConnectionPool, 'https': HTTPSConnectionPool, } class PoolManager(RequestMethods): """ Allows for arbitrary requests while transparently keeping track of necessary connection pools for you. :param num_pools: Number of connection pools to cache before discarding the least recently used pool. :param headers: Headers to include with all requests, unless other headers are given explicitly. :param \**connection_pool_kw: Additional parameters are used to create fresh :class:`urllib3.connectionpool.ConnectionPool` instances. Example:: >>> manager = PoolManager(num_pools=2) >>> r = manager.request('GET', 'http://google.com/') >>> r = manager.request('GET', 'http://google.com/mail') >>> r = manager.request('GET', 'http://yahoo.com/') >>> len(manager.pools) 2 """ proxy = None def __init__(self, num_pools=10, headers=None, **connection_pool_kw): RequestMethods.__init__(self, headers) self.connection_pool_kw = connection_pool_kw self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close()) # Locally set the pool classes and keys so other PoolManagers can # override them. self.pool_classes_by_scheme = pool_classes_by_scheme self.key_fn_by_scheme = key_fn_by_scheme.copy() def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.clear() # Return False to re-raise any potential exceptions return False def _new_pool(self, scheme, host, port): """ Create a new :class:`ConnectionPool` based on host, port and scheme. This method is used to actually create the connection pools handed out by :meth:`connection_from_url` and companion methods. It is intended to be overridden for customization. """ pool_cls = self.pool_classes_by_scheme[scheme] kwargs = self.connection_pool_kw if scheme == 'http': kwargs = self.connection_pool_kw.copy() for kw in SSL_KEYWORDS: kwargs.pop(kw, None) return pool_cls(host, port, **kwargs) def clear(self): """ Empty our store of pools and direct them all to close. This will not affect in-flight connections, but they will not be re-used after completion. """ self.pools.clear() def connection_from_host(self, host, port=None, scheme='http'): """ Get a :class:`ConnectionPool` based on the host, port, and scheme. If ``port`` isn't given, it will be derived from the ``scheme`` using ``urllib3.connectionpool.port_by_scheme``. """ if not host: raise LocationValueError("No host specified.") request_context = self.connection_pool_kw.copy() request_context['scheme'] = scheme or 'http' if not port: port = port_by_scheme.get(request_context['scheme'].lower(), 80) request_context['port'] = port request_context['host'] = host return self.connection_from_context(request_context) def connection_from_context(self, request_context): """ Get a :class:`ConnectionPool` based on the request context. ``request_context`` must at least contain the ``scheme`` key and its value must be a key in ``key_fn_by_scheme`` instance variable. """ scheme = request_context['scheme'].lower() pool_key_constructor = self.key_fn_by_scheme[scheme] pool_key = pool_key_constructor(request_context) return self.connection_from_pool_key(pool_key) def connection_from_pool_key(self, pool_key): """ Get a :class:`ConnectionPool` based on the provided pool key. ``pool_key`` should be a namedtuple that only contains immutable objects. At a minimum it must have the ``scheme``, ``host``, and ``port`` fields. """ with self.pools.lock: # If the scheme, host, or port doesn't match existing open # connections, open a new ConnectionPool. pool = self.pools.get(pool_key) if pool: return pool # Make a fresh ConnectionPool of the desired type pool = self._new_pool(pool_key.scheme, pool_key.host, pool_key.port) self.pools[pool_key] = pool return pool def connection_from_url(self, url): """ Similar to :func:`urllib3.connectionpool.connection_from_url` but doesn't pass any additional parameters to the :class:`urllib3.connectionpool.ConnectionPool` constructor. Additional parameters are taken from the :class:`.PoolManager` constructor. """ u = parse_url(url) return self.connection_from_host(u.host, port=u.port, scheme=u.scheme) def urlopen(self, method, url, redirect=True, **kw): """ Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen` with custom cross-host redirect logic and only sends the request-uri portion of the ``url``. The given ``url`` parameter must be absolute, such that an appropriate :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. """ u = parse_url(url) conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) kw['assert_same_host'] = False kw['redirect'] = False if 'headers' not in kw: kw['headers'] = self.headers if self.proxy is not None and u.scheme == "http": response = conn.urlopen(method, url, **kw) else: response = conn.urlopen(method, u.request_uri, **kw) redirect_location = redirect and response.get_redirect_location() if not redirect_location: return response # Support relative URLs for redirecting. redirect_location = urljoin(url, redirect_location) # RFC 7231, Section 6.4.4 if response.status == 303: method = 'GET' retries = kw.get('retries') if not isinstance(retries, Retry): retries = Retry.from_int(retries, redirect=redirect) try: retries = retries.increment(method, url, response=response, _pool=conn) except MaxRetryError: if retries.raise_on_redirect: raise return response kw['retries'] = retries kw['redirect'] = redirect log.info("Redirecting %s -> %s", url, redirect_location) return self.urlopen(method, redirect_location, **kw) class ProxyManager(PoolManager): """ Behaves just like :class:`PoolManager`, but sends all requests through the defined proxy, using the CONNECT method for HTTPS URLs. :param proxy_url: The URL of the proxy to be used. :param proxy_headers: A dictionary contaning headers that will be sent to the proxy. In case of HTTP they are being sent with each request, while in the HTTPS/CONNECT case they are sent only once. Could be used for proxy authentication. Example: >>> proxy = urllib3.ProxyManager('http://localhost:3128/') >>> r1 = proxy.request('GET', 'http://google.com/') >>> r2 = proxy.request('GET', 'http://httpbin.org/') >>> len(proxy.pools) 1 >>> r3 = proxy.request('GET', 'https://httpbin.org/') >>> r4 = proxy.request('GET', 'https://twitter.com/') >>> len(proxy.pools) 3 """ def __init__(self, proxy_url, num_pools=10, headers=None, proxy_headers=None, **connection_pool_kw): if isinstance(proxy_url, HTTPConnectionPool): proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host, proxy_url.port) proxy = parse_url(proxy_url) if not proxy.port: port = port_by_scheme.get(proxy.scheme, 80) proxy = proxy._replace(port=port) if proxy.scheme not in ("http", "https"): raise ProxySchemeUnknown(proxy.scheme) self.proxy = proxy self.proxy_headers = proxy_headers or {} connection_pool_kw['_proxy'] = self.proxy connection_pool_kw['_proxy_headers'] = self.proxy_headers super(ProxyManager, self).__init__( num_pools, headers, **connection_pool_kw) def connection_from_host(self, host, port=None, scheme='http'): if scheme == "https": return super(ProxyManager, self).connection_from_host( host, port, scheme) return super(ProxyManager, self).connection_from_host( self.proxy.host, self.proxy.port, self.proxy.scheme) def _set_proxy_headers(self, url, headers=None): """ Sets headers needed by proxies: specifically, the Accept and Host headers. Only sets headers not provided by the user. """ headers_ = {'Accept': '*/*'} netloc = parse_url(url).netloc if netloc: headers_['Host'] = netloc if headers: headers_.update(headers) return headers_ def urlopen(self, method, url, redirect=True, **kw): "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." u = parse_url(url) if u.scheme == "http": # For proxied HTTPS requests, httplib sets the necessary headers # on the CONNECT to the proxy. For HTTP, we'll definitely # need to set 'Host' at the very least. headers = kw.get('headers', self.headers) kw['headers'] = self._set_proxy_headers(url, headers) return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw) def proxy_from_url(url, **kw): return ProxyManager(proxy_url=url, **kw)
gpl-3.0
jacknjzhou/zerorpc-python
tests/test_server.py
72
5945
# -*- coding: utf-8 -*- # Open Source Initiative OSI - The MIT License (MIT):Licensing # # The MIT License (MIT) # Copyright (c) 2012 DotCloud Inc (opensource@dotcloud.com) # # Permission is hereby granted, free of charge, to any person obtaining a copy of # this software and associated documentation files (the "Software"), to deal in # the Software without restriction, including without limitation the rights to # use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies # of the Software, and to permit persons to whom the Software is furnished to do # so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from nose.tools import assert_raises import gevent import sys from zerorpc import zmq import zerorpc from testutils import teardown, random_ipc_endpoint def test_server_manual(): endpoint = random_ipc_endpoint() class MySrv(zerorpc.Server): def lolita(self): return 42 def add(self, a, b): return a + b srv = MySrv() srv.bind(endpoint) gevent.spawn(srv.run) client_events = zerorpc.Events(zmq.DEALER) client_events.connect(endpoint) client = zerorpc.ChannelMultiplexer(client_events, ignore_broadcast=True) client_channel = client.channel() client_channel.emit('lolita', tuple()) event = client_channel.recv() assert list(event.args) == [42] client_channel.close() client_channel = client.channel() client_channel.emit('add', (1, 2)) event = client_channel.recv() assert list(event.args) == [3] client_channel.close() srv.stop() def test_client_server(): endpoint = random_ipc_endpoint() class MySrv(zerorpc.Server): def lolita(self): return 42 def add(self, a, b): return a + b srv = MySrv() srv.bind(endpoint) gevent.spawn(srv.run) client = zerorpc.Client() client.connect(endpoint) print client.lolita() assert client.lolita() == 42 print client.add(1, 4) assert client.add(1, 4) == 5 def test_client_server_client_timeout(): endpoint = random_ipc_endpoint() class MySrv(zerorpc.Server): def lolita(self): return 42 def add(self, a, b): gevent.sleep(10) return a + b srv = MySrv() srv.bind(endpoint) gevent.spawn(srv.run) client = zerorpc.Client(timeout=2) client.connect(endpoint) if sys.version_info < (2, 7): assert_raises(zerorpc.TimeoutExpired, client.add, 1, 4) else: with assert_raises(zerorpc.TimeoutExpired): print client.add(1, 4) client.close() srv.close() def test_client_server_exception(): endpoint = random_ipc_endpoint() class MySrv(zerorpc.Server): def raise_something(self, a): return a[4] srv = MySrv() srv.bind(endpoint) gevent.spawn(srv.run) client = zerorpc.Client(timeout=2) client.connect(endpoint) if sys.version_info < (2, 7): def _do_with_assert_raises(): print client.raise_something(42) assert_raises(zerorpc.RemoteError, _do_with_assert_raises) else: with assert_raises(zerorpc.RemoteError): print client.raise_something(42) assert client.raise_something(range(5)) == 4 client.close() srv.close() def test_client_server_detailed_exception(): endpoint = random_ipc_endpoint() class MySrv(zerorpc.Server): def raise_error(self): raise RuntimeError('oops!') srv = MySrv() srv.bind(endpoint) gevent.spawn(srv.run) client = zerorpc.Client(timeout=2) client.connect(endpoint) if sys.version_info < (2, 7): def _do_with_assert_raises(): print client.raise_error() assert_raises(zerorpc.RemoteError, _do_with_assert_raises) else: with assert_raises(zerorpc.RemoteError): print client.raise_error() try: client.raise_error() except zerorpc.RemoteError as e: print 'got that:', e print 'name', e.name print 'msg', e.msg assert e.name == 'RuntimeError' assert e.msg == 'oops!' client.close() srv.close() def test_exception_compat_v1(): endpoint = random_ipc_endpoint() class MySrv(zerorpc.Server): pass srv = MySrv() srv.bind(endpoint) gevent.spawn(srv.run) client_events = zerorpc.Events(zmq.DEALER) client_events.connect(endpoint) client = zerorpc.ChannelMultiplexer(client_events, ignore_broadcast=True) rpccall = client.channel() rpccall.emit('donotexist', tuple()) event = rpccall.recv() print event assert event.name == 'ERR' (name, msg, tb) = event.args print 'detailed error', name, msg, tb assert name == 'NameError' assert msg == 'donotexist' rpccall = client.channel() rpccall.emit('donotexist', tuple(), xheader=dict(v=1)) event = rpccall.recv() print event assert event.name == 'ERR' (msg,) = event.args print 'msg only', msg assert msg == "NameError('donotexist',)" client_events.close() srv.close() def test_removed_unscriptable_error_format_args_spec(): class MySrv(zerorpc.Server): pass srv = MySrv() return_value = srv._format_args_spec(None) assert return_value is None
mit
toastdriven/django-budget
budget/categories/models.py
1
1579
import datetime from decimal import Decimal from django.db import models from django.utils.translation import ugettext_lazy as _ class StandardMetadata(models.Model): """ A basic (abstract) model for metadata. """ created = models.DateTimeField(_('Created'), default=datetime.datetime.now) updated = models.DateTimeField(_('Updated'), default=datetime.datetime.now) is_deleted = models.BooleanField(_('Is deleted'), default=False, db_index=True) class Meta: abstract = True def save(self, *args, **kwargs): self.updated = datetime.datetime.now() super(StandardMetadata, self).save(*args, **kwargs) def delete(self): self.is_deleted = True self.save() class ActiveManager(models.Manager): def get_query_set(self): return super(ActiveManager, self).get_query_set().filter(is_deleted=False) class Category(StandardMetadata): """ Categories are the means to loosely tie together the transactions and estimates. They are used to aggregate transactions together and compare them to the appropriate budget estimate. For the reasoning behind this, the docstring on the Transaction object explains this. """ name = models.CharField(_('Name'), max_length=128) slug = models.SlugField(_('Slug'), unique=True) objects = models.Manager() active = ActiveManager() class Meta: verbose_name = _('Category') verbose_name_plural = _('Categories') def __unicode__(self): return self.name
mit
Jorge-Rodriguez/ansible
test/units/modules/network/f5/test_bigip_profile_tcp.py
21
3343
# -*- coding: utf-8 -*- # # Copyright: (c) 2017, F5 Networks Inc. # GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import json import pytest import sys if sys.version_info < (2, 7): pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7") from ansible.module_utils.basic import AnsibleModule try: from library.modules.bigip_profile_tcp import ApiParameters from library.modules.bigip_profile_tcp import ModuleParameters from library.modules.bigip_profile_tcp import ModuleManager from library.modules.bigip_profile_tcp import ArgumentSpec # In Ansible 2.8, Ansible changed import paths. from test.units.compat import unittest from test.units.compat.mock import Mock from test.units.compat.mock import patch from test.units.modules.utils import set_module_args except ImportError: from ansible.modules.network.f5.bigip_profile_tcp import ApiParameters from ansible.modules.network.f5.bigip_profile_tcp import ModuleParameters from ansible.modules.network.f5.bigip_profile_tcp import ModuleManager from ansible.modules.network.f5.bigip_profile_tcp import ArgumentSpec # Ansible 2.8 imports from units.compat import unittest from units.compat.mock import Mock from units.compat.mock import patch from units.modules.utils import set_module_args fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures') fixture_data = {} def load_fixture(name): path = os.path.join(fixture_path, name) if path in fixture_data: return fixture_data[path] with open(path) as f: data = f.read() try: data = json.loads(data) except Exception: pass fixture_data[path] = data return data class TestParameters(unittest.TestCase): def test_module_parameters(self): args = dict( name='foo', parent='bar', idle_timeout='500' ) p = ModuleParameters(params=args) assert p.name == 'foo' assert p.parent == '/Common/bar' assert p.idle_timeout == 500 def test_api_parameters(self): args = load_fixture('load_ltm_profile_tcp_1.json') p = ApiParameters(params=args) assert p.name == 'foo' assert p.idle_timeout == 300 class TestManager(unittest.TestCase): def setUp(self): self.spec = ArgumentSpec() def test_create(self, *args): # Configure the arguments that would be sent to the Ansible module set_module_args(dict( name='foo', parent='bar', idle_timeout=500, password='password', server='localhost', user='admin' )) module = AnsibleModule( argument_spec=self.spec.argument_spec, supports_check_mode=self.spec.supports_check_mode ) mm = ModuleManager(module=module) # Override methods to force specific logic in the module to happen mm.exists = Mock(return_value=False) mm.create_on_device = Mock(return_value=True) results = mm.exec_module() assert results['changed'] is True assert results['idle_timeout'] == 500
gpl-3.0
yh453926638/shadowsocks
tests/nose_plugin.py
1072
1164
#!/usr/bin/env python # # Copyright 2015 clowwindy # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import nose from nose.plugins.base import Plugin class ExtensionPlugin(Plugin): name = "ExtensionPlugin" def options(self, parser, env): Plugin.options(self, parser, env) def configure(self, options, config): Plugin.configure(self, options, config) self.enabled = True def wantFile(self, file): return file.endswith('.py') def wantDirectory(self, directory): return True def wantModule(self, file): return True if __name__ == '__main__': nose.main(addplugins=[ExtensionPlugin()])
apache-2.0
eaplatanios/tensorflow
tensorflow/compiler/tests/xla_test.py
6
9859
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Definition of XLA test case.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import contextlib import os import random import re import numpy as np from tensorflow.contrib.compiler import jit from tensorflow.core.framework import types_pb2 from tensorflow.core.protobuf import config_pb2 from tensorflow.python.client import session from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import random_seed from tensorflow.python.ops import array_ops from tensorflow.python.ops import variables from tensorflow.python.platform import flags from tensorflow.python.platform import test from tensorflow.python.platform import tf_logging as logging FLAGS = flags.FLAGS flags.DEFINE_string('test_device', None, 'Tensorflow device on which to place operators under test') flags.DEFINE_string('types', None, 'Types to test. Comma-separated list.') flags.DEFINE_string('disabled_manifest', None, 'Path to a file with a list of tests that should not run.') flags.DEFINE_string('tf_xla_flags', None, 'Value to set the TF_XLA_FLAGS environment variable to') class XLATestCase(test.TestCase): """XLA test cases are parameterized test cases.""" def __init__(self, method_name='runTest'): super(XLATestCase, self).__init__(method_name) self.device = FLAGS.test_device self.has_custom_call = (self.device == 'XLA_CPU') self._all_tf_types = set([ dtypes.as_dtype(types_pb2.DataType.Value(name)) for name in FLAGS.types.split(',') ]) self.int_tf_types = set([ dtype for dtype in self._all_tf_types if dtype.is_integer ]) self._float_tf_types = set([ dtype for dtype in self._all_tf_types if dtype.is_floating ]) self.complex_tf_types = set([ dtype for dtype in self._all_tf_types if dtype.is_complex ]) self._numeric_tf_types = set( self.int_tf_types | self._float_tf_types | self.complex_tf_types) self._all_types = set( [dtype.as_numpy_dtype for dtype in self._all_tf_types]) self._int_types = set([dtype.as_numpy_dtype for dtype in self.int_tf_types]) self._float_types = set( [dtype.as_numpy_dtype for dtype in self._float_tf_types]) self.complex_types = set([ dtype.as_numpy_dtype for dtype in self.complex_tf_types ]) self._numeric_types = set(self._int_types | self._float_types | self.complex_types) # Parse the manifest file, if any, into a regex identifying tests to # disable self.disabled_regex = None self._method_types_filter = dict() # TODO(xpan): Make it text proto if it doesn't scale. # Each line of the manifest file specifies an entry. The entry can be # 1) TestNameRegex // E.g. CumprodTest.* Or # 2) TestName TypeName // E.g. AdamOptimizerTest.testSharing DT_BFLOAT16 # The 1) disables the entire test. While 2) only filter some numeric types # so that they are not used in those tests. if FLAGS.disabled_manifest is not None: comments_re = re.compile('#.*$') manifest_file = open(FLAGS.disabled_manifest, 'r') disabled_tests = [] disabled_method_types = [] for l in manifest_file.read().splitlines(): if not l: continue entry = comments_re.sub('', l).strip().split(' ') if len(entry) == 1: disabled_tests.append(entry[0]) elif len(entry) == 2: disabled_method_types.append( (entry[0], entry[1].strip().split(','))) else: raise ValueError('Bad entry in manifest file.') self.disabled_regex = re.compile('|'.join(disabled_tests)) for method, types in disabled_method_types: self._method_types_filter[method] = set([ dtypes.as_dtype(types_pb2.DataType.Value(name)).as_numpy_dtype for name in types]) manifest_file.close() if FLAGS.tf_xla_flags is not None: os.environ['TF_XLA_FLAGS'] = FLAGS.tf_xla_flags @property def all_tf_types(self): name = '{}.{}'.format(type(self).__name__, self._testMethodName) tf_types = set([dtypes.as_dtype(t) for t in self._method_types_filter.get(name, set())]) return self._all_tf_types - tf_types @property def float_types(self): name = '{}.{}'.format(type(self).__name__, self._testMethodName) return self._float_types - self._method_types_filter.get(name, set()) @property def float_tf_types(self): name = '{}.{}'.format(type(self).__name__, self._testMethodName) return self._float_tf_types - self._method_types_filter.get(name, set()) @property def int_types(self): name = '{}.{}'.format(type(self).__name__, self._testMethodName) return self._int_types - self._method_types_filter.get(name, set()) @property def numeric_tf_types(self): name = '{}.{}'.format(type(self).__name__, self._testMethodName) tf_types = set([dtypes.as_dtype(t) for t in self._method_types_filter.get(name, set())]) return self._numeric_tf_types - tf_types @property def numeric_types(self): name = '{}.{}'.format(type(self).__name__, self._testMethodName) return self._numeric_types - self._method_types_filter.get(name, set()) @property def all_types(self): name = '{}.{}'.format(type(self).__name__, self._testMethodName) return self._all_types - self._method_types_filter.get(name, set()) def setUp(self): super(XLATestCase, self).setUp() name = '{}.{}'.format(type(self).__name__, self._testMethodName) if self.disabled_regex is not None and self.disabled_regex.match(name): logging.info('Disabled test case: %s', name) self.skipTest('{} is disabled by manifest.'.format(name)) return logging.info('Start test case: %s', name) random.seed(random_seed.DEFAULT_GRAPH_SEED) np.random.seed(random_seed.DEFAULT_GRAPH_SEED) def tearDown(self): super(XLATestCase, self).tearDown() logging.info('End test case: %s', self._testMethodName) @contextlib.contextmanager def test_session(self): """Custom implementation of test_session() for XLA tests. We override the standard Tensorflow test_session() since it is too specific to CPU and GPU tests. In particular, we want to disable soft placement and explicitly assign ops to devices under test. Yields: A session to use when running a test case. """ graph = ops.Graph() with session.Session(graph=graph) as sess, graph.as_default(): yield sess @contextlib.contextmanager def test_scope(self): """Test scope that runs tests on a Tensorflow/XLA device. Uses a compilation_scope() to mark operators to compile. Yields: A scope to apply to the operators under test. """ with ops.device('device:{}:0'.format(self.device)): yield def Benchmark(tf_bench, builder_fn, use_xla_jit, device, separate_compiled_gradients=False): """Build a graph and run benchmarks against it, with or without XLA. Args: tf_bench: An instance of tf.test.Benchmark, used to run the benchmark. builder_fn: A function that builds a graph when invoked, and returns (name, fetches), where name is the name of the test, and fetches is a list of tensors to fetch as output. use_xla_jit: If true compile with the XLA JIT, otherwise use regular TF. device: The tensorflow device to run on, e.g. "cpu", "gpu". separate_compiled_gradients: If true put each gradient subgraph into a separate compilation scope. This gives fine-grained control over which portions of the graph will be compiled as a single unit. Compiling gradients separately may yield better performance for some graphs. The scope is named based on the scope of the forward computation as well as the name of the gradients. As a result, the gradients will be compiled in a scope that is separate from both the forward computation, and from other gradients. """ with ops.Graph().as_default(): name = None targets = [] with ops.device(device): fetches = [] jit_scope = jit.experimental_jit_scope with jit_scope( compile_ops=use_xla_jit, separate_compiled_gradients=separate_compiled_gradients): name, fetches = builder_fn() # We only want to benchmark the operations themselves, and not the data # transfer of the result(s). Non-compiled identity ops ensure XLA # doesn't know we're dropping the results, otherwise it might compile # away the entire computation. for fetch in fetches: targets.append(array_ops.identity(fetch).op) config = config_pb2.ConfigProto(allow_soft_placement=True) with session.Session(config=config) as sess: sess.run(variables.global_variables_initializer()) xla = 'xla_' if use_xla_jit else '' tf_bench.run_op_benchmark( sess, targets, name='%s_%s%s' % (name, xla, device))
apache-2.0
ganeshgore/myremolab
server/src/build/lib.linux-i686-2.7/weblab/core/db.py
2
32124
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (C) 2005 onwards University of Deusto # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. # # This software consists of contributions made by many individuals, # listed below: # # Author: Jaime Irurzun <jaime.irurzun@gmail.com> # Pablo Orduña <pablo@ordunya.com> # from functools import wraps import numbers from sqlalchemy.orm.exc import NoResultFound from sqlalchemy.sql.expression import desc from voodoo.log import logged from voodoo.typechecker import typecheck import weblab.db.model as model import weblab.db.gateway as dbGateway from weblab.data.command import Command import weblab.data.dto.experiments as ExperimentAllowed from weblab.data.experiments import ExperimentUsage, CommandSent, FileSent import weblab.db.exc as DbErrors import weblab.permissions as permissions def admin_panel_operation(func): """It checks if the requesting user has the admin_panel_access permission with full_privileges (temporal policy).""" @wraps(func) def wrapper(self, user_login, *args, **kargs): session = self.Session() try: user = self._get_user(session, user_login) admin_panel_access_permissions = self._gather_permissions(session, user, "admin_panel_access") if len(admin_panel_access_permissions) > 0: # The only requirement for now is to have full_privileges, this will be changed in further versions if self._get_bool_parameter_from_permission(session, admin_panel_access_permissions[0], "full_privileges"): return func(self, user_login, *args, **kargs) return () finally: session.close() return wrapper DEFAULT_VALUE = object() class DatabaseGateway(dbGateway.AbstractDatabaseGateway): forbidden_access = 'forbidden_access' def __init__(self, cfg_manager): super(DatabaseGateway, self).__init__(cfg_manager) @typecheck(basestring) @logged() def get_user_by_name(self, user_login): session = self.Session() try: return self._get_user(session, user_login).to_dto() finally: session.close() @typecheck(basestring) @logged() def list_experiments(self, user_login): session = self.Session() try: user = self._get_user(session, user_login) permissions = self._gather_permissions(session, user, 'experiment_allowed') grouped_experiments = {} for permission in permissions: p_permanent_id = self._get_parameter_from_permission(session, permission, 'experiment_permanent_id') p_category_id = self._get_parameter_from_permission(session, permission, 'experiment_category_id') p_time_allowed = self._get_float_parameter_from_permission(session, permission, 'time_allowed') p_priority = self._get_int_parameter_from_permission(session, permission, 'priority', ExperimentAllowed.DEFAULT_PRIORITY) p_initialization_in_accounting = self._get_bool_parameter_from_permission(session, permission, 'initialization_in_accounting', ExperimentAllowed.DEFAULT_INITIALIZATION_IN_ACCOUNTING) experiment = session.query(model.DbExperiment).filter_by(name=p_permanent_id).filter(model.DbExperimentCategory.name==p_category_id).first() if experiment is None: continue experiment_allowed = ExperimentAllowed.ExperimentAllowed(experiment.to_business(), p_time_allowed, p_priority, p_initialization_in_accounting, permission.permanent_id) experiment_unique_id = p_permanent_id+"@"+p_category_id if experiment_unique_id in grouped_experiments: grouped_experiments[experiment_unique_id].append(experiment_allowed) else: grouped_experiments[experiment_unique_id] = [experiment_allowed] # If any experiment is duplicated, only the less restrictive one is given experiments = [] for experiment_unique_id in grouped_experiments: less_restrictive_experiment_allowed = grouped_experiments[experiment_unique_id][0] for experiment_allowed in grouped_experiments[experiment_unique_id]: if experiment_allowed.time_allowed > less_restrictive_experiment_allowed.time_allowed: less_restrictive_experiment_allowed = experiment_allowed experiments.append(less_restrictive_experiment_allowed) experiments.sort(lambda x,y: cmp(x.experiment.category.name, y.experiment.category.name)) return tuple(experiments) finally: session.close() @typecheck(basestring) @logged() def is_access_forward(self, user_login): session = self.Session() try: user = self._get_user(session, user_login) permissions = self._gather_permissions(session, user, 'access_forward') return len(permissions) > 0 finally: session.close() @typecheck(basestring) @logged() def is_admin(self, user_login): session = self.Session() try: user = self._get_user(session, user_login) permissions = self._gather_permissions(session, user, 'admin_panel_access') return len(permissions) > 0 finally: session.close() @typecheck(basestring, ExperimentUsage) @logged() def store_experiment_usage(self, user_login, experiment_usage): session = self.Session() try: use = model.DbUserUsedExperiment( self._get_user(session, user_login), self._get_experiment(session, experiment_usage.experiment_id.exp_name, experiment_usage.experiment_id.cat_name), experiment_usage.start_date, experiment_usage.from_ip, experiment_usage.coord_address.address, experiment_usage.reservation_id, experiment_usage.end_date ) session.add(use) # TODO: The c.response of an standard command is an object with # a commandstring, whereas the response to an async command is # a simple string to identify the request. The way in which the logger # currently handles these cases is somewhat shady. for c in experiment_usage.commands: # If we have a response, the c.response will be an object and not # a string. Generally, we will, unless the command was asynchronous # and it didn't finish executing. if type(c.response) != type(""): session.add(model.DbUserCommand( use, c.command.commandstring, c.timestamp_before, c.response.commandstring, c.timestamp_after )) else: # In this other case, the response is a string, which means # that we have not updated it with the real response. Probably, # it was an asynchronous command which did not finish executing # by the time the experiment ended. session.add(model.DbUserCommand( use, c.command.commandstring, c.timestamp_before, "[RESPONSE NOT AVAILABLE]", c.timestamp_after )) for f in experiment_usage.sent_files: if f.is_loaded(): saved = f.save(self.cfg_manager, experiment_usage.reservation_id) else: saved = f session.add(model.DbUserFile( use, saved.file_path, saved.file_hash, saved.timestamp_before, saved.file_info, saved.response.commandstring, saved.timestamp_after )) for reservation_info_key in experiment_usage.request_info: db_key = session.query(model.DbUserUsedExperimentProperty).filter_by(name = reservation_info_key).first() if db_key is None: db_key = model.DbUserUsedExperimentProperty(reservation_info_key) session.add(db_key) value = experiment_usage.request_info[reservation_info_key] session.add(model.DbUserUsedExperimentPropertyValue( unicode(value), db_key, use )) session.commit() finally: session.close() @typecheck(basestring, float, CommandSent) @logged() def finish_experiment_usage(self, reservation_id, end_date, last_command ): session = self.Session() try: user_used_experiment = session.query(model.DbUserUsedExperiment).filter_by(reservation_id = reservation_id).first() if user_used_experiment is None: return False user_used_experiment.set_end_date(end_date) session.add(user_used_experiment) session.add(model.DbUserCommand( user_used_experiment, last_command.command.commandstring, last_command.timestamp_before, last_command.response.commandstring, last_command.timestamp_after )) session.commit() return True finally: session.close() @logged() def store_commands(self, complete_commands, command_requests, command_responses, complete_files, file_requests, file_responses): """ Stores all the commands in a single transaction; retrieving the ids of the file and command requests """ request_mappings = { # entry_id : command_id } session = self.Session() try: db_commands_and_files = [] for reservation_id, entry_id, command in complete_commands: db_command = self._append_command(session, reservation_id, command) if db_command == False: request_mappings[entry_id] = False for reservation_id, entry_id, command in complete_files: db_file = self._append_file(session, reservation_id, command) if db_file == False: request_mappings[entry_id] = False for entry_id in command_requests: reservation_id, command = command_requests[entry_id] db_command = self._append_command(session, reservation_id, command) if db_command == False: request_mappings[entry_id] = False else: db_commands_and_files.append((entry_id, db_command)) for entry_id in file_requests: reservation_id, file = file_requests[entry_id] db_file = self._append_file(session, reservation_id, file) if db_file == False: request_mappings[entry_id] = False else: db_commands_and_files.append((entry_id, db_file)) for entry_id, command_id, response, timestamp in command_responses: if not self._update_command(session, command_id, response, timestamp): request_mappings[entry_id] = False for entry_id, file_id, response, timestamp in file_responses: if not self._update_file(session, file_id, response, timestamp): request_mappings[entry_id] = False session.commit() for entry_id, db_command in db_commands_and_files: request_mappings[entry_id] = db_command.id finally: session.close() return request_mappings @typecheck(basestring, CommandSent) @logged() def append_command(self, reservation_id, command ): session = self.Session() try: db_command = self._append_command(session, reservation_id, command) session.commit() return db_command.id finally: session.close() def _append_command(self, session, reservation_id, command): user_used_experiment = session.query(model.DbUserUsedExperiment).filter_by(reservation_id = reservation_id).first() if user_used_experiment is None: return False db_command = model.DbUserCommand( user_used_experiment, command.command.commandstring, command.timestamp_before, command.response.commandstring if command.response is not None else None, command.timestamp_after ) session.add(db_command) return db_command @typecheck(numbers.Integral, Command, float) @logged() def update_command(self, command_id, response, end_timestamp ): session = self.Session() try: if self._update_command(session, command_id, response, end_timestamp): session.commit() return True return False finally: session.close() def _update_command(self, session, command_id, response, end_timestamp): db_command = session.query(model.DbUserCommand).filter_by(id = command_id).first() if db_command is None: return False db_command.response = response.commandstring if response is not None else None db_command.set_timestamp_after(end_timestamp) session.add(db_command) return True @typecheck(basestring, FileSent) @logged() def append_file(self, reservation_id, file_sent): session = self.Session() try: db_file_sent = self._append_file(session, reservation_id, file_sent) session.commit() return db_file_sent.id finally: session.close() def _append_file(self, session, reservation_id, file_sent): user_used_experiment = session.query(model.DbUserUsedExperiment).filter_by(reservation_id = reservation_id).first() if user_used_experiment is None: return False db_file_sent = model.DbUserFile( user_used_experiment, file_sent.file_path, file_sent.file_hash, file_sent.timestamp_before, file_sent.file_info, file_sent.response.commandstring if file_sent.response is not None else None, file_sent.timestamp_after ) session.add(db_file_sent) return db_file_sent @typecheck(numbers.Integral, Command, float) @logged() def update_file(self, file_id, response, end_timestamp ): session = self.Session() try: if self._update_file(session, file_id, response, end_timestamp): session.commit() return True return False finally: session.close() def _update_file(self, session, file_id, response, end_timestamp): db_file_sent = session.query(model.DbUserFile).filter_by(id = file_id).first() if db_file_sent is None: return False db_file_sent.response = response.commandstring if response is not None else None db_file_sent.set_timestamp_after(end_timestamp) session.add(db_file_sent) return True @logged() def list_usages_per_user(self, user_login, first=0, limit=20): session = self.Session() try: user = self._get_user(session, user_login) uses = session.query(model.DbUserUsedExperiment).filter_by(user=user).offset(first).limit(limit).all() return [ use.to_business_light() for use in uses ] finally: session.close() @logged() def retrieve_usage(self, usage_id): session = self.Session() try: use = session.query(model.DbUserUsedExperiment).filter_by(id=usage_id).one() return use.to_business() finally: session.close() @admin_panel_operation @logged() def get_groups(self, user_login, parent_id=None): """ The user's permissions are not checked at the moment """ def get_dto_children_recursively(groups): dto_groups = [] for group in groups: dto_group = group.to_dto() if len(group.children) > 0: dto_group.set_children(get_dto_children_recursively(group.children)) dto_groups.append(dto_group) return dto_groups session = self.Session() try: groups = session.query(model.DbGroup).filter_by(parent_id=parent_id).order_by(model.DbGroup.name).all() dto_groups = get_dto_children_recursively(groups) return tuple(dto_groups) finally: session.close() @admin_panel_operation @logged() def get_users(self, user_login): """ Retrieves every user from the database """ session = self.Session() try: users = session.query(model.DbUser).all() # TODO: Consider sorting users. dto_users = [ user.to_dto() for user in users ] return tuple(dto_users) finally: session.close() @admin_panel_operation @logged() def get_roles(self, user_login): """ Retrieves every role from the database """ session = self.Session() try: roles = session.query(model.DbRole).all() dto_roles = [role.to_dto() for role in roles] return tuple(dto_roles) finally: session.close() @admin_panel_operation @logged() def get_experiments(self, user_login): """ All the experiments are returned by the moment """ def sort_by_category_and_exp_name(exp1, exp2): if exp1.category.name != exp2.category.name: return cmp(exp1.category.name, exp2.category.name) else: return cmp(exp1.name, exp2.name) session = self.Session() try: experiments = session.query(model.DbExperiment).all() experiments.sort(cmp=sort_by_category_and_exp_name) dto_experiments = [ experiment.to_dto() for experiment in experiments ] return tuple(dto_experiments) finally: session.close() @logged() def get_experiment_uses_by_id(self, user_login, reservation_ids): """ Retrieve the full information of these reservation_ids, if the user has permissions to do so. By default a user can only access to those reservations that he made in the past.""" results = [] session = self.Session() try: user = session.query(model.DbUser).filter_by(login = user_login).first() if user is None: return [self.forbidden_access] * len(reservation_ids) for reservation_id in reservation_ids: experiment_use = session.query(model.DbUserUsedExperiment).filter_by(reservation_id = reservation_id.id).first() if experiment_use is None: results.append(None) else: if experiment_use.user == user: results.append(experiment_use.to_business()) else: results.append(self.forbidden_access) finally: session.close() return results @admin_panel_operation @logged() def get_experiment_uses(self, user_login, from_date, to_date, group_id, experiment_id, start_row, end_row, sort_by): """ All the experiment uses are returned by the moment. Filters are optional (they may be null), but if applied the results should chang.e The result is represented as (dto_objects, total_number_of_registers) """ session = self.Session() try: query_object = session.query(model.DbUserUsedExperiment) # Applying filters if from_date is not None: query_object = query_object.filter(model.DbUserUsedExperiment.end_date >= from_date) if to_date is not None: query_object = query_object.filter(model.DbUserUsedExperiment.start_date <= to_date) if experiment_id is not None: query_object = query_object.filter(model.DbUserUsedExperiment.experiment_id == experiment_id) if group_id is not None: def get_children_recursively(groups): new_groups = groups[:] for group in groups: new_groups.extend(get_children_recursively(group.children)) return [ group for group in new_groups ] parent_groups = session.query(model.DbGroup).filter(model.DbGroup.id == group_id).all() group_ids = [ group.id for group in get_children_recursively(parent_groups) ] groups = session.query(model.DbGroup).filter(model.DbGroup.id.in_(group_ids)).subquery() users = session.query(model.DbUser) users_in_group = users.join((groups, model.DbUser.groups)).subquery() query_object = query_object.join((users_in_group, model.DbUserUsedExperiment.user)) # Sorting if sort_by is not None and len(sort_by) > 0: # Lists instead of sets, since the order of elements inside matters (first add Experiment, only then Category) tables_to_join = [] sorters = [] for current_sort_by in sort_by: if current_sort_by in ('start_date','-start_date','end_date','-end_date','origin','-origin','id','-id'): if current_sort_by.startswith('-'): sorters.append(desc(getattr(model.DbUserUsedExperiment, current_sort_by[1:]))) else: sorters.append(getattr(model.DbUserUsedExperiment, current_sort_by)) elif current_sort_by in ('agent_login', '-agent_login', 'agent_name', '-agent_name', 'agent_email', '-agent_email'): tables_to_join.append((model.DbUser, model.DbUserUsedExperiment.user)) if current_sort_by.endswith('agent_login'): sorter = model.DbUser.login elif current_sort_by.endswith('agent_name'): sorter = model.DbUser.full_name else: # current_sort_by.endswith('agent_email') sorter = model.DbUser.email if current_sort_by.startswith('-'): sorters.append(desc(sorter)) else: sorters.append(sorter) elif current_sort_by in ('experiment_name', '-experiment_name'): tables_to_join.append((model.DbExperiment, model.DbUserUsedExperiment.experiment)) if current_sort_by.startswith('-'): sorters.append(desc(model.DbExperiment.name)) else: sorters.append(model.DbExperiment.name) elif current_sort_by in ('experiment_category', '-experiment_category'): tables_to_join.append((model.DbExperiment, model.DbUserUsedExperiment.experiment)) tables_to_join.append((model.DbExperimentCategory, model.DbExperiment.category)) if current_sort_by.startswith('-'): sorters.append(desc(model.DbExperimentCategory.name)) else: sorters.append(model.DbExperimentCategory.name) while len(tables_to_join) > 0: table, field = tables_to_join.pop(0) # Just in case it was added twice, for instance if sorting by experiment name *and* category name if (table,field) in tables_to_join: tables_to_join.remove((table,field)) query_object = query_object.join((table, field)) query_object = query_object.order_by(*sorters) # Apply all sorters in order # Counting total_number = query_object.count() if start_row is not None: starting = start_row else: starting = 0 if end_row is not None: ending = end_row else: ending = total_number experiment_uses = query_object[starting:ending] dto_experiment_uses = [ experiment_use.to_dto() for experiment_use in experiment_uses ] return tuple(dto_experiment_uses), total_number finally: session.close() @logged() def get_user_permissions(self, user_login): session = self.Session() try: user = self._get_user(session, user_login) user_permissions = [] for pt in permissions.permission_types: user_permissions.extend(self._gather_permissions(session, user, pt)) dto_permissions = [ permission.to_dto() for permission in user_permissions ] return tuple(dto_permissions) finally: session.close() def _get_user(self, session, user_login): try: return session.query(model.DbUser).filter_by(login=user_login).one() except NoResultFound: raise DbErrors.DbProvidedUserNotFoundError("Unable to find a User with the provided login: '%s'" % user_login) def _get_experiment(self, session, exp_name, cat_name): try: return session.query(model.DbExperiment) \ .filter(model.DbExperimentCategory.name == cat_name) \ .filter_by(name=exp_name).one() except NoResultFound: raise DbErrors.DbProvidedExperimentNotFoundError("Unable to find an Experiment with the provided unique id: '%s@%s'" % (exp_name, cat_name)) def _gather_groups_permissions(self, session, group, permission_type_name, permissions, remaining_list): if group.id in remaining_list: return remaining_list.append(group.id) self._add_or_replace_permissions(permissions, self._get_permissions(session, group, permission_type_name)) if group.parent is not None: self._gather_groups_permissions(session, group.parent, permission_type_name, permissions, remaining_list) def _gather_permissions(self, session, user, permission_type_name): permissions = [] self._add_or_replace_permissions(permissions, self._get_permissions(session, user.role, permission_type_name)) remaining_list = [] for group in user.groups: self._gather_groups_permissions(session, group, permission_type_name, permissions, remaining_list) self._add_or_replace_permissions(permissions, self._get_permissions(session, user, permission_type_name)) return permissions def _add_or_replace_permissions(self, permissions, permissions_to_add): permissions.extend(permissions_to_add) def _get_permissions(self, session, user_or_role_or_group_or_ee, permission_type_name): return [ pi for pi in user_or_role_or_group_or_ee.permissions if pi.get_permission_type() == permission_type_name ] def _get_parameter_from_permission(self, session, permission, parameter_name, default_value = DEFAULT_VALUE): try: param = [ p for p in permission.parameters if p.get_name() == parameter_name ][0] except IndexError: if default_value == DEFAULT_VALUE: raise DbErrors.DbIllegalStatusError( permission.get_permission_type() + " permission without " + parameter_name ) else: return default_value return param.value def _get_float_parameter_from_permission(self, session, permission, parameter_name, default_value = DEFAULT_VALUE): value = self._get_parameter_from_permission(session, permission, parameter_name, default_value) try: return float(value) except ValueError: raise DbErrors.InvalidPermissionParameterFormatError( "Expected float as parameter '%s' of '%s', found: '%s'" % ( parameter_name, permission.get_permission_type(), value ) ) def _get_int_parameter_from_permission(self, session, permission, parameter_name, default_value = DEFAULT_VALUE): value = self._get_parameter_from_permission(session, permission, parameter_name, default_value) try: return int(value) except ValueError: raise DbErrors.InvalidPermissionParameterFormatError( "Expected int as parameter '%s' of '%s', found: '%s'" % ( parameter_name, permission.get_permission_type(), value ) ) def _get_bool_parameter_from_permission(self, session, permission, parameter_name, default_value = DEFAULT_VALUE): return self._get_parameter_from_permission(session, permission, parameter_name, default_value) def _delete_all_uses(self): """ IMPORTANT: SHOULD NEVER BE USED IN PRODUCTION, IT'S HERE ONLY FOR TESTS """ session = self.Session() try: uu = session.query(model.DbUserUsedExperiment).all() for i in uu: session.delete(i) session.commit() finally: session.close() def _insert_user_used_experiment(self, user_login, experiment_name, experiment_category_name, start_time, origin, coord_address, reservation_id, end_date, commands = None, files = None): """ IMPORTANT: SHOULD NEVER BE USED IN PRODUCTION, IT'S HERE ONLY FOR TESTS """ if commands is None: commands = [] if files is None: files = [] session = self.Session() try: user = session.query(model.DbUser).filter_by(login=user_login).one() category = session.query(model.DbExperimentCategory).filter_by(name=experiment_category_name).one() experiment = session.query(model.DbExperiment). \ filter_by(name=experiment_name). \ filter_by(category=category).one() experiment_id = experiment.id exp_use = model.DbUserUsedExperiment(user, experiment, start_time, origin, coord_address, reservation_id, end_date) session.add(exp_use) session.commit() return experiment_id finally: session.close() def create_gateway(cfg_manager): return DatabaseGateway(cfg_manager)
bsd-2-clause
ATIX-AG/ansible
lib/ansible/modules/clustering/consul_acl.py
33
22133
#!/usr/bin/python # # (c) 2015, Steve Gargan <steve.gargan@gmail.com> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = """ module: consul_acl short_description: Manipulate Consul ACL keys and rules description: - Allows the addition, modification and deletion of ACL keys and associated rules in a consul cluster via the agent. For more details on using and configuring ACLs, see https://www.consul.io/docs/guides/acl.html. version_added: "2.0" author: - Steve Gargan (@sgargan) - Colin Nolan (@colin-nolan) options: mgmt_token: description: - a management token is required to manipulate the acl lists state: description: - whether the ACL pair should be present or absent required: false choices: ['present', 'absent'] default: present token_type: description: - the type of token that should be created, either management or client choices: ['client', 'management'] default: client name: description: - the name that should be associated with the acl key, this is opaque to Consul required: false token: description: - the token key indentifying an ACL rule set. If generated by consul this will be a UUID required: false rules: description: - a list of the rules that should be associated with a given token required: false host: description: - host of the consul agent defaults to localhost required: false default: localhost port: description: - the port on which the consul agent is running required: false default: 8500 scheme: description: - the protocol scheme on which the consul agent is running required: false default: http version_added: "2.1" validate_certs: description: - whether to verify the tls certificate of the consul agent required: false default: True version_added: "2.1" requirements: - "python >= 2.6" - python-consul - pyhcl - requests """ EXAMPLES = """ - name: create an ACL with rules consul_acl: host: consul1.example.com mgmt_token: some_management_acl name: Foo access rules: - key: "foo" policy: read - key: "private/foo" policy: deny - name: create an ACL with a specific token consul_acl: host: consul1.example.com mgmt_token: some_management_acl name: Foo access token: my-token rules: - key: "foo" policy: read - name: update the rules associated to an ACL token consul_acl: host: consul1.example.com mgmt_token: some_management_acl name: Foo access token: some_client_token rules: - event: "bbq" policy: write - key: "foo" policy: read - key: "private" policy: deny - keyring: write - node: "hgs4" policy: write - operator: read - query: "" policy: write - service: "consul" policy: write - session: "standup" policy: write - name: remove a token consul_acl: host: consul1.example.com mgmt_token: some_management_acl token: 172bd5c8-9fe9-11e4-b1b0-3c15c2c9fd5e state: absent """ RETURN = """ token: description: the token associated to the ACL (the ACL's ID) returned: success type: string sample: a2ec332f-04cf-6fba-e8b8-acf62444d3da rules: description: the HCL JSON representation of the rules associated to the ACL, in the format described in the Consul documentation (https://www.consul.io/docs/guides/acl.html#rule-specification). returned: I(status) == "present" type: string sample: { "key": { "foo": { "policy": "write" }, "bar": { "policy": "deny" } } } operation: description: the operation performed on the ACL returned: changed type: string sample: update """ try: import consul python_consul_installed = True except ImportError: python_consul_installed = False try: import hcl pyhcl_installed = True except ImportError: pyhcl_installed = False try: from requests.exceptions import ConnectionError has_requests = True except ImportError: has_requests = False from collections import defaultdict from ansible.module_utils.basic import to_text, AnsibleModule RULE_SCOPES = ["agent", "event", "key", "keyring", "node", "operator", "query", "service", "session"] MANAGEMENT_PARAMETER_NAME = "mgmt_token" HOST_PARAMETER_NAME = "host" SCHEME_PARAMETER_NAME = "scheme" VALIDATE_CERTS_PARAMETER_NAME = "validate_certs" NAME_PARAMETER_NAME = "name" PORT_PARAMETER_NAME = "port" RULES_PARAMETER_NAME = "rules" STATE_PARAMETER_NAME = "state" TOKEN_PARAMETER_NAME = "token" TOKEN_TYPE_PARAMETER_NAME = "token_type" PRESENT_STATE_VALUE = "present" ABSENT_STATE_VALUE = "absent" CLIENT_TOKEN_TYPE_VALUE = "client" MANAGEMENT_TOKEN_TYPE_VALUE = "management" REMOVE_OPERATION = "remove" UPDATE_OPERATION = "update" CREATE_OPERATION = "create" _POLICY_JSON_PROPERTY = "policy" _RULES_JSON_PROPERTY = "Rules" _TOKEN_JSON_PROPERTY = "ID" _TOKEN_TYPE_JSON_PROPERTY = "Type" _NAME_JSON_PROPERTY = "Name" _POLICY_YML_PROPERTY = "policy" _POLICY_HCL_PROPERTY = "policy" _ARGUMENT_SPEC = { MANAGEMENT_PARAMETER_NAME: dict(required=True, no_log=True), HOST_PARAMETER_NAME: dict(default='localhost'), SCHEME_PARAMETER_NAME: dict(required=False, default='http'), VALIDATE_CERTS_PARAMETER_NAME: dict(required=False, type='bool', default=True), NAME_PARAMETER_NAME: dict(required=False), PORT_PARAMETER_NAME: dict(default=8500, type='int'), RULES_PARAMETER_NAME: dict(default=None, required=False, type='list'), STATE_PARAMETER_NAME: dict(default=PRESENT_STATE_VALUE, choices=[PRESENT_STATE_VALUE, ABSENT_STATE_VALUE]), TOKEN_PARAMETER_NAME: dict(required=False), TOKEN_TYPE_PARAMETER_NAME: dict(required=False, choices=[CLIENT_TOKEN_TYPE_VALUE, MANAGEMENT_TOKEN_TYPE_VALUE], default=CLIENT_TOKEN_TYPE_VALUE) } def set_acl(consul_client, configuration): """ Sets an ACL based on the given configuration. :param consul_client: the consul client :param configuration: the run configuration :return: the output of setting the ACL """ acls_as_json = decode_acls_as_json(consul_client.acl.list()) existing_acls_mapped_by_name = dict((acl.name, acl) for acl in acls_as_json if acl.name is not None) existing_acls_mapped_by_token = dict((acl.token, acl) for acl in acls_as_json) if None in existing_acls_mapped_by_token: raise AssertionError("expecting ACL list to be associated to a token: %s" % existing_acls_mapped_by_token[None]) if configuration.token is None and configuration.name and configuration.name in existing_acls_mapped_by_name: # No token but name given so can get token from name configuration.token = existing_acls_mapped_by_name[configuration.name].token if configuration.token and configuration.token in existing_acls_mapped_by_token: return update_acl(consul_client, configuration) else: if configuration.token in existing_acls_mapped_by_token: raise AssertionError() if configuration.name in existing_acls_mapped_by_name: raise AssertionError() return create_acl(consul_client, configuration) def update_acl(consul_client, configuration): """ Updates an ACL. :param consul_client: the consul client :param configuration: the run configuration :return: the output of the update """ existing_acl = load_acl_with_token(consul_client, configuration.token) changed = existing_acl.rules != configuration.rules if changed: name = configuration.name if configuration.name is not None else existing_acl.name rules_as_hcl = encode_rules_as_hcl_string(configuration.rules) updated_token = consul_client.acl.update( configuration.token, name=name, type=configuration.token_type, rules=rules_as_hcl) if updated_token != configuration.token: raise AssertionError() return Output(changed=changed, token=configuration.token, rules=configuration.rules, operation=UPDATE_OPERATION) def create_acl(consul_client, configuration): """ Creates an ACL. :param consul_client: the consul client :param configuration: the run configuration :return: the output of the creation """ rules_as_hcl = encode_rules_as_hcl_string(configuration.rules) if len(configuration.rules) > 0 else None token = consul_client.acl.create( name=configuration.name, type=configuration.token_type, rules=rules_as_hcl, acl_id=configuration.token) rules = configuration.rules return Output(changed=True, token=token, rules=rules, operation=CREATE_OPERATION) def remove_acl(consul, configuration): """ Removes an ACL. :param consul: the consul client :param configuration: the run configuration :return: the output of the removal """ token = configuration.token changed = consul.acl.info(token) is not None if changed: consul.acl.destroy(token) return Output(changed=changed, token=token, operation=REMOVE_OPERATION) def load_acl_with_token(consul, token): """ Loads the ACL with the given token (token == rule ID). :param consul: the consul client :param token: the ACL "token"/ID (not name) :return: the ACL associated to the given token :exception ConsulACLTokenNotFoundException: raised if the given token does not exist """ acl_as_json = consul.acl.info(token) if acl_as_json is None: raise ConsulACLNotFoundException(token) return decode_acl_as_json(acl_as_json) def encode_rules_as_hcl_string(rules): """ Converts the given rules into the equivalent HCL (string) representation. :param rules: the rules :return: the equivalent HCL (string) representation of the rules. Will be None if there is no rules (see internal note for justification) """ if len(rules) == 0: # Note: empty string is not valid HCL according to `hcl.load` however, the ACL `Rule` property will be an empty # string if there is no rules... return None rules_as_hcl = "" for rule in rules: rules_as_hcl += encode_rule_as_hcl_string(rule) return rules_as_hcl def encode_rule_as_hcl_string(rule): """ Converts the given rule into the equivalent HCL (string) representation. :param rule: the rule :return: the equivalent HCL (string) representation of the rule """ if rule.pattern is not None: return '%s "%s" {\n %s = "%s"\n}\n' % (rule.scope, rule.pattern, _POLICY_HCL_PROPERTY, rule.policy) else: return '%s = "%s"\n' % (rule.scope, rule.policy) def decode_rules_as_hcl_string(rules_as_hcl): """ Converts the given HCL (string) representation of rules into a list of rule domain models. :param rules_as_hcl: the HCL (string) representation of a collection of rules :return: the equivalent domain model to the given rules """ rules_as_hcl = to_text(rules_as_hcl) rules_as_json = hcl.loads(rules_as_hcl) return decode_rules_as_json(rules_as_json) def decode_rules_as_json(rules_as_json): """ Converts the given JSON representation of rules into a list of rule domain models. :param rules_as_json: the JSON representation of a collection of rules :return: the equivalent domain model to the given rules """ rules = RuleCollection() for scope in rules_as_json: if not isinstance(rules_as_json[scope], dict): rules.add(Rule(scope, rules_as_json[scope])) else: for pattern, policy in rules_as_json[scope].items(): rules.add(Rule(scope, policy[_POLICY_JSON_PROPERTY], pattern)) return rules def encode_rules_as_json(rules): """ Converts the given rules into the equivalent JSON representation according to the documentation: https://www.consul.io/docs/guides/acl.html#rule-specification. :param rules: the rules :return: JSON representation of the given rules """ rules_as_json = defaultdict(dict) for rule in rules: if rule.pattern is not None: if rule.pattern in rules_as_json[rule.scope]: raise AssertionError() rules_as_json[rule.scope][rule.pattern] = { _POLICY_JSON_PROPERTY: rule.policy } else: if rule.scope in rules_as_json: raise AssertionError() rules_as_json[rule.scope] = rule.policy return rules_as_json def decode_rules_as_yml(rules_as_yml): """ Converts the given YAML representation of rules into a list of rule domain models. :param rules_as_yml: the YAML representation of a collection of rules :return: the equivalent domain model to the given rules """ rules = RuleCollection() if rules_as_yml: for rule_as_yml in rules_as_yml: rule_added = False for scope in RULE_SCOPES: if scope in rule_as_yml: if rule_as_yml[scope] is None: raise ValueError("Rule for '%s' does not have a value associated to the scope" % scope) policy = rule_as_yml[_POLICY_YML_PROPERTY] if _POLICY_YML_PROPERTY in rule_as_yml \ else rule_as_yml[scope] pattern = rule_as_yml[scope] if _POLICY_YML_PROPERTY in rule_as_yml else None rules.add(Rule(scope, policy, pattern)) rule_added = True break if not rule_added: raise ValueError("A rule requires one of %s and a policy." % ('/'.join(RULE_SCOPES))) return rules def decode_acl_as_json(acl_as_json): """ Converts the given JSON representation of an ACL into the equivalent domain model. :param acl_as_json: the JSON representation of an ACL :return: the equivalent domain model to the given ACL """ rules_as_hcl = acl_as_json[_RULES_JSON_PROPERTY] rules = decode_rules_as_hcl_string(acl_as_json[_RULES_JSON_PROPERTY]) if rules_as_hcl.strip() != "" \ else RuleCollection() return ACL( rules=rules, token_type=acl_as_json[_TOKEN_TYPE_JSON_PROPERTY], token=acl_as_json[_TOKEN_JSON_PROPERTY], name=acl_as_json[_NAME_JSON_PROPERTY] ) def decode_acls_as_json(acls_as_json): """ Converts the given JSON representation of ACLs into a list of ACL domain models. :param acls_as_json: the JSON representation of a collection of ACLs :return: list of equivalent domain models for the given ACLs (order not guaranteed to be the same) """ return [decode_acl_as_json(acl_as_json) for acl_as_json in acls_as_json] class ConsulACLNotFoundException(Exception): """ Exception raised if an ACL with is not found. """ class Configuration: """ Configuration for this module. """ def __init__(self, management_token=None, host=None, scheme=None, validate_certs=None, name=None, port=None, rules=None, state=None, token=None, token_type=None): self.management_token = management_token # type: str self.host = host # type: str self.scheme = scheme # type: str self.validate_certs = validate_certs # type: bool self.name = name # type: str self.port = port # type: bool self.rules = rules # type: RuleCollection self.state = state # type: str self.token = token # type: str self.token_type = token_type # type: str class Output: """ Output of an action of this module. """ def __init__(self, changed=None, token=None, rules=None, operation=None): self.changed = changed # type: bool self.token = token # type: str self.rules = rules # type: RuleCollection self.operation = operation # type: str class ACL: """ Consul ACL. See: https://www.consul.io/docs/guides/acl.html. """ def __init__(self, rules, token_type, token, name): self.rules = rules self.token_type = token_type self.token = token self.name = name def __eq__(self, other): return other \ and isinstance(other, self.__class__) \ and self.rules == other.rules \ and self.token_type == other.token_type \ and self.token == other.token \ and self.name == other.name def __hash__(self): return hash(self.rules) ^ hash(self.token_type) ^ hash(self.token) ^ hash(self.name) class Rule: """ ACL rule. See: https://www.consul.io/docs/guides/acl.html#acl-rules-and-scope. """ def __init__(self, scope, policy, pattern=None): self.scope = scope self.policy = policy self.pattern = pattern def __eq__(self, other): return isinstance(other, self.__class__) \ and self.scope == other.scope \ and self.policy == other.policy \ and self.pattern == other.pattern def __ne__(self, other): return not self.__eq__(other) def __hash__(self): return (hash(self.scope) ^ hash(self.policy)) ^ hash(self.pattern) def __str__(self): return encode_rule_as_hcl_string(self) class RuleCollection: """ Collection of ACL rules, which are part of a Consul ACL. """ def __init__(self): self._rules = {} for scope in RULE_SCOPES: self._rules[scope] = {} def __iter__(self): all_rules = [] for scope, pattern_keyed_rules in self._rules.items(): for pattern, rule in pattern_keyed_rules.items(): all_rules.append(rule) return iter(all_rules) def __len__(self): count = 0 for scope in RULE_SCOPES: count += len(self._rules[scope]) return count def __eq__(self, other): return isinstance(other, self.__class__) \ and set(self) == set(other) def __ne__(self, other): return not self.__eq__(other) def __str__(self): return encode_rules_as_hcl_string(self) def add(self, rule): """ Adds the given rule to this collection. :param rule: model of a rule :raises ValueError: raised if there already exists a rule for a given scope and pattern """ if rule.pattern in self._rules[rule.scope]: patten_info = " and pattern '%s'" % rule.pattern if rule.pattern is not None else "" raise ValueError("Duplicate rule for scope '%s'%s" % (rule.scope, patten_info)) self._rules[rule.scope][rule.pattern] = rule def get_consul_client(configuration): """ Gets a Consul client for the given configuration. Does not check if the Consul client can connect. :param configuration: the run configuration :return: Consul client """ token = configuration.management_token if token is None: token = configuration.token if token is None: raise AssertionError("Expecting the management token to always be set") return consul.Consul(host=configuration.host, port=configuration.port, scheme=configuration.scheme, verify=configuration.validate_certs, token=token) def check_dependencies(): """ Checks that the required dependencies have been imported. :exception ImportError: if it is detected that any of the required dependencies have not been iported """ if not python_consul_installed: raise ImportError("python-consul required for this module. " "See: http://python-consul.readthedocs.org/en/latest/#installation") if not pyhcl_installed: raise ImportError("pyhcl required for this module. " "See: https://pypi.python.org/pypi/pyhcl") if not has_requests: raise ImportError("requests required for this module. See https://pypi.python.org/pypi/requests") def main(): """ Main method. """ module = AnsibleModule(_ARGUMENT_SPEC, supports_check_mode=False) try: check_dependencies() except ImportError as e: module.fail_json(msg=str(e)) configuration = Configuration( management_token=module.params.get(MANAGEMENT_PARAMETER_NAME), host=module.params.get(HOST_PARAMETER_NAME), scheme=module.params.get(SCHEME_PARAMETER_NAME), validate_certs=module.params.get(VALIDATE_CERTS_PARAMETER_NAME), name=module.params.get(NAME_PARAMETER_NAME), port=module.params.get(PORT_PARAMETER_NAME), rules=decode_rules_as_yml(module.params.get(RULES_PARAMETER_NAME)), state=module.params.get(STATE_PARAMETER_NAME), token=module.params.get(TOKEN_PARAMETER_NAME), token_type=module.params.get(TOKEN_TYPE_PARAMETER_NAME) ) consul_client = get_consul_client(configuration) try: if configuration.state == PRESENT_STATE_VALUE: output = set_acl(consul_client, configuration) else: output = remove_acl(consul_client, configuration) except ConnectionError as e: module.fail_json(msg='Could not connect to consul agent at %s:%s, error was %s' % ( configuration.host, configuration.port, str(e))) raise return_values = dict(changed=output.changed, token=output.token, operation=output.operation) if output.rules is not None: return_values["rules"] = encode_rules_as_json(output.rules) module.exit_json(**return_values) if __name__ == "__main__": main()
gpl-3.0
h3llrais3r/subliminal
subliminal/converters/thesubdb.py
45
1123
# -*- coding: utf-8 -*- from babelfish import LanguageReverseConverter from ..exceptions import ConfigurationError class TheSubDBConverter(LanguageReverseConverter): def __init__(self): self.from_thesubdb = {'en': ('eng',), 'es': ('spa',), 'fr': ('fra',), 'it': ('ita',), 'nl': ('nld',), 'pl': ('pol',), 'pt': ('por', 'BR'), 'ro': ('ron',), 'sv': ('swe',), 'tr': ('tur',)} self.to_thesubdb = {v: k for k, v in self.from_thesubdb.items()} self.codes = set(self.from_thesubdb.keys()) def convert(self, alpha3, country=None, script=None): if (alpha3, country) in self.to_thesubdb: return self.to_thesubdb[(alpha3, country)] if (alpha3,) in self.to_thesubdb: return self.to_thesubdb[(alpha3,)] raise ConfigurationError('Unsupported language for thesubdb: %s, %s, %s' % (alpha3, country, script)) def reverse(self, thesubdb): if thesubdb in self.from_thesubdb: return self.from_thesubdb[thesubdb] raise ConfigurationError('Unsupported language code for thesubdb: %s' % thesubdb)
mit
NaturalGIS/naturalgis_qgis
python/PyQt/PyQt5/QtTest.py
45
1035
# -*- coding: utf-8 -*- """ *************************************************************************** QtTest.py --------------------- Date : March 2016 Copyright : (C) 2016 by Juergen E. Fischer Email : jef at norbit dot de *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Juergen E. Fischer' __date__ = 'March 2016' __copyright__ = '(C) 2016, Juergen E. Fischer' from PyQt5.QtTest import *
gpl-2.0
Jimmyhua94/MyoFlie
lib/cfclient/ui/dialogs/inputconfigdialogue.py
5
19062
#!/usr/bin/env python # -*- coding: utf-8 -*- # # || ____ _ __ # +------+ / __ )(_) /_______________ _____ ___ # | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \ # +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/ # || || /_____/_/\__/\___/_/ \__,_/ /___/\___/ # # Copyright (C) 2011-2013 Bitcraze AB # # Crazyflie Nano Quadcopter Client # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """ Dialogue used to select and configure an inputdevice. This includes mapping buttons and axis to match controls for the Crazyflie. """ import sys import json import logging from cfclient.utils.config_manager import ConfigManager from cflib.crtp.exceptions import CommunicationException from PyQt4 import Qt, QtCore, QtGui, uic from PyQt4.QtCore import * from PyQt4.QtGui import * from PyQt4.Qt import * from cfclient.utils.input import JoystickReader __author__ = 'Bitcraze AB' __all__ = ['InputConfigDialogue'] logger = logging.getLogger(__name__) (inputconfig_widget_class, connect_widget_base_class) = ( uic.loadUiType(sys.path[0] + '/cfclient/ui/dialogs/inputconfigdialogue.ui') ) class InputConfigDialogue(QtGui.QWidget, inputconfig_widget_class): def __init__(self, joystickReader, *args): super(InputConfigDialogue, self).__init__(*args) self.setupUi(self) self._input = joystickReader self._input_device_reader = DeviceReader(self._input) self._input_device_reader.start() self._input_device_reader.raw_axis_data_signal.connect( self._detect_axis) self._input_device_reader.raw_button_data_signal.connect( self._detect_button) self._input_device_reader.mapped_values_signal.connect( self._update_mapped_values) self.cancelButton.clicked.connect(self.close) self.saveButton.clicked.connect(self._save_config) self.detectPitch.clicked.connect( lambda: self._axis_detect( "pitch", "Pitch axis", "Center the pitch axis then do max %s pitch", ["forward", "backward"])) self.detectRoll.clicked.connect( lambda: self._axis_detect( "roll", "Roll axis", "Center the roll axis and then do max %s roll", ["right", "left"])) self.detectYaw.clicked.connect( lambda: self._axis_detect( "yaw", "Yaw axis", "Center the yaw axis and then do max %s yaw", ["right", "left"])) self.detectThrust.clicked.connect( lambda: self._axis_detect( "thrust", "Thrust axis", "Center the thrust axis, and then do max thrust (also used to " "adjust target altitude in altitude hold mode)")) self.detectPitchPos.clicked.connect( lambda: self._button_detect( "pitchPos", "Pitch Cal Positive", "Press the button for Pitch postive calibration")) self.detectPitchNeg.clicked.connect( lambda: self._button_detect( "pitchNeg", "Pitch Cal Negative", "Press the button for Pitch negative calibration")) self.detectRollPos.clicked.connect( lambda: self._button_detect( "rollPos", "Roll Cal Positive", "Press the button for Roll positive calibration")) self.detectRollNeg.clicked.connect( lambda: self._button_detect( "rollNeg", "Roll Cal Negative", "Press the button for Roll negative calibration")) self.detectKillswitch.clicked.connect( lambda: self._button_detect( "killswitch", "Killswitch", "Press the button for the killswitch (will disable motors)")) self.detectAlt1.clicked.connect( lambda: self._button_detect( "alt1", "Alternative function 1", "The alternative function 1 that will do a callback")) self.detectAlt2.clicked.connect( lambda: self._button_detect( "alt2", "Alternative function 2", "The alternative function 2 that will do a callback")) self.detectExitapp.clicked.connect( lambda: self._button_detect( "exitapp", "Exit application", "Press the button for exiting the application")) self.detectAltHold.clicked.connect( lambda: self._button_detect( "althold", "Altitude hold", "Press the button for altitude hold mode activation " "(releasing returns to manual mode)")) self.detectMuxswitch.clicked.connect( lambda: self._button_detect( "muxswitch", "Mux Switch", "Press the button for mux switching")) self.configButton.clicked.connect(self._start_configuration) self.loadButton.clicked.connect(self._load_config_from_file) self.deleteButton.clicked.connect(self._delete_configuration) self._popup = None self._combined_button = None self._detection_buttons = [ self.detectPitch, self.detectRoll, self.detectYaw, self.detectThrust, self.detectPitchPos, self.detectPitchNeg, self.detectRollPos, self.detectRollNeg, self.detectKillswitch, self.detectExitapp, self.detectAltHold, self.detectAlt1, self.detectAlt2, self.detectMuxswitch] self._button_to_detect = "" self._axis_to_detect = "" self.combinedDetection = 0 self._prev_combined_id = None self._maxed_axis = [] self._mined_axis = [] self._buttonindicators = {} self._axisindicators = {} self._reset_mapping() for d in self._input.available_devices(): if d.supports_mapping: self.inputDeviceSelector.addItem(d.name, d.id) if len(self._input.available_devices()) > 0: self.configButton.setEnabled(True) self._map = {} self._saved_open_device = None @staticmethod def _scale(max_value, value): return (value / max_value) * 100 def _reset_mapping(self): self._buttonindicators = { "pitchPos": self.pitchPos, "pitchNeg": self.pitchNeg, "rollPos": self.rollPos, "rollNeg": self.rollNeg, "killswitch": self.killswitch, "alt1": self.alt1, "alt2": self.alt2, "exitapp": self.exitapp, "althold": self.althold, "muxswitch": self.muxswitch, } self._axisindicators = { "pitch": self.pitchAxisValue, "roll": self.rollAxisValue, "yaw": self.yawAxisValue, "thrust": self.thrustAxisValue, } def _cancel_config_popup(self, button): self._axis_to_detect = "" self._button_to_detect = "" def _show_config_popup(self, caption, message, directions=[]): self._maxed_axis = [] self._mined_axis = [] self._popup = QMessageBox() self._popup.directions = directions self._combined_button = QtGui.QPushButton('Combined Axis Detection') self.cancelButton = QtGui.QPushButton('Cancel') self._popup.addButton(self.cancelButton, QMessageBox.DestructiveRole) self._popup.setWindowTitle(caption) self._popup.setWindowFlags(Qt.Dialog | Qt.MSWindowsFixedSizeDialogHint) if len(directions) > 1: self._popup.originalMessage = message message = self._popup.originalMessage % directions[0] self._combined_button.setCheckable(True) self._combined_button.blockSignals(True) self._popup.addButton(self._combined_button, QMessageBox.ActionRole) self._popup.setText(message) self._popup.show() def _start_configuration(self): self._input.enableRawReading( str(self.inputDeviceSelector.currentText())) self._input_device_reader.start_reading() self._populate_config_dropdown() self.profileCombo.setEnabled(True) for b in self._detection_buttons: b.setEnabled(True) def _detect_axis(self, data): if (len(self._axis_to_detect) > 0): if (self._combined_button and self._combined_button.isChecked() and self.combinedDetection == 0): self._combined_button.setDisabled(True) self.combinedDetection = 1 for a in data: # Axis must go low and high before it's accepted as selected # otherwise maxed out axis (like gyro/acc) in some controllers # will always be selected. Not enforcing negative values makes # it possible to detect split axis (like bumpers on PS3 # controller) if a not in self._maxed_axis and abs(data[a]) > 0.8: self._maxed_axis.append(a) if a not in self._mined_axis and abs(data[a]) < 0.1: self._mined_axis.append(a) if a in self._maxed_axis and a in self._mined_axis and len( self._axis_to_detect) > 0: if self.combinedDetection == 0: if data[a] >= 0: self._map_axis(self._axis_to_detect, a, 1.0) else: self._map_axis(self._axis_to_detect, a, -1.0) self._axis_to_detect = "" self._check_and_enable_saving() if self._popup is not None: self.cancelButton.click() elif self.combinedDetection == 2: # finished detection # not the same axis again ... if self._prev_combined_id != a: self._map_axis(self._axis_to_detect, a, -1.0) self._axis_to_detect = "" self._check_and_enable_saving() if (self._popup is not None): self.cancelButton.click() self.combinedDetection = 0 elif self.combinedDetection == 1: self._map_axis(self._axis_to_detect, a, 1.0) self._prev_combined_id = a self.combinedDetection = 2 message = (self._popup.originalMessage % self._popup.directions[1]) self._popup.setText(message) def _update_mapped_values(self, mapped_data): for v in mapped_data.get_all_indicators(): if v in self._buttonindicators: if mapped_data.get(v): self._buttonindicators[v].setChecked(True) else: self._buttonindicators[v].setChecked(False) if v in self._axisindicators: # The sliders used are set to 0-100 and the values from the # input-layer is scaled according to the max settings in # the input-layer. So scale the value and place 0 in the middle scaled_value = mapped_data.get(v) if v == "thrust": scaled_value = InputConfigDialogue._scale( self._input.max_thrust, scaled_value ) if v == "roll" or v == "pitch": scaled_value = InputConfigDialogue._scale( self._input.max_rp_angle, scaled_value ) if v == "yaw": scaled_value = InputConfigDialogue._scale( self._input.max_yaw_rate, scaled_value ) self._axisindicators[v].setValue(scaled_value) def _map_axis(self, function, key_id, scale): self._map["Input.AXIS-{}".format(key_id)] = {} self._map["Input.AXIS-{}".format(key_id)]["id"] = key_id self._map["Input.AXIS-{}".format(key_id)]["key"] = function self._map["Input.AXIS-{}".format(key_id)]["scale"] = scale self._map["Input.AXIS-{}".format(key_id)]["offset"] = 0.0 self._map["Input.AXIS-{}".format(key_id)]["type"] = "Input.AXIS" self._input.set_raw_input_map(self._map) def _map_button(self, function, key_id): # Duplicate buttons are not allowed, remove if there's already one # mapped prev_button = None for m in self._map: if "key" in self._map[m] and self._map[m]["key"] == function: prev_button = m if prev_button: del self._map[prev_button] self._map["Input.BUTTON-{}".format(key_id)] = {} self._map["Input.BUTTON-{}".format(key_id)]["id"] = key_id self._map["Input.BUTTON-{}".format(key_id)]["key"] = function self._map["Input.BUTTON-{}".format(key_id)]["scale"] = 1.0 self._map["Input.BUTTON-{}".format(key_id)]["type"] = "Input.BUTTON" self._input.set_raw_input_map(self._map) def _detect_button(self, data): if len(self._button_to_detect) > 0: for b in data: if data[b] > 0: self._map_button(self._button_to_detect, b) self._button_to_detect = "" self._check_and_enable_saving() if self._popup is not None: self._popup.close() def _check_and_enable_saving(self): needed_funcs = ["thrust", "yaw", "roll", "pitch"] for m in self._map: if self._map[m]["key"] in needed_funcs: needed_funcs.remove(self._map[m]["key"]) if len(needed_funcs) == 0: self.saveButton.setEnabled(True) def _populate_config_dropdown(self): configs = ConfigManager().get_list_of_configs() if len(configs): self.loadButton.setEnabled(True) for c in configs: self.profileCombo.addItem(c) def _axis_detect(self, varname, caption, message, directions=[]): self._axis_to_detect = varname self._show_config_popup(caption, message, directions) def _button_detect(self, varname, caption, message): self._button_to_detect = varname self._show_config_popup(caption, message) def _show_error(self, caption, message): QMessageBox.critical(self, caption, message) def _load_config_from_file(self): loaded_map = ConfigManager().get_config( self.profileCombo.currentText()) if loaded_map: self._input.set_raw_input_map(loaded_map) self._map = loaded_map else: logger.warning("Could not load configfile [%s]", self.profileCombo.currentText()) self._show_error("Could not load config", "Could not load config [%s]" % self.profileCombo.currentText()) self._check_and_enable_saving() def _delete_configuration(self): logger.warning("deleteConfig not implemented") def _save_config(self): configName = str(self.profileCombo.currentText()) mapping = {'inputconfig': {'inputdevice': {'axis': []}}} # Create intermediate structure for the configuration file funcs = {} for m in self._map: key = self._map[m]["key"] if key not in funcs: funcs[key] = [] funcs[key].append(self._map[m]) # Create a mapping for each axis, take care to handle # split axis configurations for a in funcs: func = funcs[a] axis = {} # Check for split axis if len(func) > 1: axis["ids"] = [func[0]["id"], func[1]["id"]] axis["scale"] = func[1]["scale"] else: axis["id"] = func[0]["id"] axis["scale"] = func[0]["scale"] axis["key"] = func[0]["key"] axis["name"] = func[0]["key"] # Name isn't used... axis["type"] = func[0]["type"] mapping["inputconfig"]["inputdevice"]["axis"].append(axis) mapping["inputconfig"]['inputdevice']['name'] = configName mapping["inputconfig"]['inputdevice']['updateperiod'] = 10 config_name = self.profileCombo.currentText() filename = ConfigManager().configs_dir + "/%s.json" % config_name logger.info("Saving config to [%s]", filename) json_data = open(filename, 'w') json_data.write(json.dumps(mapping, indent=2)) json_data.close() ConfigManager().conf_needs_reload.call(config_name) self.close() def showEvent(self, event): """Called when dialog is opened""" # self._saved_open_device = self._input.get_device_name() # self._input.stop_input() self._input.pause_input() def closeEvent(self, event): """Called when dialog is closed""" self._input.stop_raw_reading() self._input_device_reader.stop_reading() # self._input.start_input(self._saved_open_device) self._input.resume_input() class DeviceReader(QThread): """Used for polling data from the Input layer during configuration""" raw_axis_data_signal = pyqtSignal(object) raw_button_data_signal = pyqtSignal(object) mapped_values_signal = pyqtSignal(object) def __init__(self, input): QThread.__init__(self) self._input = input self._read_timer = QTimer() self._read_timer.setInterval(25) self.connect(self._read_timer, SIGNAL("timeout()"), self._read_input) def stop_reading(self): """Stop polling data""" self._read_timer.stop() def start_reading(self): """Start polling data""" self._read_timer.start() def _read_input(self): [rawaxis, rawbuttons, mapped_values] = self._input.read_raw_values() self.raw_axis_data_signal.emit(rawaxis) self.raw_button_data_signal.emit(rawbuttons) self.mapped_values_signal.emit(mapped_values)
gpl-2.0
wxgeo/geophar
wxgeometrie/sympy/printing/ccode.py
2
31443
""" C code printer The C89CodePrinter & C99CodePrinter converts single sympy expressions into single C expressions, using the functions defined in math.h where possible. A complete code generator, which uses ccode extensively, can be found in sympy.utilities.codegen. The codegen module can be used to generate complete source code files that are compilable without further modifications. """ from __future__ import print_function, division from functools import wraps from itertools import chain from sympy.core import S from sympy.core.compatibility import string_types, range from sympy.core.decorators import deprecated from sympy.codegen.ast import ( Assignment, Pointer, Type, Variable, Declaration, real, complex_, integer, bool_, float32, float64, float80, complex64, complex128, intc, value_const, pointer_const, int8, int16, int32, int64, uint8, uint16, uint32, uint64, untyped ) from sympy.printing.codeprinter import CodePrinter, requires from sympy.printing.precedence import precedence, PRECEDENCE from sympy.sets.fancysets import Range # dictionary mapping sympy function to (argument_conditions, C_function). # Used in C89CodePrinter._print_Function(self) known_functions_C89 = { "Abs": [(lambda x: not x.is_integer, "fabs"), (lambda x: x.is_integer, "abs")], "Mod": [ ( lambda numer, denom: numer.is_integer and denom.is_integer, lambda printer, numer, denom, *args: "((%s) %% (%s))" % ( printer._print(numer), printer._print(denom)) ), ( lambda numer, denom: not numer.is_integer or not denom.is_integer, "fmod" ) ], "sin": "sin", "cos": "cos", "tan": "tan", "asin": "asin", "acos": "acos", "atan": "atan", "atan2": "atan2", "exp": "exp", "log": "log", "sinh": "sinh", "cosh": "cosh", "tanh": "tanh", "floor": "floor", "ceiling": "ceil", } # move to C99 once CCodePrinter is removed: _known_functions_C9X = dict(known_functions_C89, **{ "asinh": "asinh", "acosh": "acosh", "atanh": "atanh", "erf": "erf", "gamma": "tgamma", }) known_functions = _known_functions_C9X known_functions_C99 = dict(_known_functions_C9X, **{ 'exp2': 'exp2', 'expm1': 'expm1', 'expm1': 'expm1', 'log10': 'log10', 'log2': 'log2', 'log1p': 'log1p', 'Cbrt': 'cbrt', 'hypot': 'hypot', 'fma': 'fma', 'loggamma': 'lgamma', 'erfc': 'erfc', 'Max': 'fmax', 'Min': 'fmin' }) # These are the core reserved words in the C language. Taken from: # http://en.cppreference.com/w/c/keyword reserved_words = [ 'auto', 'break', 'case', 'char', 'const', 'continue', 'default', 'do', 'double', 'else', 'enum', 'extern', 'float', 'for', 'goto', 'if', 'int', 'long', 'register', 'return', 'short', 'signed', 'sizeof', 'static', 'struct', 'entry', # never standardized, we'll leave it here anyway 'switch', 'typedef', 'union', 'unsigned', 'void', 'volatile', 'while' ] reserved_words_c99 = ['inline', 'restrict'] def get_math_macros(): """ Returns a dictionary with math-related macros from math.h/cmath Note that these macros are not strictly required by the C/C++-standard. For MSVC they are enabled by defining "_USE_MATH_DEFINES" (preferably via a compilation flag). Returns ======= Dictionary mapping sympy expressions to strings (macro names) """ from sympy.codegen.cfunctions import log2, Sqrt from sympy.functions.elementary.exponential import log from sympy.functions.elementary.miscellaneous import sqrt return { S.Exp1: 'M_E', log2(S.Exp1): 'M_LOG2E', 1/log(2): 'M_LOG2E', log(2): 'M_LN2', log(10): 'M_LN10', S.Pi: 'M_PI', S.Pi/2: 'M_PI_2', S.Pi/4: 'M_PI_4', 1/S.Pi: 'M_1_PI', 2/S.Pi: 'M_2_PI', 2/sqrt(S.Pi): 'M_2_SQRTPI', 2/Sqrt(S.Pi): 'M_2_SQRTPI', sqrt(2): 'M_SQRT2', Sqrt(2): 'M_SQRT2', 1/sqrt(2): 'M_SQRT1_2', 1/Sqrt(2): 'M_SQRT1_2' } def _as_macro_if_defined(meth): """ Decorator for printer methods When a Printer's method is decorated using this decorator the expressions printed will first be looked for in the attribute ``math_macros``, and if present it will print the macro name in ``math_macros`` followed by a type suffix for the type ``real``. e.g. printing ``sympy.pi`` would print ``M_PIl`` if real is mapped to float80. """ @wraps(meth) def _meth_wrapper(self, expr, **kwargs): if expr in self.math_macros: return '%s%s' % (self.math_macros[expr], self._get_math_macro_suffix(real)) else: return meth(self, expr, **kwargs) return _meth_wrapper class C89CodePrinter(CodePrinter): """A printer to convert python expressions to strings of c code""" printmethod = "_ccode" language = "C" standard = "C89" reserved_words = set(reserved_words) _default_settings = { 'order': None, 'full_prec': 'auto', 'precision': 17, 'user_functions': {}, 'human': True, 'contract': True, 'dereference': set(), 'error_on_reserved': False, 'reserved_word_suffix': '_', } type_aliases = { real: float64, complex_: complex128, integer: intc } type_mappings = { real: 'double', intc: 'int', float32: 'float', float64: 'double', integer: 'int', bool_: 'bool', int8: 'int8_t', int16: 'int16_t', int32: 'int32_t', int64: 'int64_t', uint8: 'int8_t', uint16: 'int16_t', uint32: 'int32_t', uint64: 'int64_t', } type_headers = { bool_: {'stdbool.h'}, int8: {'stdint.h'}, int16: {'stdint.h'}, int32: {'stdint.h'}, int64: {'stdint.h'}, uint8: {'stdint.h'}, uint16: {'stdint.h'}, uint32: {'stdint.h'}, uint64: {'stdint.h'}, } type_macros = {} # Macros needed to be defined when using a Type type_func_suffixes = { float32: 'f', float64: '', float80: 'l' } type_literal_suffixes = { float32: 'F', float64: '', float80: 'L' } type_math_macro_suffixes = { float80: 'l' } math_macros = None _ns = '' # namespace, C++ uses 'std::' _kf = known_functions_C89 # known_functions-dict to copy def __init__(self, settings={}): if self.math_macros is None: self.math_macros = settings.pop('math_macros', get_math_macros()) self.type_aliases = dict(chain(self.type_aliases.items(), settings.pop('type_aliases', {}).items())) self.type_mappings = dict(chain(self.type_mappings.items(), settings.pop('type_mappings', {}).items())) self.type_headers = dict(chain(self.type_headers.items(), settings.pop('type_headers', {}).items())) self.type_macros = dict(chain(self.type_macros.items(), settings.pop('type_macros', {}).items())) self.type_func_suffixes = dict(chain(self.type_func_suffixes.items(), settings.pop('type_func_suffixes', {}).items())) self.type_literal_suffixes = dict(chain(self.type_literal_suffixes.items(), settings.pop('type_literal_suffixes', {}).items())) self.type_math_macro_suffixes = dict(chain(self.type_math_macro_suffixes.items(), settings.pop('type_math_macro_suffixes', {}).items())) super(C89CodePrinter, self).__init__(settings) self.known_functions = dict(self._kf, **settings.get('user_functions', {})) self._dereference = set(settings.get('dereference', [])) self.headers = set() self.libraries = set() self.macros = set() def _rate_index_position(self, p): return p*5 def _get_statement(self, codestring): """ Get code string as a statement - i.e. ending with a semicolon. """ return codestring if codestring.endswith(';') else codestring + ';' def _get_comment(self, text): return "// {0}".format(text) def _declare_number_const(self, name, value): type_ = self.type_aliases[real] var = Variable(name, type=type_, value=value.evalf(type_.decimal_dig), attrs={value_const}) decl = Declaration(var) return self._get_statement(self._print(decl)) def _format_code(self, lines): return self.indent_code(lines) def _traverse_matrix_indices(self, mat): rows, cols = mat.shape return ((i, j) for i in range(rows) for j in range(cols)) @_as_macro_if_defined def _print_Mul(self, expr, **kwargs): return super(C89CodePrinter, self)._print_Mul(expr, **kwargs) @_as_macro_if_defined def _print_Pow(self, expr): if "Pow" in self.known_functions: return self._print_Function(expr) PREC = precedence(expr) suffix = self._get_func_suffix(real) if expr.exp == -1: return '1.0%s/%s' % (suffix.upper(), self.parenthesize(expr.base, PREC)) elif expr.exp == 0.5: return '%ssqrt%s(%s)' % (self._ns, suffix, self._print(expr.base)) elif expr.exp == S.One/3 and self.standard != 'C89': return '%scbrt%s(%s)' % (self._ns, suffix, self._print(expr.base)) else: return '%spow%s(%s, %s)' % (self._ns, suffix, self._print(expr.base), self._print(expr.exp)) def _print_Rational(self, expr): p, q = int(expr.p), int(expr.q) suffix = self._get_literal_suffix(real) return '%d.0%s/%d.0%s' % (p, suffix, q, suffix) def _print_Indexed(self, expr): # calculate index for 1d array offset = getattr(expr.base, 'offset', S.Zero) strides = getattr(expr.base, 'strides', None) indices = expr.indices if strides is None or isinstance(strides, string_types): dims = expr.shape shift = S.One temp = tuple() if strides == 'C' or strides is None: traversal = reversed(range(expr.rank)) indices = indices[::-1] elif strides == 'F': traversal = range(expr.rank) for i in traversal: temp += (shift,) shift *= dims[i] strides = temp flat_index = sum([x[0]*x[1] for x in zip(indices, strides)]) + offset return "%s[%s]" % (self._print(expr.base.label), self._print(flat_index)) def _print_Idx(self, expr): return self._print(expr.label) @_as_macro_if_defined def _print_NumberSymbol(self, expr): return super(C89CodePrinter, self)._print_NumberSymbol(expr) def _print_Infinity(self, expr): return 'HUGE_VAL' def _print_NegativeInfinity(self, expr): return '-HUGE_VAL' def _print_Piecewise(self, expr): if expr.args[-1].cond != True: # We need the last conditional to be a True, otherwise the resulting # function may not return a result. raise ValueError("All Piecewise expressions must contain an " "(expr, True) statement to be used as a default " "condition. Without one, the generated " "expression may not evaluate to anything under " "some condition.") lines = [] if expr.has(Assignment): for i, (e, c) in enumerate(expr.args): if i == 0: lines.append("if (%s) {" % self._print(c)) elif i == len(expr.args) - 1 and c == True: lines.append("else {") else: lines.append("else if (%s) {" % self._print(c)) code0 = self._print(e) lines.append(code0) lines.append("}") return "\n".join(lines) else: # The piecewise was used in an expression, need to do inline # operators. This has the downside that inline operators will # not work for statements that span multiple lines (Matrix or # Indexed expressions). ecpairs = ["((%s) ? (\n%s\n)\n" % (self._print(c), self._print(e)) for e, c in expr.args[:-1]] last_line = ": (\n%s\n)" % self._print(expr.args[-1].expr) return ": ".join(ecpairs) + last_line + " ".join([")"*len(ecpairs)]) def _print_ITE(self, expr): from sympy.functions import Piecewise _piecewise = Piecewise((expr.args[1], expr.args[0]), (expr.args[2], True)) return self._print(_piecewise) def _print_MatrixElement(self, expr): return "{0}[{1}]".format(self.parenthesize(expr.parent, PRECEDENCE["Atom"], strict=True), expr.j + expr.i*expr.parent.shape[1]) def _print_Symbol(self, expr): name = super(C89CodePrinter, self)._print_Symbol(expr) if expr in self._settings['dereference']: return '(*{0})'.format(name) else: return name def _print_Relational(self, expr): lhs_code = self._print(expr.lhs) rhs_code = self._print(expr.rhs) op = expr.rel_op return ("{0} {1} {2}").format(lhs_code, op, rhs_code) def _print_sinc(self, expr): from sympy.functions.elementary.trigonometric import sin from sympy.core.relational import Ne from sympy.functions import Piecewise _piecewise = Piecewise( (sin(expr.args[0]) / expr.args[0], Ne(expr.args[0], 0)), (1, True)) return self._print(_piecewise) def _print_For(self, expr): target = self._print(expr.target) if isinstance(expr.iterable, Range): start, stop, step = expr.iterable.args else: raise NotImplementedError("Only iterable currently supported is Range") body = self._print(expr.body) return ('for ({target} = {start}; {target} < {stop}; {target} += ' '{step}) {{\n{body}\n}}').format(target=target, start=start, stop=stop, step=step, body=body) def _print_sign(self, func): return '((({0}) > 0) - (({0}) < 0))'.format(self._print(func.args[0])) def _print_Max(self, expr): if "Max" in self.known_functions: return self._print_Function(expr) from sympy import Max if len(expr.args) == 1: return self._print(expr.args[0]) return "((%(a)s > %(b)s) ? %(a)s : %(b)s)" % { 'a': expr.args[0], 'b': self._print(Max(*expr.args[1:]))} def _print_Min(self, expr): if "Min" in self.known_functions: return self._print_Function(expr) from sympy import Min if len(expr.args) == 1: return self._print(expr.args[0]) return "((%(a)s < %(b)s) ? %(a)s : %(b)s)" % { 'a': expr.args[0], 'b': self._print(Min(*expr.args[1:]))} def indent_code(self, code): """Accepts a string of code or a list of code lines""" if isinstance(code, string_types): code_lines = self.indent_code(code.splitlines(True)) return ''.join(code_lines) tab = " " inc_token = ('{', '(', '{\n', '(\n') dec_token = ('}', ')') code = [line.lstrip(' \t') for line in code] increase = [int(any(map(line.endswith, inc_token))) for line in code] decrease = [int(any(map(line.startswith, dec_token))) for line in code] pretty = [] level = 0 for n, line in enumerate(code): if line == '' or line == '\n': pretty.append(line) continue level -= decrease[n] pretty.append("%s%s" % (tab*level, line)) level += increase[n] return pretty def _get_func_suffix(self, type_): return self.type_func_suffixes[self.type_aliases.get(type_, type_)] def _get_literal_suffix(self, type_): return self.type_literal_suffixes[self.type_aliases.get(type_, type_)] def _get_math_macro_suffix(self, type_): alias = self.type_aliases.get(type_, type_) dflt = self.type_math_macro_suffixes.get(alias, '') return self.type_math_macro_suffixes.get(type_, dflt) def _print_Type(self, type_): self.headers.update(self.type_headers.get(type_, set())) self.macros.update(self.type_macros.get(type_, set())) return self._print(self.type_mappings.get(type_, type_.name)) def _print_Declaration(self, decl): from sympy.codegen.cnodes import restrict var = decl.variable val = var.value if var.type == untyped: raise ValueError("C does not support untyped variables") if isinstance(var, Pointer): result = '{vc}{t} *{pc} {r}{s}'.format( vc='const ' if value_const in var.attrs else '', t=self._print(var.type), pc=' const' if pointer_const in var.attrs else '', r='restrict ' if restrict in var.attrs else '', s=self._print(var.symbol) ) elif isinstance(var, Variable): result = '{vc}{t} {s}'.format( vc='const ' if value_const in var.attrs else '', t=self._print(var.type), s=self._print(var.symbol) ) else: raise NotImplementedError("Unknown type of var: %s" % type(var)) if val != None: result += ' = %s' % self._print(val) return result def _print_Float(self, flt): type_ = self.type_aliases.get(real, real) self.macros.update(self.type_macros.get(type_, set())) suffix = self._get_literal_suffix(type_) num = str(flt.evalf(type_.decimal_dig)) if 'e' not in num and '.' not in num: num += '.0' num_parts = num.split('e') num_parts[0] = num_parts[0].rstrip('0') if num_parts[0].endswith('.'): num_parts[0] += '0' return 'e'.join(num_parts) + suffix @requires(headers={'stdbool.h'}) def _print_BooleanTrue(self, expr): return 'true' @requires(headers={'stdbool.h'}) def _print_BooleanFalse(self, expr): return 'false' def _print_Element(self, elem): if elem.strides == None: if elem.offset != None: raise ValueError("Expected strides when offset is given") idxs = ']['.join(map(lambda arg: self._print(arg), elem.indices)) else: global_idx = sum([i*s for i, s in zip(elem.indices, elem.strides)]) if elem.offset != None: global_idx += elem.offset idxs = self._print(global_idx) return "{symb}[{idxs}]".format( symb=self._print(elem.symbol), idxs=idxs ) def _print_CodeBlock(self, expr): """ Elements of code blocks printed as statements. """ return '\n'.join([self._get_statement(self._print(i)) for i in expr.args]) def _print_While(self, expr): return 'while ({condition}) {{\n{body}\n}}'.format(**expr.kwargs( apply=lambda arg: self._print(arg))) def _print_Scope(self, expr): return '{\n%s\n}' % self._print_CodeBlock(expr.body) @requires(headers={'stdio.h'}) def _print_Print(self, expr): return 'printf({fmt}, {pargs})'.format( fmt=self._print(expr.format_string), pargs=', '.join(map(lambda arg: self._print(arg), expr.print_args)) ) def _print_FunctionPrototype(self, expr): pars = ', '.join(map(lambda arg: self._print(Declaration(arg)), expr.parameters)) return "%s %s(%s)" % ( tuple(map(lambda arg: self._print(arg), (expr.return_type, expr.name))) + (pars,) ) def _print_FunctionDefinition(self, expr): return "%s%s" % (self._print_FunctionPrototype(expr), self._print_Scope(expr)) def _print_Return(self, expr): arg, = expr.args return 'return %s' % self._print(arg) def _print_CommaOperator(self, expr): return '(%s)' % ', '.join(map(lambda arg: self._print(arg), expr.args)) def _print_Label(self, expr): return '%s:' % str(expr) def _print_goto(self, expr): return 'goto %s' % expr.label def _print_PreIncrement(self, expr): arg, = expr.args return '++(%s)' % self._print(arg) def _print_PostIncrement(self, expr): arg, = expr.args return '(%s)++' % self._print(arg) def _print_PreDecrement(self, expr): arg, = expr.args return '--(%s)' % self._print(arg) def _print_PostDecrement(self, expr): arg, = expr.args return '(%s)--' % self._print(arg) def _print_struct(self, expr): return "%(keyword)s %(name)s {\n%(lines)s}" % dict( keyword=expr.__class__.__name__, name=expr.name, lines=';\n'.join( [self._print(decl) for decl in expr.declarations] + ['']) ) def _print_BreakToken(self, _): return 'break' def _print_ContinueToken(self, _): return 'continue' _print_union = _print_struct class _C9XCodePrinter(object): # Move these methods to C99CodePrinter when removing CCodePrinter def _get_loop_opening_ending(self, indices): open_lines = [] close_lines = [] loopstart = "for (int %(var)s=%(start)s; %(var)s<%(end)s; %(var)s++){" # C99 for i in indices: # C arrays start at 0 and end at dimension-1 open_lines.append(loopstart % { 'var': self._print(i.label), 'start': self._print(i.lower), 'end': self._print(i.upper + 1)}) close_lines.append("}") return open_lines, close_lines @deprecated( last_supported_version='1.0', useinstead="C89CodePrinter or C99CodePrinter, e.g. ccode(..., standard='C99')", issue=12220, deprecated_since_version='1.1') class CCodePrinter(_C9XCodePrinter, C89CodePrinter): """ Deprecated. Alias for C89CodePrinter, for backwards compatibility. """ _kf = _known_functions_C9X # known_functions-dict to copy class C99CodePrinter(_C9XCodePrinter, C89CodePrinter): standard = 'C99' reserved_words = set(reserved_words + reserved_words_c99) type_mappings=dict(chain(C89CodePrinter.type_mappings.items(), { complex64: 'float complex', complex128: 'double complex', }.items())) type_headers = dict(chain(C89CodePrinter.type_headers.items(), { complex64: {'complex.h'}, complex128: {'complex.h'} }.items())) _kf = known_functions_C99 # known_functions-dict to copy # functions with versions with 'f' and 'l' suffixes: _prec_funcs = ('fabs fmod remainder remquo fma fmax fmin fdim nan exp exp2' ' expm1 log log10 log2 log1p pow sqrt cbrt hypot sin cos tan' ' asin acos atan atan2 sinh cosh tanh asinh acosh atanh erf' ' erfc tgamma lgamma ceil floor trunc round nearbyint rint' ' frexp ldexp modf scalbn ilogb logb nextafter copysign').split() def _print_Infinity(self, expr): return 'INFINITY' def _print_NegativeInfinity(self, expr): return '-INFINITY' def _print_NaN(self, expr): return 'NAN' # tgamma was already covered by 'known_functions' dict @requires(headers={'math.h'}, libraries={'m'}) @_as_macro_if_defined def _print_math_func(self, expr, nest=False): known = self.known_functions[expr.__class__.__name__] if not isinstance(known, string_types): for cb, name in known: if cb(*expr.args): known = name break else: raise ValueError("No matching printer") try: return known(self, *expr.args) except TypeError: suffix = self._get_func_suffix(real) if self._ns + known in self._prec_funcs else '' if nest: args = self._print(expr.args[0]) if len(expr.args) > 1: args += ', %s' % self._print(expr.func(*expr.args[1:])) else: args = ', '.join(map(lambda arg: self._print(arg), expr.args)) return '{ns}{name}{suffix}({args})'.format( ns=self._ns, name=known, suffix=suffix, args=args ) def _print_Max(self, expr): return self._print_math_func(expr, nest=True) def _print_Min(self, expr): return self._print_math_func(expr, nest=True) for k in ('Abs Sqrt exp exp2 expm1 log log10 log2 log1p Cbrt hypot fma Mod' ' loggamma sin cos tan asin acos atan atan2 sinh cosh tanh asinh acosh ' 'atanh erf erfc loggamma gamma ceiling floor').split(): setattr(C99CodePrinter, '_print_%s' % k, C99CodePrinter._print_math_func) class C11CodePrinter(C99CodePrinter): @requires(headers={'stdalign.h'}) def _print_alignof(self, expr): arg, = expr.args return 'alignof(%s)' % self._print(arg) c_code_printers = { 'c89': C89CodePrinter, 'c99': C99CodePrinter, 'c11': C11CodePrinter } def ccode(expr, assign_to=None, standard='c99', **settings): """Converts an expr to a string of c code Parameters ========== expr : Expr A sympy expression to be converted. assign_to : optional When given, the argument is used as the name of the variable to which the expression is assigned. Can be a string, ``Symbol``, ``MatrixSymbol``, or ``Indexed`` type. This is helpful in case of line-wrapping, or for expressions that generate multi-line statements. standard : str, optional String specifying the standard. If your compiler supports a more modern standard you may set this to 'c99' to allow the printer to use more math functions. [default='c89']. precision : integer, optional The precision for numbers such as pi [default=17]. user_functions : dict, optional A dictionary where the keys are string representations of either ``FunctionClass`` or ``UndefinedFunction`` instances and the values are their desired C string representations. Alternatively, the dictionary value can be a list of tuples i.e. [(argument_test, cfunction_string)] or [(argument_test, cfunction_formater)]. See below for examples. dereference : iterable, optional An iterable of symbols that should be dereferenced in the printed code expression. These would be values passed by address to the function. For example, if ``dereference=[a]``, the resulting code would print ``(*a)`` instead of ``a``. human : bool, optional If True, the result is a single string that may contain some constant declarations for the number symbols. If False, the same information is returned in a tuple of (symbols_to_declare, not_supported_functions, code_text). [default=True]. contract: bool, optional If True, ``Indexed`` instances are assumed to obey tensor contraction rules and the corresponding nested loops over indices are generated. Setting contract=False will not generate loops, instead the user is responsible to provide values for the indices in the code. [default=True]. Examples ======== >>> from sympy import ccode, symbols, Rational, sin, ceiling, Abs, Function >>> x, tau = symbols("x, tau") >>> expr = (2*tau)**Rational(7, 2) >>> ccode(expr) '8*M_SQRT2*pow(tau, 7.0/2.0)' >>> ccode(expr, math_macros={}) '8*sqrt(2)*pow(tau, 7.0/2.0)' >>> ccode(sin(x), assign_to="s") 's = sin(x);' >>> from sympy.codegen.ast import real, float80 >>> ccode(expr, type_aliases={real: float80}) '8*M_SQRT2l*powl(tau, 7.0L/2.0L)' Simple custom printing can be defined for certain types by passing a dictionary of {"type" : "function"} to the ``user_functions`` kwarg. Alternatively, the dictionary value can be a list of tuples i.e. [(argument_test, cfunction_string)]. >>> custom_functions = { ... "ceiling": "CEIL", ... "Abs": [(lambda x: not x.is_integer, "fabs"), ... (lambda x: x.is_integer, "ABS")], ... "func": "f" ... } >>> func = Function('func') >>> ccode(func(Abs(x) + ceiling(x)), standard='C89', user_functions=custom_functions) 'f(fabs(x) + CEIL(x))' or if the C-function takes a subset of the original arguments: >>> ccode(2**x + 3**x, standard='C99', user_functions={'Pow': [ ... (lambda b, e: b == 2, lambda b, e: 'exp2(%s)' % e), ... (lambda b, e: b != 2, 'pow')]}) 'exp2(x) + pow(3, x)' ``Piecewise`` expressions are converted into conditionals. If an ``assign_to`` variable is provided an if statement is created, otherwise the ternary operator is used. Note that if the ``Piecewise`` lacks a default term, represented by ``(expr, True)`` then an error will be thrown. This is to prevent generating an expression that may not evaluate to anything. >>> from sympy import Piecewise >>> expr = Piecewise((x + 1, x > 0), (x, True)) >>> print(ccode(expr, tau, standard='C89')) if (x > 0) { tau = x + 1; } else { tau = x; } Support for loops is provided through ``Indexed`` types. With ``contract=True`` these expressions will be turned into loops, whereas ``contract=False`` will just print the assignment expression that should be looped over: >>> from sympy import Eq, IndexedBase, Idx >>> len_y = 5 >>> y = IndexedBase('y', shape=(len_y,)) >>> t = IndexedBase('t', shape=(len_y,)) >>> Dy = IndexedBase('Dy', shape=(len_y-1,)) >>> i = Idx('i', len_y-1) >>> e=Eq(Dy[i], (y[i+1]-y[i])/(t[i+1]-t[i])) >>> ccode(e.rhs, assign_to=e.lhs, contract=False, standard='C89') 'Dy[i] = (y[i + 1] - y[i])/(t[i + 1] - t[i]);' Matrices are also supported, but a ``MatrixSymbol`` of the same dimensions must be provided to ``assign_to``. Note that any expression that can be generated normally can also exist inside a Matrix: >>> from sympy import Matrix, MatrixSymbol >>> mat = Matrix([x**2, Piecewise((x + 1, x > 0), (x, True)), sin(x)]) >>> A = MatrixSymbol('A', 3, 1) >>> print(ccode(mat, A, standard='C89')) A[0] = pow(x, 2); if (x > 0) { A[1] = x + 1; } else { A[1] = x; } A[2] = sin(x); """ return c_code_printers[standard.lower()](settings).doprint(expr, assign_to) def print_ccode(expr, **settings): """Prints C representation of the given expression.""" print(ccode(expr, **settings))
gpl-2.0
toanalien/phantomjs
src/qt/qtwebkit/Tools/Scripts/webkitpy/style/checker_unittest.py
121
35416
# -*- coding: utf-8; -*- # # Copyright (C) 2009 Google Inc. All rights reserved. # Copyright (C) 2009 Torch Mobile Inc. # Copyright (C) 2009 Apple Inc. All rights reserved. # Copyright (C) 2010 Chris Jerdonek (chris.jerdonek@gmail.com) # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Unit tests for style.py.""" import logging import os import unittest2 as unittest import checker as style from webkitpy.common.system.logtesting import LogTesting, TestLogStream from checker import _BASE_FILTER_RULES from checker import _MAX_REPORTS_PER_CATEGORY from checker import _PATH_RULES_SPECIFIER as PATH_RULES_SPECIFIER from checker import _all_categories from checker import check_webkit_style_configuration from checker import check_webkit_style_parser from checker import configure_logging from checker import CheckerDispatcher from checker import ProcessorBase from checker import StyleProcessor from checker import StyleProcessorConfiguration from checkers.changelog import ChangeLogChecker from checkers.cpp import CppChecker from checkers.jsonchecker import JSONChecker from checkers.python import PythonChecker from checkers.text import TextChecker from checkers.xml import XMLChecker from error_handlers import DefaultStyleErrorHandler from filter import validate_filter_rules from filter import FilterConfiguration from optparser import ArgumentParser from optparser import CommandOptionValues from webkitpy.common.system.logtesting import LoggingTestCase from webkitpy.style.filereader import TextFileReader class ConfigureLoggingTestBase(unittest.TestCase): """Base class for testing configure_logging(). Sub-classes should implement: is_verbose: The is_verbose value to pass to configure_logging(). """ def setUp(self): is_verbose = self.is_verbose log_stream = TestLogStream(self) # Use a logger other than the root logger or one prefixed with # webkit so as not to conflict with test-webkitpy logging. logger = logging.getLogger("unittest") # Configure the test logger not to pass messages along to the # root logger. This prevents test messages from being # propagated to loggers used by test-webkitpy logging (e.g. # the root logger). logger.propagate = False self._handlers = configure_logging(stream=log_stream, logger=logger, is_verbose=is_verbose) self._log = logger self._log_stream = log_stream def tearDown(self): """Reset logging to its original state. This method ensures that the logging configuration set up for a unit test does not affect logging in other unit tests. """ logger = self._log for handler in self._handlers: logger.removeHandler(handler) def assert_log_messages(self, messages): """Assert that the logged messages equal the given messages.""" self._log_stream.assertMessages(messages) class ConfigureLoggingTest(ConfigureLoggingTestBase): """Tests the configure_logging() function.""" is_verbose = False def test_warning_message(self): self._log.warn("test message") self.assert_log_messages(["WARNING: test message\n"]) def test_below_warning_message(self): # We test the boundary case of a logging level equal to 29. # In practice, we will probably only be calling log.info(), # which corresponds to a logging level of 20. level = logging.WARNING - 1 # Equals 29. self._log.log(level, "test message") self.assert_log_messages(["test message\n"]) def test_debug_message(self): self._log.debug("test message") self.assert_log_messages([]) def test_two_messages(self): self._log.info("message1") self._log.info("message2") self.assert_log_messages(["message1\n", "message2\n"]) class ConfigureLoggingVerboseTest(ConfigureLoggingTestBase): """Tests the configure_logging() function with is_verbose True.""" is_verbose = True def test_debug_message(self): self._log.debug("test message") self.assert_log_messages(["unittest: DEBUG test message\n"]) class GlobalVariablesTest(unittest.TestCase): """Tests validity of the global variables.""" def _all_categories(self): return _all_categories() def defaults(self): return style._check_webkit_style_defaults() def test_webkit_base_filter_rules(self): base_filter_rules = _BASE_FILTER_RULES defaults = self.defaults() already_seen = [] validate_filter_rules(base_filter_rules, self._all_categories()) # Also do some additional checks. for rule in base_filter_rules: # Check no leading or trailing white space. self.assertEqual(rule, rule.strip()) # All categories are on by default, so defaults should # begin with -. self.assertTrue(rule.startswith('-')) # Check no rule occurs twice. self.assertNotIn(rule, already_seen) already_seen.append(rule) def test_defaults(self): """Check that default arguments are valid.""" default_options = self.defaults() # FIXME: We should not need to call parse() to determine # whether the default arguments are valid. parser = ArgumentParser(all_categories=self._all_categories(), base_filter_rules=[], default_options=default_options) # No need to test the return value here since we test parse() # on valid arguments elsewhere. # # The default options are valid: no error or SystemExit. parser.parse(args=[]) def test_path_rules_specifier(self): all_categories = self._all_categories() for (sub_paths, path_rules) in PATH_RULES_SPECIFIER: validate_filter_rules(path_rules, self._all_categories()) config = FilterConfiguration(path_specific=PATH_RULES_SPECIFIER) def assertCheck(path, category): """Assert that the given category should be checked.""" message = ('Should check category "%s" for path "%s".' % (category, path)) self.assertTrue(config.should_check(category, path)) def assertNoCheck(path, category): """Assert that the given category should not be checked.""" message = ('Should not check category "%s" for path "%s".' % (category, path)) self.assertFalse(config.should_check(category, path), message) assertCheck("random_path.cpp", "build/include") assertNoCheck("Tools/WebKitAPITest/main.cpp", "build/include") assertCheck("random_path.cpp", "readability/naming") assertNoCheck("Source/WebKit/gtk/webkit/webkit.h", "readability/naming") assertNoCheck("Tools/DumpRenderTree/gtk/DumpRenderTree.cpp", "readability/null") assertNoCheck("Source/WebKit/efl/ewk/ewk_view.h", "readability/naming") assertNoCheck("Source/WebCore/css/CSSParser.cpp", "readability/naming") # Test if Qt exceptions are indeed working assertCheck("Source/WebKit/qt/WidgetApi/qwebpage.cpp", "readability/braces") assertCheck("Source/WebKit/qt/tests/qwebelement/tst_qwebelement.cpp", "readability/braces") assertCheck("Source/WebKit/qt/declarative/platformplugin/WebPlugin.cpp", "readability/braces") assertCheck("Source/WebKit/qt/examples/platformplugin/WebPlugin.cpp", "readability/braces") assertNoCheck("Source/WebKit/qt/WidgetApi/qwebpage.cpp", "readability/naming") assertNoCheck("Source/WebKit/qt/tests/qwebelement/tst_qwebelement.cpp", "readability/naming") assertNoCheck("Source/WebKit/qt/declarative/platformplugin/WebPlugin.cpp", "readability/naming") assertNoCheck("Source/WebKit/qt/examples/platformplugin/WebPlugin.cpp", "readability/naming") assertNoCheck("Tools/MiniBrowser/qt/UrlLoader.cpp", "build/include") assertNoCheck("Source/WebKit2/UIProcess/API/qt", "readability/parameter_name") assertNoCheck("Source/WebCore/ForwardingHeaders/debugger/Debugger.h", "build/header_guard") assertNoCheck("Source/WebCore/platform/graphics/gstreamer/VideoSinkGStreamer.cpp", "readability/naming") # Third-party Python code: webkitpy/thirdparty path = "Tools/Scripts/webkitpy/thirdparty/mock.py" assertNoCheck(path, "build/include") assertNoCheck(path, "pep8/E401") # A random pep8 category. assertCheck(path, "pep8/W191") assertCheck(path, "pep8/W291") assertCheck(path, "whitespace/carriage_return") # Test if the exception for GDBInterface.cpp is in place. assertNoCheck("Source/JavaScriptCore/jit/GDBInterface.cpp", "readability/naming") # Javascript keywords. assertCheck("Source/JavaScriptCore/parser/Keywords.table", "whitespace/carriage_return") def test_max_reports_per_category(self): """Check that _MAX_REPORTS_PER_CATEGORY is valid.""" all_categories = self._all_categories() for category in _MAX_REPORTS_PER_CATEGORY.iterkeys(): self.assertIn(category, all_categories, 'Key "%s" is not a category' % category) class CheckWebKitStyleFunctionTest(unittest.TestCase): """Tests the functions with names of the form check_webkit_style_*.""" def test_check_webkit_style_configuration(self): # Exercise the code path to make sure the function does not error out. option_values = CommandOptionValues() configuration = check_webkit_style_configuration(option_values) def test_check_webkit_style_parser(self): # Exercise the code path to make sure the function does not error out. parser = check_webkit_style_parser() class CheckerDispatcherSkipTest(unittest.TestCase): """Tests the "should skip" methods of the CheckerDispatcher class.""" def setUp(self): self._dispatcher = CheckerDispatcher() def test_should_skip_with_warning(self): """Test should_skip_with_warning().""" # Check skipped files. paths_to_skip = [ "Source/WebKit/gtk/tests/testatk.c", "Source/WebKit2/UIProcess/API/gtk/webkit2.h", "Source/WebKit2/UIProcess/API/gtk/WebKitWebView.h", "Source/WebKit2/UIProcess/API/gtk/WebKitLoader.h", ] for path in paths_to_skip: self.assertTrue(self._dispatcher.should_skip_with_warning(path), "Checking: " + path) # Verify that some files are not skipped. paths_not_to_skip = [ "foo.txt", "Source/WebKit2/UIProcess/API/gtk/HelperClass.cpp", "Source/WebKit2/UIProcess/API/gtk/HelperClass.h", "Source/WebKit2/UIProcess/API/gtk/WebKitWebView.cpp", "Source/WebKit2/UIProcess/API/gtk/WebKitWebViewPrivate.h", "Source/WebKit2/UIProcess/API/gtk/tests/WebViewTest.cpp", "Source/WebKit2/UIProcess/API/gtk/tests/WebViewTest.h", ] for path in paths_not_to_skip: self.assertFalse(self._dispatcher.should_skip_with_warning(path)) def _assert_should_skip_without_warning(self, path, is_checker_none, expected): # Check the file type before asserting the return value. checker = self._dispatcher.dispatch(file_path=path, handle_style_error=None, min_confidence=3) message = 'while checking: %s' % path self.assertEqual(checker is None, is_checker_none, message) self.assertEqual(self._dispatcher.should_skip_without_warning(path), expected, message) def test_should_skip_without_warning__true(self): """Test should_skip_without_warning() for True return values.""" # Check a file with NONE file type. path = 'foo.asdf' # Non-sensical file extension. self._assert_should_skip_without_warning(path, is_checker_none=True, expected=True) # Check files with non-NONE file type. These examples must be # drawn from the _SKIPPED_FILES_WITHOUT_WARNING configuration # variable. path = os.path.join('LayoutTests', 'foo.txt') self._assert_should_skip_without_warning(path, is_checker_none=False, expected=True) def test_should_skip_without_warning__false(self): """Test should_skip_without_warning() for False return values.""" paths = ['foo.txt', os.path.join('LayoutTests', 'ChangeLog'), ] for path in paths: self._assert_should_skip_without_warning(path, is_checker_none=False, expected=False) class CheckerDispatcherCarriageReturnTest(unittest.TestCase): def test_should_check_and_strip_carriage_returns(self): files = { 'foo.txt': True, 'foo.cpp': True, 'foo.vcproj': False, 'foo.vsprops': False, } dispatcher = CheckerDispatcher() for file_path, expected_result in files.items(): self.assertEqual(dispatcher.should_check_and_strip_carriage_returns(file_path), expected_result, 'Checking: %s' % file_path) class CheckerDispatcherDispatchTest(unittest.TestCase): """Tests dispatch() method of CheckerDispatcher class.""" def dispatch(self, file_path): """Call dispatch() with the given file path.""" dispatcher = CheckerDispatcher() self.mock_handle_style_error = DefaultStyleErrorHandler('', None, None, []) checker = dispatcher.dispatch(file_path, self.mock_handle_style_error, min_confidence=3) return checker def assert_checker_none(self, file_path): """Assert that the dispatched checker is None.""" checker = self.dispatch(file_path) self.assertIsNone(checker, 'Checking: "%s"' % file_path) def assert_checker(self, file_path, expected_class): """Assert the type of the dispatched checker.""" checker = self.dispatch(file_path) got_class = checker.__class__ self.assertEqual(got_class, expected_class, 'For path "%(file_path)s" got %(got_class)s when ' "expecting %(expected_class)s." % {"file_path": file_path, "got_class": got_class, "expected_class": expected_class}) def assert_checker_changelog(self, file_path): """Assert that the dispatched checker is a ChangeLogChecker.""" self.assert_checker(file_path, ChangeLogChecker) def assert_checker_cpp(self, file_path): """Assert that the dispatched checker is a CppChecker.""" self.assert_checker(file_path, CppChecker) def assert_checker_json(self, file_path): """Assert that the dispatched checker is a JSONChecker.""" self.assert_checker(file_path, JSONChecker) def assert_checker_python(self, file_path): """Assert that the dispatched checker is a PythonChecker.""" self.assert_checker(file_path, PythonChecker) def assert_checker_text(self, file_path): """Assert that the dispatched checker is a TextChecker.""" self.assert_checker(file_path, TextChecker) def assert_checker_xml(self, file_path): """Assert that the dispatched checker is a XMLChecker.""" self.assert_checker(file_path, XMLChecker) def test_changelog_paths(self): """Test paths that should be checked as ChangeLog.""" paths = [ "ChangeLog", "ChangeLog-2009-06-16", os.path.join("Source", "WebCore", "ChangeLog"), ] for path in paths: self.assert_checker_changelog(path) # Check checker attributes on a typical input. file_path = "ChangeLog" self.assert_checker_changelog(file_path) checker = self.dispatch(file_path) self.assertEqual(checker.file_path, file_path) self.assertEqual(checker.handle_style_error, self.mock_handle_style_error) def test_cpp_paths(self): """Test paths that should be checked as C++.""" paths = [ "-", "foo.c", "foo.cpp", "foo.h", ] for path in paths: self.assert_checker_cpp(path) # Check checker attributes on a typical input. file_base = "foo" file_extension = "c" file_path = file_base + "." + file_extension self.assert_checker_cpp(file_path) checker = self.dispatch(file_path) self.assertEqual(checker.file_extension, file_extension) self.assertEqual(checker.file_path, file_path) self.assertEqual(checker.handle_style_error, self.mock_handle_style_error) self.assertEqual(checker.min_confidence, 3) # Check "-" for good measure. file_base = "-" file_extension = "" file_path = file_base self.assert_checker_cpp(file_path) checker = self.dispatch(file_path) self.assertEqual(checker.file_extension, file_extension) self.assertEqual(checker.file_path, file_path) def test_json_paths(self): """Test paths that should be checked as JSON.""" paths = [ "Source/WebCore/inspector/Inspector.json", "Tools/BuildSlaveSupport/build.webkit.org-config/config.json", ] for path in paths: self.assert_checker_json(path) # Check checker attributes on a typical input. file_base = "foo" file_extension = "json" file_path = file_base + "." + file_extension self.assert_checker_json(file_path) checker = self.dispatch(file_path) self.assertEqual(checker._handle_style_error, self.mock_handle_style_error) def test_python_paths(self): """Test paths that should be checked as Python.""" paths = [ "foo.py", "Tools/Scripts/modules/text_style.py", ] for path in paths: self.assert_checker_python(path) # Check checker attributes on a typical input. file_base = "foo" file_extension = "css" file_path = file_base + "." + file_extension self.assert_checker_text(file_path) checker = self.dispatch(file_path) self.assertEqual(checker.file_path, file_path) self.assertEqual(checker.handle_style_error, self.mock_handle_style_error) def test_text_paths(self): """Test paths that should be checked as text.""" paths = [ "foo.ac", "foo.cc", "foo.cgi", "foo.css", "foo.exp", "foo.flex", "foo.gyp", "foo.gypi", "foo.html", "foo.idl", "foo.in", "foo.js", "foo.mm", "foo.php", "foo.pl", "foo.pm", "foo.pri", "foo.pro", "foo.rb", "foo.sh", "foo.txt", "foo.wm", "foo.xhtml", "foo.y", os.path.join("Source", "WebCore", "inspector", "front-end", "inspector.js"), os.path.join("Tools", "Scripts", "check-webkit-style"), ] for path in paths: self.assert_checker_text(path) # Check checker attributes on a typical input. file_base = "foo" file_extension = "css" file_path = file_base + "." + file_extension self.assert_checker_text(file_path) checker = self.dispatch(file_path) self.assertEqual(checker.file_path, file_path) self.assertEqual(checker.handle_style_error, self.mock_handle_style_error) def test_xml_paths(self): """Test paths that should be checked as XML.""" paths = [ "Source/WebCore/WebCore.vcproj/WebCore.vcproj", "WebKitLibraries/win/tools/vsprops/common.vsprops", ] for path in paths: self.assert_checker_xml(path) # Check checker attributes on a typical input. file_base = "foo" file_extension = "vcproj" file_path = file_base + "." + file_extension self.assert_checker_xml(file_path) checker = self.dispatch(file_path) self.assertEqual(checker._handle_style_error, self.mock_handle_style_error) def test_none_paths(self): """Test paths that have no file type..""" paths = [ "Makefile", "foo.asdf", # Non-sensical file extension. "foo.exe", ] for path in paths: self.assert_checker_none(path) class StyleProcessorConfigurationTest(unittest.TestCase): """Tests the StyleProcessorConfiguration class.""" def setUp(self): self._error_messages = [] """The messages written to _mock_stderr_write() of this class.""" def _mock_stderr_write(self, message): self._error_messages.append(message) def _style_checker_configuration(self, output_format="vs7"): """Return a StyleProcessorConfiguration instance for testing.""" base_rules = ["-whitespace", "+whitespace/tab"] filter_configuration = FilterConfiguration(base_rules=base_rules) return StyleProcessorConfiguration( filter_configuration=filter_configuration, max_reports_per_category={"whitespace/newline": 1}, min_confidence=3, output_format=output_format, stderr_write=self._mock_stderr_write) def test_init(self): """Test the __init__() method.""" configuration = self._style_checker_configuration() # Check that __init__ sets the "public" data attributes correctly. self.assertEqual(configuration.max_reports_per_category, {"whitespace/newline": 1}) self.assertEqual(configuration.stderr_write, self._mock_stderr_write) self.assertEqual(configuration.min_confidence, 3) def test_is_reportable(self): """Test the is_reportable() method.""" config = self._style_checker_configuration() self.assertTrue(config.is_reportable("whitespace/tab", 3, "foo.txt")) # Test the confidence check code path by varying the confidence. self.assertFalse(config.is_reportable("whitespace/tab", 2, "foo.txt")) # Test the category check code path by varying the category. self.assertFalse(config.is_reportable("whitespace/line", 4, "foo.txt")) def _call_write_style_error(self, output_format): config = self._style_checker_configuration(output_format=output_format) config.write_style_error(category="whitespace/tab", confidence_in_error=5, file_path="foo.h", line_number=100, message="message") def test_write_style_error_emacs(self): """Test the write_style_error() method.""" self._call_write_style_error("emacs") self.assertEqual(self._error_messages, ["foo.h:100: message [whitespace/tab] [5]\n"]) def test_write_style_error_vs7(self): """Test the write_style_error() method.""" self._call_write_style_error("vs7") self.assertEqual(self._error_messages, ["foo.h(100): message [whitespace/tab] [5]\n"]) class StyleProcessor_EndToEndTest(LoggingTestCase): """Test the StyleProcessor class with an emphasis on end-to-end tests.""" def setUp(self): LoggingTestCase.setUp(self) self._messages = [] def _mock_stderr_write(self, message): """Save a message so it can later be asserted.""" self._messages.append(message) def test_init(self): """Test __init__ constructor.""" configuration = StyleProcessorConfiguration( filter_configuration=FilterConfiguration(), max_reports_per_category={}, min_confidence=3, output_format="vs7", stderr_write=self._mock_stderr_write) processor = StyleProcessor(configuration) self.assertEqual(processor.error_count, 0) self.assertEqual(self._messages, []) def test_process(self): configuration = StyleProcessorConfiguration( filter_configuration=FilterConfiguration(), max_reports_per_category={}, min_confidence=3, output_format="vs7", stderr_write=self._mock_stderr_write) processor = StyleProcessor(configuration) processor.process(lines=['line1', 'Line with tab:\t'], file_path='foo.txt') self.assertEqual(processor.error_count, 1) expected_messages = ['foo.txt(2): Line contains tab character. ' '[whitespace/tab] [5]\n'] self.assertEqual(self._messages, expected_messages) class StyleProcessor_CodeCoverageTest(LoggingTestCase): """Test the StyleProcessor class with an emphasis on code coverage. This class makes heavy use of mock objects. """ class MockDispatchedChecker(object): """A mock checker dispatched by the MockDispatcher.""" def __init__(self, file_path, min_confidence, style_error_handler): self.file_path = file_path self.min_confidence = min_confidence self.style_error_handler = style_error_handler def check(self, lines): self.lines = lines class MockDispatcher(object): """A mock CheckerDispatcher class.""" def __init__(self): self.dispatched_checker = None def should_skip_with_warning(self, file_path): return file_path.endswith('skip_with_warning.txt') def should_skip_without_warning(self, file_path): return file_path.endswith('skip_without_warning.txt') def should_check_and_strip_carriage_returns(self, file_path): return not file_path.endswith('carriage_returns_allowed.txt') def dispatch(self, file_path, style_error_handler, min_confidence): if file_path.endswith('do_not_process.txt'): return None checker = StyleProcessor_CodeCoverageTest.MockDispatchedChecker( file_path, min_confidence, style_error_handler) # Save the dispatched checker so the current test case has a # way to access and check it. self.dispatched_checker = checker return checker def setUp(self): LoggingTestCase.setUp(self) # We can pass an error-message swallower here because error message # output is tested instead in the end-to-end test case above. configuration = StyleProcessorConfiguration( filter_configuration=FilterConfiguration(), max_reports_per_category={"whitespace/newline": 1}, min_confidence=3, output_format="vs7", stderr_write=self._swallow_stderr_message) mock_carriage_checker_class = self._create_carriage_checker_class() mock_dispatcher = self.MockDispatcher() # We do not need to use a real incrementer here because error-count # incrementing is tested instead in the end-to-end test case above. mock_increment_error_count = self._do_nothing processor = StyleProcessor(configuration=configuration, mock_carriage_checker_class=mock_carriage_checker_class, mock_dispatcher=mock_dispatcher, mock_increment_error_count=mock_increment_error_count) self._configuration = configuration self._mock_dispatcher = mock_dispatcher self._processor = processor def _do_nothing(self): # We provide this function so the caller can pass it to the # StyleProcessor constructor. This lets us assert the equality of # the DefaultStyleErrorHandler instance generated by the process() # method with an expected instance. pass def _swallow_stderr_message(self, message): """Swallow a message passed to stderr.write().""" # This is a mock stderr.write() for passing to the constructor # of the StyleProcessorConfiguration class. pass def _create_carriage_checker_class(self): # Create a reference to self with a new name so its name does not # conflict with the self introduced below. test_case = self class MockCarriageChecker(object): """A mock carriage-return checker.""" def __init__(self, style_error_handler): self.style_error_handler = style_error_handler # This gives the current test case access to the # instantiated carriage checker. test_case.carriage_checker = self def check(self, lines): # Save the lines so the current test case has a way to access # and check them. self.lines = lines return lines return MockCarriageChecker def test_should_process__skip_without_warning(self): """Test should_process() for a skip-without-warning file.""" file_path = "foo/skip_without_warning.txt" self.assertFalse(self._processor.should_process(file_path)) def test_should_process__skip_with_warning(self): """Test should_process() for a skip-with-warning file.""" file_path = "foo/skip_with_warning.txt" self.assertFalse(self._processor.should_process(file_path)) self.assertLog(['WARNING: File exempt from style guide. ' 'Skipping: "foo/skip_with_warning.txt"\n']) def test_should_process__true_result(self): """Test should_process() for a file that should be processed.""" file_path = "foo/skip_process.txt" self.assertTrue(self._processor.should_process(file_path)) def test_process__checker_dispatched(self): """Test the process() method for a path with a dispatched checker.""" file_path = 'foo.txt' lines = ['line1', 'line2'] line_numbers = [100] expected_error_handler = DefaultStyleErrorHandler( configuration=self._configuration, file_path=file_path, increment_error_count=self._do_nothing, line_numbers=line_numbers) self._processor.process(lines=lines, file_path=file_path, line_numbers=line_numbers) # Check that the carriage-return checker was instantiated correctly # and was passed lines correctly. carriage_checker = self.carriage_checker self.assertEqual(carriage_checker.style_error_handler, expected_error_handler) self.assertEqual(carriage_checker.lines, ['line1', 'line2']) # Check that the style checker was dispatched correctly and was # passed lines correctly. checker = self._mock_dispatcher.dispatched_checker self.assertEqual(checker.file_path, 'foo.txt') self.assertEqual(checker.min_confidence, 3) self.assertEqual(checker.style_error_handler, expected_error_handler) self.assertEqual(checker.lines, ['line1', 'line2']) def test_process__no_checker_dispatched(self): """Test the process() method for a path with no dispatched checker.""" path = os.path.join('foo', 'do_not_process.txt') self.assertRaises(AssertionError, self._processor.process, lines=['line1', 'line2'], file_path=path, line_numbers=[100]) def test_process__carriage_returns_not_stripped(self): """Test that carriage returns aren't stripped from files that are allowed to contain them.""" file_path = 'carriage_returns_allowed.txt' lines = ['line1\r', 'line2\r'] line_numbers = [100] self._processor.process(lines=lines, file_path=file_path, line_numbers=line_numbers) # The carriage return checker should never have been invoked, and so # should not have saved off any lines. self.assertFalse(hasattr(self.carriage_checker, 'lines'))
bsd-3-clause
ShefaliGups11/Implementation-of-SFB-in-ns-3
src/lr-wpan/bindings/callbacks_list.py
9
3325
callback_classes = [ ['ns3::ObjectBase *', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'unsigned int', 'ns3::Ptr<ns3::Packet>', 'unsigned char', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::LrWpanPhyEnumeration', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::LrWpanPhyEnumeration', 'unsigned char', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::LrWpanPhyEnumeration', 'ns3::LrWpanPibAttributeIdentifier', 'ns3::LrWpanPhyPibAttributes *', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::LrWpanPhyEnumeration', 'ns3::LrWpanPibAttributeIdentifier', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::Ptr<const ns3::Packet>', 'double', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::Time', 'ns3::LrWpanPhyEnumeration', 'ns3::LrWpanPhyEnumeration', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::LrWpanPhyEnumeration', 'ns3::LrWpanPhyEnumeration', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::McpsDataConfirmParams', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::McpsDataIndicationParams', 'ns3::Ptr<ns3::Packet>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::Ptr<const ns3::Packet>', 'unsigned char', 'unsigned char', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::LrWpanMacState', 'ns3::LrWpanMacState', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::LrWpanMacState', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'const ns3::Address &', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'std::basic_string<char>', 'ns3::Ptr<const ns3::Packet>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ]
gpl-2.0
CSC-ORG/Dynamic-Dashboard-2015
engine/lib/python2.7/site-packages/django/contrib/gis/db/backends/oracle/schema.py
608
4050
from django.contrib.gis.db.models.fields import GeometryField from django.db.backends.oracle.schema import DatabaseSchemaEditor from django.db.backends.utils import truncate_name class OracleGISSchemaEditor(DatabaseSchemaEditor): sql_add_geometry_metadata = (""" INSERT INTO USER_SDO_GEOM_METADATA ("TABLE_NAME", "COLUMN_NAME", "DIMINFO", "SRID") VALUES ( %(table)s, %(column)s, MDSYS.SDO_DIM_ARRAY( MDSYS.SDO_DIM_ELEMENT('LONG', %(dim0)s, %(dim2)s, %(tolerance)s), MDSYS.SDO_DIM_ELEMENT('LAT', %(dim1)s, %(dim3)s, %(tolerance)s) ), %(srid)s )""") sql_add_spatial_index = 'CREATE INDEX %(index)s ON %(table)s(%(column)s) INDEXTYPE IS MDSYS.SPATIAL_INDEX' sql_drop_spatial_index = 'DROP INDEX %(index)s' sql_clear_geometry_table_metadata = 'DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = %(table)s' sql_clear_geometry_field_metadata = ( 'DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = %(table)s ' 'AND COLUMN_NAME = %(column)s' ) def __init__(self, *args, **kwargs): super(OracleGISSchemaEditor, self).__init__(*args, **kwargs) self.geometry_sql = [] def geo_quote_name(self, name): return self.connection.ops.geo_quote_name(name) def column_sql(self, model, field, include_default=False): column_sql = super(OracleGISSchemaEditor, self).column_sql(model, field, include_default) if isinstance(field, GeometryField): db_table = model._meta.db_table self.geometry_sql.append( self.sql_add_geometry_metadata % { 'table': self.geo_quote_name(db_table), 'column': self.geo_quote_name(field.column), 'dim0': field._extent[0], 'dim1': field._extent[1], 'dim2': field._extent[2], 'dim3': field._extent[3], 'tolerance': field._tolerance, 'srid': field.srid, } ) if field.spatial_index: self.geometry_sql.append( self.sql_add_spatial_index % { 'index': self.quote_name(self._create_spatial_index_name(model, field)), 'table': self.quote_name(db_table), 'column': self.quote_name(field.column), } ) return column_sql def create_model(self, model): super(OracleGISSchemaEditor, self).create_model(model) self.run_geometry_sql() def delete_model(self, model): super(OracleGISSchemaEditor, self).delete_model(model) self.execute(self.sql_clear_geometry_table_metadata % { 'table': self.geo_quote_name(model._meta.db_table), }) def add_field(self, model, field): super(OracleGISSchemaEditor, self).add_field(model, field) self.run_geometry_sql() def remove_field(self, model, field): if isinstance(field, GeometryField): self.execute(self.sql_clear_geometry_field_metadata % { 'table': self.geo_quote_name(model._meta.db_table), 'column': self.geo_quote_name(field.column), }) if field.spatial_index: self.execute(self.sql_drop_spatial_index % { 'index': self.quote_name(self._create_spatial_index_name(model, field)), }) super(OracleGISSchemaEditor, self).remove_field(model, field) def run_geometry_sql(self): for sql in self.geometry_sql: self.execute(sql) self.geometry_sql = [] def _create_spatial_index_name(self, model, field): # Oracle doesn't allow object names > 30 characters. Use this scheme # instead of self._create_index_name() for backwards compatibility. return truncate_name('%s_%s_id' % (model._meta.db_table, field.column), 30)
mit
TheTypoMaster/chromium-crosswalk
chrome/common/extensions/docs/server2/host_file_system_provider_test.py
97
1715
#!/usr/bin/env python # Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from copy import deepcopy import unittest from extensions_paths import CHROME_API from file_system import FileNotFoundError from host_file_system_provider import HostFileSystemProvider from object_store_creator import ObjectStoreCreator from test_data.canned_data import CANNED_API_FILE_SYSTEM_DATA from test_file_system import TestFileSystem class HostFileSystemProviderTest(unittest.TestCase): def setUp(self): self._idle_path = CHROME_API + 'idle.json' self._canned_data = deepcopy(CANNED_API_FILE_SYSTEM_DATA) def _constructor_for_test(self, branch, **optargs): return TestFileSystem(self._canned_data[branch]) def testWithCaching(self): creator = HostFileSystemProvider( ObjectStoreCreator.ForTest(), constructor_for_test=self._constructor_for_test) fs = creator.GetBranch('1500') first_read = fs.ReadSingle(self._idle_path).Get() self._canned_data['1500']['chrome']['common']['extensions'].get('api' )['idle.json'] = 'blah blah blah' second_read = fs.ReadSingle(self._idle_path).Get() self.assertEqual(first_read, second_read) def testWithOffline(self): creator = HostFileSystemProvider( ObjectStoreCreator.ForTest(), offline=True, constructor_for_test=self._constructor_for_test) fs = creator.GetBranch('1500') # Offline file system should raise a FileNotFoundError if read is attempted. self.assertRaises(FileNotFoundError, fs.ReadSingle(self._idle_path).Get) if __name__ == '__main__': unittest.main()
bsd-3-clause
samuknet/servo
tests/wpt/harness/wptrunner/update/base.py
196
2148
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. exit_unclean = object() exit_clean = object() class Step(object): provides = [] def __init__(self, logger): self.logger = logger def run(self, step_index, state): """Base class for state-creating steps. When a Step is run() the current state is checked to see if the state from this step has already been created. If it has the restore() method is invoked. Otherwise the create() method is invoked with the state object. This is expected to add items with all the keys in __class__.provides to the state object. """ name = self.__class__.__name__ try: stored_step = state.steps[step_index] except IndexError: stored_step = None if stored_step == name: self.restore(state) elif stored_step is None: self.create(state) assert set(self.provides).issubset(set(state.keys())) state.steps = state.steps + [name] else: raise ValueError("Expected a %s step, got a %s step" % (name, stored_step)) def create(self, data): raise NotImplementedError def restore(self, state): self.logger.debug("Step %s using stored state" % (self.__class__.__name__,)) for key in self.provides: assert key in state class StepRunner(object): steps = [] def __init__(self, logger, state): """Class that runs a specified series of Steps with a common State""" self.state = state self.logger = logger if "steps" not in state: state.steps = [] def run(self): rv = None for step_index, step in enumerate(self.steps): self.logger.debug("Starting step %s" % step.__name__) rv = step(self.logger).run(step_index, self.state) if rv in (exit_clean, exit_unclean): break return rv
mpl-2.0
hongmeister/aislib
aislib.py
1
24559
#!/usr/bin/env python """ Simple AIS library. This library supports creating and decoding NMEA formatted AIS type 1,5,24 messages @author Daniel Hong https://github.com/doodleincode/aislib This program is licensed under the GNU GENERAL PUBLIC LICENSE Version 2. A LICENSE file should have accompanied this program. """ import bitstring import binascii # Create a character encoding and reversed character encoding map which # we will use to encode and decode, respectively, AIS bit streams encodingchars = [ '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', ':', ';', '<', '=', '>', '?', '@', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', "`", 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w' ] # We'll populate this with the encoding chars k/v in reverse for use in decoding # the AIS payload re_encodingchars = {} for i in range(len(encodingchars)): re_encodingchars[encodingchars[i]] = i # END character encoding map AISchars='@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_ !"#$%&\'()*+,-./0123456789:;<=>?' re_AISchars = {} for i in range(len(AISchars)): re_AISchars[AISchars[i]] = i def AISString2Bits(name,length=20): if len(name)>length: name = name[:length] if len(name)<length: name = name+'@'*(length-len(name)) return bitstring.Bits().join(['uint:6=%d' % re_AISchars[name[k]] for k in range(len(name))]) def int2bin6(num): """ Converts the given integer to a 6-bit binary representation """ return "".join(num & (1 << i) and '1' or '0' for i in range(5, -1, -1)) class CRCInvalidError(Exception): pass class AISMessage(object): # Contain our AIS message elements _attrs = {} # Map the number of bits for each element in the AIS message _bitmap = {} def __init__(self, elements): # Init our bit mapping and load up default message values for key, arr in elements.iteritems(): # arr[0] == data type of the element # arr[1] == number of bits for given element # arr[2] == the default value for the element self._bitmap[key] = [ arr[0], arr[1] ] # Set default value self.__setattr__(key, arr[2]) def __getattr__(self, name): """ We are overriding the behavior of __getattr__ to implement dynamic class properties. This way we can do stuff like [class].[property] without requiring a "getter" method for each property. If the AIS message element is not found in our attribute table, we'll revert to the default behavior of __getattr__ """ if name in self._attrs: return self._attrs[name] # Preserve the default behavior if our custom attributes were not found return super(AISMessage, self).__getattr__(name) def __setattr__(self, name, value): """ We are overriding the __setattr__ to implement dynamic property "setters". """ if type(value) not in [ int,long]: raise TypeError("Value must be an integer.") if name == "_bitmap": super(AISMessage, self).__setattr__(name, value) # Set attributes that are supported by the sub-classed AIS message type elif name in self._bitmap: # String format is: [datatype]:[num_bits]=[value] self._attrs[name] = bitstring.Bits( "%s:%d=%d" % (self._bitmap[name][0], self._bitmap[name][1], value)) else: raise AttributeError("Unsupported AIS message element.") def get_attr(self, name): """ Returns an integer representation of the binary value for the given element name. @param name Name of the AIS message element to retrieve @return Human readable int value. If invalid element, returns None """ if name in self._attrs: if self._bitmap[name][0] == "int": return self._attrs[name].int else: return self._attrs[name].uint return None ## Sub-classes should implement the methods below ## def build_bitstream(self): """ Build the bitstream which we will be using to encode the payload. This will basically involve concatenating all the message elements into one bitstring. Sub-classes that extend the AISMessage class are required to implement this method. Example implementation: return bitstring.Bits().join([ self.element_1, self.element_2, [...] ]) """ pass def unpack(self, bitstream): """ Unpack a bitstream into AIS message elements. Sub-classes can optionally implement this method to support decoding of AIS messages. Example implementation: self._attrs["element_1"] = bitstring.Bits(bin=bitstream[0:6]) self._attrs["element_2"] = bitstring.Bits(bin=bitstream[6:8]) [...] """ pass class AISPositionReportMessage(AISMessage): def __init__(self, id=1, repeat=0, mmsi=0, status=15, rot=-128, sog=0, pa=0, lon=0, lat=0, cog=3600, heading=511, ts=60, smi=0, spare=0, raim=0, comm_state=0): """ Returns an instance of an AIS Position Report Message class The parameters contain the default values, simply set the parameters who's value need to change. Ex: aismsg = AISPositionReportMessage(mmsi=12345, lon=4567, lat=5432) """ super(AISPositionReportMessage, self).__init__({ # message_element : ["data_type", num_bits, initial_value] 'id' : ["uint", 6, id], 'repeat' : ["uint", 2, repeat], 'mmsi' : ["uint", 30, mmsi], 'status' : ["uint", 4, status], 'rot' : ["int", 8, rot], 'sog' : ["uint", 10, sog], 'pa' : ["uint", 1, pa], 'lon' : ["int", 28, lon], 'lat' : ["int", 27, lat], 'cog' : ["uint", 12, cog], 'heading' : ["uint", 9, heading], 'ts' : ["uint", 6, ts], 'smi' : ["uint", 2, smi], 'spare' : ["uint", 3, spare], 'raim' : ["uint", 1, raim], 'comm_state' : ["uint", 19, comm_state] }) def build_bitstream(self): return bitstring.Bits().join([ self.id, self.repeat, self.mmsi, self.status, self.rot, self.sog, self.pa, self.lon, self.lat, self.cog, self.heading, self.ts, self.smi, self.spare, self.raim, self.comm_state ]) def unpack(self, bitstream): # TODO: figure out a better way to do this, but works fine for now self._attrs["id"] = bitstring.Bits(bin=bitstream[0:6]) self._attrs["repeat"] = bitstring.Bits(bin=bitstream[6:8]) self._attrs["mmsi"] = bitstring.Bits(bin=bitstream[8:38]) self._attrs["status"] = bitstring.Bits(bin=bitstream[38:42]) self._attrs["rot"] = bitstring.Bits(bin=bitstream[42:50]) self._attrs["sog"] = bitstring.Bits(bin=bitstream[50:60]) self._attrs["pa"] = bitstring.Bits(bin=bitstream[60:61]) self._attrs["lon"] = bitstring.Bits(bin=bitstream[61:89]) self._attrs["lat"] = bitstring.Bits(bin=bitstream[89:116]) self._attrs["cog"] = bitstring.Bits(bin=bitstream[116:128]) self._attrs["heading"] = bitstring.Bits(bin=bitstream[128:137]) self._attrs["ts"] = bitstring.Bits(bin=bitstream[137:143]) self._attrs["smi"] = bitstring.Bits(bin=bitstream[143:145]) self._attrs["spare"] = bitstring.Bits(bin=bitstream[145:148]) self._attrs["raim"] = bitstring.Bits(bin=bitstream[148:149]) self._attrs["comm_state"] = bitstring.Bits(bin=bitstream[149:168]) class AISStaticAndVoyageReportMessage(AISMessage): def __init__(self, id=5, repeat=0, mmsi=0, ais_version=0, imo=0, callsign=0, shipname=0, shiptype=0, to_bow=0, to_stern=0, to_port=0, to_starboard=0, epfd=1, month=0, day=0, hour=24, minute=60, draught=0, destination=0, dte=0, spare=0): """ Returns an instance of an AIS Position Report Message class The parameters contain the default values, simply set the parameters who's value need to change. Ex: aismsg = AISStaticAndVoyageReportMessage(mmsi=12345,shipname='ASIAN JADE') """ super(AISStaticAndVoyageReportMessage, self).__init__({ # message_element : ["data_type", num_bits, initial_value] 'id' : ["uint", 6, id], 'repeat' : ["uint", 2, repeat], 'mmsi' : ["uint", 30, mmsi], 'ais_version' : ["uint", 2, ais_version], 'imo' : ["uint", 30, imo], 'callsign' : ["uint", 42, AISString2Bits(callsign,length=42/6).int if type(callsign) == str else callsign], 'shipname' : ["uint", 120, AISString2Bits(shipname,length=120/6).int if type(shipname) == str else shipname], 'shiptype' : ["uint", 8, shiptype], 'to_bow' : ["uint", 9, to_bow], 'to_stern' : ["uint", 9, to_stern], 'to_port' : ["uint", 6, to_port], 'to_starboard' : ["uint", 6, to_starboard], 'epfd' : ["uint", 4, epfd], 'month' : ["uint", 4, month], 'day' : ["uint", 5, day], 'hour' : ["uint", 5, hour], 'minute' : ["uint", 6, minute], 'draught' : ["uint", 8, draught], 'destination' : ["uint", 120, AISString2Bits(destination,length=120/6).int if type(destination) == str else destination], 'dte' : ["uint", 1, dte], 'spare' : ["uint", 1, spare] }) def build_bitstream(self): return bitstring.Bits().join([ self.id, self.repeat, self.mmsi, self.ais_version, self.imo, self.callsign, self.shipname, self.shiptype, self.to_bow, self.to_stern, self.to_port, self.to_starboard, self.epfd, self.month, self.day, self.hour, self.minute, self.draught, self.destination, self.dte, self.spare ]) def unpack(self, bitstream): # TODO: figure out a better way to do this, but works fine for now self._attrs["id"] = bitstring.Bits(bin=bitstream[0:6]) self._attrs["repeat"] = bitstring.Bits(bin=bitstream[6:8]) self._attrs["mmsi"] = bitstring.Bits(bin=bitstream[8:38]) self._attrs["ais_version"] = bitstring.Bits(bin=bitstream[38:40]) self._attrs["imo"] = bitstring.Bits(bin=bitstream[40:70]) self._attrs["callsign"] = bitstring.Bits(bin=bitstream[70:112]) self._attrs["shipname"] = bitstring.Bits(bin=bitstream[112:232]) self._attrs["shiptype"] = bitstring.Bits(bin=bitstream[232:240]) self._attrs["to_bow"] = bitstring.Bits(bin=bitstream[240:249]) self._attrs["to_stern"] = bitstring.Bits(bin=bitstream[249:258]) self._attrs["to_port"] = bitstring.Bits(bin=bitstream[258:264]) self._attrs["to_starboard"] = bitstring.Bits(bin=bitstream[264:270]) self._attrs["epfd"] = bitstring.Bits(bin=bitstream[270:274]) self._attrs["month"] = bitstring.Bits(bin=bitstream[274:278]) self._attrs["day"] = bitstring.Bits(bin=bitstream[278:283]) self._attrs["hour"] = bitstring.Bits(bin=bitstream[283:288]) self._attrs["minute"] = bitstring.Bits(bin=bitstream[288:294]) self._attrs["draught"] = bitstring.Bits(bin=bitstream[294:302]) self._attrs["destination"] = bitstring.Bits(bin=bitstream[302:422]) self._attrs["dte"] = bitstring.Bits(bin=bitstream[422:423]) self._attrs["spare"] = bitstring.Bits(bin=bitstream[423:424]) class AISStaticDataReportAMessage(AISMessage): def __init__(self, id=24, repeat=0, mmsi=0, partno=0, shipname=0, spare=0): """ Returns an instance of an AIS Static Data Report Message Format A class The parameters contain the default values, simply set the parameters whose values need to change. Ex: aismsg = AISPositionReportAMessage(mmsi=12345, shipname='ASIAN JADE') """ super(AISStaticDataReportAMessage, self).__init__({ # message_element : ["data_type", num_bits, initial_value] 'id' : ["uint", 6, id], 'repeat' : ["uint", 2, repeat], 'mmsi' : ["uint", 30, mmsi], 'partno' : ["uint", 2, partno], 'shipname' : ["uint", 120, AISString2Bits(shipname,length=120/6).int if type(shipname) == str else shipname], 'spare' : ["uint", 8, spare] }) def build_bitstream(self): return bitstring.Bits().join([ self.id, self.repeat, self.mmsi, self.partno, self.shipname, self.spare ]) def unpack(self, bitstream): # TODO: figure out a better way to do this, but works fine for now self._attrs["id"] = bitstring.Bits(bin=bitstream[0:6]) self._attrs["repeat"] = bitstring.Bits(bin=bitstream[6:8]) self._attrs["mmsi"] = bitstring.Bits(bin=bitstream[8:38]) self._attrs["partno"] = bitstring.Bits(bin=bitstream[38:40]) self._attrs["shipname"] = bitstring.Bits(bin=bitstream[40:160]) self._attrs["spare"] = bitstring.Bits(bin=bitstream[160:168]) class AISStaticDataReportBMessage(AISMessage): def __init__(self, id=24, repeat=0, mmsi=0, partno=1, shiptype=0, vendorid=0,model=0,serial=0,callsign=0, to_bow=0,to_stern=0,to_port=0,to_starboard=0, spare=0): """ Returns an instance of an AIS Static Data Report Message Format A class The parameters contain the default values, simply set the parameters whose values need to change. Ex: aismsg = AISPositionReportBMessage(mmsi=12345, shiptype=60) """ super(AISStaticDataReportBMessage, self).__init__({ # message_element : ["data_type", num_bits, initial_value] 'id' : ["uint", 6, id], 'repeat' : ["uint", 2, repeat], 'mmsi' : ["uint", 30, mmsi], 'partno' : ["uint", 2, partno], 'shiptype' : ["uint", 8, shiptype], 'vendorid' : ["uint", 18, AISString2Bits(vendorid,length=18/6).int if type(vendorid) == str else vendorid], 'model' : ["uint", 4, model], 'serial' : ["uint", 20, serial], 'callsign' : ["uint", 42, AISString2Bits(callsign,length=42/6).int if type(callsign) == str else callsign], 'to_bow' : ["uint", 9, to_bow], 'to_stern' : ["uint", 9, to_stern], 'to_port' : ["uint", 6, to_port], 'to_starboard' : ["uint", 6, to_starboard], 'spare' : ["uint", 6, spare] }) def build_bitstream(self): return bitstring.Bits().join([ self.id, self.repeat, self.mmsi, self.partno, self.shiptype, self.vendorid, self.model, self.serial, self.callsign, self.to_bow, self.to_stern, self.to_port, self.to_starboard, self.spare ]) def unpack(self, bitstream): # TODO: figure out a better way to do this, but works fine for now self._attrs["id"] = bitstring.Bits(bin=bitstream[0:6]) self._attrs["repeat"] = bitstring.Bits(bin=bitstream[6:8]) self._attrs["mmsi"] = bitstring.Bits(bin=bitstream[8:38]) self._attrs["partno"] = bitstring.Bits(bin=bitstream[38:40]) self._attrs["shiptype"] = bitstring.Bits(bin=bitstream[40:48]) self._attrs["vendorid"] = bitstring.Bits(bin=bitstream[48:66]) self._attrs["model"] = bitstring.Bits(bin=bitstream[66:70]) self._attrs["serial"] = bitstring.Bits(bin=bitstream[70:90]) self._attrs["callsign"] = bitstring.Bits(bin=bitstream[90:132]) self._attrs["to_bow"] = bitstring.Bits(bin=bitstream[132:141]) self._attrs["to_stern"] = bitstring.Bits(bin=bitstream[141:150]) self._attrs["to_port"] = bitstring.Bits(bin=bitstream[150:156]) self._attrs["to_starboard"] = bitstring.Bits(bin=bitstream[156:162]) self._attrs["spare"] = bitstring.Bits(bin=bitstream[162:168]) class AIS(object): # Instance of the AISMessage class _ais_message = None def __init__(self, ais_message): # If the provided param was not an AISMessage object, throw exception if not isinstance(ais_message, AISMessage): raise TypeError("Variable 'ais_message' is not an instance of 'AISMessage'.") # Otherwise set the variable self._ais_message = ais_message def build_payload(self, invert_crc = False): """ Builds the AIS NMEA message string This method only supports AIVDM, single fragment, 168 bit (28-char) payload Type 1 and Type 24 format A are of this kind Field 1, !AIVDM, identifies this as an AIVDM packet. Field 2 (1) is the count of fragments in the currently accumulating message. The payload size of each sentence is limited by NMEA 0183's 82-character maximum, so it is sometimes required to split a payload over several fragment sentences. Field 3 (1) is the fragment number of this sentence. It will be one-based. A sentence with a fragment count of 1 and a fragment number of 1 is complete in itself. Field 4 (empty) is a sequential message ID for multi-sentence messages. Field 5 (A) is a radio channel code. AIS uses the high side of the duplex from two VHF radio channels: - AIS Channel A is 161.975Mhz (87B); - AIS Channel B is 162.025Mhz (88B). Field 6 is the encoded payload string Field 7 (0) is the number of fill bits requires to pad the data payload to a 6-bit boundary. This value can range from 1-5. """ payLoad = self.encode() payload = "!AIVDM,1,1,,A," + payLoad + '*' chksum = self.crc(payload) if invert_crc: chksum = ~chksum return payload + "%02X" % (chksum & 0xff) def encode(self, bitstr = None): """ Encode a bitstream into a 6-bit encoded AIS message string @param bitstr The bitstream. This should be a Bit object generated from bitstring.Bit(...). If this is not provided, then it will use the bitstring from the '_ais_message' property @return 6-bit encoded AIS string """ curr_index = 0 curr_offset = 6 # We'll be encoding 6 bits at a time encoded_str = [] if bitstr == None: bitstr = self._ais_message.build_bitstream() # The total AIS message is len(bitstr) bits # Since we are encoding 6-bit chunks, we are looping # round(len(bitstr)/ 6.) times (type 5 has 424 bits which does not divide by 6 #print len(bitstr),len(bitstr)/6,len(bitstr)/6.,int(round(len(bitstr)/6.)) for i in range(0, int(round(len(bitstr)/6.))): block = bitstr[curr_index:curr_offset] encoded_str.append(encodingchars[block.uint]) curr_index += 6 curr_offset += 6 remainingbits = len(bitstr) %6 fillbits = (6 -remainingbits) if remainingbits !=0 else 0 return ("".join(encoded_str))+','+chr(ord('0')+fillbits) def decode(self, msg): """ Decodes an AIS NMEA formatted message. Currently only supports the Position Report Message type. On success, returns an instance of AISPositionReportMessage. A CRC check is performed. If the CRC does not match, a CRCInvalidError exception is thrown @param msg The message to decode @return If CRC checks, returns an instance of AISPositionReportMessage """ computed_crc = self.crc(msg) given_crc = int(msg[-2:], 16) # If CRC did not match, throw exception! if given_crc != computed_crc: raise CRCInvalidError("The given CRC did not match the computed CRC.") # Otherwise we can continue with decoding the message # ... # Grap just the payload. The 6th index in the AIS message contains the payload payload,fillbits = msg.split(",")[5:7] # First we will reverse the 6-bit ascii encoding to its integer equivalent # using our reverse encoded character map dec = [] for c in payload: dec.append(re_encodingchars[c]) # Now we will take our list of integers and convert it to a bitstream bits = [] for i in range(len(dec)): bits.append(int2bin6(dec[i])) bitstream = "".join(bits) if fillbits[0] !='0':bitstream = bitstream[:-int(fillbits[0])] msgId = bitstream[0:6]#;print msgId if msgId == '000001': aismsg = AISPositionReportMessage() elif msgId == '011000' and bitstream[38] == '0': aismsg = AISStaticDataReportAMessage() elif msgId == '011000' and bitstream[38] == '1': aismsg = AISStaticDataReportBMessage() elif msgId == '000101': aismsg = AISStaticAndVoyageReportMessage() aismsg.unpack(bitstream) return aismsg def crc(self, msg): """ Generates the CRC for the given AIS NMEA formatted string @param msg The message used to generate the CRC. This should be a well formed NMEA formatted message @return Integer representation of the CRC. You can use hex(crc) to get the hex """ chksum = 0 # If the input contains the entire NMEA message, then we just need to # get the string between the ! and * # Otherwise we'll assume the input contains just the string to checksum astk = msg.rfind("*") if msg[0] == "!" and astk != -1: msg = msg[1:astk] for c in msg: chksum = chksum ^ ord(c) return chksum
gpl-2.0
nzjrs/overo-openembedded
recipes/python/python-native-2.6.1/sitecustomize.py
228
1125
# OpenEmbedded sitecustomize.py (C) 2002-2008 Michael 'Mickey' Lauer <mlauer@vanille-media.de> # GPLv2 or later # Version: 20081123 # Features: # * set proper default encoding # * enable readline completion in the interactive interpreter # * load command line history on startup # * save command line history on exit import os def __exithandler(): try: readline.write_history_file( "%s/.python-history" % os.getenv( "HOME", "/tmp" ) ) except IOError: pass def __registerExitHandler(): import atexit atexit.register( __exithandler ) def __enableReadlineSupport(): readline.set_history_length( 1000 ) readline.parse_and_bind( "tab: complete" ) try: readline.read_history_file( "%s/.python-history" % os.getenv( "HOME", "/tmp" ) ) except IOError: pass def __enableDefaultEncoding(): import sys try: sys.setdefaultencoding( "utf8" ) except LookupError: pass import sys try: import rlcompleter, readline except ImportError: pass else: __enableDefaultEncoding() __registerExitHandler() __enableReadlineSupport()
mit
cigalsace/ldapreader
libs/requests/models.py
410
29176
# -*- coding: utf-8 -*- """ requests.models ~~~~~~~~~~~~~~~ This module contains the primary objects that power Requests. """ import collections import datetime from io import BytesIO, UnsupportedOperation from .hooks import default_hooks from .structures import CaseInsensitiveDict from .auth import HTTPBasicAuth from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar from .packages.urllib3.fields import RequestField from .packages.urllib3.filepost import encode_multipart_formdata from .packages.urllib3.util import parse_url from .packages.urllib3.exceptions import ( DecodeError, ReadTimeoutError, ProtocolError, LocationParseError) from .exceptions import ( HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError, ContentDecodingError, ConnectionError, StreamConsumedError) from .utils import ( guess_filename, get_auth_from_url, requote_uri, stream_decode_response_unicode, to_key_val_list, parse_header_links, iter_slices, guess_json_utf, super_len, to_native_string) from .compat import ( cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO, is_py2, chardet, json, builtin_str, basestring) from .status_codes import codes #: The set of HTTP status codes that indicate an automatically #: processable redirect. REDIRECT_STATI = ( codes.moved, # 301 codes.found, # 302 codes.other, # 303 codes.temporary_redirect, # 307 codes.permanent_redirect, # 308 ) DEFAULT_REDIRECT_LIMIT = 30 CONTENT_CHUNK_SIZE = 10 * 1024 ITER_CHUNK_SIZE = 512 json_dumps = json.dumps class RequestEncodingMixin(object): @property def path_url(self): """Build the path URL to use.""" url = [] p = urlsplit(self.url) path = p.path if not path: path = '/' url.append(path) query = p.query if query: url.append('?') url.append(query) return ''.join(url) @staticmethod def _encode_params(data): """Encode parameters in a piece of data. Will successfully encode parameters when passed as a dict or a list of 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary if parameters are supplied as a dict. """ if isinstance(data, (str, bytes)): return data elif hasattr(data, 'read'): return data elif hasattr(data, '__iter__'): result = [] for k, vs in to_key_val_list(data): if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): vs = [vs] for v in vs: if v is not None: result.append( (k.encode('utf-8') if isinstance(k, str) else k, v.encode('utf-8') if isinstance(v, str) else v)) return urlencode(result, doseq=True) else: return data @staticmethod def _encode_files(files, data): """Build the body for a multipart/form-data request. Will successfully encode files when passed as a dict or a list of 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary if parameters are supplied as a dict. """ if (not files): raise ValueError("Files must be provided.") elif isinstance(data, basestring): raise ValueError("Data must not be a string.") new_fields = [] fields = to_key_val_list(data or {}) files = to_key_val_list(files or {}) for field, val in fields: if isinstance(val, basestring) or not hasattr(val, '__iter__'): val = [val] for v in val: if v is not None: # Don't call str() on bytestrings: in Py3 it all goes wrong. if not isinstance(v, bytes): v = str(v) new_fields.append( (field.decode('utf-8') if isinstance(field, bytes) else field, v.encode('utf-8') if isinstance(v, str) else v)) for (k, v) in files: # support for explicit filename ft = None fh = None if isinstance(v, (tuple, list)): if len(v) == 2: fn, fp = v elif len(v) == 3: fn, fp, ft = v else: fn, fp, ft, fh = v else: fn = guess_filename(v) or k fp = v if isinstance(fp, (str, bytes, bytearray)): fdata = fp else: fdata = fp.read() rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) rf.make_multipart(content_type=ft) new_fields.append(rf) body, content_type = encode_multipart_formdata(new_fields) return body, content_type class RequestHooksMixin(object): def register_hook(self, event, hook): """Properly register a hook.""" if event not in self.hooks: raise ValueError('Unsupported event specified, with event name "%s"' % (event)) if isinstance(hook, collections.Callable): self.hooks[event].append(hook) elif hasattr(hook, '__iter__'): self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable)) def deregister_hook(self, event, hook): """Deregister a previously registered hook. Returns True if the hook existed, False if not. """ try: self.hooks[event].remove(hook) return True except ValueError: return False class Request(RequestHooksMixin): """A user-created :class:`Request <Request>` object. Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server. :param method: HTTP method to use. :param url: URL to send. :param headers: dictionary of headers to send. :param files: dictionary of {filename: fileobject} files to multipart upload. :param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place. :param json: json for the body to attach to the request (if data is not specified). :param params: dictionary of URL parameters to append to the URL. :param auth: Auth handler or (user, pass) tuple. :param cookies: dictionary or CookieJar of cookies to attach to this request. :param hooks: dictionary of callback hooks, for internal usage. Usage:: >>> import requests >>> req = requests.Request('GET', 'http://httpbin.org/get') >>> req.prepare() <PreparedRequest [GET]> """ def __init__(self, method=None, url=None, headers=None, files=None, data=None, params=None, auth=None, cookies=None, hooks=None, json=None): # Default empty dicts for dict params. data = [] if data is None else data files = [] if files is None else files headers = {} if headers is None else headers params = {} if params is None else params hooks = {} if hooks is None else hooks self.hooks = default_hooks() for (k, v) in list(hooks.items()): self.register_hook(event=k, hook=v) self.method = method self.url = url self.headers = headers self.files = files self.data = data self.json = json self.params = params self.auth = auth self.cookies = cookies def __repr__(self): return '<Request [%s]>' % (self.method) def prepare(self): """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it.""" p = PreparedRequest() p.prepare( method=self.method, url=self.url, headers=self.headers, files=self.files, data=self.data, json=self.json, params=self.params, auth=self.auth, cookies=self.cookies, hooks=self.hooks, ) return p class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): """The fully mutable :class:`PreparedRequest <PreparedRequest>` object, containing the exact bytes that will be sent to the server. Generated from either a :class:`Request <Request>` object or manually. Usage:: >>> import requests >>> req = requests.Request('GET', 'http://httpbin.org/get') >>> r = req.prepare() <PreparedRequest [GET]> >>> s = requests.Session() >>> s.send(r) <Response [200]> """ def __init__(self): #: HTTP verb to send to the server. self.method = None #: HTTP URL to send the request to. self.url = None #: dictionary of HTTP headers. self.headers = None # The `CookieJar` used to create the Cookie header will be stored here # after prepare_cookies is called self._cookies = None #: request body to send to the server. self.body = None #: dictionary of callback hooks, for internal usage. self.hooks = default_hooks() def prepare(self, method=None, url=None, headers=None, files=None, data=None, params=None, auth=None, cookies=None, hooks=None, json=None): """Prepares the entire request with the given parameters.""" self.prepare_method(method) self.prepare_url(url, params) self.prepare_headers(headers) self.prepare_cookies(cookies) self.prepare_body(data, files, json) self.prepare_auth(auth, url) # Note that prepare_auth must be last to enable authentication schemes # such as OAuth to work on a fully prepared request. # This MUST go after prepare_auth. Authenticators could add a hook self.prepare_hooks(hooks) def __repr__(self): return '<PreparedRequest [%s]>' % (self.method) def copy(self): p = PreparedRequest() p.method = self.method p.url = self.url p.headers = self.headers.copy() if self.headers is not None else None p._cookies = _copy_cookie_jar(self._cookies) p.body = self.body p.hooks = self.hooks return p def prepare_method(self, method): """Prepares the given HTTP method.""" self.method = method if self.method is not None: self.method = self.method.upper() def prepare_url(self, url, params): """Prepares the given HTTP URL.""" #: Accept objects that have string representations. #: We're unable to blindy call unicode/str functions #: as this will include the bytestring indicator (b'') #: on python 3.x. #: https://github.com/kennethreitz/requests/pull/2238 if isinstance(url, bytes): url = url.decode('utf8') else: url = unicode(url) if is_py2 else str(url) # Don't do any URL preparation for non-HTTP schemes like `mailto`, # `data` etc to work around exceptions from `url_parse`, which # handles RFC 3986 only. if ':' in url and not url.lower().startswith('http'): self.url = url return # Support for unicode domain names and paths. try: scheme, auth, host, port, path, query, fragment = parse_url(url) except LocationParseError as e: raise InvalidURL(*e.args) if not scheme: raise MissingSchema("Invalid URL {0!r}: No schema supplied. " "Perhaps you meant http://{0}?".format( to_native_string(url, 'utf8'))) if not host: raise InvalidURL("Invalid URL %r: No host supplied" % url) # Only want to apply IDNA to the hostname try: host = host.encode('idna').decode('utf-8') except UnicodeError: raise InvalidURL('URL has an invalid label.') # Carefully reconstruct the network location netloc = auth or '' if netloc: netloc += '@' netloc += host if port: netloc += ':' + str(port) # Bare domains aren't valid URLs. if not path: path = '/' if is_py2: if isinstance(scheme, str): scheme = scheme.encode('utf-8') if isinstance(netloc, str): netloc = netloc.encode('utf-8') if isinstance(path, str): path = path.encode('utf-8') if isinstance(query, str): query = query.encode('utf-8') if isinstance(fragment, str): fragment = fragment.encode('utf-8') enc_params = self._encode_params(params) if enc_params: if query: query = '%s&%s' % (query, enc_params) else: query = enc_params url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) self.url = url def prepare_headers(self, headers): """Prepares the given HTTP headers.""" if headers: self.headers = CaseInsensitiveDict((to_native_string(name), value) for name, value in headers.items()) else: self.headers = CaseInsensitiveDict() def prepare_body(self, data, files, json=None): """Prepares the given HTTP body data.""" # Check if file, fo, generator, iterator. # If not, run through normal process. # Nottin' on you. body = None content_type = None length = None if json is not None: content_type = 'application/json' body = json_dumps(json) is_stream = all([ hasattr(data, '__iter__'), not isinstance(data, (basestring, list, tuple, dict)) ]) try: length = super_len(data) except (TypeError, AttributeError, UnsupportedOperation): length = None if is_stream: body = data if files: raise NotImplementedError('Streamed bodies and files are mutually exclusive.') if length is not None: self.headers['Content-Length'] = builtin_str(length) else: self.headers['Transfer-Encoding'] = 'chunked' else: # Multi-part file uploads. if files: (body, content_type) = self._encode_files(files, data) else: if data and json is None: body = self._encode_params(data) if isinstance(data, basestring) or hasattr(data, 'read'): content_type = None else: content_type = 'application/x-www-form-urlencoded' self.prepare_content_length(body) # Add content-type if it wasn't explicitly provided. if content_type and ('content-type' not in self.headers): self.headers['Content-Type'] = content_type self.body = body def prepare_content_length(self, body): if hasattr(body, 'seek') and hasattr(body, 'tell'): body.seek(0, 2) self.headers['Content-Length'] = builtin_str(body.tell()) body.seek(0, 0) elif body is not None: l = super_len(body) if l: self.headers['Content-Length'] = builtin_str(l) elif (self.method not in ('GET', 'HEAD')) and (self.headers.get('Content-Length') is None): self.headers['Content-Length'] = '0' def prepare_auth(self, auth, url=''): """Prepares the given HTTP auth data.""" # If no Auth is explicitly provided, extract it from the URL first. if auth is None: url_auth = get_auth_from_url(self.url) auth = url_auth if any(url_auth) else None if auth: if isinstance(auth, tuple) and len(auth) == 2: # special-case basic HTTP auth auth = HTTPBasicAuth(*auth) # Allow auth to make its changes. r = auth(self) # Update self to reflect the auth changes. self.__dict__.update(r.__dict__) # Recompute Content-Length self.prepare_content_length(self.body) def prepare_cookies(self, cookies): """Prepares the given HTTP cookie data. This function eventually generates a ``Cookie`` header from the given cookies using cookielib. Due to cookielib's design, the header will not be regenerated if it already exists, meaning this function can only be called once for the life of the :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls to ``prepare_cookies`` will have no actual effect, unless the "Cookie" header is removed beforehand.""" if isinstance(cookies, cookielib.CookieJar): self._cookies = cookies else: self._cookies = cookiejar_from_dict(cookies) cookie_header = get_cookie_header(self._cookies, self) if cookie_header is not None: self.headers['Cookie'] = cookie_header def prepare_hooks(self, hooks): """Prepares the given hooks.""" # hooks can be passed as None to the prepare method and to this # method. To prevent iterating over None, simply use an empty list # if hooks is False-y hooks = hooks or [] for event in hooks: self.register_hook(event, hooks[event]) class Response(object): """The :class:`Response <Response>` object, which contains a server's response to an HTTP request. """ __attrs__ = [ '_content', 'status_code', 'headers', 'url', 'history', 'encoding', 'reason', 'cookies', 'elapsed', 'request', ] def __init__(self): super(Response, self).__init__() self._content = False self._content_consumed = False #: Integer Code of responded HTTP Status, e.g. 404 or 200. self.status_code = None #: Case-insensitive Dictionary of Response Headers. #: For example, ``headers['content-encoding']`` will return the #: value of a ``'Content-Encoding'`` response header. self.headers = CaseInsensitiveDict() #: File-like object representation of response (for advanced usage). #: Use of ``raw`` requires that ``stream=True`` be set on the request. # This requirement does not apply for use internally to Requests. self.raw = None #: Final URL location of Response. self.url = None #: Encoding to decode with when accessing r.text. self.encoding = None #: A list of :class:`Response <Response>` objects from #: the history of the Request. Any redirect responses will end #: up here. The list is sorted from the oldest to the most recent request. self.history = [] #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". self.reason = None #: A CookieJar of Cookies the server sent back. self.cookies = cookiejar_from_dict({}) #: The amount of time elapsed between sending the request #: and the arrival of the response (as a timedelta). #: This property specifically measures the time taken between sending #: the first byte of the request and finishing parsing the headers. It #: is therefore unaffected by consuming the response content or the #: value of the ``stream`` keyword argument. self.elapsed = datetime.timedelta(0) #: The :class:`PreparedRequest <PreparedRequest>` object to which this #: is a response. self.request = None def __getstate__(self): # Consume everything; accessing the content attribute makes # sure the content has been fully read. if not self._content_consumed: self.content return dict( (attr, getattr(self, attr, None)) for attr in self.__attrs__ ) def __setstate__(self, state): for name, value in state.items(): setattr(self, name, value) # pickled objects do not have .raw setattr(self, '_content_consumed', True) setattr(self, 'raw', None) def __repr__(self): return '<Response [%s]>' % (self.status_code) def __bool__(self): """Returns true if :attr:`status_code` is 'OK'.""" return self.ok def __nonzero__(self): """Returns true if :attr:`status_code` is 'OK'.""" return self.ok def __iter__(self): """Allows you to use a response as an iterator.""" return self.iter_content(128) @property def ok(self): try: self.raise_for_status() except HTTPError: return False return True @property def is_redirect(self): """True if this Response is a well-formed HTTP redirect that could have been processed automatically (by :meth:`Session.resolve_redirects`). """ return ('location' in self.headers and self.status_code in REDIRECT_STATI) @property def is_permanent_redirect(self): """True if this Response one of the permanant versions of redirect""" return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect)) @property def apparent_encoding(self): """The apparent encoding, provided by the chardet library""" return chardet.detect(self.content)['encoding'] def iter_content(self, chunk_size=1, decode_unicode=False): """Iterates over the response data. When stream=True is set on the request, this avoids reading the content at once into memory for large responses. The chunk size is the number of bytes it should read into memory. This is not necessarily the length of each item returned as decoding can take place. If decode_unicode is True, content will be decoded using the best available encoding based on the response. """ def generate(): try: # Special case for urllib3. try: for chunk in self.raw.stream(chunk_size, decode_content=True): yield chunk except ProtocolError as e: raise ChunkedEncodingError(e) except DecodeError as e: raise ContentDecodingError(e) except ReadTimeoutError as e: raise ConnectionError(e) except AttributeError: # Standard file-like object. while True: chunk = self.raw.read(chunk_size) if not chunk: break yield chunk self._content_consumed = True if self._content_consumed and isinstance(self._content, bool): raise StreamConsumedError() # simulate reading small chunks of the content reused_chunks = iter_slices(self._content, chunk_size) stream_chunks = generate() chunks = reused_chunks if self._content_consumed else stream_chunks if decode_unicode: chunks = stream_decode_response_unicode(chunks, self) return chunks def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None): """Iterates over the response data, one line at a time. When stream=True is set on the request, this avoids reading the content at once into memory for large responses. .. note:: This method is not reentrant safe. """ pending = None for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode): if pending is not None: chunk = pending + chunk if delimiter: lines = chunk.split(delimiter) else: lines = chunk.splitlines() if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: pending = lines.pop() else: pending = None for line in lines: yield line if pending is not None: yield pending @property def content(self): """Content of the response, in bytes.""" if self._content is False: # Read the contents. try: if self._content_consumed: raise RuntimeError( 'The content for this response was already consumed') if self.status_code == 0: self._content = None else: self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes() except AttributeError: self._content = None self._content_consumed = True # don't need to release the connection; that's been handled by urllib3 # since we exhausted the data. return self._content @property def text(self): """Content of the response, in unicode. If Response.encoding is None, encoding will be guessed using ``chardet``. The encoding of the response content is determined based solely on HTTP headers, following RFC 2616 to the letter. If you can take advantage of non-HTTP knowledge to make a better guess at the encoding, you should set ``r.encoding`` appropriately before accessing this property. """ # Try charset from content-type content = None encoding = self.encoding if not self.content: return str('') # Fallback to auto-detected encoding. if self.encoding is None: encoding = self.apparent_encoding # Decode unicode from given encoding. try: content = str(self.content, encoding, errors='replace') except (LookupError, TypeError): # A LookupError is raised if the encoding was not found which could # indicate a misspelling or similar mistake. # # A TypeError can be raised if encoding is None # # So we try blindly encoding. content = str(self.content, errors='replace') return content def json(self, **kwargs): """Returns the json-encoded content of a response, if any. :param \*\*kwargs: Optional arguments that ``json.loads`` takes. """ if not self.encoding and len(self.content) > 3: # No encoding set. JSON RFC 4627 section 3 states we should expect # UTF-8, -16 or -32. Detect which one to use; If the detection or # decoding fails, fall back to `self.text` (using chardet to make # a best guess). encoding = guess_json_utf(self.content) if encoding is not None: try: return json.loads(self.content.decode(encoding), **kwargs) except UnicodeDecodeError: # Wrong UTF codec detected; usually because it's not UTF-8 # but some other 8-bit codec. This is an RFC violation, # and the server didn't bother to tell us what codec *was* # used. pass return json.loads(self.text, **kwargs) @property def links(self): """Returns the parsed header links of the response, if any.""" header = self.headers.get('link') # l = MultiDict() l = {} if header: links = parse_header_links(header) for link in links: key = link.get('rel') or link.get('url') l[key] = link return l def raise_for_status(self): """Raises stored :class:`HTTPError`, if one occurred.""" http_error_msg = '' if 400 <= self.status_code < 500: http_error_msg = '%s Client Error: %s' % (self.status_code, self.reason) elif 500 <= self.status_code < 600: http_error_msg = '%s Server Error: %s' % (self.status_code, self.reason) if http_error_msg: raise HTTPError(http_error_msg, response=self) def close(self): """Releases the connection back to the pool. Once this method has been called the underlying ``raw`` object must not be accessed again. *Note: Should not normally need to be called explicitly.* """ return self.raw.release_conn()
mit
yinjun322/quick-ng
tools/cocos2d-console/plugins/project_new/project_new.py
6
26531
#!/usr/bin/python # ---------------------------------------------------------------------------- # cocos "new" plugin # # Copyright 2013 (C) cocos2d-x.org # # License: MIT # ---------------------------------------------------------------------------- ''' "new" plugin for cocos command line tool ''' __docformat__ = 'restructuredtext' # python import os import sys import getopt import ConfigParser import json import shutil import cocos import cocos_project import re from collections import OrderedDict # # Plugins should be a sublass of CCJSPlugin # class CCPluginNew(cocos.CCPlugin): DEFAULT_PROJ_NAME = { cocos_project.Project.CPP: 'MyCppGame', cocos_project.Project.LUA: 'MyLuaGame', cocos_project.Project.JS: 'MyJSGame' } @staticmethod def plugin_name(): return "new" @staticmethod def brief_description(): return cocos.MultiLanguage.get_string('NEW_BRIEF') def init(self, args): self._projname = args.name self._projdir = unicode( os.path.abspath(os.path.join(args.directory, self._projname)), "utf-8") self._lang = args.language self._package = args.package self._tpname = args.template # new official ways to get the template and cocos paths self._templates_paths = self.get_templates_paths() self._cocosroot = self.get_cocos2d_path() # search for custom paths if args.engine_path is not None: self._cocosroot = os.path.abspath(args.engine_path) self._cocosroot = unicode(self._cocosroot, "utf-8") tp_path = os.path.join(self._cocosroot, "templates") if os.path.isdir(tp_path): self._templates_paths.append(tp_path) # remove duplicates keeping order o = OrderedDict.fromkeys(self._templates_paths) self._templates_paths = o.keys() self._other_opts = args self._mac_bundleid = args.mac_bundleid self._ios_bundleid = args.ios_bundleid self._templates = Templates(args.language, self._templates_paths, args.template) if self._templates.none_active(): self._templates.select_one() # parse arguments def parse_args(self, argv): """Custom and check param list. """ from argparse import ArgumentParser # set the parser to parse input params # the correspond variable name of "-x, --xxx" is parser.xxx name = CCPluginNew.plugin_name() category = CCPluginNew.plugin_category() parser = ArgumentParser(prog="cocos %s" % self.__class__.plugin_name(), description=self.__class__.brief_description()) parser.add_argument( "name", metavar="PROJECT_NAME", nargs='?', help=cocos.MultiLanguage.get_string('NEW_ARG_NAME')) parser.add_argument( "-p", "--package", metavar="PACKAGE_NAME", help=cocos.MultiLanguage.get_string('NEW_ARG_PACKAGE')) parser.add_argument("-l", "--language", required=True, choices=["cpp", "lua", "js"], help=cocos.MultiLanguage.get_string('NEW_ARG_LANG')) parser.add_argument("-d", "--directory", metavar="DIRECTORY", help=cocos.MultiLanguage.get_string('NEW_ARG_DIR')) parser.add_argument("-t", "--template", metavar="TEMPLATE_NAME", help=cocos.MultiLanguage.get_string('NEW_ARG_TEMPLATE')) parser.add_argument( "--ios-bundleid", dest="ios_bundleid", help=cocos.MultiLanguage.get_string('NEW_ARG_IOS_BUNDLEID')) parser.add_argument( "--mac-bundleid", dest="mac_bundleid", help=cocos.MultiLanguage.get_string('NEW_ARG_MAC_BUNDLEID')) parser.add_argument("-e", "--engine-path", dest="engine_path", help=cocos.MultiLanguage.get_string('NEW_ARG_ENGINE_PATH')) parser.add_argument("--portrait", action="store_true", dest="portrait", help=cocos.MultiLanguage.get_string('NEW_ARG_PORTRAIT')) group = parser.add_argument_group(cocos.MultiLanguage.get_string('NEW_ARG_GROUP_SCRIPT')) group.add_argument( "--no-native", action="store_true", dest="no_native", help=cocos.MultiLanguage.get_string('NEW_ARG_NO_NATIVE')) # parse the params args = parser.parse_args(argv) if args.name is None: args.name = CCPluginNew.DEFAULT_PROJ_NAME[args.language] if not args.package: args.package = "org.cocos2dx.%s" % args.name if not args.ios_bundleid: args.ios_bundleid = args.package if not args.mac_bundleid: args.mac_bundleid = args.package if not args.directory: args.directory = os.getcwd() if not args.template: args.template = 'default' self.init(args) return args def _stat_engine_version(self): try: ver_str = None engine_type = None framework_ver_file = os.path.join(self._cocosroot, 'version') x_ver_file = os.path.join(self._cocosroot, 'cocos/cocos2d.cpp') js_ver_file = os.path.join(self._cocosroot, 'frameworks/js-bindings/bindings/manual/ScriptingCore.h') if os.path.isfile(framework_ver_file): # the engine is Cocos Framework f = open(framework_ver_file) ver_str = f.read() f.close() engine_type = 'cocosframework' else: ver_file = None pattern = None if os.path.isfile(x_ver_file): # the engine is cocos2d-x pattern = r".*return[ \t]+\"(.*)\";" ver_file = x_ver_file engine_type = 'cocos2d-x' elif os.path.isfile(js_ver_file): # the engine is cocos2d-js pattern = r".*#define[ \t]+ENGINE_VERSION[ \t]+\"(.*)\"" ver_file = js_ver_file engine_type = 'cocos2d-js' if ver_file is not None: f = open(ver_file) import re for line in f.readlines(): match = re.match(pattern, line) if match: ver_str = match.group(1) break f.close() if ver_str is not None: # stat the engine version info cocos.DataStatistic.stat_event('new_engine_ver', ver_str, engine_type) except: pass def _create_from_cmd(self): # check the dst project dir exists if os.path.exists(self._projdir): message = cocos.MultiLanguage.get_string('NEW_ERROR_FOLDER_EXISTED_FMT') % self._projdir raise cocos.CCPluginError(message) tp_dir = self._templates.template_path() creator = TPCreator(self._lang, self._cocosroot, self._projname, self._projdir, self._tpname, tp_dir, self._package, self._mac_bundleid, self._ios_bundleid) # do the default creating step creator.do_default_step() data = None cfg_path = os.path.join(self._projdir, cocos_project.Project.CONFIG) if os.path.isfile(cfg_path): f = open(cfg_path) data = json.load(f) f.close() if data is None: data = {} if cocos_project.Project.KEY_PROJ_TYPE not in data: data[cocos_project.Project.KEY_PROJ_TYPE] = self._lang # script project may add native support if self._lang in (cocos_project.Project.LUA, cocos_project.Project.JS): if not self._other_opts.no_native: creator.do_other_step('do_add_native_support') data[cocos_project.Project.KEY_HAS_NATIVE] = True else: data[cocos_project.Project.KEY_HAS_NATIVE] = False # if --portrait is specified, change the orientation if self._other_opts.portrait: creator.do_other_step("change_orientation", not_existed_error=False) # write config files with open(cfg_path, 'w') as outfile: json.dump(data, outfile, sort_keys=True, indent=4) # main entry point def run(self, argv, dependencies): self.parse_args(argv) action_str = 'new_%s' % (self._lang) cocos.DataStatistic.stat_event('new', action_str, self._tpname) self._create_from_cmd() self._stat_engine_version() def replace_string(filepath, src_string, dst_string): """ From file's content replace specified string Arg: filepath: Specify a file contains the path src_string: old string dst_string: new string """ if src_string is None or dst_string is None: raise TypeError content = "" f1 = open(filepath, "rb") for line in f1: strline = line.decode('utf8') if src_string in strline: content += strline.replace(src_string, dst_string) else: content += strline f1.close() f2 = open(filepath, "wb") f2.write(content.encode('utf8')) f2.close() # end of replace_string class Templates(object): def __init__(self, lang, templates_paths, current): self._lang = lang self._templates_paths = templates_paths self._scan() self._current = None if current is not None: if current in self._template_folders: self._current = current else: cocos.Logging.warning(cocos.MultiLanguage.get_string('NEW_TEMPLATE_NOT_FOUND_FMT') % current) def _scan(self): template_pattern = { "cpp": 'cpp-template-(.+)', "lua": 'lua-template-(.+)', "js": 'js-template-(.+)', } self._template_folders = {} for templates_dir in self._templates_paths: try: dirs = [name for name in os.listdir(templates_dir) if os.path.isdir( os.path.join(templates_dir, name))] except Exception: continue pattern = template_pattern[self._lang] for name in dirs: match = re.search(pattern, name) if match is None: continue template_name = match.group(1) if template_name in self._template_folders.keys(): continue self._template_folders[template_name] = os.path.join(templates_dir, name) if len(self._template_folders) == 0: cur_engine = "cocos2d-x" if self._lang == "js" else "cocos2d-js" need_engine = "cocos2d-js" if self._lang == "js" else "cocos2d-x" engine_tip = cocos.MultiLanguage.get_string('NEW_ERROR_ENGINE_TIP_FMT') % need_engine message = cocos.MultiLanguage.get_string('NEW_ERROR_TEMPLATE_NOT_FOUND_FMT') % (self._lang, engine_tip) raise cocos.CCPluginError(message) def none_active(self): return self._current is None def template_path(self): if self._current is None: return None return self._template_folders[self._current] def select_one(self): cocos.Logging.warning(cocos.MultiLanguage.get_string('NEW_SELECT_TEMPLATE_TIP1')) p = self._template_folders.keys() for i in range(len(p)): cocos.Logging.warning('%d %s' % (i + 1, p[i])) cocos.Logging.warning(cocos.MultiLanguage.get_string('NEW_SELECT_TEMPLATE_TIP2')) while True: option = raw_input() if option.isdigit(): option = int(option) - 1 if option in range(len(p)): break self._current = p[option] class TPCreator(object): def __init__(self, lang, cocos_root, project_name, project_dir, tp_name, tp_dir, project_package, mac_id, ios_id): self.lang = lang self.cocos_root = cocos_root self.project_dir = project_dir self.project_name = project_name self.package_name = project_package self.mac_bundleid = mac_id self.ios_bundleid = ios_id self.tp_name = tp_name self.tp_dir = tp_dir self.tp_json = 'cocos-project-template.json' tp_json_path = os.path.join(tp_dir, self.tp_json) if not os.path.exists(tp_json_path): message = cocos.MultiLanguage.get_string('NEW_WARNING_FILE_NOT_FOUND_FMT') % tp_json_path raise cocos.CCPluginError(message) f = open(tp_json_path) # keep the key order tpinfo = json.load(f, encoding='utf8', object_pairs_hook=OrderedDict) # read the default creating step if 'do_default' not in tpinfo: message = (cocos.MultiLanguage.get_string('NEW_ERROR_DEFAILT_CFG_NOT_FOUND_FMT') % tp_json_path) raise cocos.CCPluginError(message) self.tp_default_step = tpinfo.pop('do_default') # keep the other steps self.tp_other_step = tpinfo def cp_self(self, project_dir, exclude_files): cocos.Logging.info(cocos.MultiLanguage.get_string('NEW_INFO_STEP_COPY_TEMPLATE_FMT') % project_dir) if not os.path.exists(self.project_dir): os.makedirs(self.project_dir) copy_cfg = { "from": self.tp_dir, "to": self.project_dir, "exclude": exclude_files } cocos.copy_files_with_config(copy_cfg, self.tp_dir, self.project_dir) def do_default_step(self): default_cmds = self.tp_default_step exclude_files = [] if "exclude_from_template" in default_cmds: exclude_files = exclude_files + \ default_cmds['exclude_from_template'] default_cmds.pop('exclude_from_template') # should ignore teh xx-template-xx.json exclude_files.append(self.tp_json) self.cp_self(self.project_dir, exclude_files) self.do_cmds(default_cmds) def do_other_step(self, step, not_existed_error=True): if step not in self.tp_other_step: if not_existed_error: # handle as error message = cocos.MultiLanguage.get_string('NEW_ERROR_STEP_NOT_FOUND_FMT') % step raise cocos.CCPluginError(message) else: # handle as warning cocos.Logging.warning(cocos.MultiLanguage.get_string('NEW_WARNING_STEP_NOT_FOUND_FMT') % step) return cmds = self.tp_other_step[step] self.do_cmds(cmds) def do_cmds(self, cmds): for k, v in cmds.iteritems(): # call cmd method by method/cmd name # get from # http://stackoverflow.com/questions/3951840/python-how-to-invoke-an-function-on-an-object-dynamically-by-name try: cmd = getattr(self, k) except AttributeError: raise cocos.CCPluginError(cocos.MultiLanguage.get_string('NEW_ERROR_CMD_NOT_FOUND_FMT') % k) try: cmd(v) except Exception as e: raise cocos.CCPluginError(str(e)) # cmd methods below def append_h5_engine(self, v): src = os.path.join(self.cocos_root, v['from']) dst = os.path.join(self.project_dir, v['to']) # check cocos engine exist moduleConfig = 'moduleConfig.json' moudle_cfg = os.path.join(src, moduleConfig) if not os.path.exists(moudle_cfg): message = cocos.MultiLanguage.get_string('NEW_WARNING_FILE_NOT_FOUND_FMT') % moudle_cfg raise cocos.CCPluginError(message) f = open(moudle_cfg) data = json.load(f, 'utf8') f.close() modules = data['module'] # must copy moduleConfig.json & CCBoot.js file_list = [moduleConfig, data['bootFile']] for k, v in modules.iteritems(): module = modules[k] for f in module: if f[-2:] == 'js': file_list.append(f) # begin copy engine cocos.Logging.info(cocos.MultiLanguage.get_string('NEW_INFO_STEP_COPY_H5')) for index in range(len(file_list)): srcfile = os.path.join(src, file_list[index]) dstfile = os.path.join(dst, file_list[index]) srcfile = cocos.add_path_prefix(srcfile) dstfile = cocos.add_path_prefix(dstfile) if not os.path.exists(os.path.dirname(dstfile)): os.makedirs(cocos.add_path_prefix(os.path.dirname(dstfile))) # copy file or folder if os.path.exists(srcfile): if os.path.isdir(srcfile): if os.path.exists(dstfile): shutil.rmtree(dstfile) shutil.copytree(srcfile, dstfile) else: if os.path.exists(dstfile): os.remove(dstfile) shutil.copy2(srcfile, dstfile) def append_x_engine(self, v): # FIXME this is a hack, but in order to fix it correctly the cocos-project-template.json # file probably will need to be re-designed. # As a quick (horrible) fix, we check if we are in distro mode. # If so, we don't do the "append_x_engine" step if cocos.CCPlugin.get_cocos2d_mode() == 'distro': return src = os.path.join(self.cocos_root, v['from']) dst = os.path.join(self.project_dir, v['to']) # check cocos engine exist cocosx_files_json = os.path.join( src, 'templates', 'cocos2dx_files.json') if not os.path.exists(cocosx_files_json): message = cocos.MultiLanguage.get_string('NEW_WARNING_FILE_NOT_FOUND_FMT') % cocosx_files_json raise cocos.CCPluginError(message) f = open(cocosx_files_json) data = json.load(f) f.close() fileList = data['common'] if self.lang == 'lua': fileList = fileList + data['lua'] if self.lang == 'js' and 'js' in data.keys(): fileList = fileList + data['js'] # begin copy engine cocos.Logging.info(cocos.MultiLanguage.get_string('NEW_INFO_STEP_COPY_X')) for index in range(len(fileList)): srcfile = os.path.join(src, fileList[index]) dstfile = os.path.join(dst, fileList[index]) srcfile = cocos.add_path_prefix(srcfile) dstfile = cocos.add_path_prefix(dstfile) if not os.path.exists(os.path.dirname(dstfile)): os.makedirs(cocos.add_path_prefix(os.path.dirname(dstfile))) # copy file or folder if os.path.exists(srcfile): if os.path.isdir(srcfile): if os.path.exists(dstfile): shutil.rmtree(dstfile) shutil.copytree(srcfile, dstfile) else: if os.path.exists(dstfile): os.remove(dstfile) shutil.copy2(srcfile, dstfile) def append_from_template(self, v): cocos.Logging.info(cocos.MultiLanguage.get_string('NEW_INFO_STEP_APPEND_TEMPLATE')) cocos.copy_files_with_config(v, self.tp_dir, self.project_dir) def append_dir(self, v): cocos.Logging.info(cocos.MultiLanguage.get_string('NEW_INFO_STEP_APPEND_DIR')) for item in v: cocos.copy_files_with_config( item, self.cocos_root, self.project_dir) def append_file(self, v): cocos.Logging.info(cocos.MultiLanguage.get_string('NEW_INFO_STEP_APPEND_FILE')) for item in v: src = os.path.join(self.cocos_root, item['from']) dst = os.path.join(self.project_dir, item['to']) src = cocos.add_path_prefix(src) dst = cocos.add_path_prefix(dst) shutil.copy2(src, dst) # project cmd def project_rename(self, v): """ will modify the file name of the file """ dst_project_dir = self.project_dir dst_project_name = self.project_name src_project_name = v['src_project_name'] cocos.Logging.info(cocos.MultiLanguage.get_string('NEW_INFO_STEP_RENAME_PROJ_FMT') % (src_project_name, dst_project_name)) files = v['files'] for f in files: src = f.replace("PROJECT_NAME", src_project_name) dst = f.replace("PROJECT_NAME", dst_project_name) src_file_path = os.path.join(dst_project_dir, src) dst_file_path = os.path.join(dst_project_dir, dst) if os.path.exists(src_file_path): if os.path.exists(dst_file_path): os.remove(dst_file_path) os.rename(src_file_path, dst_file_path) else: cocos.Logging.warning(cocos.MultiLanguage.get_string('NEW_WARNING_FILE_NOT_FOUND_FMT') % os.path.join(dst_project_dir, src)) def project_replace_project_name(self, v): """ will modify the content of the file """ dst_project_dir = self.project_dir dst_project_name = self.project_name src_project_name = v['src_project_name'] cocos.Logging.info(cocos.MultiLanguage.get_string('NEW_INFO_STEP_REPLACE_PROJ_FMT') % (src_project_name, dst_project_name)) files = v['files'] for f in files: dst = f.replace("PROJECT_NAME", dst_project_name) if os.path.exists(os.path.join(dst_project_dir, dst)): replace_string( os.path.join(dst_project_dir, dst), src_project_name, dst_project_name) else: cocos.Logging.warning(cocos.MultiLanguage.get_string('NEW_WARNING_FILE_NOT_FOUND_FMT') % os.path.join(dst_project_dir, dst)) def project_replace_package_name(self, v): """ will modify the content of the file """ dst_project_dir = self.project_dir dst_project_name = self.project_name src_package_name = v['src_package_name'] dst_package_name = self.package_name cocos.Logging.info(cocos.MultiLanguage.get_string('NEW_INFO_STEP_REPLACE_PKG_FMT') % (src_package_name, dst_package_name)) files = v['files'] if not dst_package_name: raise cocos.CCPluginError(cocos.MultiLanguage.get_string('NEW_ERROR_PKG_NAME_NOT_SPECIFIED')) for f in files: dst = f.replace("PROJECT_NAME", dst_project_name) if os.path.exists(os.path.join(dst_project_dir, dst)): replace_string( os.path.join(dst_project_dir, dst), src_package_name, dst_package_name) else: cocos.Logging.warning(cocos.MultiLanguage.get_string('NEW_WARNING_FILE_NOT_FOUND_FMT') % os.path.join(dst_project_dir, dst)) def project_replace_mac_bundleid(self, v): """ will modify the content of the file """ if self.mac_bundleid is None: return dst_project_dir = self.project_dir dst_project_name = self.project_name src_bundleid = v['src_bundle_id'] dst_bundleid = self.mac_bundleid cocos.Logging.info(cocos.MultiLanguage.get_string('NEW_INFO_STEP_MAC_BUNDLEID_FMT') % (src_bundleid, dst_bundleid)) files = v['files'] for f in files: dst = f.replace("PROJECT_NAME", dst_project_name) if os.path.exists(os.path.join(dst_project_dir, dst)): replace_string( os.path.join(dst_project_dir, dst), src_bundleid, dst_bundleid) else: cocos.Logging.warning(cocos.MultiLanguage.get_string('NEW_WARNING_FILE_NOT_FOUND_FMT') % os.path.join(dst_project_dir, dst)) def project_replace_ios_bundleid(self, v): """ will modify the content of the file """ if self.ios_bundleid is None: return dst_project_dir = self.project_dir dst_project_name = self.project_name src_bundleid = v['src_bundle_id'] dst_bundleid = self.ios_bundleid cocos.Logging.info(cocos.MultiLanguage.get_string('NEW_INFO_STEP_IOS_BUNDLEID_FMT') % (src_bundleid, dst_bundleid)) files = v['files'] for f in files: dst = f.replace("PROJECT_NAME", dst_project_name) if os.path.exists(os.path.join(dst_project_dir, dst)): replace_string( os.path.join(dst_project_dir, dst), src_bundleid, dst_bundleid) else: cocos.Logging.warning(cocos.MultiLanguage.get_string('NEW_WARNING_FILE_NOT_FOUND_FMT') % os.path.join(dst_project_dir, dst)) def modify_files(self, v): """ will modify the content of the file format of v is : [ { "file_path": The path related with project directory, "pattern": Find pattern, "replace_string": Replaced string }, ... ] """ cocos.Logging.info(cocos.MultiLanguage.get_string('NEW_INFO_STEP_MODIFY_FILE')) for modify_info in v: modify_file = modify_info["file_path"] if not os.path.isabs(modify_file): modify_file = os.path.abspath(os.path.join(self.project_dir, modify_file)) if not os.path.isfile(modify_file): cocos.Logging.warning(cocos.MultiLanguage.get_string('NEW_WARNING_NOT_A_FILE_FMT') % modify_file) continue pattern = modify_info["pattern"] replace_str = modify_info["replace_string"] f = open(modify_file) lines = f.readlines() f.close() new_lines = [] for line in lines: new_line = re.sub(pattern, replace_str, line) new_lines.append(new_line) f = open(modify_file, "w") f.writelines(new_lines) f.close()
mit
wehkamp/ansible
lib/ansible/executor/task_executor.py
3
19381
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import json import pipes import subprocess import sys import time from ansible import constants as C from ansible.errors import AnsibleError, AnsibleParserError from ansible.executor.connection_info import ConnectionInformation from ansible.playbook.conditional import Conditional from ansible.playbook.task import Task from ansible.plugins import lookup_loader, connection_loader, action_loader from ansible.template import Templar from ansible.utils.listify import listify_lookup_plugin_terms from ansible.utils.unicode import to_unicode from ansible.utils.debug import debug __all__ = ['TaskExecutor'] class TaskExecutor: ''' This is the main worker class for the executor pipeline, which handles loading an action plugin to actually dispatch the task to a given host. This class roughly corresponds to the old Runner() class. ''' def __init__(self, host, task, job_vars, connection_info, new_stdin, loader, shared_loader_obj): self._host = host self._task = task self._job_vars = job_vars self._connection_info = connection_info self._new_stdin = new_stdin self._loader = loader self._shared_loader_obj = shared_loader_obj def run(self): ''' The main executor entrypoint, where we determine if the specified task requires looping and either runs the task with ''' debug("in run()") try: # lookup plugins need to know if this task is executing from # a role, so that it can properly find files/templates/etc. roledir = None if self._task._role: roledir = self._task._role._role_path self._job_vars['roledir'] = roledir items = self._get_loop_items() if items is not None: if len(items) > 0: item_results = self._run_loop(items) # loop through the item results, and remember the changed/failed # result flags based on any item there. changed = False failed = False for item in item_results: if 'changed' in item and item['changed']: changed = True if 'failed' in item and item['failed']: failed = True # create the overall result item, and set the changed/failed # flags there to reflect the overall result of the loop res = dict(results=item_results) if changed: res['changed'] = True if failed: res['failed'] = True res['msg'] = 'One or more items failed' else: res['msg'] = 'All items completed' else: res = dict(changed=False, skipped=True, skipped_reason='No items in the list', results=[]) else: debug("calling self._execute()") res = self._execute() debug("_execute() done") # make sure changed is set in the result, if it's not present if 'changed' not in res: res['changed'] = False debug("dumping result to json") result = json.dumps(res) debug("done dumping result, returning") return result except AnsibleError, e: return dict(failed=True, msg=to_unicode(e, nonstring='simplerepr')) def _get_loop_items(self): ''' Loads a lookup plugin to handle the with_* portion of a task (if specified), and returns the items result. ''' items = None if self._task.loop and self._task.loop in lookup_loader: loop_terms = listify_lookup_plugin_terms(terms=self._task.loop_args, variables=self._job_vars, loader=self._loader) items = lookup_loader.get(self._task.loop, loader=self._loader).run(terms=loop_terms, variables=self._job_vars) return items def _run_loop(self, items): ''' Runs the task with the loop items specified and collates the result into an array named 'results' which is inserted into the final result along with the item for which the loop ran. ''' results = [] # make copies of the job vars and task so we can add the item to # the variables and re-validate the task with the item variable task_vars = self._job_vars.copy() items = self._squash_items(items, task_vars) for item in items: task_vars['item'] = item try: tmp_task = self._task.copy() except AnsibleParserError, e: results.append(dict(failed=True, msg=str(e))) continue # now we swap the internal task with the copy, execute, # and swap them back so we can do the next iteration cleanly (self._task, tmp_task) = (tmp_task, self._task) res = self._execute(variables=task_vars) (self._task, tmp_task) = (tmp_task, self._task) # now update the result with the item info, and append the result # to the list of results res['item'] = item results.append(res) # FIXME: we should be sending back a callback result for each item in the loop here print(res) return results def _squash_items(self, items, variables): ''' Squash items down to a comma-separated list for certain modules which support it (typically package management modules). ''' if len(items) > 0 and self._task.action in ('apt', 'yum', 'pkgng', 'zypper'): final_items = [] for item in items: variables['item'] = item templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=variables) if self._task.evaluate_conditional(templar, variables): final_items.append(item) return [",".join(final_items)] else: return items def _execute(self, variables=None): ''' The primary workhorse of the executor system, this runs the task on the specified host (which may be the delegated_to host) and handles the retry/until and block rescue/always execution ''' if variables is None: variables = self._job_vars templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=variables) # fields set from the play/task may be based on variables, so we have to # do the same kind of post validation step on it here before we use it. self._connection_info.post_validate(templar=templar) # now that the connection information is finalized, we can add 'magic' # variables to the variable dictionary self._connection_info.update_vars(variables) # get the connection and the handler for this execution self._connection = self._get_connection(variables) self._connection.set_host_overrides(host=self._host) self._handler = self._get_action_handler(connection=self._connection, templar=templar) # Evaluate the conditional (if any) for this task, which we do before running # the final task post-validation. We do this before the post validation due to # the fact that the conditional may specify that the task be skipped due to a # variable not being present which would otherwise cause validation to fail if not self._task.evaluate_conditional(templar, variables): debug("when evaulation failed, skipping this task") return dict(changed=False, skipped=True, skip_reason='Conditional check failed') # Now we do final validation on the task, which sets all fields to their final values self._task.post_validate(templar=templar) # if this task is a TaskInclude, we just return now with a success code so the # main thread can expand the task list for the given host if self._task.action == 'include': include_variables = self._task.args.copy() include_file = include_variables.get('_raw_params') del include_variables['_raw_params'] return dict(changed=True, include=include_file, include_variables=include_variables) # And filter out any fields which were set to default(omit), and got the omit token value omit_token = variables.get('omit') if omit_token is not None: self._task.args = dict(filter(lambda x: x[1] != omit_token, self._task.args.iteritems())) # Read some values from the task, so that we can modify them if need be retries = self._task.retries if retries <= 0: retries = 1 delay = self._task.delay if delay < 0: delay = 1 # make a copy of the job vars here, in case we need to update them # with the registered variable value later on when testing conditions vars_copy = variables.copy() debug("starting attempt loop") result = None for attempt in range(retries): if attempt > 0: # FIXME: this should use the callback/message passing mechanism print("FAILED - RETRYING: %s (%d retries left)" % (self._task, retries-attempt)) result['attempts'] = attempt + 1 debug("running the handler") result = self._handler.run(task_vars=variables) debug("handler run complete") if self._task.async > 0: # the async_wrapper module returns dumped JSON via its stdout # response, so we parse it here and replace the result try: result = json.loads(result.get('stdout')) except ValueError, e: return dict(failed=True, msg="The async task did not return valid JSON: %s" % str(e)) if self._task.poll > 0: result = self._poll_async_result(result=result, templar=templar) # update the local copy of vars with the registered value, if specified, # or any facts which may have been generated by the module execution if self._task.register: vars_copy[self._task.register] = result if 'ansible_facts' in result: vars_copy.update(result['ansible_facts']) # create a conditional object to evaluate task conditions cond = Conditional(loader=self._loader) # FIXME: make sure until is mutually exclusive with changed_when/failed_when if self._task.until: cond.when = self._task.until if cond.evaluate_conditional(templar, vars_copy): break elif (self._task.changed_when or self._task.failed_when) and 'skipped' not in result: if self._task.changed_when: cond.when = [ self._task.changed_when ] result['changed'] = cond.evaluate_conditional(templar, vars_copy) if self._task.failed_when: cond.when = [ self._task.failed_when ] failed_when_result = cond.evaluate_conditional(templar, vars_copy) result['failed_when_result'] = result['failed'] = failed_when_result if failed_when_result: break elif 'failed' not in result and result.get('rc', 0) == 0: # if the result is not failed, stop trying break if attempt < retries - 1: time.sleep(delay) # do the final update of the local variables here, for both registered # values and any facts which may have been created if self._task.register: variables[self._task.register] = result if 'ansible_facts' in result: variables.update(result['ansible_facts']) # and return debug("attempt loop complete, returning result") return result def _poll_async_result(self, result, templar): ''' Polls for the specified JID to be complete ''' async_jid = result.get('ansible_job_id') if async_jid is None: return dict(failed=True, msg="No job id was returned by the async task") # Create a new psuedo-task to run the async_status module, and run # that (with a sleep for "poll" seconds between each retry) until the # async time limit is exceeded. async_task = Task().load(dict(action='async_status jid=%s' % async_jid)) # Because this is an async task, the action handler is async. However, # we need the 'normal' action handler for the status check, so get it # now via the action_loader normal_handler = action_loader.get( 'normal', task=async_task, connection=self._connection, connection_info=self._connection_info, loader=self._loader, templar=templar, shared_loader_obj=self._shared_loader_obj, ) time_left = self._task.async while time_left > 0: time.sleep(self._task.poll) async_result = normal_handler.run() if int(async_result.get('finished', 0)) == 1 or 'failed' in async_result or 'skipped' in async_result: break time_left -= self._task.poll if int(async_result.get('finished', 0)) != 1: return dict(failed=True, msg="async task did not complete within the requested time") else: return async_result def _get_connection(self, variables): ''' Reads the connection property for the host, and returns the correct connection object from the list of connection plugins ''' # FIXME: delegate_to calculation should be done here # FIXME: calculation of connection params/auth stuff should be done here if not self._connection_info.remote_addr: self._connection_info.remote_addr = self._host.ipv4_address if self._task.delegate_to is not None: self._compute_delegate(variables) conn_type = self._connection_info.connection if conn_type == 'smart': conn_type = 'ssh' if sys.platform.startswith('darwin') and self._connection_info.password: # due to a current bug in sshpass on OSX, which can trigger # a kernel panic even for non-privileged users, we revert to # paramiko on that OS when a SSH password is specified conn_type = "paramiko" else: # see if SSH can support ControlPersist if not use paramiko cmd = subprocess.Popen(['ssh','-o','ControlPersist'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) (out, err) = cmd.communicate() if "Bad configuration option" in err: conn_type = "paramiko" connection = connection_loader.get(conn_type, self._connection_info, self._new_stdin) if not connection: raise AnsibleError("the connection plugin '%s' was not found" % conn_type) return connection def _get_action_handler(self, connection, templar): ''' Returns the correct action plugin to handle the requestion task action ''' if self._task.action in action_loader: if self._task.async != 0: raise AnsibleError("async mode is not supported with the %s module" % module_name) handler_name = self._task.action elif self._task.async == 0: handler_name = 'normal' else: handler_name = 'async' handler = action_loader.get( handler_name, task=self._task, connection=connection, connection_info=self._connection_info, loader=self._loader, templar=templar, shared_loader_obj=self._shared_loader_obj, ) if not handler: raise AnsibleError("the handler '%s' was not found" % handler_name) return handler def _compute_delegate(self, variables): # get the vars for the delegate by its name try: this_info = variables['hostvars'][self._task.delegate_to] except: # make sure the inject is empty for non-inventory hosts this_info = {} # get the real ssh_address for the delegate and allow ansible_ssh_host to be templated #self._connection_info.remote_user = self._compute_delegate_user(self.delegate_to, delegate['inject']) self._connection_info.remote_addr = this_info.get('ansible_ssh_host', self._task.delegate_to) self._connection_info.port = this_info.get('ansible_ssh_port', self._connection_info.port) self._connection_info.password = this_info.get('ansible_ssh_pass', self._connection_info.password) self._connection_info.private_key_file = this_info.get('ansible_ssh_private_key_file', self._connection_info.private_key_file) self._connection_info.connection = this_info.get('ansible_connection', self._connection_info.connection) self._connection_info.become_pass = this_info.get('ansible_sudo_pass', self._connection_info.become_pass) if self._connection_info.remote_addr in ('127.0.0.1', 'localhost'): self._connection_info.connection = 'local' # Last chance to get private_key_file from global variables. # this is useful if delegated host is not defined in the inventory #if delegate['private_key_file'] is None: # delegate['private_key_file'] = remote_inject.get('ansible_ssh_private_key_file', None) #if delegate['private_key_file'] is not None: # delegate['private_key_file'] = os.path.expanduser(delegate['private_key_file']) for i in this_info: if i.startswith("ansible_") and i.endswith("_interpreter"): variables[i] = this_info[i]
gpl-3.0
wemanuel/smry
smry/server-auth/ls/google-cloud-sdk/platform/gsutil/third_party/boto/tests/unit/s3/test_connection.py
90
7823
# Copyright (c) 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # import time from tests.compat import mock, unittest from tests.unit import AWSMockServiceTestCase from tests.unit import MockServiceWithConfigTestCase from boto.s3.connection import S3Connection, HostRequiredError from boto.s3.connection import S3ResponseError, Bucket class TestSignatureAlteration(AWSMockServiceTestCase): connection_class = S3Connection def test_unchanged(self): self.assertEqual( self.service_connection._required_auth_capability(), ['s3'] ) def test_switched(self): conn = self.connection_class( aws_access_key_id='less', aws_secret_access_key='more', host='s3.cn-north-1.amazonaws.com.cn' ) self.assertEqual( conn._required_auth_capability(), ['hmac-v4-s3'] ) class TestSigV4HostError(MockServiceWithConfigTestCase): connection_class = S3Connection def test_historical_behavior(self): self.assertEqual( self.service_connection._required_auth_capability(), ['s3'] ) self.assertEqual(self.service_connection.host, 's3.amazonaws.com') def test_sigv4_opt_in(self): # Switch it at the config, so we can check to see how the host is # handled. self.config = { 's3': { 'use-sigv4': True, } } with self.assertRaises(HostRequiredError): # No host+SigV4 == KABOOM self.connection_class( aws_access_key_id='less', aws_secret_access_key='more' ) # Ensure passing a ``host`` still works. conn = self.connection_class( aws_access_key_id='less', aws_secret_access_key='more', host='s3.cn-north-1.amazonaws.com.cn' ) self.assertEqual( conn._required_auth_capability(), ['hmac-v4-s3'] ) self.assertEqual( conn.host, 's3.cn-north-1.amazonaws.com.cn' ) class TestSigV4Presigned(MockServiceWithConfigTestCase): connection_class = S3Connection def test_sigv4_presign(self): self.config = { 's3': { 'use-sigv4': True, } } conn = self.connection_class( aws_access_key_id='less', aws_secret_access_key='more', host='s3.amazonaws.com' ) # Here we force an input iso_date to ensure we always get the # same signature. url = conn.generate_url_sigv4(86400, 'GET', bucket='examplebucket', key='test.txt', iso_date='20140625T000000Z') self.assertIn('a937f5fbc125d98ac8f04c49e0204ea1526a7b8ca058000a54c192457be05b7d', url) def test_sigv4_presign_optional_params(self): self.config = { 's3': { 'use-sigv4': True, } } conn = self.connection_class( aws_access_key_id='less', aws_secret_access_key='more', security_token='token', host='s3.amazonaws.com' ) url = conn.generate_url_sigv4(86400, 'GET', bucket='examplebucket', key='test.txt', version_id=2) self.assertIn('VersionId=2', url) self.assertIn('X-Amz-Security-Token=token', url) def test_sigv4_presign_headers(self): self.config = { 's3': { 'use-sigv4': True, } } conn = self.connection_class( aws_access_key_id='less', aws_secret_access_key='more', host='s3.amazonaws.com' ) headers = {'x-amz-meta-key': 'val'} url = conn.generate_url_sigv4(86400, 'GET', bucket='examplebucket', key='test.txt', headers=headers) self.assertIn('host', url) self.assertIn('x-amz-meta-key', url) class TestUnicodeCallingFormat(AWSMockServiceTestCase): connection_class = S3Connection def default_body(self): return """<?xml version="1.0" encoding="UTF-8"?> <ListAllMyBucketsResult xmlns="http://doc.s3.amazonaws.com/2006-03-01"> <Owner> <ID>bcaf1ffd86f461ca5fb16fd081034f</ID> <DisplayName>webfile</DisplayName> </Owner> <Buckets> <Bucket> <Name>quotes</Name> <CreationDate>2006-02-03T16:45:09.000Z</CreationDate> </Bucket> <Bucket> <Name>samples</Name> <CreationDate>2006-02-03T16:41:58.000Z</CreationDate> </Bucket> </Buckets> </ListAllMyBucketsResult>""" def create_service_connection(self, **kwargs): kwargs['calling_format'] = u'boto.s3.connection.OrdinaryCallingFormat' return super(TestUnicodeCallingFormat, self).create_service_connection(**kwargs) def test_unicode_calling_format(self): self.set_http_response(status_code=200) self.service_connection.get_all_buckets() class TestHeadBucket(AWSMockServiceTestCase): connection_class = S3Connection def default_body(self): # HEAD requests always have an empty body. return "" def test_head_bucket_success(self): self.set_http_response(status_code=200) buck = self.service_connection.head_bucket('my-test-bucket') self.assertTrue(isinstance(buck, Bucket)) self.assertEqual(buck.name, 'my-test-bucket') def test_head_bucket_forbidden(self): self.set_http_response(status_code=403) with self.assertRaises(S3ResponseError) as cm: self.service_connection.head_bucket('cant-touch-this') err = cm.exception self.assertEqual(err.status, 403) self.assertEqual(err.error_code, 'AccessDenied') self.assertEqual(err.message, 'Access Denied') def test_head_bucket_notfound(self): self.set_http_response(status_code=404) with self.assertRaises(S3ResponseError) as cm: self.service_connection.head_bucket('totally-doesnt-exist') err = cm.exception self.assertEqual(err.status, 404) self.assertEqual(err.error_code, 'NoSuchBucket') self.assertEqual(err.message, 'The specified bucket does not exist') def test_head_bucket_other(self): self.set_http_response(status_code=405) with self.assertRaises(S3ResponseError) as cm: self.service_connection.head_bucket('you-broke-it') err = cm.exception self.assertEqual(err.status, 405) # We don't have special-cases for this error status. self.assertEqual(err.error_code, None) self.assertEqual(err.message, '') if __name__ == "__main__": unittest.main()
apache-2.0
i8run/BigDL-1
spark/dl/src/test/resources/tf/models/batch_norm_nchw.py
9
1139
# # Copyright 2016 The BigDL Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import tensorflow as tf from sys import argv from util import run_model def main(): inputs = tf.Variable(tf.reshape(tf.range(0.0, 16), [1, 1, 4, 4]), name = 'input') inputs = tf.identity(inputs, "input_node") output = tf.layers.batch_normalization(inputs, axis=1, training=True) named_output = tf.nn.relu(output, name="output") net_outputs = map(lambda x: tf.get_default_graph().get_tensor_by_name(x), argv[2].split(',')) run_model(net_outputs, argv[1], 'batchNorm', argv[3] == 'True') if __name__ == "__main__": main()
apache-2.0
landism/pants
tests/python/pants_test/base/test_pants_ignore_scm.py
8
1301
# coding=utf-8 # Copyright 2016 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import subprocess import unittest from pants.base.scm_project_tree import ScmProjectTree from pants.scm.git import Git from pants_test.base.pants_ignore_test_base import PantsIgnoreTestBase class ScmPantsIgnoreTest(unittest.TestCase, PantsIgnoreTestBase): """ Common test cases are defined in PantsIgnoreTestBase. Special test cases can be defined here. """ def mk_project_tree(self, build_root, ignore_patterns=None): return ScmProjectTree(build_root, Git(worktree=build_root), 'HEAD', ignore_patterns) def setUp(self): super(ScmPantsIgnoreTest, self).setUp() self.prepare() subprocess.check_call(['git', 'init']) subprocess.check_call(['git', 'config', 'user.email', 'you@example.com']) subprocess.check_call(['git', 'config', 'user.name', 'Your Name']) subprocess.check_call(['git', 'add', '.']) subprocess.check_call(['git', 'commit', '-m' 'initial commit']) def tearDown(self): super(ScmPantsIgnoreTest, self).tearDown() self.cleanup()
apache-2.0
frdb194/django
tests/check_framework/test_multi_db.py
191
1682
from django.db import connections, models from django.test import TestCase, mock from django.test.utils import override_settings from .tests import IsolateModelsMixin class TestRouter(object): """ Routes to the 'other' database if the model name starts with 'Other'. """ def allow_migrate(self, db, app_label, model=None, **hints): return db == ('other' if model._meta.verbose_name.startswith('other') else 'default') @override_settings(DATABASE_ROUTERS=[TestRouter()]) class TestMultiDBChecks(IsolateModelsMixin, TestCase): multi_db = True def _patch_check_field_on(self, db): return mock.patch.object(connections[db].validation, 'check_field') def test_checks_called_on_the_default_database(self): class Model(models.Model): field = models.CharField(max_length=100) model = Model() with self._patch_check_field_on('default') as mock_check_field_default: with self._patch_check_field_on('other') as mock_check_field_other: model.check() self.assertTrue(mock_check_field_default.called) self.assertFalse(mock_check_field_other.called) def test_checks_called_on_the_other_database(self): class OtherModel(models.Model): field = models.CharField(max_length=100) model = OtherModel() with self._patch_check_field_on('other') as mock_check_field_other: with self._patch_check_field_on('default') as mock_check_field_default: model.check() self.assertTrue(mock_check_field_other.called) self.assertFalse(mock_check_field_default.called)
bsd-3-clause
lebertbe/Mines-Project
FonctionExtraction.py
1
5498
import xlrd import numpy as np def importBus(): wb = xlrd.open_workbook('Demand.xlsx') sh = wb.sheet_by_name(u'Sheet1') resultFinal=[] compteur = 0 for rownum in range(sh.nrows): inter1 = sh.row_values(rownum) if compteur > 0: nomBus = inter1[0] typeCons = 2 if compteur == 1: typeCons = 3 if inter1[1] != "": Pmax = inter1[1] else: Pmax = 0 Qmax = 0 Gs = 0 Bs = 0 area = 1 Vm = 1 Va = 0 baseKV = 380 zone = 1 Vmax = 1.05 Vmin = 0.95 inter = [nomBus, typeCons, Pmax, Qmax, Gs, Bs, area, Vm, Va, baseKV, zone, Vmax, Vmin] resultFinal.append(inter) compteur += 1 resultFinal = np.asarray(resultFinal, dtype=np.float64) return resultFinal def importGenCost(): wb = xlrd.open_workbook('Capacity_cost.xlsx') sh = wb.sheet_by_name(u'Sheet1') resultFinal=[] compteur = 0 for rownum in range(sh.nrows): LigneExcel = sh.row_values(rownum) if compteur > 0: model = 1 startup = 0 shutdown = 0 N = 10 # Each "CapacityN" reprensents the capacity (MW) of one type of technology # and is equal to : max installed capacity * capacity factor # "CostN" is in €/h [= MW*(€/MWh)] # There is one generator per bus. It is the sum of different generators # from the cheapest technology (in €/MWh) to the most expensive # Costs and Capacities are summed up with the last technology Capacity1 = LigneExcel[2]*LigneExcel[3] Cost1 = LigneExcel[2]*LigneExcel[3]*LigneExcel[4] Capacity2 = LigneExcel[5]*LigneExcel[6] Cost2 = LigneExcel[5]*LigneExcel[6]*LigneExcel[7] Capacity3 = LigneExcel[8]*LigneExcel[9] Cost3 = LigneExcel[8]*LigneExcel[9]*LigneExcel[10] Capacity4 = LigneExcel[11]*LigneExcel[12] Cost4 = LigneExcel[11]*LigneExcel[12]*LigneExcel[13] Capacity5 = LigneExcel[14]*LigneExcel[15] Cost5 = LigneExcel[14]*LigneExcel[15]*LigneExcel[16] Capacity6 = LigneExcel[17]*LigneExcel[18] Cost6 = LigneExcel[19]*LigneExcel[17]*LigneExcel[18] Capacity7 = LigneExcel[20]*LigneExcel[21] Cost7 = LigneExcel[22]*LigneExcel[20]*LigneExcel[21] Capacity8 = LigneExcel[23]*LigneExcel[24] Cost8 = LigneExcel[25]*LigneExcel[23]*LigneExcel[24] Capacity9 = LigneExcel[26]*LigneExcel[27] Cost9 = LigneExcel[28]*LigneExcel[26]*LigneExcel[27] Cost = [Cost1, Cost2, Cost3, Cost4, Cost5, Cost6, Cost7, Cost8, Cost9] Capacity = [Capacity1, Capacity2, Capacity3, Capacity4, Capacity5, Capacity6, Capacity7, Capacity8, Capacity9] p = [0, 0, 0, 0, 0, 0, 0, 0, 0] c = [0, 0, 0, 0, 0, 0, 0, 0, 0] p[0] = Capacity[0] c[0] = Cost[0] for x in range(8): p[x+1] = p[x] + Capacity[x+1] c[x+1] = c[x] + Cost[x+1] inter = [model, startup, shutdown, N, 0, 0, p[0], c[0], p[1], c[1], p[2], c[2], p[3], c[3], p[4], c[4], p[5], c[5], p[6], c[6], p[7], c[7], p[8], c[8]] resultFinal.append(inter) compteur += 1 resultFinal = np.asarray(resultFinal, dtype=np.float64) return resultFinal def importGen(): wb = xlrd.open_workbook('Capacity_cost.xlsx') sh = wb.sheet_by_name(u'Sheet1') resultFinal = [] compteur = 0 for rownum in range(sh.nrows): inter1 = sh.row_values(rownum) if compteur > 0: nomBus = inter1[0] if inter1[1] != "": Pg = inter1[1] else: Pg = 0 Qg = 0 Qmax = 0 Qmin = 0 Vg = 1 mBase = 100 status = 1 Pmax = Pg Pmin = 0 Pc1 = 0 Pc2 = 0 Qc1min = 0 Qc1max = 0 Qc2min = 0 Qc2max = 0 ramp_agc = 0 ramp_10 = 0 ramp_30 = 0 ramp_q = 0 apf = 0 inter = [nomBus, Pg, Qg, Qmax, Qmin, Vg, mBase, status, Pmax, Pmin, Pc1, Pc2, Qc1min, Qc1max, Qc2min, Qc2max, ramp_agc, ramp_10, ramp_30, ramp_q, apf] if Pg >- 1: resultFinal.append(inter) compteur += 1 resultFinal = np.asarray(resultFinal) return resultFinal def importBranch(): wb = xlrd.open_workbook('Branch.xlsx') sh = wb.sheet_by_name(u'Sheet1') resultFinal = [] compteur = 0 for rownum in range(sh.nrows): inter1 = sh.row_values(rownum) if compteur > 0: Bus1 = int(inter1[0]) Bus2 = int(inter1[1]) r = inter1[2] x = inter1[3] b = 0.1 rateA = inter1[4] rateB = inter1[4] rateC = inter1[4] ratio = 0 angle = 0 status = 1 angmin = -360 angmax = 360 inter = [Bus1, Bus2, r, x, b, rateA, rateB, rateC, ratio, angle, status, angmin, angmax] resultFinal.append(inter) compteur += 1 resultFinal = np.asarray(resultFinal) return resultFinal
mit
maxiyommi/primeros-pasos-BBB
CODIGOS/python/LCD/display-lcd-menu.py
1
1524
# -*- coding: utf-8 -*- """Código para display HD44780 conectado via I2C con un modulo PCF8574.""" from Adafruit_BBIO import GPIO from i2c_lcd import I2cLcd import time """ Dirección por defecto del display: 0x27. Probado en una BB Black con Debian - kernel 4.4.83 - Verificar la dirección con el comando ' i2cdetect -y -r ' seguido del número de dispositivo i2c, el cual puede ser 0, 1, 2. """ DEFAULT_I2C_ADDR = 0x27 boton1= "P8_11" boton2= "P8_12" GPIO.setup (boton1, GPIO.IN) def inicio(): lcd = I2cLcd(2, DEFAULT_I2C_ADDR, 2, 16) "Conexión al lcd, resolución 16x2" lcd.blink_cursor_on() "Parpadeo de cursor" lcd.custom_char(15, bytearray([0x0E,0x1B,0x11,0x11,0x1F,0x1F,0x1F,0x1F])) lcd.move_to(15,1) "mueve el cursor a la posición 15x1" lcd.putchar(chr(15)) lcd.move_to(0,0) lcd.putstr("Iniciando!") time.sleep(5) lcd.clear() "Limpia o 'resetea' la pantalla" try: while True: GPIO.wait_for_edge (boton1, GPIO.RISING) lcd.clear() lcd.putstr("1 - 1ra opcion") time.sleep(0.2) GPIO.wait_for_edge (boton1, GPIO.RISING) lcd.clear() lcd.putstr("2 - 2da opcion") time.sleep(0.2) GPIO.wait_for_edge (boton1, GPIO.RISING) lcd.clear() lcd.putstr("3 - 3ra opcion") time.sleep(0.2) except KeyboardInterrupt: GPIO.cleanup() lcd.backlight_off() if __name__ == "__main__": inicio()
gpl-3.0
phborba/dsgtoolsop
ProfileTool/pyqtgraph/canvas/CanvasTemplate_pyqt.py
35
5126
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'acq4/pyqtgraph/canvas/CanvasTemplate.ui' # # Created: Thu Jan 2 11:13:07 2014 # by: PyQt4 UI code generator 4.9 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: _fromUtf8 = lambda s: s class Ui_Form(object): def setupUi(self, Form): Form.setObjectName(_fromUtf8("Form")) Form.resize(490, 414) self.gridLayout = QtGui.QGridLayout(Form) self.gridLayout.setMargin(0) self.gridLayout.setSpacing(0) self.gridLayout.setObjectName(_fromUtf8("gridLayout")) self.splitter = QtGui.QSplitter(Form) self.splitter.setOrientation(QtCore.Qt.Horizontal) self.splitter.setObjectName(_fromUtf8("splitter")) self.view = GraphicsView(self.splitter) self.view.setObjectName(_fromUtf8("view")) self.layoutWidget = QtGui.QWidget(self.splitter) self.layoutWidget.setObjectName(_fromUtf8("layoutWidget")) self.gridLayout_2 = QtGui.QGridLayout(self.layoutWidget) self.gridLayout_2.setMargin(0) self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2")) self.autoRangeBtn = QtGui.QPushButton(self.layoutWidget) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(1) sizePolicy.setHeightForWidth(self.autoRangeBtn.sizePolicy().hasHeightForWidth()) self.autoRangeBtn.setSizePolicy(sizePolicy) self.autoRangeBtn.setObjectName(_fromUtf8("autoRangeBtn")) self.gridLayout_2.addWidget(self.autoRangeBtn, 2, 0, 1, 2) self.horizontalLayout = QtGui.QHBoxLayout() self.horizontalLayout.setSpacing(0) self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout")) self.redirectCheck = QtGui.QCheckBox(self.layoutWidget) self.redirectCheck.setObjectName(_fromUtf8("redirectCheck")) self.horizontalLayout.addWidget(self.redirectCheck) self.redirectCombo = CanvasCombo(self.layoutWidget) self.redirectCombo.setObjectName(_fromUtf8("redirectCombo")) self.horizontalLayout.addWidget(self.redirectCombo) self.gridLayout_2.addLayout(self.horizontalLayout, 5, 0, 1, 2) self.itemList = TreeWidget(self.layoutWidget) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(100) sizePolicy.setHeightForWidth(self.itemList.sizePolicy().hasHeightForWidth()) self.itemList.setSizePolicy(sizePolicy) self.itemList.setHeaderHidden(True) self.itemList.setObjectName(_fromUtf8("itemList")) self.itemList.headerItem().setText(0, _fromUtf8("1")) self.gridLayout_2.addWidget(self.itemList, 6, 0, 1, 2) self.ctrlLayout = QtGui.QGridLayout() self.ctrlLayout.setSpacing(0) self.ctrlLayout.setObjectName(_fromUtf8("ctrlLayout")) self.gridLayout_2.addLayout(self.ctrlLayout, 10, 0, 1, 2) self.resetTransformsBtn = QtGui.QPushButton(self.layoutWidget) self.resetTransformsBtn.setObjectName(_fromUtf8("resetTransformsBtn")) self.gridLayout_2.addWidget(self.resetTransformsBtn, 7, 0, 1, 1) self.mirrorSelectionBtn = QtGui.QPushButton(self.layoutWidget) self.mirrorSelectionBtn.setObjectName(_fromUtf8("mirrorSelectionBtn")) self.gridLayout_2.addWidget(self.mirrorSelectionBtn, 3, 0, 1, 1) self.reflectSelectionBtn = QtGui.QPushButton(self.layoutWidget) self.reflectSelectionBtn.setObjectName(_fromUtf8("reflectSelectionBtn")) self.gridLayout_2.addWidget(self.reflectSelectionBtn, 3, 1, 1, 1) self.gridLayout.addWidget(self.splitter, 0, 0, 1, 1) self.retranslateUi(Form) QtCore.QMetaObject.connectSlotsByName(Form) def retranslateUi(self, Form): Form.setWindowTitle(QtGui.QApplication.translate("Form", "Form", None, QtGui.QApplication.UnicodeUTF8)) self.autoRangeBtn.setText(QtGui.QApplication.translate("Form", "Auto Range", None, QtGui.QApplication.UnicodeUTF8)) self.redirectCheck.setToolTip(QtGui.QApplication.translate("Form", "Check to display all local items in a remote canvas.", None, QtGui.QApplication.UnicodeUTF8)) self.redirectCheck.setText(QtGui.QApplication.translate("Form", "Redirect", None, QtGui.QApplication.UnicodeUTF8)) self.resetTransformsBtn.setText(QtGui.QApplication.translate("Form", "Reset Transforms", None, QtGui.QApplication.UnicodeUTF8)) self.mirrorSelectionBtn.setText(QtGui.QApplication.translate("Form", "Mirror Selection", None, QtGui.QApplication.UnicodeUTF8)) self.reflectSelectionBtn.setText(QtGui.QApplication.translate("Form", "MirrorXY", None, QtGui.QApplication.UnicodeUTF8)) from ..widgets.TreeWidget import TreeWidget from CanvasManager import CanvasCombo from ..widgets.GraphicsView import GraphicsView
gpl-2.0
fjbatresv/odoo
addons/report/models/report.py
168
26320
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2014-Today OpenERP SA (<http://www.openerp.com>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import api from openerp import SUPERUSER_ID from openerp.exceptions import AccessError from openerp.osv import osv, fields from openerp.tools import config from openerp.tools.misc import find_in_path from openerp.tools.translate import _ from openerp.addons.web.http import request from openerp.tools.safe_eval import safe_eval as eval import re import time import base64 import logging import tempfile import lxml.html import os import subprocess from contextlib import closing from distutils.version import LooseVersion from functools import partial from pyPdf import PdfFileWriter, PdfFileReader #-------------------------------------------------------------------------- # Helpers #-------------------------------------------------------------------------- _logger = logging.getLogger(__name__) def _get_wkhtmltopdf_bin(): wkhtmltopdf_bin = find_in_path('wkhtmltopdf') if wkhtmltopdf_bin is None: raise IOError return wkhtmltopdf_bin #-------------------------------------------------------------------------- # Check the presence of Wkhtmltopdf and return its version at Odoo start-up #-------------------------------------------------------------------------- wkhtmltopdf_state = 'install' try: process = subprocess.Popen( [_get_wkhtmltopdf_bin(), '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) except (OSError, IOError): _logger.info('You need Wkhtmltopdf to print a pdf version of the reports.') else: _logger.info('Will use the Wkhtmltopdf binary at %s' % _get_wkhtmltopdf_bin()) out, err = process.communicate() version = re.search('([0-9.]+)', out).group(0) if LooseVersion(version) < LooseVersion('0.12.0'): _logger.info('Upgrade Wkhtmltopdf to (at least) 0.12.0') wkhtmltopdf_state = 'upgrade' else: wkhtmltopdf_state = 'ok' if config['workers'] == 1: _logger.info('You need to start Odoo with at least two workers to print a pdf version of the reports.') wkhtmltopdf_state = 'workers' class Report(osv.Model): _name = "report" _description = "Report" public_user = None #-------------------------------------------------------------------------- # Extension of ir_ui_view.render with arguments frequently used in reports #-------------------------------------------------------------------------- def translate_doc(self, cr, uid, doc_id, model, lang_field, template, values, context=None): """Helper used when a report should be translated into a specific lang. <t t-foreach="doc_ids" t-as="doc_id"> <t t-raw="translate_doc(doc_id, doc_model, 'partner_id.lang', account.report_invoice_document')"/> </t> :param doc_id: id of the record to translate :param model: model of the record to translate :param lang_field': field of the record containing the lang :param template: name of the template to translate into the lang_field """ ctx = context.copy() doc = self.pool[model].browse(cr, uid, doc_id, context=ctx) qcontext = values.copy() # Do not force-translate if we chose to display the report in a specific lang if ctx.get('translatable') is True: qcontext['o'] = doc else: # Reach the lang we want to translate the doc into ctx['lang'] = eval('doc.%s' % lang_field, {'doc': doc}) qcontext['o'] = self.pool[model].browse(cr, uid, doc_id, context=ctx) return self.pool['ir.ui.view'].render(cr, uid, template, qcontext, context=ctx) def render(self, cr, uid, ids, template, values=None, context=None): """Allow to render a QWeb template python-side. This function returns the 'ir.ui.view' render but embellish it with some variables/methods used in reports. :param values: additionnal methods/variables used in the rendering :returns: html representation of the template """ if values is None: values = {} if context is None: context = {} context = dict(context, inherit_branding=True) # Tell QWeb to brand the generated html view_obj = self.pool['ir.ui.view'] def translate_doc(doc_id, model, lang_field, template): return self.translate_doc(cr, uid, doc_id, model, lang_field, template, values, context=context) user = self.pool['res.users'].browse(cr, uid, uid) website = None if request and hasattr(request, 'website'): if request.website is not None: website = request.website context = dict(context, translatable=context.get('lang') != request.website.default_lang_code) values.update( time=time, context_timestamp=lambda t: fields.datetime.context_timestamp(cr, uid, t, context), translate_doc=translate_doc, editable=True, user=user, res_company=user.company_id, website=website, ) return view_obj.render(cr, uid, template, values, context=context) #-------------------------------------------------------------------------- # Main report methods #-------------------------------------------------------------------------- @api.v7 def get_html(self, cr, uid, ids, report_name, data=None, context=None): """This method generates and returns html version of a report. """ # If the report is using a custom model to render its html, we must use it. # Otherwise, fallback on the generic html rendering. try: report_model_name = 'report.%s' % report_name particularreport_obj = self.pool[report_model_name] return particularreport_obj.render_html(cr, uid, ids, data=data, context=context) except KeyError: report = self._get_report_from_name(cr, uid, report_name) report_obj = self.pool[report.model] docs = report_obj.browse(cr, uid, ids, context=context) docargs = { 'doc_ids': ids, 'doc_model': report.model, 'docs': docs, } return self.render(cr, uid, [], report.report_name, docargs, context=context) @api.v8 def get_html(self, records, report_name, data=None): return self._model.get_html(self._cr, self._uid, records.ids, report_name, data=data, context=self._context) @api.v7 def get_pdf(self, cr, uid, ids, report_name, html=None, data=None, context=None): """This method generates and returns pdf version of a report. """ if context is None: context = {} if html is None: html = self.get_html(cr, uid, ids, report_name, data=data, context=context) html = html.decode('utf-8') # Ensure the current document is utf-8 encoded. # Get the ir.actions.report.xml record we are working on. report = self._get_report_from_name(cr, uid, report_name) # Check if we have to save the report or if we have to get one from the db. save_in_attachment = self._check_attachment_use(cr, uid, ids, report) # Get the paperformat associated to the report, otherwise fallback on the company one. if not report.paperformat_id: user = self.pool['res.users'].browse(cr, uid, uid) paperformat = user.company_id.paperformat_id else: paperformat = report.paperformat_id # Preparing the minimal html pages css = '' # Will contain local css headerhtml = [] contenthtml = [] footerhtml = [] irconfig_obj = self.pool['ir.config_parameter'] base_url = irconfig_obj.get_param(cr, SUPERUSER_ID, 'report.url') or irconfig_obj.get_param(cr, SUPERUSER_ID, 'web.base.url') # Minimal page renderer view_obj = self.pool['ir.ui.view'] render_minimal = partial(view_obj.render, cr, uid, 'report.minimal_layout', context=context) # The received html report must be simplified. We convert it in a xml tree # in order to extract headers, bodies and footers. try: root = lxml.html.fromstring(html) match_klass = "//div[contains(concat(' ', normalize-space(@class), ' '), ' {} ')]" for node in root.xpath("//html/head/style"): css += node.text for node in root.xpath(match_klass.format('header')): body = lxml.html.tostring(node) header = render_minimal(dict(css=css, subst=True, body=body, base_url=base_url)) headerhtml.append(header) for node in root.xpath(match_klass.format('footer')): body = lxml.html.tostring(node) footer = render_minimal(dict(css=css, subst=True, body=body, base_url=base_url)) footerhtml.append(footer) for node in root.xpath(match_klass.format('page')): # Previously, we marked some reports to be saved in attachment via their ids, so we # must set a relation between report ids and report's content. We use the QWeb # branding in order to do so: searching after a node having a data-oe-model # attribute with the value of the current report model and read its oe-id attribute if ids and len(ids) == 1: reportid = ids[0] else: oemodelnode = node.find(".//*[@data-oe-model='%s']" % report.model) if oemodelnode is not None: reportid = oemodelnode.get('data-oe-id') if reportid: reportid = int(reportid) else: reportid = False # Extract the body body = lxml.html.tostring(node) reportcontent = render_minimal(dict(css=css, subst=False, body=body, base_url=base_url)) contenthtml.append(tuple([reportid, reportcontent])) except lxml.etree.XMLSyntaxError: contenthtml = [] contenthtml.append(html) save_in_attachment = {} # Don't save this potentially malformed document # Get paperformat arguments set in the root html tag. They are prioritized over # paperformat-record arguments. specific_paperformat_args = {} for attribute in root.items(): if attribute[0].startswith('data-report-'): specific_paperformat_args[attribute[0]] = attribute[1] # Run wkhtmltopdf process return self._run_wkhtmltopdf( cr, uid, headerhtml, footerhtml, contenthtml, context.get('landscape'), paperformat, specific_paperformat_args, save_in_attachment ) @api.v8 def get_pdf(self, records, report_name, html=None, data=None): return self._model.get_pdf(self._cr, self._uid, records.ids, report_name, html=html, data=data, context=self._context) @api.v7 def get_action(self, cr, uid, ids, report_name, data=None, context=None): """Return an action of type ir.actions.report.xml. :param ids: Ids of the records to print (if not used, pass an empty list) :param report_name: Name of the template to generate an action for """ if ids: if not isinstance(ids, list): ids = [ids] context = dict(context or {}, active_ids=ids) report_obj = self.pool['ir.actions.report.xml'] idreport = report_obj.search(cr, uid, [('report_name', '=', report_name)], context=context) try: report = report_obj.browse(cr, uid, idreport[0], context=context) except IndexError: raise osv.except_osv( _('Bad Report Reference'), _('This report is not loaded into the database: %s.' % report_name) ) return { 'context': context, 'data': data, 'type': 'ir.actions.report.xml', 'report_name': report.report_name, 'report_type': report.report_type, 'report_file': report.report_file, 'context': context, } @api.v8 def get_action(self, records, report_name, data=None): return self._model.get_action(self._cr, self._uid, records.ids, report_name, data=data, context=self._context) #-------------------------------------------------------------------------- # Report generation helpers #-------------------------------------------------------------------------- @api.v7 def _check_attachment_use(self, cr, uid, ids, report): """ Check attachment_use field. If set to true and an existing pdf is already saved, load this one now. Else, mark save it. """ save_in_attachment = {} save_in_attachment['model'] = report.model save_in_attachment['loaded_documents'] = {} if report.attachment: for record_id in ids: obj = self.pool[report.model].browse(cr, uid, record_id) filename = eval(report.attachment, {'object': obj, 'time': time}) # If the user has checked 'Reload from Attachment' if report.attachment_use: alreadyindb = [('datas_fname', '=', filename), ('res_model', '=', report.model), ('res_id', '=', record_id)] attach_ids = self.pool['ir.attachment'].search(cr, uid, alreadyindb) if attach_ids: # Add the loaded pdf in the loaded_documents list pdf = self.pool['ir.attachment'].browse(cr, uid, attach_ids[0]).datas pdf = base64.decodestring(pdf) save_in_attachment['loaded_documents'][record_id] = pdf _logger.info('The PDF document %s was loaded from the database' % filename) continue # Do not save this document as we already ignore it # If the user has checked 'Save as Attachment Prefix' if filename is False: # May be false if, for instance, the 'attachment' field contains a condition # preventing to save the file. continue else: save_in_attachment[record_id] = filename # Mark current document to be saved return save_in_attachment @api.v8 def _check_attachment_use(self, records, report): return self._model._check_attachment_use( self._cr, self._uid, records.ids, report, context=self._context) def _check_wkhtmltopdf(self): return wkhtmltopdf_state def _run_wkhtmltopdf(self, cr, uid, headers, footers, bodies, landscape, paperformat, spec_paperformat_args=None, save_in_attachment=None): """Execute wkhtmltopdf as a subprocess in order to convert html given in input into a pdf document. :param header: list of string containing the headers :param footer: list of string containing the footers :param bodies: list of string containing the reports :param landscape: boolean to force the pdf to be rendered under a landscape format :param paperformat: ir.actions.report.paperformat to generate the wkhtmltopf arguments :param specific_paperformat_args: dict of prioritized paperformat arguments :param save_in_attachment: dict of reports to save/load in/from the db :returns: Content of the pdf as a string """ command_args = [] # Passing the cookie to wkhtmltopdf in order to resolve internal links. try: if request: command_args.extend(['--cookie', 'session_id', request.session.sid]) except AttributeError: pass # Wkhtmltopdf arguments command_args.extend(['--quiet']) # Less verbose error messages if paperformat: # Convert the paperformat record into arguments command_args.extend(self._build_wkhtmltopdf_args(paperformat, spec_paperformat_args)) # Force the landscape orientation if necessary if landscape and '--orientation' in command_args: command_args_copy = list(command_args) for index, elem in enumerate(command_args_copy): if elem == '--orientation': del command_args[index] del command_args[index] command_args.extend(['--orientation', 'landscape']) elif landscape and not '--orientation' in command_args: command_args.extend(['--orientation', 'landscape']) # Execute WKhtmltopdf pdfdocuments = [] temporary_files = [] for index, reporthtml in enumerate(bodies): local_command_args = [] pdfreport_fd, pdfreport_path = tempfile.mkstemp(suffix='.pdf', prefix='report.tmp.') temporary_files.append(pdfreport_path) # Directly load the document if we already have it if save_in_attachment and save_in_attachment['loaded_documents'].get(reporthtml[0]): with closing(os.fdopen(pdfreport_fd, 'w')) as pdfreport: pdfreport.write(save_in_attachment['loaded_documents'][reporthtml[0]]) pdfdocuments.append(pdfreport_path) continue else: os.close(pdfreport_fd) # Wkhtmltopdf handles header/footer as separate pages. Create them if necessary. if headers: head_file_fd, head_file_path = tempfile.mkstemp(suffix='.html', prefix='report.header.tmp.') temporary_files.append(head_file_path) with closing(os.fdopen(head_file_fd, 'w')) as head_file: head_file.write(headers[index]) local_command_args.extend(['--header-html', head_file_path]) if footers: foot_file_fd, foot_file_path = tempfile.mkstemp(suffix='.html', prefix='report.footer.tmp.') temporary_files.append(foot_file_path) with closing(os.fdopen(foot_file_fd, 'w')) as foot_file: foot_file.write(footers[index]) local_command_args.extend(['--footer-html', foot_file_path]) # Body stuff content_file_fd, content_file_path = tempfile.mkstemp(suffix='.html', prefix='report.body.tmp.') temporary_files.append(content_file_path) with closing(os.fdopen(content_file_fd, 'w')) as content_file: content_file.write(reporthtml[1]) try: wkhtmltopdf = [_get_wkhtmltopdf_bin()] + command_args + local_command_args wkhtmltopdf += [content_file_path] + [pdfreport_path] process = subprocess.Popen(wkhtmltopdf, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = process.communicate() if process.returncode not in [0, 1]: raise osv.except_osv(_('Report (PDF)'), _('Wkhtmltopdf failed (error code: %s). ' 'Message: %s') % (str(process.returncode), err)) # Save the pdf in attachment if marked if reporthtml[0] is not False and save_in_attachment.get(reporthtml[0]): with open(pdfreport_path, 'rb') as pdfreport: attachment = { 'name': save_in_attachment.get(reporthtml[0]), 'datas': base64.encodestring(pdfreport.read()), 'datas_fname': save_in_attachment.get(reporthtml[0]), 'res_model': save_in_attachment.get('model'), 'res_id': reporthtml[0], } try: self.pool['ir.attachment'].create(cr, uid, attachment) except AccessError: _logger.warning("Cannot save PDF report %r as attachment", attachment['name']) else: _logger.info('The PDF document %s is now saved in the database', attachment['name']) pdfdocuments.append(pdfreport_path) except: raise # Return the entire document if len(pdfdocuments) == 1: entire_report_path = pdfdocuments[0] else: entire_report_path = self._merge_pdf(pdfdocuments) temporary_files.append(entire_report_path) with open(entire_report_path, 'rb') as pdfdocument: content = pdfdocument.read() # Manual cleanup of the temporary files for temporary_file in temporary_files: try: os.unlink(temporary_file) except (OSError, IOError): _logger.error('Error when trying to remove file %s' % temporary_file) return content def _get_report_from_name(self, cr, uid, report_name): """Get the first record of ir.actions.report.xml having the ``report_name`` as value for the field report_name. """ report_obj = self.pool['ir.actions.report.xml'] qwebtypes = ['qweb-pdf', 'qweb-html'] conditions = [('report_type', 'in', qwebtypes), ('report_name', '=', report_name)] idreport = report_obj.search(cr, uid, conditions)[0] return report_obj.browse(cr, uid, idreport) def _build_wkhtmltopdf_args(self, paperformat, specific_paperformat_args=None): """Build arguments understandable by wkhtmltopdf from a report.paperformat record. :paperformat: report.paperformat record :specific_paperformat_args: a dict containing prioritized wkhtmltopdf arguments :returns: list of string representing the wkhtmltopdf arguments """ command_args = [] if paperformat.format and paperformat.format != 'custom': command_args.extend(['--page-size', paperformat.format]) if paperformat.page_height and paperformat.page_width and paperformat.format == 'custom': command_args.extend(['--page-width', str(paperformat.page_width) + 'mm']) command_args.extend(['--page-height', str(paperformat.page_height) + 'mm']) if specific_paperformat_args and specific_paperformat_args.get('data-report-margin-top'): command_args.extend(['--margin-top', str(specific_paperformat_args['data-report-margin-top'])]) else: command_args.extend(['--margin-top', str(paperformat.margin_top)]) if specific_paperformat_args and specific_paperformat_args.get('data-report-dpi'): command_args.extend(['--dpi', str(specific_paperformat_args['data-report-dpi'])]) elif paperformat.dpi: if os.name == 'nt' and int(paperformat.dpi) <= 95: _logger.info("Generating PDF on Windows platform require DPI >= 96. Using 96 instead.") command_args.extend(['--dpi', '96']) else: command_args.extend(['--dpi', str(paperformat.dpi)]) if specific_paperformat_args and specific_paperformat_args.get('data-report-header-spacing'): command_args.extend(['--header-spacing', str(specific_paperformat_args['data-report-header-spacing'])]) elif paperformat.header_spacing: command_args.extend(['--header-spacing', str(paperformat.header_spacing)]) command_args.extend(['--margin-left', str(paperformat.margin_left)]) command_args.extend(['--margin-bottom', str(paperformat.margin_bottom)]) command_args.extend(['--margin-right', str(paperformat.margin_right)]) if paperformat.orientation: command_args.extend(['--orientation', str(paperformat.orientation)]) if paperformat.header_line: command_args.extend(['--header-line']) return command_args def _merge_pdf(self, documents): """Merge PDF files into one. :param documents: list of path of pdf files :returns: path of the merged pdf """ writer = PdfFileWriter() streams = [] # We have to close the streams *after* PdfFilWriter's call to write() for document in documents: pdfreport = file(document, 'rb') streams.append(pdfreport) reader = PdfFileReader(pdfreport) for page in range(0, reader.getNumPages()): writer.addPage(reader.getPage(page)) merged_file_fd, merged_file_path = tempfile.mkstemp(suffix='.html', prefix='report.merged.tmp.') with closing(os.fdopen(merged_file_fd, 'w')) as merged_file: writer.write(merged_file) for stream in streams: stream.close() return merged_file_path
agpl-3.0
mpasternak/pyglet-fix-issue-552
pyglet/image/codecs/gdkpixbuf2.py
9
5863
# ---------------------------------------------------------------------------- # pyglet # Copyright (c) 2006-2008 Alex Holkner # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # * Neither the name of pyglet nor the names of its # contributors may be used to endorse or promote products # derived from this software without specific prior written # permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # ---------------------------------------------------------------------------- ''' ''' __docformat__ = 'restructuredtext' __version__ = '$Id$' from ctypes import * from pyglet.gl import * from pyglet.image import * from pyglet.image.codecs import * from pyglet.image.codecs import gif import pyglet.lib import pyglet.window gdk = pyglet.lib.load_library('gdk-x11-2.0') gdkpixbuf = pyglet.lib.load_library('gdk_pixbuf-2.0') GdkPixbufLoader = c_void_p GdkPixbuf = c_void_p gdkpixbuf.gdk_pixbuf_loader_new.restype = GdkPixbufLoader gdkpixbuf.gdk_pixbuf_loader_get_pixbuf.restype = GdkPixbuf gdkpixbuf.gdk_pixbuf_get_pixels.restype = c_void_p gdkpixbuf.gdk_pixbuf_loader_get_animation.restype = c_void_p gdkpixbuf.gdk_pixbuf_animation_get_iter.restype = c_void_p gdkpixbuf.gdk_pixbuf_animation_iter_get_pixbuf.restype = GdkPixbuf class GTimeVal(Structure): _fields_ = [ ('tv_sec', c_long), ('tv_usec', c_long) ] class GdkPixbuf2ImageDecoder(ImageDecoder): def get_file_extensions(self): return ['.png', '.xpm', '.jpg', '.jpeg', '.tif', '.tiff', '.pnm', '.ras', '.bmp', '.gif'] def get_animation_file_extensions(self): return ['.gif', '.ani'] def _load(self, file, filename, load_func): data = file.read() loader = gdkpixbuf.gdk_pixbuf_loader_new() gdkpixbuf.gdk_pixbuf_loader_write(loader, data, len(data), None) if not gdkpixbuf.gdk_pixbuf_loader_close(loader, None): raise ImageDecodeException(filename) result = load_func(loader) if not result: raise ImageDecodeException('Unable to load: %s' % filename) return result def _pixbuf_to_image(self, pixbuf): # Get format and dimensions width = gdkpixbuf.gdk_pixbuf_get_width(pixbuf) height = gdkpixbuf.gdk_pixbuf_get_height(pixbuf) channels = gdkpixbuf.gdk_pixbuf_get_n_channels(pixbuf) rowstride = gdkpixbuf.gdk_pixbuf_get_rowstride(pixbuf) #has_alpha = gdkpixbuf.gdk_pixbuf_get_has_alpha(pixbuf) pixels = gdkpixbuf.gdk_pixbuf_get_pixels(pixbuf) # Copy pixel data. buffer = (c_ubyte * (rowstride * height))() memmove(buffer, pixels, rowstride * (height - 1) + width * channels) # Release pixbuf gdk.g_object_unref(pixbuf) # Determine appropriate GL type if channels == 3: format = 'RGB' else: format = 'RGBA' return ImageData(width, height, format, buffer, -rowstride) def decode(self, file, filename): pixbuf = self._load(file, filename, gdkpixbuf.gdk_pixbuf_loader_get_pixbuf) return self._pixbuf_to_image(pixbuf) def decode_animation(self, file, filename): # Extract GIF control data. If it's not a GIF, this method will # raise. gif_stream = gif.read(file) delays = [image.delay for image in gif_stream.images] # Get GDK animation iterator file.seek(0) anim = self._load(file, filename, gdkpixbuf.gdk_pixbuf_loader_get_animation) time = GTimeVal(0, 0) iter = gdkpixbuf.gdk_pixbuf_animation_get_iter(anim, byref(time)) frames = [] # Extract each image for control_delay in delays: pixbuf = gdkpixbuf.gdk_pixbuf_animation_iter_get_pixbuf(iter) image = self._pixbuf_to_image(pixbuf) frames.append(AnimationFrame(image, control_delay)) gdk_delay = gdkpixbuf.gdk_pixbuf_animation_iter_get_delay_time(iter) gdk_delay *= 1000 # milliseconds to microseconds # Compare gdk_delay to control_delay for interest only. #print control_delay, gdk_delay / 1000000. if gdk_delay == -1: break us = time.tv_usec + gdk_delay time.tv_sec += us // 1000000 time.tv_usec = us % 1000000 gdkpixbuf.gdk_pixbuf_animation_iter_advance(iter, byref(time)) return Animation(frames) def get_decoders(): return [GdkPixbuf2ImageDecoder()] def get_encoders(): return [] def init(): gdk.g_type_init() init()
bsd-3-clause
paulsmith/geodjango
django/contrib/gis/gdal/geomtype.py
1
2745
from django.contrib.gis.gdal.error import OGRException #### OGRGeomType #### class OGRGeomType(object): "Encapulates OGR Geometry Types." # Ordered array of acceptable strings and their corresponding OGRwkbGeometryType __ogr_str = ['Unknown', 'Point', 'LineString', 'Polygon', 'MultiPoint', 'MultiLineString', 'MultiPolygon', 'GeometryCollection', 'LinearRing'] __ogr_int = [0, 1, 2, 3, 4, 5, 6, 7, 101] def __init__(self, type_input): "Figures out the correct OGR Type based upon the input." if isinstance(type_input, OGRGeomType): self._index = type_input._index elif isinstance(type_input, basestring): idx = self._has_str(self.__ogr_str, type_input) if idx == None: raise OGRException('Invalid OGR String Type "%s"' % type_input) self._index = idx elif isinstance(type_input, int): if not type_input in self.__ogr_int: raise OGRException('Invalid OGR Integer Type: %d' % type_input) self._index = self.__ogr_int.index(type_input) else: raise TypeError('Invalid OGR input type given.') def __str__(self): "Returns a short-hand string form of the OGR Geometry type." return self.__ogr_str[self._index] def __eq__(self, other): """ Does an equivalence test on the OGR type with the given other OGRGeomType, the short-hand string, or the integer. """ if isinstance(other, OGRGeomType): return self._index == other._index elif isinstance(other, basestring): idx = self._has_str(self.__ogr_str, other) if not (idx == None): return self._index == idx return False elif isinstance(other, int): if not other in self.__ogr_int: return False return self.__ogr_int.index(other) == self._index else: raise TypeError('Cannot compare with type: %s' % str(type(other))) def __ne__(self, other): return not (self == other) def _has_str(self, arr, s): "Case-insensitive search of the string array for the given pattern." s_low = s.lower() for i in xrange(len(arr)): if s_low == arr[i].lower(): return i return None @property def django(self): "Returns the Django GeometryField for this OGR Type." s = self.__ogr_str[self._index] if s in ('Unknown', 'LinearRing'): return None else: return s + 'Field' @property def num(self): "Returns the OGRwkbGeometryType number for the OGR Type." return self.__ogr_int[self._index]
bsd-3-clause
open-synergy/purchase-workflow
purchase_partial_invoicing/wizard/po_line_invoice.py
3
4153
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2013 Agile Business Group sagl (<http://www.agilebg.com>) # Copyright (c) 2015 ACSONE SA/NV (<http://acsone.eu>) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from __future__ import division from openerp import models, fields, api, exceptions import openerp.addons.decimal_precision as dp from openerp.tools.translate import _ class PurchaseLineInvoice(models.TransientModel): _inherit = 'purchase.order.line_invoice' line_ids = fields.One2many('purchase.order.line_invoice.line', 'wizard_id', string='Lines') @api.model def default_get(self, fields): ctx = self.env.context.copy() po_ids = ctx.get('active_ids', []) po_line_obj = self.env['purchase.order.line'] lines = [] for po_line in po_line_obj.browse(po_ids): max_quantity = po_line.product_qty - po_line.invoiced_qty -\ po_line.cancelled_qty lines.append({ 'po_line_id': po_line.id, 'product_qty': max_quantity, 'invoiced_qty': max_quantity, 'price_unit': po_line.price_unit, }) defaults = super(PurchaseLineInvoice, self).default_get(fields) defaults['line_ids'] = lines return defaults @api.multi def makeInvoices(self): self.ensure_one() ctx = self.env.context.copy() changed_lines = {} ctx['active_ids'] = [] not_invoiced_lines = self.env['purchase.order.line'] invoiced_lines = self.env['purchase.order.line'] for line in self.line_ids: if line.invoiced_qty > line.product_qty: raise exceptions.Warning( _("""Quantity to invoice is greater than available quantity""")) ctx['active_ids'].append(line.po_line_id.id) changed_lines[ line.po_line_id.id ] = line.invoiced_qty if line.po_line_id.fully_invoiced: invoiced_lines += line.po_line_id else: not_invoiced_lines += line.po_line_id not_invoiced_lines.write({'invoiced': False}) invoiced_lines.write({'invoiced': True}) ctx.update({'partial_quantity_lines': changed_lines}) res = super(PurchaseLineInvoice, self.with_context(ctx))\ .makeInvoices() po_lines = self.env['purchase.order.line'].browse(changed_lines.keys()) for po_line in po_lines: if po_line.invoiced_qty != po_line.product_qty: po_line.invoiced = False return res class PurchaseLineInvoiceLine(models.TransientModel): _name = 'purchase.order.line_invoice.line' po_line_id = fields.Many2one('purchase.order.line', 'Purchase order line', readonly=True) product_qty = fields.Float( 'Quantity', digits=dp.get_precision('Product Unit of Measure'), readonly=True) price_unit = fields.Float(related='po_line_id.price_unit', string='Unit Price', readonly=True) invoiced_qty = fields.Float( string='Quantity to invoice', digits=dp.get_precision('Product Unit of Measure')) wizard_id = fields.Many2one('purchase.order.line_invoice', 'Wizard')
agpl-3.0
saisai/phantomjs
src/qt/qtwebkit/Tools/QueueStatusServer/model/queuestatus.py
121
2109
# Copyright (C) 2013 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from config import messages from google.appengine.ext import db from model.queuepropertymixin import QueuePropertyMixin class QueueStatus(db.Model, QueuePropertyMixin): author = db.UserProperty() queue_name = db.StringProperty() bot_id = db.StringProperty() active_bug_id = db.IntegerProperty() active_patch_id = db.IntegerProperty() message = db.StringProperty(multiline=True) date = db.DateTimeProperty(auto_now_add=True) results_file = db.BlobProperty() def is_retry_request(self): return self.message == messages.retry_status
bsd-3-clause
jinverar/crits
crits/core/mongo_tools.py
17
14440
from django.conf import settings if settings.FILE_DB == settings.S3: from crits.core.s3_tools import get_file_s3 import gridfs import pymongo import magic class MongoError(Exception): """ Generic MongoError exception. """ pass # TODO: mongo_connector() and gridfs_connector() can probably be combined into # one function. # Setup standard connector to the MongoDB instance for use in any functions def mongo_connector(collection, preference=settings.MONGO_READ_PREFERENCE): """ Connect to the mongo database if you need to use PyMongo directly and not use MongoEngine. :param collection: the collection to use. :type collection: str :param preference: PyMongo Read Preference for ReplicaSet/clustered DBs. :type preference: str. :returns: :class:`pymongo.Connection`, :class:`crits.core.mongo_tools.MongoError` """ try: connection = pymongo.Connection("%s" % settings.MONGO_HOST, settings.MONGO_PORT, read_preference=preference, ssl=settings.MONGO_SSL) db = connection[settings.MONGO_DATABASE] if settings.MONGO_USER: db.authenticate(settings.MONGO_USER, settings.MONGO_PASSWORD) return db[collection] except pymongo.errors.ConnectionFailure as e: raise MongoError("Error connecting to Mongo database: %s" % e) except KeyError as e: raise MongoError("Unknown database or collection: %s" % e) except: raise def gridfs_connector(collection, preference=settings.MONGO_READ_PREFERENCE): """ Connect to the mongo database if you need to use PyMongo directly and not use MongoEngine. Used specifically for accessing GridFS. :param collection: the collection to use. :type collection: str :param preference: PyMongo Read Preference for ReplicaSet/clustered DBs. :type preference: str. :returns: :class:`gridfs.GridFS`, :class:`crits.core.mongo_tools.MongoError` """ try: connection = pymongo.Connection("%s" % settings.MONGO_HOST, settings.MONGO_PORT, read_preference=preference, ssl=settings.MONGO_SSL) db = connection[settings.MONGO_DATABASE] if settings.MONGO_USER: db.authenticate(settings.MONGO_USER, settings.MONGO_PASSWORD) return gridfs.GridFS(db, collection) except pymongo.errors.ConnectionFailure as e: raise MongoError("Error connecting to Mongo database: %s" % e) except KeyError as e: raise MongoError("Unknown database: %s" % e) except: raise def get_file(sample_md5, collection=settings.COL_SAMPLES): """ Get a file from GridFS (or S3 if that's what you've configured). :param sample_md5: The MD5 of the file to download. :type sample_md5: str :param collection: The collection to grab the file from. :type collection: str :returns: str """ # Workaround until pcap download uses pcap object if settings.FILE_DB == settings.GRIDFS: return get_file_gridfs(sample_md5, collection) elif settings.FILE_DB == settings.S3: objs = mongo_connector(collection) obj = objs.find_one({"md5": sample_md5}) oid = obj['filedata'] return get_file_s3(oid,collection) def put_file(m, data, collection=settings.COL_SAMPLES): """ Add a file to storage. :param m: The filename. :type m: str :param data: The data to add. :type data: str :param collection: The collection to grab the file from. :type collection: str :returns: str """ return put_file_gridfs(m, data, collection) def get_file_gridfs(sample_md5, collection=settings.COL_SAMPLES): """ Get a file from GridFS. :param sample_md5: The MD5 of the file to download. :type sample_md5: str :param collection: The collection to grab the file from. :type collection: str :returns: str """ data = None try: fm = mongo_connector("%s.files" % collection) objectid = fm.find_one({'md5': sample_md5}, {'_id': 1})['_id'] fs = gridfs_connector("%s" % collection) data = fs.get(objectid).read() except Exception: return None return data def put_file_gridfs(m, data, collection=settings.COL_SAMPLES): """ Add a file to storage. :param m: The filename. :type m: str :param data: The data to add. :type data: str :param collection: The collection to grab the file from. :type collection: str :returns: str """ mimetype = magic.from_buffer(data, mime=True) try: fs = gridfs_connector("%s" % collection) fs.put(data, content_type="%s" % mimetype, filename="%s" % m) except Exception: return None return m def delete_file(sample_md5, collection=settings.COL_SAMPLES): """ delete_file allows you to delete a file from a gridfs collection specified in the collection parameter. this will only remove the file object, not metadata from assocatiated collections for full deletion of metadata and file use delete_sample :param sample_md5: The MD5 of the file to delete. :type sample_md5: str :param collection: The collection to delete the file from. :type collection: str :returns: True, False, None """ fm = mongo_connector("%s.files" % collection) sample = fm.find_one({'md5': sample_md5}, {'_id': 1}) success = None if sample: objectid = sample["_id"] fs = gridfs_connector("%s" % collection) try: fs.delete(objectid) return True except: return None return success #################################################### # NOTE: The following wrappers are only here for # # legacy code and rare instances where we # # cannot use MongoEngine to achieve our # # goal. Please use these as a last resort! # #################################################### # Wrapper for pymongo's find_one function def mongo_find_one(collection, query, fields=None, skip=0, sort=None, *args, **kwargs): """ Find one document from a collection matching the parameters. :param collection: The collection to query. :type collection: str :param query: The query to find the document(s). :type query: dict :param fields: The fields to return for each document. :type fields: dict :param skip: How many documents to skip before returning. :type skip: int :param sort: How to sort the results. :type sort: dict :returns: PyMongo cursor. """ col = mongo_connector(collection) return col.find_one(query, fields, skip=skip, sort=sort, *args, **kwargs) # Wrapper for pymongo's find function def mongo_find(collection, query, fields=None, skip=0, limit=0, sort=None, count=False, *args, **kwargs): """ Find documents from a collection matching the parameters. :param collection: The collection to query. :type collection: str :param query: The query to find the document(s). :type query: dict :param fields: The fields to return for each document. :type fields: dict :param skip: How many documents to skip before returning. :type skip: int :param limit: How many documents to return. :type limit: int :param sort: How to sort the results. :type sort: dict :param count: Only return a count of the documents. :type count: boolean :returns: PyMongo cursor, int """ col = mongo_connector(collection) results = col.find(query, fields, skip=skip, limit=limit, sort=sort, *args, **kwargs) if not kwargs.get('timeout', True): col.close if count: return results.count() else: return results # Wrapper for pymongo's insert function def mongo_insert(collection, doc_or_docs, username=None, safe=True, *args, **kwargs): """ Insert documents into a collection. :param collection: The collection to query. :type collection: str :param doc_or_docs: A single document or list of documents to insert. :type doc_or_docs: dict or list :param username: The user inserting these documents. :type username: str :param safe: Whether or not to insert in safe mode. :type safe: boolean :returns: dict with keys: "success" (boolean), "message" (list), "object" (insertion response) if successful. """ col = mongo_connector(collection) try: col.insert(doc_or_docs, safe=safe, check_keys=True, *args, **kwargs) return {'success':True, 'message':[], 'object':doc_or_docs} except Exception, e: # OperationFailure gets raised only if safe=True and there is some error return {'success':False, 'message':[format_error(e)]} # Wrapper for pymongo's update function def mongo_update(collection, query, alter, username=None, multi=True, upsert=False, safe=True, *args, **kwargs): """ Update documents in a collection. :param collection: The collection to query. :type collection: str :param query: The query to use to find the documents to update. :type query: dict :param alter: How to update the documents. :type alter: dict :param username: The user updating the documents. :type username: str :param multi: Whether or not to update multiple documents. :type multi: boolean :param upsert: Insert documents into the collection if they are not found. :type upsert: boolean :param safe: Use safe mode while performing the update. :type safe: boolean :returns: dict with keys "success" (boolean) and "message" (list) """ col = mongo_connector(collection) try: r = col.update(query, alter, multi=multi, upsert=upsert, check_keys=True, safe=safe, *args, **kwargs) return {'success':True, 'message':[r]} except Exception, e: return {'success':False, 'message':[format_error(e)]} # Wrapper for pymongo's save function def mongo_save(collection, to_save, username=None, safe=True, *args, **kwargs): """ Save a document to a collection. :param collection: The collection to query. :type collection: str :param to_save: The document to save. :type to_save: dict :param username: The user saving the document. :type username: str :param safe: Use safe mode while performing the save. :type safe: boolean :returns: dict with keys "success" (boolean) and "message" (list) """ col = mongo_connector(collection) try: r = col.save(to_save, check_keys=True, manipulate=True, safe=safe, *args, **kwargs) return {'success':True, 'message':[r]} except Exception, e: return {'success':False, 'message':[format_error(e)]} # Wrapper for pymongo's find_and_modify function def mongo_find_and_modify(collection, query, alter, fields=None, username=None, sort={}, remove=False, new=False, upsert=False, *args, **kwargs): """ Find documents from a collection matching the parameters, update them, and return them. :param collection: The collection to query. :type collection: str :param query: The query to use to find the documents to update. :type query: dict :param alter: How to update the documents. :type alter: dict :param fields: The fields to return for each document. :type fields: dict :param username: The user updating the documents. :type username: str :param sort: How to sort the results. :type sort: dict :param remove: Remove documents instead of update. :type remove: boolean :param new: Return the updated documents instead of the original ones. :param upsert: Insert documents into the collection if they are not found. :type upsert: boolean :returns: dict with keys: "success" (boolean), "message" (list), "object" (cursor) if successful. """ try: col = mongo_connector(collection) result = col.find_and_modify(query, update=alter, fields=fields, remove=remove, new=new, upsert=upsert, sort=sort, *args, **kwargs) except Exception, e: return {'success':False, 'message':[format_error(e)]} try: return {'success':True, 'message':[], 'object': result} except Exception, e: return {'success':True, 'message':[format_error(e)], 'object': result} # Wrapper for pymongo's remove function def mongo_remove(collection, query=None, username=None, safe=True, verify=False, *args, **kwargs): """ Find documents from a collection matching the parameters. :param collection: The collection to query. :type collection: str :param query: The query to use to find the documents to remove. :type query: dict :param username: The user removing the documents. :type username: str :param safe: Use safe mode while removing the documents. :type safe: boolean :param verify: Verify the removal. :type verify: boolean :returns: dict with keys "success" (boolean) and "message" list. """ if not query: return {'success': False, 'message':['No query supplied to remove']} else: try: col = mongo_connector(collection) col.remove(query, safe=safe, *args, **kwargs) if verify: if mongo_find(collection, query, count=True): return {'success':False, 'message':['Unknown error; unable to remove item']} return {'success':True, 'message':[]} except Exception, e: return {'success':False, 'message':[format_error(e)]} def format_error(e): """ wrapper for core/handlers format_error function. Redefined here to avoid circular imports. :param e: The error. :type e: :class:`Exception` :returns: str """ from crits.core.handlers import format_error as fe return fe(e)
mit
rlr/fjord
vendor/packages/chardet/chardet/langthaimodel.py
2930
11275
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # 255: Control characters that usually does not exist in any text # 254: Carriage/Return # 253: symbol (punctuation) that does not belong to word # 252: 0 - 9 # The following result for thai was collected from a limited sample (1M). # Character Mapping Table: TIS620CharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40 188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50 253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70 209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222, 223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235, 236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57, 49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54, 45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63, 22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244, 11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247, 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253, ) # Model Table: # total sequences: 100% # first 512 sequences: 92.6386% # first 1024 sequences:7.3177% # rest sequences: 1.0230% # negative sequences: 0.0436% ThaiLangModel = ( 0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3, 0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2, 3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3, 0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1, 3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2, 3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1, 3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2, 3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1, 3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1, 3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0, 3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1, 2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1, 3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1, 0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0, 3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1, 0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0, 3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2, 1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0, 3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3, 3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0, 1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2, 0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0, 2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3, 0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0, 3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1, 2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0, 3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2, 0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2, 3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, 3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0, 2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2, 3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1, 2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1, 3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0, 3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1, 3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1, 3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1, 1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2, 0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3, 0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1, 3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0, 3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1, 1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0, 3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1, 3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0, 0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2, 0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0, 0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0, 1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1, 1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1, 3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1, 0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, 0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0, 0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, 3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0, 3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0, 0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1, 0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0, 0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1, 0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1, 0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0, 0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1, 0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0, 3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0, 0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0, 0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0, 3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1, 2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1, 0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0, 3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0, 0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, 2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0, 1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3, 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0, 1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0, 2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0, 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0, 1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, ) TIS620ThaiModel = { 'charToOrderMap': TIS620CharToOrderMap, 'precedenceMatrix': ThaiLangModel, 'mTypicalPositiveRatio': 0.926386, 'keepEnglishLetter': False, 'charsetName': "TIS-620" } # flake8: noqa
bsd-3-clause
pawkoz/dyplom
blender/release/scripts/startup/bl_ui/space_userpref.py
1
48048
# ##### BEGIN GPL LICENSE BLOCK ##### # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # ##### END GPL LICENSE BLOCK ##### # <pep8 compliant> import bpy from bpy.types import Header, Menu, Panel from bpy.app.translations import pgettext_iface as iface_ from bpy.app.translations import contexts as i18n_contexts def opengl_lamp_buttons(column, lamp): split = column.split(percentage=0.1) split.prop(lamp, "use", text="", icon='OUTLINER_OB_LAMP' if lamp.use else 'LAMP_DATA') col = split.column() col.active = lamp.use row = col.row() row.label(text="Diffuse:") row.prop(lamp, "diffuse_color", text="") row = col.row() row.label(text="Specular:") row.prop(lamp, "specular_color", text="") col = split.column() col.active = lamp.use col.prop(lamp, "direction", text="") class USERPREF_HT_header(Header): bl_space_type = 'USER_PREFERENCES' def draw(self, context): layout = self.layout layout.template_header() userpref = context.user_preferences layout.operator_context = 'EXEC_AREA' layout.operator("wm.save_userpref") layout.operator_context = 'INVOKE_DEFAULT' if userpref.active_section == 'INPUT': layout.operator("wm.keyconfig_import") layout.operator("wm.keyconfig_export") elif userpref.active_section == 'ADDONS': layout.operator("wm.addon_install", icon='FILESEL') layout.operator("wm.addon_refresh", icon='FILE_REFRESH') layout.menu("USERPREF_MT_addons_dev_guides") elif userpref.active_section == 'THEMES': layout.operator("ui.reset_default_theme") layout.operator("wm.theme_install") class USERPREF_PT_tabs(Panel): bl_label = "" bl_space_type = 'USER_PREFERENCES' bl_region_type = 'WINDOW' bl_options = {'HIDE_HEADER'} def draw(self, context): layout = self.layout userpref = context.user_preferences layout.prop(userpref, "active_section", expand=True) class USERPREF_MT_interaction_presets(Menu): bl_label = "Presets" preset_subdir = "interaction" preset_operator = "script.execute_preset" draw = Menu.draw_preset class USERPREF_MT_appconfigs(Menu): bl_label = "AppPresets" preset_subdir = "keyconfig" preset_operator = "wm.appconfig_activate" def draw(self, context): self.layout.operator("wm.appconfig_default", text="Blender (default)") # now draw the presets Menu.draw_preset(self, context) class USERPREF_MT_splash(Menu): bl_label = "Splash" def draw(self, context): layout = self.layout split = layout.split() row = split.row() row.label("") row = split.row() row.label("Interaction:") text = bpy.path.display_name(context.window_manager.keyconfigs.active.name) if not text: text = "Blender (default)" row.menu("USERPREF_MT_appconfigs", text=text) # only for addons class USERPREF_MT_splash_footer(Menu): bl_label = "" def draw(self, context): pass class USERPREF_PT_interface(Panel): bl_space_type = 'USER_PREFERENCES' bl_label = "Interface" bl_region_type = 'WINDOW' bl_options = {'HIDE_HEADER'} @classmethod def poll(cls, context): userpref = context.user_preferences return (userpref.active_section == 'INTERFACE') def draw(self, context): import sys layout = self.layout userpref = context.user_preferences view = userpref.view row = layout.row() col = row.column() col.label(text="Display:") col.prop(view, "show_tooltips") col.prop(view, "show_tooltips_python") col.prop(view, "show_object_info", text="Object Info") col.prop(view, "show_large_cursors") col.prop(view, "show_view_name", text="View Name") col.prop(view, "show_playback_fps", text="Playback FPS") col.prop(view, "use_global_scene") col.prop(view, "object_origin_size") col.separator() col.separator() col.separator() col.prop(view, "show_mini_axis", text="Display Mini Axis") sub = col.column() sub.active = view.show_mini_axis sub.prop(view, "mini_axis_size", text="Size") sub.prop(view, "mini_axis_brightness", text="Brightness") col.separator() if sys.platform[:3] == "win": col.label("Warnings") col.prop(view, "use_quit_dialog") row.separator() row.separator() col = row.column() col.label(text="View Manipulation:") col.prop(view, "use_mouse_depth_cursor") col.prop(view, "use_mouse_depth_navigate") col.prop(view, "use_zoom_to_mouse") col.prop(view, "use_rotate_around_active") col.prop(view, "use_global_pivot") col.prop(view, "use_camera_lock_parent") col.separator() col.prop(view, "use_auto_perspective") col.prop(view, "smooth_view") col.prop(view, "rotation_angle") col.separator() col.separator() col.label(text="2D Viewports:") col.prop(view, "view2d_grid_spacing_min", text="Minimum Grid Spacing") col.prop(view, "timecode_style") col.prop(view, "view_frame_type") if (view.view_frame_type == 'SECONDS'): col.prop(view, "view_frame_seconds") elif (view.view_frame_type == 'KEYFRAMES'): col.prop(view, "view_frame_keyframes") row.separator() row.separator() col = row.column() #Toolbox doesn't exist yet #col.label(text="Toolbox:") #col.prop(view, "show_column_layout") #col.label(text="Open Toolbox Delay:") #col.prop(view, "open_left_mouse_delay", text="Hold LMB") #col.prop(view, "open_right_mouse_delay", text="Hold RMB") col.prop(view, "show_manipulator") sub = col.column() sub.active = view.show_manipulator sub.prop(view, "manipulator_size", text="Size") sub.prop(view, "manipulator_handle_size", text="Handle Size") sub.prop(view, "manipulator_hotspot", text="Hotspot") col.separator() col.separator() col.separator() col.label(text="Menus:") col.prop(view, "use_mouse_over_open") sub = col.column() sub.active = view.use_mouse_over_open sub.prop(view, "open_toplevel_delay", text="Top Level") sub.prop(view, "open_sublevel_delay", text="Sub Level") col.separator() col.label(text="Pie Menus:") sub = col.column(align=True) sub.prop(view, "pie_animation_timeout") sub.prop(view, "pie_initial_timeout") sub.prop(view, "pie_menu_radius") sub.prop(view, "pie_menu_threshold") sub.prop(view, "pie_menu_confirm") col.separator() col.separator() col.separator() col.prop(view, "show_splash") class USERPREF_PT_edit(Panel): bl_space_type = 'USER_PREFERENCES' bl_label = "Edit" bl_region_type = 'WINDOW' bl_options = {'HIDE_HEADER'} @classmethod def poll(cls, context): userpref = context.user_preferences return (userpref.active_section == 'EDITING') def draw(self, context): layout = self.layout userpref = context.user_preferences edit = userpref.edit row = layout.row() col = row.column() col.label(text="Link Materials To:") col.prop(edit, "material_link", text="") col.separator() col.separator() col.separator() col.label(text="New Objects:") col.prop(edit, "use_enter_edit_mode") col.label(text="Align To:") col.prop(edit, "object_align", text="") col.separator() col.separator() col.separator() col.label(text="Undo:") col.prop(edit, "use_global_undo") col.prop(edit, "undo_steps", text="Steps") col.prop(edit, "undo_memory_limit", text="Memory Limit") row.separator() row.separator() col = row.column() col.label(text="Grease Pencil:") col.prop(edit, "grease_pencil_eraser_radius", text="Eraser Radius") col.separator() col.prop(edit, "grease_pencil_manhattan_distance", text="Manhattan Distance") col.prop(edit, "grease_pencil_euclidean_distance", text="Euclidean Distance") col.separator() col.prop(edit, "use_grease_pencil_smooth_stroke", text="Smooth Stroke") col.prop(edit, "use_grease_pencil_simplify_stroke", text="Simplify Stroke") col.separator() col.prop(edit, "grease_pencil_default_color", text="Default Color") col.separator() col.separator() col.separator() col.label(text="Playback:") col.prop(edit, "use_negative_frames") col.separator() col.separator() col.separator() col.label(text="Node Editor:") col.prop(edit, "node_margin") col.label(text="Animation Editors:") col.prop(edit, "fcurve_unselected_alpha", text="F-Curve Visibility") row.separator() row.separator() col = row.column() col.label(text="Keyframing:") col.prop(edit, "use_visual_keying") col.prop(edit, "use_keyframe_insert_needed", text="Only Insert Needed") col.separator() col.prop(edit, "use_auto_keying", text="Auto Keyframing:") col.prop(edit, "use_auto_keying_warning") sub = col.column() #~ sub.active = edit.use_keyframe_insert_auto # incorrect, time-line can enable sub.prop(edit, "use_keyframe_insert_available", text="Only Insert Available") col.separator() col.label(text="New F-Curve Defaults:") col.prop(edit, "keyframe_new_interpolation_type", text="Interpolation") col.prop(edit, "keyframe_new_handle_type", text="Handles") col.prop(edit, "use_insertkey_xyz_to_rgb", text="XYZ to RGB") col.separator() col.separator() col.separator() col.label(text="Transform:") col.prop(edit, "use_drag_immediately") row.separator() row.separator() col = row.column() col.prop(edit, "sculpt_paint_overlay_color", text="Sculpt Overlay Color") col.separator() col.separator() col.separator() col.label(text="Duplicate Data:") col.prop(edit, "use_duplicate_mesh", text="Mesh") col.prop(edit, "use_duplicate_surface", text="Surface") col.prop(edit, "use_duplicate_curve", text="Curve") col.prop(edit, "use_duplicate_text", text="Text") col.prop(edit, "use_duplicate_metaball", text="Metaball") col.prop(edit, "use_duplicate_armature", text="Armature") col.prop(edit, "use_duplicate_lamp", text="Lamp") col.prop(edit, "use_duplicate_material", text="Material") col.prop(edit, "use_duplicate_texture", text="Texture") #col.prop(edit, "use_duplicate_fcurve", text="F-Curve") col.prop(edit, "use_duplicate_action", text="Action") col.prop(edit, "use_duplicate_particle", text="Particle") class USERPREF_PT_system(Panel): bl_space_type = 'USER_PREFERENCES' bl_label = "System" bl_region_type = 'WINDOW' bl_options = {'HIDE_HEADER'} @classmethod def poll(cls, context): userpref = context.user_preferences return (userpref.active_section == 'SYSTEM') def draw(self, context): import sys layout = self.layout userpref = context.user_preferences system = userpref.system split = layout.split() # 1. Column column = split.column() colsplit = column.split(percentage=0.85) col = colsplit.column() col.label(text="General:") col.prop(system, "dpi") col.label("Virtual Pixel Mode:") col.prop(system, "virtual_pixel_mode", text="") col.separator() col.prop(system, "frame_server_port") col.prop(system, "scrollback", text="Console Scrollback") col.separator() col.label(text="Sound:") col.row().prop(system, "audio_device", expand=False) sub = col.column() sub.active = system.audio_device != 'NONE' and system.audio_device != 'Null' #sub.prop(system, "use_preview_images") sub.prop(system, "audio_channels", text="Channels") sub.prop(system, "audio_mixing_buffer", text="Mixing Buffer") sub.prop(system, "audio_sample_rate", text="Sample Rate") sub.prop(system, "audio_sample_format", text="Sample Format") col.separator() col.label(text="Screencast:") col.prop(system, "screencast_fps") col.prop(system, "screencast_wait_time") col.separator() if hasattr(system, "compute_device_type"): col.label(text="Compute Device:") col.row().prop(system, "compute_device_type", expand=True) sub = col.row() sub.active = system.compute_device_type != 'CPU' sub.prop(system, "compute_device", text="") if hasattr(system, "opensubdiv_compute_type"): col.label(text="OpenSubdiv compute:") col.row().prop(system, "opensubdiv_compute_type", text="") # 2. Column column = split.column() colsplit = column.split(percentage=0.85) col = colsplit.column() col.label(text="OpenGL:") col.prop(system, "gl_clip_alpha", slider=True) col.prop(system, "use_mipmaps") col.prop(system, "use_gpu_mipmap") col.prop(system, "use_16bit_textures") col.separator() col.label(text="Selection") col.prop(system, "select_method", text="") col.separator() col.label(text="Anisotropic Filtering") col.prop(system, "anisotropic_filter", text="") col.separator() col.label(text="Window Draw Method:") col.prop(system, "window_draw_method", text="") col.prop(system, "multi_sample", text="") if sys.platform == "linux" and system.multi_sample != 'NONE': col.label(text="Might fail for Mesh editing selection!") col.separator() col.prop(system, "use_region_overlap") col.separator() col.label(text="Text Draw Options:") col.prop(system, "use_text_antialiasing") col.separator() col.label(text="Textures:") col.prop(system, "gl_texture_limit", text="Limit Size") col.prop(system, "texture_time_out", text="Time Out") col.prop(system, "texture_collection_rate", text="Collection Rate") col.separator() col.label(text="Images Draw Method:") col.prop(system, "image_draw_method", text="") col.separator() col.label(text="Sequencer / Clip Editor:") # currently disabled in the code # col.prop(system, "prefetch_frames") col.prop(system, "memory_cache_limit") # 3. Column column = split.column() column.label(text="Solid OpenGL lights:") split = column.split(percentage=0.1) split.label() split.label(text="Colors:") split.label(text="Direction:") lamp = system.solid_lights[0] opengl_lamp_buttons(column, lamp) lamp = system.solid_lights[1] opengl_lamp_buttons(column, lamp) lamp = system.solid_lights[2] opengl_lamp_buttons(column, lamp) column.separator() column.label(text="Color Picker Type:") column.row().prop(system, "color_picker_type", text="") column.separator() column.prop(system, "use_weight_color_range", text="Custom Weight Paint Range") sub = column.column() sub.active = system.use_weight_color_range sub.template_color_ramp(system, "weight_color_range", expand=True) column.separator() column.prop(system, "font_path_ui") if bpy.app.build_options.international: column.prop(system, "use_international_fonts") if system.use_international_fonts: column.prop(system, "language") row = column.row() row.label(text="Translate:", text_ctxt=i18n_contexts.id_windowmanager) row = column.row(align=True) row.prop(system, "use_translate_interface", text="Interface", toggle=True) row.prop(system, "use_translate_tooltips", text="Tooltips", toggle=True) row.prop(system, "use_translate_new_dataname", text="New Data", toggle=True) class USERPREF_MT_interface_theme_presets(Menu): bl_label = "Presets" preset_subdir = "interface_theme" preset_operator = "script.execute_preset" preset_type = 'XML' preset_xml_map = ( ("user_preferences.themes[0]", "Theme"), ("user_preferences.ui_styles[0]", "ThemeStyle"), ) draw = Menu.draw_preset class USERPREF_PT_theme(Panel): bl_space_type = 'USER_PREFERENCES' bl_label = "Themes" bl_region_type = 'WINDOW' bl_options = {'HIDE_HEADER'} @staticmethod def _theme_generic(split, themedata): col = split.column() def theme_generic_recurse(data): col.label(data.rna_type.name) row = col.row() subsplit = row.split(percentage=0.95) padding1 = subsplit.split(percentage=0.15) padding1.column() subsplit = row.split(percentage=0.85) padding2 = subsplit.split(percentage=0.15) padding2.column() colsub_pair = padding1.column(), padding2.column() props_type = {} for i, prop in enumerate(data.rna_type.properties): if prop.identifier == "rna_type": continue props_type.setdefault((prop.type, prop.subtype), []).append(prop) for props_type, props_ls in sorted(props_type.items()): if props_type[0] == 'POINTER': for i, prop in enumerate(props_ls): theme_generic_recurse(getattr(data, prop.identifier)) else: for i, prop in enumerate(props_ls): colsub_pair[i % 2].row().prop(data, prop.identifier) theme_generic_recurse(themedata) @staticmethod def _theme_widget_style(layout, widget_style): row = layout.row() subsplit = row.split(percentage=0.95) padding = subsplit.split(percentage=0.15) colsub = padding.column() colsub = padding.column() colsub.row().prop(widget_style, "outline") colsub.row().prop(widget_style, "item", slider=True) colsub.row().prop(widget_style, "inner", slider=True) colsub.row().prop(widget_style, "inner_sel", slider=True) subsplit = row.split(percentage=0.85) padding = subsplit.split(percentage=0.15) colsub = padding.column() colsub = padding.column() colsub.row().prop(widget_style, "text") colsub.row().prop(widget_style, "text_sel") colsub.prop(widget_style, "show_shaded") subsub = colsub.column(align=True) subsub.active = widget_style.show_shaded subsub.prop(widget_style, "shadetop") subsub.prop(widget_style, "shadedown") layout.separator() @staticmethod def _ui_font_style(layout, font_style): split = layout.split() col = split.column() col.label(text="Kerning Style:") col.row().prop(font_style, "font_kerning_style", expand=True) col.prop(font_style, "points") col = split.column() col.label(text="Shadow Offset:") col.prop(font_style, "shadow_offset_x", text="X") col.prop(font_style, "shadow_offset_y", text="Y") col = split.column() col.prop(font_style, "shadow") col.prop(font_style, "shadow_alpha") col.prop(font_style, "shadow_value") layout.separator() @classmethod def poll(cls, context): userpref = context.user_preferences return (userpref.active_section == 'THEMES') def draw(self, context): layout = self.layout theme = context.user_preferences.themes[0] split_themes = layout.split(percentage=0.2) sub = split_themes.column() sub.label(text="Presets:") subrow = sub.row(align=True) subrow.menu("USERPREF_MT_interface_theme_presets", text=USERPREF_MT_interface_theme_presets.bl_label) subrow.operator("wm.interface_theme_preset_add", text="", icon='ZOOMIN') subrow.operator("wm.interface_theme_preset_add", text="", icon='ZOOMOUT').remove_active = True sub.separator() sub.prop(theme, "theme_area", expand=True) split = layout.split(percentage=0.4) layout.separator() layout.separator() split = split_themes.split() if theme.theme_area == 'USER_INTERFACE': col = split.column() ui = theme.user_interface col.label(text="Regular:") self._theme_widget_style(col, ui.wcol_regular) col.label(text="Tool:") self._theme_widget_style(col, ui.wcol_tool) col.label(text="Radio Buttons:") self._theme_widget_style(col, ui.wcol_radio) col.label(text="Text:") self._theme_widget_style(col, ui.wcol_text) col.label(text="Option:") self._theme_widget_style(col, ui.wcol_option) col.label(text="Toggle:") self._theme_widget_style(col, ui.wcol_toggle) col.label(text="Number Field:") self._theme_widget_style(col, ui.wcol_num) col.label(text="Value Slider:") self._theme_widget_style(col, ui.wcol_numslider) col.label(text="Box:") self._theme_widget_style(col, ui.wcol_box) col.label(text="Menu:") self._theme_widget_style(col, ui.wcol_menu) col.label(text="Pie Menu:") self._theme_widget_style(col, ui.wcol_pie_menu) col.label(text="Pulldown:") self._theme_widget_style(col, ui.wcol_pulldown) col.label(text="Menu Back:") self._theme_widget_style(col, ui.wcol_menu_back) col.label(text="Tooltip:") self._theme_widget_style(col, ui.wcol_tooltip) col.label(text="Menu Item:") self._theme_widget_style(col, ui.wcol_menu_item) col.label(text="Scroll Bar:") self._theme_widget_style(col, ui.wcol_scroll) col.label(text="Progress Bar:") self._theme_widget_style(col, ui.wcol_progress) col.label(text="List Item:") self._theme_widget_style(col, ui.wcol_list_item) ui_state = theme.user_interface.wcol_state col.label(text="State:") row = col.row() subsplit = row.split(percentage=0.95) padding = subsplit.split(percentage=0.15) colsub = padding.column() colsub = padding.column() colsub.row().prop(ui_state, "inner_anim") colsub.row().prop(ui_state, "inner_anim_sel") colsub.row().prop(ui_state, "inner_driven") colsub.row().prop(ui_state, "inner_driven_sel") subsplit = row.split(percentage=0.85) padding = subsplit.split(percentage=0.15) colsub = padding.column() colsub = padding.column() colsub.row().prop(ui_state, "inner_key") colsub.row().prop(ui_state, "inner_key_sel") colsub.row().prop(ui_state, "blend") col.separator() col.separator() col.label("Styles:") row = col.row() subsplit = row.split(percentage=0.95) padding = subsplit.split(percentage=0.15) colsub = padding.column() colsub = padding.column() colsub.row().prop(ui, "menu_shadow_fac") subsplit = row.split(percentage=0.85) padding = subsplit.split(percentage=0.15) colsub = padding.column() colsub = padding.column() colsub.row().prop(ui, "menu_shadow_width") row = col.row() subsplit = row.split(percentage=0.95) padding = subsplit.split(percentage=0.15) colsub = padding.column() colsub = padding.column() colsub.row().prop(ui, "icon_alpha") subsplit = row.split(percentage=0.85) padding = subsplit.split(percentage=0.15) colsub = padding.column() colsub = padding.column() colsub.row().prop(ui, "widget_emboss") col.separator() col.separator() col.label("Axis Colors:") row = col.row() subsplit = row.split(percentage=0.95) padding = subsplit.split(percentage=0.15) colsub = padding.column() colsub = padding.column() colsub.row().prop(ui, "axis_x") colsub.row().prop(ui, "axis_y") colsub.row().prop(ui, "axis_z") subsplit = row.split(percentage=0.85) padding = subsplit.split(percentage=0.15) colsub = padding.column() colsub = padding.column() layout.separator() layout.separator() elif theme.theme_area == 'BONE_COLOR_SETS': col = split.column() for i, ui in enumerate(theme.bone_color_sets): col.label(text=iface_("Color Set %d:") % (i + 1), translate=False) # i starts from 0 row = col.row() subsplit = row.split(percentage=0.95) padding = subsplit.split(percentage=0.15) colsub = padding.column() colsub = padding.column() colsub.row().prop(ui, "normal") colsub.row().prop(ui, "select") colsub.row().prop(ui, "active") subsplit = row.split(percentage=0.85) padding = subsplit.split(percentage=0.15) colsub = padding.column() colsub = padding.column() colsub.row().prop(ui, "show_colored_constraints") elif theme.theme_area == 'STYLE': col = split.column() style = context.user_preferences.ui_styles[0] col.label(text="Panel Title:") self._ui_font_style(col, style.panel_title) col.separator() col.label(text="Widget:") self._ui_font_style(col, style.widget) col.separator() col.label(text="Widget Label:") self._ui_font_style(col, style.widget_label) else: self._theme_generic(split, getattr(theme, theme.theme_area.lower())) class USERPREF_PT_file(Panel): bl_space_type = 'USER_PREFERENCES' bl_label = "Files" bl_region_type = 'WINDOW' bl_options = {'HIDE_HEADER'} @classmethod def poll(cls, context): userpref = context.user_preferences return (userpref.active_section == 'FILES') def draw(self, context): layout = self.layout userpref = context.user_preferences paths = userpref.filepaths system = userpref.system split = layout.split(percentage=0.7) col = split.column() col.label(text="File Paths:") colsplit = col.split(percentage=0.95) col1 = colsplit.split(percentage=0.3) sub = col1.column() sub.label(text="Fonts:") sub.label(text="Textures:") sub.label(text="Render Output:") sub.label(text="Scripts:") sub.label(text="Sounds:") sub.label(text="Temp:") sub.label(text="Render Cache:") sub.label(text="I18n Branches:") sub.label(text="Image Editor:") sub.label(text="Animation Player:") sub = col1.column() sub.prop(paths, "font_directory", text="") sub.prop(paths, "texture_directory", text="") sub.prop(paths, "render_output_directory", text="") sub.prop(paths, "script_directory", text="") sub.prop(paths, "sound_directory", text="") sub.prop(paths, "temporary_directory", text="") sub.prop(paths, "render_cache_directory", text="") sub.prop(paths, "i18n_branches_directory", text="") sub.prop(paths, "image_editor", text="") subsplit = sub.split(percentage=0.3) subsplit.prop(paths, "animation_player_preset", text="") subsplit.prop(paths, "animation_player", text="") col.separator() col.separator() colsplit = col.split(percentage=0.95) sub = colsplit.column() row = sub.split(percentage=0.3) row.label(text="Auto Execution:") row.prop(system, "use_scripts_auto_execute") if system.use_scripts_auto_execute: box = sub.box() row = box.row() row.label(text="Excluded Paths:") row.operator("wm.userpref_autoexec_path_add", text="", icon='ZOOMIN', emboss=False) for i, path_cmp in enumerate(userpref.autoexec_paths): row = box.row() row.prop(path_cmp, "path", text="") row.prop(path_cmp, "use_glob", text="", icon='FILTER') row.operator("wm.userpref_autoexec_path_remove", text="", icon='X', emboss=False).index = i col = split.column() col.label(text="Save & Load:") col.prop(paths, "use_relative_paths") col.prop(paths, "use_file_compression") col.prop(paths, "use_load_ui") col.prop(paths, "use_filter_files") col.prop(paths, "show_hidden_files_datablocks") col.prop(paths, "hide_recent_locations") col.prop(paths, "hide_system_bookmarks") col.prop(paths, "show_thumbnails") col.separator() col.prop(paths, "save_version") col.prop(paths, "recent_files") col.prop(paths, "use_save_preview_images") col.separator() col.label(text="Auto Save:") col.prop(paths, "use_keep_session") col.prop(paths, "use_auto_save_temporary_files") sub = col.column() sub.active = paths.use_auto_save_temporary_files sub.prop(paths, "auto_save_time", text="Timer (mins)") col.separator() col.label(text="Text Editor:") col.prop(system, "use_tabs_as_spaces") colsplit = col.split(percentage=0.95) col1 = colsplit.split(percentage=0.3) sub = col1.column() sub.label(text="Author:") sub = col1.column() sub.prop(system, "author", text="") class USERPREF_MT_ndof_settings(Menu): # accessed from the window key-bindings in C (only) bl_label = "3D Mouse Settings" def draw(self, context): layout = self.layout input_prefs = context.user_preferences.inputs is_view3d = context.space_data.type == 'VIEW_3D' layout.prop(input_prefs, "ndof_sensitivity") layout.prop(input_prefs, "ndof_orbit_sensitivity") layout.prop(input_prefs, "ndof_deadzone") if is_view3d: layout.separator() layout.prop(input_prefs, "ndof_show_guide") layout.separator() layout.label(text="Orbit style") layout.row().prop(input_prefs, "ndof_view_navigate_method", text="") layout.row().prop(input_prefs, "ndof_view_rotate_method", text="") layout.separator() layout.label(text="Orbit options") layout.prop(input_prefs, "ndof_rotx_invert_axis") layout.prop(input_prefs, "ndof_roty_invert_axis") layout.prop(input_prefs, "ndof_rotz_invert_axis") # view2d use pan/zoom layout.separator() layout.label(text="Pan options") layout.prop(input_prefs, "ndof_panx_invert_axis") layout.prop(input_prefs, "ndof_pany_invert_axis") layout.prop(input_prefs, "ndof_panz_invert_axis") layout.prop(input_prefs, "ndof_pan_yz_swap_axis") layout.label(text="Zoom options") layout.prop(input_prefs, "ndof_zoom_invert") if is_view3d: layout.separator() layout.label(text="Fly/Walk options") layout.prop(input_prefs, "ndof_fly_helicopter", icon='NDOF_FLY') layout.prop(input_prefs, "ndof_lock_horizon", icon='NDOF_DOM') class USERPREF_MT_keyconfigs(Menu): bl_label = "KeyPresets" preset_subdir = "keyconfig" preset_operator = "wm.keyconfig_activate" def draw(self, context): props = self.layout.operator("wm.context_set_value", text="Blender (default)") props.data_path = "window_manager.keyconfigs.active" props.value = "context.window_manager.keyconfigs.default" # now draw the presets Menu.draw_preset(self, context) class USERPREF_PT_input(Panel): bl_space_type = 'USER_PREFERENCES' bl_label = "Input" bl_region_type = 'WINDOW' bl_options = {'HIDE_HEADER'} @classmethod def poll(cls, context): userpref = context.user_preferences return (userpref.active_section == 'INPUT') @staticmethod def draw_input_prefs(inputs, layout): import sys # General settings row = layout.row() col = row.column() sub = col.column() sub.label(text="Presets:") subrow = sub.row(align=True) subrow.menu("USERPREF_MT_interaction_presets", text=bpy.types.USERPREF_MT_interaction_presets.bl_label) subrow.operator("wm.interaction_preset_add", text="", icon='ZOOMIN') subrow.operator("wm.interaction_preset_add", text="", icon='ZOOMOUT').remove_active = True sub.separator() sub.label(text="Mouse:") sub1 = sub.column() sub1.active = (inputs.select_mouse == 'RIGHT') sub1.prop(inputs, "use_mouse_emulate_3_button") sub.prop(inputs, "use_mouse_continuous") sub.prop(inputs, "drag_threshold") sub.prop(inputs, "tweak_threshold") sub.label(text="Select With:") sub.row().prop(inputs, "select_mouse", expand=True) sub = col.column() sub.label(text="Double Click:") sub.prop(inputs, "mouse_double_click_time", text="Speed") sub.separator() sub.prop(inputs, "use_emulate_numpad") sub.separator() sub.label(text="Orbit Style:") sub.row().prop(inputs, "view_rotate_method", expand=True) sub.separator() sub.label(text="Zoom Style:") sub.row().prop(inputs, "view_zoom_method", text="") if inputs.view_zoom_method in {'DOLLY', 'CONTINUE'}: sub.row().prop(inputs, "view_zoom_axis", expand=True) sub.prop(inputs, "invert_mouse_zoom", text="Invert Mouse Zoom Direction") #sub.prop(inputs, "use_mouse_mmb_paste") #col.separator() sub = col.column() sub.prop(inputs, "invert_zoom_wheel", text="Invert Wheel Zoom Direction") #sub.prop(view, "wheel_scroll_lines", text="Scroll Lines") if sys.platform == "darwin": sub = col.column() sub.prop(inputs, "use_trackpad_natural", text="Natural Trackpad Direction") col.separator() sub = col.column() sub.label(text="View Navigation:") sub.row().prop(inputs, "navigation_mode", expand=True) if inputs.navigation_mode == 'WALK': walk = inputs.walk_navigation sub.prop(walk, "use_mouse_reverse") sub.prop(walk, "mouse_speed") sub.prop(walk, "teleport_time") sub = col.column(align=True) sub.prop(walk, "walk_speed") sub.prop(walk, "walk_speed_factor") sub.separator() sub.prop(walk, "use_gravity") sub = col.column(align=True) sub.active = walk.use_gravity sub.prop(walk, "view_height") sub.prop(walk, "jump_height") col.separator() col.label(text="NDOF Device:") sub = col.column(align=True) sub.prop(inputs, "ndof_sensitivity", text="NDOF Sensitivity") sub.prop(inputs, "ndof_orbit_sensitivity", text="NDOF Orbit Sensitivity") sub.prop(inputs, "ndof_deadzone", text="NDOF Deadzone") sub = col.column(align=True) sub.row().prop(inputs, "ndof_view_navigate_method", expand=True) sub.row().prop(inputs, "ndof_view_rotate_method", expand=True) row.separator() def draw(self, context): from rna_keymap_ui import draw_keymaps layout = self.layout #import time #start = time.time() userpref = context.user_preferences inputs = userpref.inputs split = layout.split(percentage=0.25) # Input settings self.draw_input_prefs(inputs, split) # Keymap Settings draw_keymaps(context, split) #print("runtime", time.time() - start) class USERPREF_MT_addons_dev_guides(Menu): bl_label = "Development Guides" # menu to open web-pages with addons development guides def draw(self, context): layout = self.layout layout.operator("wm.url_open", text="API Concepts", icon='URL').url = bpy.types.WM_OT_doc_view._prefix + "/info_quickstart.html" layout.operator("wm.url_open", text="Addon Guidelines", icon='URL').url = "http://wiki.blender.org/index.php/Dev:2.5/Py/Scripts/Guidelines/Addons" layout.operator("wm.url_open", text="How to share your addon", icon='URL').url = "http://wiki.blender.org/index.php/Dev:Py/Sharing" class USERPREF_PT_addons(Panel): bl_space_type = 'USER_PREFERENCES' bl_label = "Add-ons" bl_region_type = 'WINDOW' bl_options = {'HIDE_HEADER'} _support_icon_mapping = { 'OFFICIAL': 'FILE_BLEND', 'COMMUNITY': 'POSE_DATA', 'TESTING': 'MOD_EXPLODE', } @classmethod def poll(cls, context): userpref = context.user_preferences return (userpref.active_section == 'ADDONS') @staticmethod def is_user_addon(mod, user_addon_paths): import os if not user_addon_paths: for path in (bpy.utils.script_path_user(), bpy.utils.script_path_pref()): if path is not None: user_addon_paths.append(os.path.join(path, "addons")) for path in user_addon_paths: if bpy.path.is_subdir(mod.__file__, path): return True return False @staticmethod def draw_error(layout, message): lines = message.split("\n") box = layout.box() sub = box.row() sub.label(lines[0]) sub.label(icon='ERROR') for l in lines[1:]: box.label(l) def draw(self, context): import os import addon_utils layout = self.layout userpref = context.user_preferences used_ext = {ext.module for ext in userpref.addons} userpref_addons_folder = os.path.join(userpref.filepaths.script_directory, "addons") scripts_addons_folder = bpy.utils.user_resource('SCRIPTS', "addons") # collect the categories that can be filtered on addons = [(mod, addon_utils.module_bl_info(mod)) for mod in addon_utils.modules(refresh=False)] split = layout.split(percentage=0.2) col = split.column() col.prop(context.window_manager, "addon_search", text="", icon='VIEWZOOM') col.label(text="Supported Level") col.prop(context.window_manager, "addon_support", expand=True) col.label(text="Categories") col.prop(context.window_manager, "addon_filter", expand=True) col = split.column() # set in addon_utils.modules_refresh() if addon_utils.error_duplicates: self.draw_error(col, "Multiple addons using the same name found!\n" "likely a problem with the script search path.\n" "(see console for details)", ) if addon_utils.error_encoding: self.draw_error(col, "One or more addons do not have UTF-8 encoding\n" "(see console for details)", ) filter = context.window_manager.addon_filter search = context.window_manager.addon_search.lower() support = context.window_manager.addon_support # initialized on demand user_addon_paths = [] for mod, info in addons: module_name = mod.__name__ is_enabled = module_name in used_ext if info["support"] not in support: continue # check if addon should be visible with current filters if ((filter == "All") or (filter == info["category"]) or (filter == "Enabled" and is_enabled) or (filter == "Disabled" and not is_enabled) or (filter == "User" and (mod.__file__.startswith((scripts_addons_folder, userpref_addons_folder)))) ): if search and search not in info["name"].lower(): if info["author"]: if search not in info["author"].lower(): continue else: continue # Addon UI Code col_box = col.column() box = col_box.box() colsub = box.column() row = colsub.row() row.operator("wm.addon_expand", icon='TRIA_DOWN' if info["show_expanded"] else 'TRIA_RIGHT', emboss=False).module = module_name sub = row.row() sub.active = is_enabled sub.label(text='%s: %s' % (info["category"], info["name"])) if info["warning"]: sub.label(icon='ERROR') # icon showing support level. sub.label(icon=self._support_icon_mapping.get(info["support"], 'QUESTION')) if is_enabled: row.operator("wm.addon_disable", icon='CHECKBOX_HLT', text="", emboss=False).module = module_name else: row.operator("wm.addon_enable", icon='CHECKBOX_DEHLT', text="", emboss=False).module = module_name # Expanded UI (only if additional info is available) if info["show_expanded"]: if info["description"]: split = colsub.row().split(percentage=0.15) split.label(text="Description:") split.label(text=info["description"]) if info["location"]: split = colsub.row().split(percentage=0.15) split.label(text="Location:") split.label(text=info["location"]) if mod: split = colsub.row().split(percentage=0.15) split.label(text="File:") split.label(text=mod.__file__, translate=False) if info["author"]: split = colsub.row().split(percentage=0.15) split.label(text="Author:") split.label(text=info["author"], translate=False) if info["version"]: split = colsub.row().split(percentage=0.15) split.label(text="Version:") split.label(text='.'.join(str(x) for x in info["version"]), translate=False) if info["warning"]: split = colsub.row().split(percentage=0.15) split.label(text="Warning:") split.label(text=' ' + info["warning"], icon='ERROR') user_addon = USERPREF_PT_addons.is_user_addon(mod, user_addon_paths) tot_row = bool(info["wiki_url"]) + bool(user_addon) if tot_row: split = colsub.row().split(percentage=0.15) split.label(text="Internet:") if info["wiki_url"]: split.operator("wm.url_open", text="Documentation", icon='HELP').url = info["wiki_url"] split.operator("wm.url_open", text="Report a Bug", icon='URL').url = info.get( "tracker_url", "http://developer.blender.org/maniphest/task/create/?project=3&type=Bug") if user_addon: split.operator("wm.addon_remove", text="Remove", icon='CANCEL').module = mod.__name__ for i in range(4 - tot_row): split.separator() # Show addon user preferences if is_enabled: addon_preferences = userpref.addons[module_name].preferences if addon_preferences is not None: draw = getattr(addon_preferences, "draw", None) if draw is not None: addon_preferences_class = type(addon_preferences) box_prefs = col_box.box() box_prefs.label("Preferences:") addon_preferences_class.layout = box_prefs try: draw(context) except: import traceback traceback.print_exc() box_prefs.label(text="Error (see console)", icon='ERROR') del addon_preferences_class.layout # Append missing scripts # First collect scripts that are used but have no script file. module_names = {mod.__name__ for mod, info in addons} missing_modules = {ext for ext in used_ext if ext not in module_names} if missing_modules and filter in {"All", "Enabled"}: col.column().separator() col.column().label(text="Missing script files") module_names = {mod.__name__ for mod, info in addons} for module_name in sorted(missing_modules): is_enabled = module_name in used_ext # Addon UI Code box = col.column().box() colsub = box.column() row = colsub.row() row.label(text=module_name, translate=False, icon='ERROR') if is_enabled: row.operator("wm.addon_disable", icon='CHECKBOX_HLT', text="", emboss=False).module = module_name if __name__ == "__main__": # only for live edit. bpy.utils.register_module(__name__)
gpl-2.0
Noviat/odoo
addons/calendar/__openerp__.py
269
1999
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Business Applications # Copyright (c) 2011 OpenERP S.A. <http://openerp.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Calendar', 'version': '1.0', 'depends': ['base', 'mail', 'base_action_rule', 'web_calendar'], 'summary': 'Personal & Shared Calendar', 'description': """ This is a full-featured calendar system. ======================================== It supports: ------------ - Calendar of events - Recurring events If you need to manage your meetings, you should install the CRM module. """, 'author': 'OpenERP SA', 'category': 'Hidden/Dependency', 'website': 'https://www.odoo.com/page/crm', 'demo': ['calendar_demo.xml'], 'data': [ 'calendar_cron.xml', 'security/ir.model.access.csv', 'security/calendar_security.xml', 'calendar_view.xml', 'calendar_data.xml', 'views/calendar.xml', ], 'qweb': ['static/src/xml/*.xml'], 'test': [ 'test/calendar_test.yml', 'test/test_calendar_recurrent_event_case2.yml' ], 'installable': True, 'application': True, 'auto_install': False, }
agpl-3.0
gnieboer/tensorflow
tensorflow/contrib/training/python/training/failure_tolerator.py
72
4450
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """A retry helper for tolerating transient failures in distributed training.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import contextlib import time from tensorflow.python.framework import errors from tensorflow.python.platform import tf_logging as logging class FailureTolerator(object): """Helper for tolerating certain exceptions. When encountering a handled exception inside tolerator.forgive(), it is suppressed (but logged). A subsequent call to tolerator.forgive() will sleep for a period of time before continuing, with exponential backoff on multiple exceptions. (The delay avoids retrying too quickly -- a subsequent attempt will often only succeed after a transient failure has resolved itself.) If more than `limit` exceptions have been encountered, the error will not be suppressed. Exceptions occurring more than `forgive_after_seconds` ago (excluding time spent waiting between retries) are forgiven and no longer count towards the limit. An example loop using FailureTolerator to retry until a successful `session.run(...)` would look like: ``` failure_tolerator = FailureTolerator() while True: with failure_tolerator.forgive(): session = make_session_somehow() while not should_stop(): session.run(...) break # session.run was successful ``` By using FailureTolerator, failures are logged, there are delays between retries, and there's a ceiling on the maximum number of retries available. (In the case of persistent errors, the task won't just loop forever!) """ def __init__(self, limit=5, init_delay=5.0, backoff_factor=2.0, forgive_after_seconds=6000, handled_exceptions=None): """Creates a FailureTolerator. The result will pause for `init_delay * (backoff_factor^(failure_count-1))` when re-entering `forgive()` after a failure. Args: limit: The maximum number of suppressed, unforgiven, failures. init_delay: How long to pause once the first failure is encountered. Defaults to five seconds. backoff_factor: Each subsequent failure grows the pause by this factor. forgive_after_seconds: Failures older than this are forgiven. handled_exceptions: The exceptions to forgive. Defaults to `(errors.AbortedError,)`. """ self.limit = limit self.backoff = backoff_factor self.delay = init_delay self.forgive_after = forgive_after_seconds self.exceptions = [] self.time_in_delay = 0.0 if handled_exceptions is None: self.handled = (errors.AbortedError,) else: self.handled = tuple(handled_exceptions) def _adjusted_now(self): """Returns what the current time would be if no delays had occurred.""" return time.time() - self.time_in_delay def _forgive_old(self): adjusted_now = self._adjusted_now() self.exceptions = [t for t in self.exceptions if (adjusted_now - t) < self.forgive_after] def _handle_error(self, e): if not isinstance(e, self.handled): return True self._forgive_old() self.exceptions.append(self._adjusted_now()) return len(self.exceptions) >= self.limit # pylint: disable=broad-except @contextlib.contextmanager def forgive(self): self._forgive_old() if self.exceptions: delay = self.delay * (self.backoff ** (len(self.exceptions) - 1)) logging.warning('Sleeping for %f seconds before resuming' % delay) time.sleep(delay) self.time_in_delay += delay try: yield except Exception as e: if self._handle_error(e): raise else: logging.warning('Forgiving an exception', exc_info=True)
apache-2.0
elitan/mybot
plugins/google.py
18
1115
import random from util import hook, http def api_get(query, key, is_image=None, num=1): url = ('https://www.googleapis.com/customsearch/v1?cx=007629729846476161907:ud5nlxktgcw' '&fields=items(title,link,snippet)&safe=off' + ('&searchType=image' if is_image else '')) return http.get_json(url, key=key, q=query, num=num) @hook.api_key('google') @hook.command def gis(inp, api_key=None): '''.gis <term> -- finds an image using google images (safesearch off)''' parsed = api_get(inp, api_key, is_image=True, num=10) if 'items' not in parsed: return 'no images found' return random.choice(parsed['items'])['link'] @hook.api_key('google') @hook.command('g') @hook.command def google(inp, api_key=None): '''.g/.google <query> -- returns first google search result''' parsed = api_get(inp, api_key) if 'items' not in parsed: return 'no results found' out = u'{link} -- \x02{title}\x02: "{snippet}"'.format(**parsed['items'][0]) out = ' '.join(out.split()) if len(out) > 300: out = out[:out.rfind(' ')] + '..."' return out
unlicense
hortonworks/hortonworks-sandbox
desktop/core/ext-py/greenlet-0.3.1/tests/test_extension_interface.py
2
2392
import sys import unittest import greenlet import _test_extension class CAPITests(unittest.TestCase): def test_switch(self): self.assertEquals( 50, _test_extension.test_switch(greenlet.greenlet(lambda: 50))) def test_switch_kwargs(self): def foo(x, y): return x * y g = greenlet.greenlet(foo) self.assertEquals(6, _test_extension.test_switch_kwargs(g, x=3, y=2)) def test_setparent(self): def foo(): def bar(): greenlet.getcurrent().parent.switch() # This final switch should go back to the main greenlet, since the # test_setparent() function in the C extension should have # reparented this greenlet. greenlet.getcurrent().parent.switch() raise AssertionError("Should never have reached this code") child = greenlet.greenlet(bar) child.switch() greenlet.getcurrent().parent.switch(child) greenlet.getcurrent().parent.throw( AssertionError("Should never reach this code")) foo_child = greenlet.greenlet(foo).switch() self.assertEquals(None, _test_extension.test_setparent(foo_child)) def test_getcurrent(self): _test_extension.test_getcurrent() def test_new_greenlet(self): self.assertEquals(-15, _test_extension.test_new_greenlet(lambda: -15)) def test_raise_greenlet_dead(self): self.assertRaises( greenlet.GreenletExit, _test_extension.test_raise_dead_greenlet) def test_raise_greenlet_error(self): self.assertRaises( greenlet.error, _test_extension.test_raise_greenlet_error) def test_throw(self): seen = [] def foo(): try: greenlet.getcurrent().parent.switch() except ValueError: seen.append(sys.exc_info()[1]) except greenlet.GreenletExit: raise AssertionError g = greenlet.greenlet(foo) g.switch() _test_extension.test_throw(g) self.assertEquals(len(seen), 1) self.assertTrue( isinstance(seen[0], ValueError), "ValueError was not raised in foo()") self.assertEquals( str(seen[0]), 'take that sucka!', "message doesn't match")
apache-2.0
liikGit/MissionPlanner
Lib/site-packages/scipy/signal/fir_filter_design.py
53
18572
"""Functions for FIR filter design.""" from math import ceil, log import numpy as np from numpy.fft import irfft from scipy.special import sinc import sigtools # Some notes on function parameters: # # `cutoff` and `width` are given as a numbers between 0 and 1. These # are relative frequencies, expressed as a fraction of the Nyquist rate. # For example, if the Nyquist rate is 2KHz, then width=0.15 is a width # of 300 Hz. # # The `order` of a FIR filter is one less than the number of taps. # This is a potential source of confusion, so in the following code, # we will always use the number of taps as the parameterization of # the 'size' of the filter. The "number of taps" means the number # of coefficients, which is the same as the length of the impulse # response of the filter. def kaiser_beta(a): """Compute the Kaiser parameter `beta`, given the attenuation `a`. Parameters ---------- a : float The desired attenuation in the stopband and maximum ripple in the passband, in dB. This should be a *positive* number. Returns ------- beta : float The `beta` parameter to be used in the formula for a Kaiser window. References ---------- Oppenheim, Schafer, "Discrete-Time Signal Processing", p.475-476. """ if a > 50: beta = 0.1102 * (a - 8.7) elif a > 21: beta = 0.5842 * (a - 21)**0.4 + 0.07886 * (a - 21) else: beta = 0.0 return beta def kaiser_atten(numtaps, width): """Compute the attenuation of a Kaiser FIR filter. Given the number of taps `N` and the transition width `width`, compute the attenuation `a` in dB, given by Kaiser's formula: a = 2.285 * (N - 1) * pi * width + 7.95 Parameters ---------- N : int The number of taps in the FIR filter. width : float The desired width of the transition region between passband and stopband (or, in general, at any discontinuity) for the filter. Returns ------- a : float The attenuation of the ripple, in dB. See Also -------- kaiserord, kaiser_beta """ a = 2.285 * (numtaps - 1) * np.pi * width + 7.95 return a def kaiserord(ripple, width): """Design a Kaiser window to limit ripple and width of transition region. Parameters ---------- ripple : float Positive number specifying maximum ripple in passband (dB) and minimum ripple in stopband. width : float Width of transition region (normalized so that 1 corresponds to pi radians / sample). Returns ------- numtaps : int The length of the kaiser window. beta : The beta parameter for the kaiser window. Notes ----- There are several ways to obtain the Kaiser window: signal.kaiser(numtaps, beta, sym=0) signal.get_window(beta, numtaps) signal.get_window(('kaiser', beta), numtaps) The empirical equations discovered by Kaiser are used. See Also -------- kaiser_beta, kaiser_atten References ---------- Oppenheim, Schafer, "Discrete-Time Signal Processing", p.475-476. """ A = abs(ripple) # in case somebody is confused as to what's meant if A < 8: # Formula for N is not valid in this range. raise ValueError("Requested maximum ripple attentuation %f is too " "small for the Kaiser formula." % A) beta = kaiser_beta(A) # Kaiser's formula (as given in Oppenheim and Schafer) is for the filter # order, so we have to add 1 to get the number of taps. numtaps = (A - 7.95) / 2.285 / (np.pi * width) + 1 return int(ceil(numtaps)), beta def firwin(numtaps, cutoff, width=None, window='hamming', pass_zero=True, scale=True, nyq=1.0): """ FIR filter design using the window method. This function computes the coefficients of a finite impulse response filter. The filter will have linear phase; it will be Type I if `numtaps` is odd and Type II if `numtaps` is even. Type II filters always have zero response at the Nyquist rate, so a ValueError exception is raised if firwin is called with `numtaps` even and having a passband whose right end is at the Nyquist rate. Parameters ---------- numtaps : int Length of the filter (number of coefficients, i.e. the filter order + 1). `numtaps` must be even if a passband includes the Nyquist frequency. cutoff : float or 1D array_like Cutoff frequency of filter (expressed in the same units as `nyq`) OR an array of cutoff frequencies (that is, band edges). In the latter case, the frequencies in `cutoff` should be positive and monotonically increasing between 0 and `nyq`. The values 0 and `nyq` must not be included in `cutoff`. width : float or None If `width` is not None, then assume it is the approximate width of the transition region (expressed in the same units as `nyq`) for use in Kaiser FIR filter design. In this case, the `window` argument is ignored. window : string or tuple of string and parameter values Desired window to use. See `scipy.signal.get_window` for a list of windows and required parameters. pass_zero : bool If True, the gain at the frequency 0 (i.e. the "DC gain") is 1. Otherwise the DC gain is 0. scale : bool Set to True to scale the coefficients so that the frequency response is exactly unity at a certain frequency. That frequency is either: 0 (DC) if the first passband starts at 0 (i.e. pass_zero is True); `nyq` (the Nyquist rate) if the first passband ends at `nyq` (i.e the filter is a single band highpass filter); center of first passband otherwise. nyq : float Nyquist frequency. Each frequency in `cutoff` must be between 0 and `nyq`. Returns ------- h : 1D ndarray Coefficients of length `numtaps` FIR filter. Raises ------ ValueError If any value in `cutoff` is less than or equal to 0 or greater than or equal to `nyq`, if the values in `cutoff` are not strictly monotonically increasing, or if `numtaps` is even but a passband includes the Nyquist frequency. Examples -------- Low-pass from 0 to f:: >>> firwin(numtaps, f) Use a specific window function:: >>> firwin(numtaps, f, window='nuttall') High-pass ('stop' from 0 to f):: >>> firwin(numtaps, f, pass_zero=False) Band-pass:: >>> firwin(numtaps, [f1, f2], pass_zero=False) Band-stop:: >>> firwin(numtaps, [f1, f2]) Multi-band (passbands are [0, f1], [f2, f3] and [f4, 1]):: >>>firwin(numtaps, [f1, f2, f3, f4]) Multi-band (passbands are [f1, f2] and [f3,f4]):: >>> firwin(numtaps, [f1, f2, f3, f4], pass_zero=False) See also -------- scipy.signal.firwin2 """ # The major enhancements to this function added in November 2010 were # developed by Tom Krauss (see ticket #902). cutoff = np.atleast_1d(cutoff) / float(nyq) # Check for invalid input. if cutoff.ndim > 1: raise ValueError("The cutoff argument must be at most one-dimensional.") if cutoff.size == 0: raise ValueError("At least one cutoff frequency must be given.") if cutoff.min() <= 0 or cutoff.max() >= 1: raise ValueError("Invalid cutoff frequency: frequencies must be greater than 0 and less than nyq.") if np.any(np.diff(cutoff) <= 0): raise ValueError("Invalid cutoff frequencies: the frequencies must be strictly increasing.") if width is not None: # A width was given. Find the beta parameter of the Kaiser window # and set `window`. This overrides the value of `window` passed in. atten = kaiser_atten(numtaps, float(width)/nyq) beta = kaiser_beta(atten) window = ('kaiser', beta) pass_nyquist = bool(cutoff.size & 1) ^ pass_zero if pass_nyquist and numtaps % 2 == 0: raise ValueError("A filter with an even number of coefficients must " "have zero response at the Nyquist rate.") # Insert 0 and/or 1 at the ends of cutoff so that the length of cutoff is even, # and each pair in cutoff corresponds to passband. cutoff = np.hstack(([0.0]*pass_zero, cutoff, [1.0]*pass_nyquist)) # `bands` is a 2D array; each row gives the left and right edges of a passband. bands = cutoff.reshape(-1,2) # Build up the coefficients. alpha = 0.5 * (numtaps-1) m = np.arange(0, numtaps) - alpha h = 0 for left, right in bands: h += right * sinc(right * m) h -= left * sinc(left * m) # Get and apply the window function. from signaltools import get_window win = get_window(window, numtaps, fftbins=False) h *= win # Now handle scaling if desired. if scale: # Get the first passband. left, right = bands[0] if left == 0: scale_frequency = 0.0 elif right == 1: scale_frequency = 1.0 else: scale_frequency = 0.5 * (left + right) c = np.cos(np.pi * m * scale_frequency) s = np.sum(h * c) h /= s return h # Original version of firwin2 from scipy ticket #457, submitted by "tash". # # Rewritten by Warren Weckesser, 2010. def firwin2(numtaps, freq, gain, nfreqs=None, window='hamming', nyq=1.0): """FIR filter design using the window method. From the given frequencies `freq` and corresponding gains `gain`, this function constructs an FIR filter with linear phase and (approximately) the given frequency response. Parameters ---------- numtaps : int The number of taps in the FIR filter. `numtaps` must be less than `nfreqs`. If the gain at the Nyquist rate, `gain[-1]`, is not 0, then `numtaps` must be odd. freq : array-like, 1D The frequency sampling points. Typically 0.0 to 1.0 with 1.0 being Nyquist. The Nyquist frequency can be redefined with the argument `nyq`. The values in `freq` must be nondecreasing. A value can be repeated once to implement a discontinuity. The first value in `freq` must be 0, and the last value must be `nyq`. gain : array-like The filter gains at the frequency sampling points. nfreqs : int, optional The size of the interpolation mesh used to construct the filter. For most efficient behavior, this should be a power of 2 plus 1 (e.g, 129, 257, etc). The default is one more than the smallest power of 2 that is not less than `numtaps`. `nfreqs` must be greater than `numtaps`. window : string or (string, float) or float, or None, optional Window function to use. Default is "hamming". See `scipy.signal.get_window` for the complete list of possible values. If None, no window function is applied. nyq : float Nyquist frequency. Each frequency in `freq` must be between 0 and `nyq` (inclusive). Returns ------- taps : numpy 1D array of length `numtaps` The filter coefficients of the FIR filter. Example ------- A lowpass FIR filter with a response that is 1 on [0.0, 0.5], and that decreases linearly on [0.5, 1.0] from 1 to 0: >>> taps = firwin2(150, [0.0, 0.5, 1.0], [1.0, 1.0, 0.0]) >>> print(taps[72:78]) [-0.02286961 -0.06362756 0.57310236 0.57310236 -0.06362756 -0.02286961] See also -------- scipy.signal.firwin Notes ----- From the given set of frequencies and gains, the desired response is constructed in the frequency domain. The inverse FFT is applied to the desired response to create the associated convolution kernel, and the first `numtaps` coefficients of this kernel, scaled by `window`, are returned. The FIR filter will have linear phase. The filter is Type I if `numtaps` is odd and Type II if `numtaps` is even. Because Type II filters always have a zero at the Nyquist frequency, `numtaps` must be odd if `gain[-1]` is not zero. .. versionadded:: 0.9.0 References ---------- .. [1] Oppenheim, A. V. and Schafer, R. W., "Discrete-Time Signal Processing", Prentice-Hall, Englewood Cliffs, New Jersey (1989). (See, for example, Section 7.4.) .. [2] Smith, Steven W., "The Scientist and Engineer's Guide to Digital Signal Processing", Ch. 17. http://www.dspguide.com/ch17/1.htm """ if len(freq) != len(gain): raise ValueError('freq and gain must be of same length.') if nfreqs is not None and numtaps >= nfreqs: raise ValueError('ntaps must be less than nfreqs, but firwin2 was ' 'called with ntaps=%d and nfreqs=%s' % (numtaps, nfreqs)) if freq[0] != 0 or freq[-1] != nyq: raise ValueError('freq must start with 0 and end with `nyq`.') d = np.diff(freq) if (d < 0).any(): raise ValueError('The values in freq must be nondecreasing.') d2 = d[:-1] + d[1:] if (d2 == 0).any(): raise ValueError('A value in freq must not occur more than twice.') if numtaps % 2 == 0 and gain[-1] != 0.0: raise ValueError("A filter with an even number of coefficients must " "have zero gain at the Nyquist rate.") if nfreqs is None: nfreqs = 1 + 2 ** int(ceil(log(numtaps,2))) # Tweak any repeated values in freq so that interp works. eps = np.finfo(float).eps for k in range(len(freq)): if k < len(freq)-1 and freq[k] == freq[k+1]: freq[k] = freq[k] - eps freq[k+1] = freq[k+1] + eps # Linearly interpolate the desired response on a uniform mesh `x`. x = np.linspace(0.0, nyq, nfreqs) fx = np.interp(x, freq, gain) # Adjust the phases of the coefficients so that the first `ntaps` of the # inverse FFT are the desired filter coefficients. shift = np.exp(-(numtaps-1)/2. * 1.j * np.pi * x / nyq) fx2 = fx * shift # Use irfft to compute the inverse FFT. out_full = irfft(fx2) if window is not None: # Create the window to apply to the filter coefficients. from signaltools import get_window wind = get_window(window, numtaps, fftbins=False) else: wind = 1 # Keep only the first `numtaps` coefficients in `out`, and multiply by # the window. out = out_full[:numtaps] * wind return out def remez(numtaps, bands, desired, weight=None, Hz=1, type='bandpass', maxiter=25, grid_density=16): """ Calculate the minimax optimal filter using the Remez exchange algorithm. Calculate the filter-coefficients for the finite impulse response (FIR) filter whose transfer function minimizes the maximum error between the desired gain and the realized gain in the specified frequency bands using the Remez exchange algorithm. Parameters ---------- numtaps : int The desired number of taps in the filter. The number of taps is the number of terms in the filter, or the filter order plus one. bands : array_like A monotonic sequence containing the band edges in Hz. All elements must be non-negative and less than half the sampling frequency as given by `Hz`. desired : array_like A sequence half the size of bands containing the desired gain in each of the specified bands. weight : array_like, optional A relative weighting to give to each band region. The length of `weight` has to be half the length of `bands`. Hz : scalar, optional The sampling frequency in Hz. Default is 1. type : {'bandpass', 'differentiator', 'hilbert'}, optional The type of filter: 'bandpass' : flat response in bands. This is the default. 'differentiator' : frequency proportional response in bands. 'hilbert' : filter with odd symmetry, that is, type III (for even order) or type IV (for odd order) linear phase filters. maxiter : int, optional Maximum number of iterations of the algorithm. Default is 25. grid_density : int, optional Grid density. The dense grid used in `remez` is of size ``(numtaps + 1) * grid_density``. Default is 16. Returns ------- out : ndarray A rank-1 array containing the coefficients of the optimal (in a minimax sense) filter. See Also -------- freqz : Compute the frequency response of a digital filter. References ---------- .. [1] J. H. McClellan and T. W. Parks, "A unified approach to the design of optimum FIR linear phase digital filters", IEEE Trans. Circuit Theory, vol. CT-20, pp. 697-701, 1973. .. [2] J. H. McClellan, T. W. Parks and L. R. Rabiner, "A Computer Program for Designing Optimum FIR Linear Phase Digital Filters", IEEE Trans. Audio Electroacoust., vol. AU-21, pp. 506-525, 1973. Examples -------- We want to construct a filter with a passband at 0.2-0.4 Hz, and stop bands at 0-0.1 Hz and 0.45-0.5 Hz. Note that this means that the behavior in the frequency ranges between those bands is unspecified and may overshoot. >>> bpass = sp.signal.remez(72, [0, 0.1, 0.2, 0.4, 0.45, 0.5], [0, 1, 0]) >>> freq, response = sp.signal.freqz(bpass) >>> ampl = np.abs(response) >>> import matplotlib.pyplot as plt >>> fig = plt.figure() >>> ax1 = fig.add_subplot(111) >>> ax1.semilogy(freq/(2*np.pi), ampl, 'b-') # freq in Hz [<matplotlib.lines.Line2D object at 0xf486790>] >>> plt.show() """ # Convert type try: tnum = {'bandpass':1, 'differentiator':2, 'hilbert':3}[type] except KeyError: raise ValueError("Type must be 'bandpass', 'differentiator', or 'hilbert'") # Convert weight if weight is None: weight = [1] * len(desired) bands = np.asarray(bands).copy() return sigtools._remez(numtaps, bands, desired, weight, tnum, Hz, maxiter, grid_density)
gpl-3.0
pepeportela/edx-platform
lms/djangoapps/course_blocks/transformers/tests/test_visibility.py
36
1429
""" Tests for VisibilityTransformer. """ import ddt from nose.plugins.attrib import attr from ..visibility import VisibilityTransformer from .helpers import BlockParentsMapTestCase, update_block @attr(shard=3) @ddt.ddt class VisibilityTransformerTestCase(BlockParentsMapTestCase): """ VisibilityTransformer Test """ TRANSFORMER_CLASS_TO_TEST = VisibilityTransformer # Following test cases are based on BlockParentsMapTestCase.parents_map @ddt.data( ({}, {0, 1, 2, 3, 4, 5, 6}, {}), ({0}, {}, {1, 2, 3, 4, 5, 6}), ({1}, {0, 2, 5, 6}, {3, 4}), ({2}, {0, 1, 3, 4, 6}, {5}), ({3}, {0, 1, 2, 4, 5, 6}, {}), ({4}, {0, 1, 2, 3, 5, 6}, {}), ({5}, {0, 1, 2, 3, 4, 6}, {}), ({6}, {0, 1, 2, 3, 4, 5}, {}), ({1, 2}, {0}, {3, 4, 5, 6}), ({2, 4}, {0, 1, 3}, {5, 6}), ({1, 2, 3, 4, 5, 6}, {0}, {}), ) @ddt.unpack def test_block_visibility( self, staff_only_blocks, expected_visible_blocks, blocks_with_differing_access ): for idx, _ in enumerate(self.parents_map): block = self.get_block(idx) block.visible_to_staff_only = (idx in staff_only_blocks) update_block(block) self.assert_transform_results( self.student, expected_visible_blocks, blocks_with_differing_access, self.transformers, )
agpl-3.0
cjparsons74/kupfer
kupfer/plugin/gtg.py
1
4891
# -*- coding: UTF-8 -*- __kupfer_name__ = _("Getting Things GNOME") __kupfer_sources__ = ("TasksSource", ) __kupfer_actions__ = ("CreateNewTask",) __description__ = _("Browse and create new tasks in GTG") __version__ = "2010-05-27" __author__ = "Karol Będkowski <karol.bedkowski@gmail.com>" import os import dbus from kupfer import plugin_support from kupfer import pretty from kupfer import textutils from kupfer.obj.base import Leaf, Action, Source from kupfer.obj.objects import TextLeaf from kupfer.obj.apps import AppLeafContentMixin from kupfer.obj.helplib import FilesystemWatchMixin plugin_support.check_dbus_connection() _SERVICE_NAME = 'org.GTG' _OBJECT_NAME = '/org/GTG' _IFACE_NAME = 'org.GTG' _GTG_HOME = "~/.local/share/gtg/" def _create_dbus_connection(activate=False): ''' Create dbus connection to GTG @activate: if True, start program if not running ''' interface = None sbus = dbus.SessionBus() try: proxy_obj = sbus.get_object('org.freedesktop.DBus', '/org/freedesktop/DBus') dbus_iface = dbus.Interface(proxy_obj, 'org.freedesktop.DBus') if activate or dbus_iface.NameHasOwner(_IFACE_NAME): obj = sbus.get_object(_SERVICE_NAME, _OBJECT_NAME) if obj: interface = dbus.Interface(obj, _IFACE_NAME) except dbus.exceptions.DBusException, err: pretty.print_debug(err) return interface def _truncate_long_text(text, maxlen=80): if len(text) > maxlen: return text[:maxlen - 1] + u'…' return text def _load_tasks(interface): ''' Load task by dbus interface ''' for task in interface.get_tasks(): title = task['title'].strip() if not title: title = task['text'].strip() title = _truncate_long_text(title) otask = Task(task['id'], title, task['status']) otask.duedate = task['duedate'] otask.startdate = task['startdate'] otask.tags = task['tags'] yield otask def _change_task_status(task_id, status): interface = _create_dbus_connection(True) task = interface.get_task(task_id) task['status'] = status interface.modify_task(task_id, task) class Task (Leaf): def __init__(self, task_id, title, status): Leaf.__init__(self, task_id, title) self.status = status self.tags = None self.duedate = None self.startdate = None def get_description(self): descr = [self.status] if self.duedate: descr.append(_("due: %s") % self.duedate) if self.startdate: descr.append(_("start: %s") % self.startdate) if self.tags: descr.append(_("tags: %s") % " ".join(self.tags)) return " ".join(descr) def get_icon_name(self): return 'gtg' def get_actions(self): yield OpenEditor() yield Delete() yield MarkDone() yield Dismiss() class OpenEditor (Action): rank_adjust = 1 def __init__(self): Action.__init__(self, _("Open")) def activate(self, leaf): interface = _create_dbus_connection(True) interface.open_task_editor(leaf.object) def get_icon_name(self): return 'document-open' def get_description(self): return _("Open task in Getting Things GNOME!") class Delete (Action): rank_adjust = -10 def __init__(self): Action.__init__(self, _("Delete")) def activate(self, leaf): interface = _create_dbus_connection(True) interface.delete_task(leaf.object) def get_icon_name(self): return 'edit-delete' def get_description(self): return _("Permanently remove this task") class MarkDone (Action): def __init__(self): Action.__init__(self, _("Mark Done")) def activate(self, leaf): _change_task_status(leaf.object, 'Done') def get_icon_name(self): return 'gtk-yes' def get_description(self): return _("Mark this task as done") class Dismiss (Action): def __init__(self): Action.__init__(self, _("Dismiss")) def activate(self, leaf): _change_task_status(leaf.object, 'Dismiss') def get_icon_name(self): return 'gtk-cancel' def get_description(self): return _("Mark this task as not to be done anymore") class CreateNewTask (Action): def __init__(self): Action.__init__(self, _("Create Task")) def activate(self, leaf): interface = _create_dbus_connection(True) title, body = textutils.extract_title_body(leaf.object) interface.open_new_task(title, body) def item_types(self): yield TextLeaf def get_icon_name(self): return 'document-new' def get_description(self): return _("Create new task in Getting Things GNOME") class TasksSource (AppLeafContentMixin, Source, FilesystemWatchMixin): appleaf_content_id = 'gtg' def __init__(self, name=None): Source.__init__(self, name or __kupfer_name__) self._tasks = [] self._version = 2 def initialize(self): self.monitor_token = \ self.monitor_directories(os.path.expanduser(_GTG_HOME)) def get_items(self): interface = _create_dbus_connection() if interface is not None: self._tasks = list(_load_tasks(interface)) return self._tasks def get_icon_name(self): return 'gtg' def provides(self): yield Task
gpl-3.0
yury-s/v8-inspector
Source/chrome/tools/perf/benchmarks/blink_style.py
2
1383
# Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from core import perf_benchmark from measurements import blink_style from telemetry import benchmark import page_sets @benchmark.Disabled('reference', 'win8') class BlinkStyleTop25(perf_benchmark.PerfBenchmark): """Measures performance of Blink's style engine (CSS Parsing, Style Recalc, etc.) on the top 25 pages. """ test = blink_style.BlinkStyle page_set = page_sets.Top25PageSet @classmethod def Name(cls): return 'blink_style.top_25' @benchmark.Disabled('reference') @benchmark.Enabled('android') class BlinkStyleKeyMobileSites(perf_benchmark.PerfBenchmark): """Measures performance of Blink's style engine (CSS Parsing, Style Recalc, etc.) on key mobile sites. """ test = blink_style.BlinkStyle page_set = page_sets.KeyMobileSitesPageSet @classmethod def Name(cls): return 'blink_style.key_mobile_sites' @benchmark.Disabled('reference') @benchmark.Enabled('android') class BlinkStylePolymer(perf_benchmark.PerfBenchmark): """Measures performance of Blink's style engine (CSS Parsing, Style Recalc, etc.) for Polymer cases. """ test = blink_style.BlinkStyle page_set = page_sets.PolymerPageSet @classmethod def Name(cls): return 'blink_style.polymer'
bsd-3-clause
smaffulli/libcloud
libcloud/dns/drivers/softlayer.py
26
7409
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License.You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __all__ = [ 'SoftLayerDNSDriver' ] from libcloud.common.softlayer import SoftLayerConnection from libcloud.common.softlayer import SoftLayerObjectDoesntExist from libcloud.dns.types import Provider, RecordType from libcloud.dns.types import ZoneDoesNotExistError, RecordDoesNotExistError from libcloud.dns.base import DNSDriver, Zone, Record VALID_RECORD_EXTRA_PARAMS = ['priority', 'ttl'] class SoftLayerDNSDriver(DNSDriver): type = Provider.SOFTLAYER name = 'Softlayer DNS' website = 'https://www.softlayer.com' connectionCls = SoftLayerConnection RECORD_TYPE_MAP = { RecordType.A: 'a', RecordType.AAAA: 'aaaa', RecordType.CNAME: 'cname', RecordType.MX: 'mx', RecordType.NS: 'ns', RecordType.PTR: 'ptr', RecordType.SOA: 'soa', RecordType.SPF: 'spf', RecordType.SRV: 'srv', RecordType.TXT: 'txt', } def create_zone(self, domain, ttl=None, extra=None): self.connection.set_context({'resource': 'zone', 'id': domain}) data = { 'name': domain, 'resourceRecords': [] } response = self.connection.request( 'SoftLayer_Dns_Domain', 'createObject', data ).object zone = Zone(id=response['id'], domain=domain, type='master', ttl=3600, driver=self) return zone def get_zone(self, zone_id): self.connection.set_context({'resource': 'zone', 'id': zone_id}) try: response = self.connection.request( 'SoftLayer_Dns_Domain', 'getObject', id=zone_id ).object except SoftLayerObjectDoesntExist: raise ZoneDoesNotExistError(value='', driver=self, zone_id=zone_id) return self._to_zone(response) def delete_zone(self, zone): self.connection.set_context({'resource': 'zone', 'id': zone.id}) try: self.connection.request( 'SoftLayer_Dns_Domain', 'deleteObject', id=zone.id ).object except SoftLayerObjectDoesntExist: raise ZoneDoesNotExistError(value='', driver=self, zone_id=zone.id) else: return True def iterate_zones(self): zones_list = self.connection.request( 'SoftLayer_Dns_Domain', 'getByDomainName', '.' ).object for item in zones_list: yield self._to_zone(item) def iterate_records(self, zone): self.connection.set_context({'resource': 'zone', 'id': zone.id}) records_list = self.connection.request( 'SoftLayer_Dns_Domain', 'getResourceRecords', id=zone.id ).object for item in records_list: yield self._to_record(item, zone=zone) def get_record(self, zone_id, record_id): try: record = self.connection.request( 'SoftLayer_Dns_Domain_ResourceRecord', 'getObject', id=record_id ).object return self._to_record(record, zone=self.get_zone(zone_id)) except SoftLayerObjectDoesntExist: raise RecordDoesNotExistError(value='', driver=self, record_id=record_id) def delete_record(self, record): try: self.connection.request( 'SoftLayer_Dns_Domain_ResourceRecord', 'deleteObject', id=record.id ).object except SoftLayerObjectDoesntExist: raise RecordDoesNotExistError(value='', driver=self, record_id=record.id) else: return True def create_record(self, name, zone, type, data, extra=None): params = { 'domainId': zone.id, 'type': self.RECORD_TYPE_MAP[type], 'host': name, 'data': data } if extra: if extra.get('ttl'): params['ttl'] = extra['ttl'] if extra.get('refresh'): params['refresh'] = extra['refresh'] if extra.get('retry'): params['retry'] = extra['retry'] if extra.get('expire'): params['expire'] = extra['expire'] if extra.get('priority'): params['mxPriority'] = extra['priority'] response = self.connection.request( 'SoftLayer_Dns_Domain_ResourceRecord', 'createObject', params ).object return self._to_record(response, zone=zone) def update_record( self, record, name=None, type=None, data=None, extra=None): params = {} if type: params['type'] = self.RECORD_TYPE_MAP[type] if name: params['host'] = name if data: params['data'] = data if extra: if extra.get('ttl'): params['ttl'] = extra['ttl'] if extra.get('refresh'): params['refresh'] = extra['refresh'] if extra.get('retry'): params['retry'] = extra['retry'] if extra.get('expire'): params['expire'] = extra['expire'] if extra.get('priority'): params['mxPriority'] = extra['priority'] response = self.connection.request( 'SoftLayer_Dns_Domain_ResourceRecord', 'editObject', params, id=record.id, ).object if response: changed_record = self.connection.request( 'SoftLayer_Dns_Domain_ResourceRecord', 'getObject', id=record.id, ).object return self._to_record(changed_record, zone=record.zone) else: return False def _to_zone(self, item): ttl = item.get('ttl', 3600) zone = Zone(id=item['id'], domain=item['name'], type='master', ttl=ttl, driver=self) return zone def _to_record(self, item, zone=None): extra = { 'ttl': item['ttl'], 'expire': item['expire'], 'mxPriority': item['mxPriority'], 'refresh': item['refresh'], 'retry': item['retry'], } record = Record( id=item['id'], name=item['host'], type=self._string_to_record_type(item['type']), data=item['data'], zone=zone, driver=self, extra=extra ) return record
apache-2.0
IllusionRom-deprecated/android_platform_external_chromium_org
win8/util/check_sdk_patch.py
68
1559
#!/usr/bin/env python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Script to check that the Windows 8 SDK has been appropriately patched so that it can be used with VS 2010. In practice, this checks for the presence of 'enum class' in asyncinfo.h. Changing that to 'enum' is the only thing needed to build with the WinRT headers in VS 2010. """ import os import sys def main(argv): if len(argv) < 2: print "Usage: check_sdk_patch.py path_to_windows_8_sdk [dummy_output_file]" return 1 # Look for asyncinfo.h async_info_path = os.path.join(argv[1], 'Include/winrt/asyncinfo.h') if not os.path.exists(async_info_path): print ("Could not find %s in provided SDK path. Please check input." % async_info_path) print "CWD: %s" % os.getcwd() return 2 else: if 'enum class' in open(async_info_path).read(): print ("\nERROR: You are using an unpatched Windows 8 SDK located at %s." "\nPlease see instructions at" "\nhttp://www.chromium.org/developers/how-tos/" "build-instructions-windows\nfor how to apply the patch to build " "with VS2010.\n" % argv[1]) return 3 else: if len(argv) > 2: with open(argv[2], 'w') as dummy_file: dummy_file.write('Windows 8 SDK has been patched!') # Patched Windows 8 SDK found. return 0 if '__main__' == __name__: sys.exit(main(sys.argv))
bsd-3-clause
LukasSukenik/faunus
src/examples/grand.py
1
1548
#!/usr/bin/env python from __future__ import print_function import json, sys, os from subprocess import call from shutil import copyfile pfx = os.path.join( os.path.dirname(__file__), "grand") try: copyfile( pfx+'.state', 'state' ) copyfile( pfx+'.test', 'grand.test' ) except: pass def mkinput(): d = { "moleculelist": { "salt": { "Ninit": 20, "atomic": True, "atoms": "Na Cl" } }, "energy": { "nonbonded": { "epsr": 80 } }, "moves": { "atomtranslate": { "salt": { "permol": True, "prob": 0.01 } }, "atomgc": { "molecule": "salt" }, "random": { "hardware": True } }, "system": { "mcloop" : { "macro": 10, "micro": 100000 }, "geometry" : { "radius": 80 }, "unittest" : { "testfile": "grand.test", "stable": False }, "coulomb" : { "epsr": 80 }, "temperature" : 298.15 }, "analysis" : { "widom" : { "nstep":20, "ninsert":15, "particles":["Na", "Cl"] }, "widomscaled" : { "nstep":20, "ninsert":15, "lB":7.0 }, "pqrfile" : { "file": "grand.pqr" }, "statefile" : { "file": "state" } }, "atomlist": { "Na": { "q": 1.0, "r": 2.0, "dp": 50, "activity": 0.05 }, "Cl": { "q": -1.0, "r": 2.0, "dp": 50, "activity": 0.05 } } } with open('grand.json', 'w+') as f: f.write(json.dumps(d, indent=4)) exe='./grand' if ( os.access( exe, os.X_OK )): mkinput() rc = call( [exe] ) sys.exit( rc )
gpl-2.0
fly19890211/edx-platform
lms/lib/courseware_search/lms_filter_generator.py
58
5634
""" This file contains implementation override of SearchFilterGenerator which will allow * Filter by all courses in which the user is enrolled in """ from microsite_configuration import microsite from student.models import CourseEnrollment from opaque_keys import InvalidKeyError from opaque_keys.edx.keys import CourseKey from opaque_keys.edx.locations import SlashSeparatedCourseKey from xmodule.modulestore.django import modulestore from search.filter_generator import SearchFilterGenerator from openedx.core.djangoapps.user_api.partition_schemes import RandomUserPartitionScheme from openedx.core.djangoapps.course_groups.partition_scheme import CohortPartitionScheme from courseware.access import get_user_role INCLUDE_SCHEMES = [CohortPartitionScheme, RandomUserPartitionScheme, ] SCHEME_SUPPORTS_ASSIGNMENT = [RandomUserPartitionScheme, ] class LmsSearchFilterGenerator(SearchFilterGenerator): """ SearchFilterGenerator for LMS Search """ _user_enrollments = {} def _enrollments_for_user(self, user): """ Return the specified user's course enrollments """ if user not in self._user_enrollments: self._user_enrollments[user] = CourseEnrollment.enrollments_for_user(user) return self._user_enrollments[user] def filter_dictionary(self, **kwargs): """ LMS implementation, adds filtering by user partition, course id and user """ def get_group_for_user_partition(user_partition, course_key, user): """ Returns the specified user's group for user partition """ if user_partition.scheme in SCHEME_SUPPORTS_ASSIGNMENT: return user_partition.scheme.get_group_for_user( course_key, user, user_partition, assign=False, ) else: return user_partition.scheme.get_group_for_user( course_key, user, user_partition, ) def get_group_ids_for_user(course, user): """ Collect user partition group ids for user for this course """ partition_groups = [] for user_partition in course.user_partitions: if user_partition.scheme in INCLUDE_SCHEMES: group = get_group_for_user_partition(user_partition, course.id, user) if group: partition_groups.append(group) partition_group_ids = [unicode(partition_group.id) for partition_group in partition_groups] return partition_group_ids if partition_group_ids else None filter_dictionary = super(LmsSearchFilterGenerator, self).filter_dictionary(**kwargs) if 'user' in kwargs: user = kwargs['user'] if 'course_id' in kwargs and kwargs['course_id']: try: course_key = CourseKey.from_string(kwargs['course_id']) except InvalidKeyError: course_key = SlashSeparatedCourseKey.from_deprecated_string(kwargs['course_id']) # Staff user looking at course as staff user if get_user_role(user, course_key) in ('instructor', 'staff'): return filter_dictionary # Need to check course exist (if course gets deleted enrollments don't get cleaned up) course = modulestore().get_course(course_key) if course: filter_dictionary['content_groups'] = get_group_ids_for_user(course, user) else: user_enrollments = self._enrollments_for_user(user) content_groups = [] for enrollment in user_enrollments: course = modulestore().get_course(enrollment.course_id) if course: enrollment_group_ids = get_group_ids_for_user(course, user) if enrollment_group_ids: content_groups.extend(enrollment_group_ids) filter_dictionary['content_groups'] = content_groups if content_groups else None return filter_dictionary def field_dictionary(self, **kwargs): """ add course if provided otherwise add courses in which the user is enrolled in """ field_dictionary = super(LmsSearchFilterGenerator, self).field_dictionary(**kwargs) if not kwargs.get('user'): field_dictionary['course'] = [] elif not kwargs.get('course_id'): user_enrollments = self._enrollments_for_user(kwargs['user']) field_dictionary['course'] = [unicode(enrollment.course_id) for enrollment in user_enrollments] # if we have an org filter, only include results for this org filter course_org_filter = microsite.get_value('course_org_filter') if course_org_filter: field_dictionary['org'] = course_org_filter return field_dictionary def exclude_dictionary(self, **kwargs): """ If we are not on a microsite, then exclude any microsites that are defined """ exclude_dictionary = super(LmsSearchFilterGenerator, self).exclude_dictionary(**kwargs) course_org_filter = microsite.get_value('course_org_filter') # If we have a course filter we are ensuring that we only get those courses above if not course_org_filter: org_filter_out_set = microsite.get_all_orgs() if org_filter_out_set: exclude_dictionary['org'] = list(org_filter_out_set) return exclude_dictionary
agpl-3.0
piotroxp/scibibscan
scib/lib/python3.6/site-packages/pip/_vendor/requests/packages/urllib3/util/timeout.py
713
9596
from __future__ import absolute_import # The default socket timeout, used by httplib to indicate that no timeout was # specified by the user from socket import _GLOBAL_DEFAULT_TIMEOUT import time from ..exceptions import TimeoutStateError # A sentinel value to indicate that no timeout was specified by the user in # urllib3 _Default = object() def current_time(): """ Retrieve the current time. This function is mocked out in unit testing. """ return time.time() class Timeout(object): """ Timeout configuration. Timeouts can be defined as a default for a pool:: timeout = Timeout(connect=2.0, read=7.0) http = PoolManager(timeout=timeout) response = http.request('GET', 'http://example.com/') Or per-request (which overrides the default for the pool):: response = http.request('GET', 'http://example.com/', timeout=Timeout(10)) Timeouts can be disabled by setting all the parameters to ``None``:: no_timeout = Timeout(connect=None, read=None) response = http.request('GET', 'http://example.com/, timeout=no_timeout) :param total: This combines the connect and read timeouts into one; the read timeout will be set to the time leftover from the connect attempt. In the event that both a connect timeout and a total are specified, or a read timeout and a total are specified, the shorter timeout will be applied. Defaults to None. :type total: integer, float, or None :param connect: The maximum amount of time to wait for a connection attempt to a server to succeed. Omitting the parameter will default the connect timeout to the system default, probably `the global default timeout in socket.py <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_. None will set an infinite timeout for connection attempts. :type connect: integer, float, or None :param read: The maximum amount of time to wait between consecutive read operations for a response from the server. Omitting the parameter will default the read timeout to the system default, probably `the global default timeout in socket.py <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_. None will set an infinite timeout. :type read: integer, float, or None .. note:: Many factors can affect the total amount of time for urllib3 to return an HTTP response. For example, Python's DNS resolver does not obey the timeout specified on the socket. Other factors that can affect total request time include high CPU load, high swap, the program running at a low priority level, or other behaviors. In addition, the read and total timeouts only measure the time between read operations on the socket connecting the client and the server, not the total amount of time for the request to return a complete response. For most requests, the timeout is raised because the server has not sent the first byte in the specified time. This is not always the case; if a server streams one byte every fifteen seconds, a timeout of 20 seconds will not trigger, even though the request will take several minutes to complete. If your goal is to cut off any request after a set amount of wall clock time, consider having a second "watcher" thread to cut off a slow request. """ #: A sentinel object representing the default timeout value DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT def __init__(self, total=None, connect=_Default, read=_Default): self._connect = self._validate_timeout(connect, 'connect') self._read = self._validate_timeout(read, 'read') self.total = self._validate_timeout(total, 'total') self._start_connect = None def __str__(self): return '%s(connect=%r, read=%r, total=%r)' % ( type(self).__name__, self._connect, self._read, self.total) @classmethod def _validate_timeout(cls, value, name): """ Check that a timeout attribute is valid. :param value: The timeout value to validate :param name: The name of the timeout attribute to validate. This is used to specify in error messages. :return: The validated and casted version of the given value. :raises ValueError: If the type is not an integer or a float, or if it is a numeric value less than zero. """ if value is _Default: return cls.DEFAULT_TIMEOUT if value is None or value is cls.DEFAULT_TIMEOUT: return value try: float(value) except (TypeError, ValueError): raise ValueError("Timeout value %s was %s, but it must be an " "int or float." % (name, value)) try: if value < 0: raise ValueError("Attempted to set %s timeout to %s, but the " "timeout cannot be set to a value less " "than 0." % (name, value)) except TypeError: # Python 3 raise ValueError("Timeout value %s was %s, but it must be an " "int or float." % (name, value)) return value @classmethod def from_float(cls, timeout): """ Create a new Timeout from a legacy timeout value. The timeout value used by httplib.py sets the same timeout on the connect(), and recv() socket requests. This creates a :class:`Timeout` object that sets the individual timeouts to the ``timeout`` value passed to this function. :param timeout: The legacy timeout value. :type timeout: integer, float, sentinel default object, or None :return: Timeout object :rtype: :class:`Timeout` """ return Timeout(read=timeout, connect=timeout) def clone(self): """ Create a copy of the timeout object Timeout properties are stored per-pool but each request needs a fresh Timeout object to ensure each one has its own start/stop configured. :return: a copy of the timeout object :rtype: :class:`Timeout` """ # We can't use copy.deepcopy because that will also create a new object # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to # detect the user default. return Timeout(connect=self._connect, read=self._read, total=self.total) def start_connect(self): """ Start the timeout clock, used during a connect() attempt :raises urllib3.exceptions.TimeoutStateError: if you attempt to start a timer that has been started already. """ if self._start_connect is not None: raise TimeoutStateError("Timeout timer has already been started.") self._start_connect = current_time() return self._start_connect def get_connect_duration(self): """ Gets the time elapsed since the call to :meth:`start_connect`. :return: Elapsed time. :rtype: float :raises urllib3.exceptions.TimeoutStateError: if you attempt to get duration for a timer that hasn't been started. """ if self._start_connect is None: raise TimeoutStateError("Can't get connect duration for timer " "that has not started.") return current_time() - self._start_connect @property def connect_timeout(self): """ Get the value to use when setting a connection timeout. This will be a positive float or integer, the value None (never timeout), or the default system timeout. :return: Connect timeout. :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None """ if self.total is None: return self._connect if self._connect is None or self._connect is self.DEFAULT_TIMEOUT: return self.total return min(self._connect, self.total) @property def read_timeout(self): """ Get the value for the read timeout. This assumes some time has elapsed in the connection timeout and computes the read timeout appropriately. If self.total is set, the read timeout is dependent on the amount of time taken by the connect timeout. If the connection time has not been established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be raised. :return: Value to use for the read timeout. :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` has not yet been called on this object. """ if (self.total is not None and self.total is not self.DEFAULT_TIMEOUT and self._read is not None and self._read is not self.DEFAULT_TIMEOUT): # In case the connect timeout has not yet been established. if self._start_connect is None: return self._read return max(0, min(self.total - self.get_connect_duration(), self._read)) elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT: return max(0, self.total - self.get_connect_duration()) else: return self._read
mit
cbrepo/django-reversion
src/reversion/helpers.py
2
2931
"""A number of useful helper functions to automate common tasks.""" from django.contrib import admin from django.contrib.admin.sites import NotRegistered from reversion.admin import VersionAdmin def patch_admin(model, admin_site=None): """ Enables version control with full admin integration for a model that has already been registered with the django admin site. This is excellent for adding version control to existing Django contrib applications. """ admin_site = admin_site or admin.site try: ModelAdmin = admin_site._registry[model].__class__ except KeyError: raise NotRegistered, "The model %r has not been registered with the admin site." % model # Unregister existing admin class. admin_site.unregister(model) # Register patched admin class. class PatchedModelAdmin(VersionAdmin, ModelAdmin): pass admin_site.register(model, PatchedModelAdmin) # Patch generation methods, only available if the google-diff-match-patch # library is installed. # # http://code.google.com/p/google-diff-match-patch/ try: from diff_match_patch import diff_match_patch except ImportError: pass else: dmp = diff_match_patch() def generate_diffs(old_version, new_version, field_name, cleanup): """Generates a diff array for the named field between the two versions.""" # Extract the text from the versions. old_text = old_version.field_dict[field_name] or u"" new_text = new_version.field_dict[field_name] or u"" # Generate the patch. diffs = dmp.diff_main(unicode(old_text), unicode(new_text)) if cleanup == "semantic": dmp.diff_cleanupSemantic(diffs) elif cleanup == "efficiency": dmp.diff_cleanupEfficiency(diffs) elif cleanup is None: pass else: raise ValueError("cleanup parameter should be one of 'semantic', 'efficiency' or None.") return diffs def generate_patch(old_version, new_version, field_name, cleanup=None): """ Generates a text patch of the named field between the two versions. The cleanup parameter can be None, "semantic" or "efficiency" to clean up the diff for greater human readibility. """ diffs = generate_diffs(old_version, new_version, field_name, cleanup) patch = dmp.patch_make(diffs) return dmp.patch_toText(patch) def generate_patch_html(old_version, new_version, field_name, cleanup=None): """ Generates a pretty html version of the differences between the named field in two versions. The cleanup parameter can be None, "semantic" or "efficiency" to clean up the diff for greater human readibility. """ diffs = generate_diffs(old_version, new_version, field_name, cleanup) return dmp.diff_prettyHtml(diffs)
bsd-3-clause
Mapkin/tally
tally/conf/wsgi.py
1
1146
""" WSGI config for tally project. This module contains the WSGI application used by Django's development server and any production WSGI deployments. It should expose a module-level variable named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover this application via the ``WSGI_APPLICATION`` setting. Usually you will have the standard Django WSGI application here, but it also might make sense to replace the whole Django WSGI application with a custom one that later delegates to the Django one. For example, you could introduce WSGI middleware here, or combine a Django application with an application of another framework. """ import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tally.conf.settings.settings") # This application object is used by any WSGI server configured to use this # file. This includes Django's development server, if the WSGI_APPLICATION # setting points here. from django.core.wsgi import get_wsgi_application application = get_wsgi_application() # Apply WSGI middleware here. # from helloworld.wsgi import HelloWorldApplication # application = HelloWorldApplication(application)
mit
epiqc/ScaffCC
llvm/utils/lit/lit/discovery.py
12
8926
""" Test discovery functions. """ import copy import os import sys from lit.TestingConfig import TestingConfig from lit import LitConfig, Test def chooseConfigFileFromDir(dir, config_names): for name in config_names: p = os.path.join(dir, name) if os.path.exists(p): return p return None def dirContainsTestSuite(path, lit_config): cfgpath = chooseConfigFileFromDir(path, lit_config.site_config_names) if not cfgpath: cfgpath = chooseConfigFileFromDir(path, lit_config.config_names) return cfgpath def getTestSuite(item, litConfig, cache): """getTestSuite(item, litConfig, cache) -> (suite, relative_path) Find the test suite containing @arg item. @retval (None, ...) - Indicates no test suite contains @arg item. @retval (suite, relative_path) - The suite that @arg item is in, and its relative path inside that suite. """ def search1(path): # Check for a site config or a lit config. cfgpath = dirContainsTestSuite(path, litConfig) # If we didn't find a config file, keep looking. if not cfgpath: parent,base = os.path.split(path) if parent == path: return (None, ()) ts, relative = search(parent) return (ts, relative + (base,)) # This is a private builtin parameter which can be used to perform # translation of configuration paths. Specifically, this parameter # can be set to a dictionary that the discovery process will consult # when it finds a configuration it is about to load. If the given # path is in the map, the value of that key is a path to the # configuration to load instead. config_map = litConfig.params.get('config_map') if config_map: cfgpath = os.path.realpath(cfgpath) cfgpath = os.path.normcase(cfgpath) target = config_map.get(cfgpath) if target: cfgpath = target # We found a test suite, create a new config for it and load it. if litConfig.debug: litConfig.note('loading suite config %r' % cfgpath) cfg = TestingConfig.fromdefaults(litConfig) cfg.load_from_path(cfgpath, litConfig) source_root = os.path.realpath(cfg.test_source_root or path) exec_root = os.path.realpath(cfg.test_exec_root or path) return Test.TestSuite(cfg.name, source_root, exec_root, cfg), () def search(path): # Check for an already instantiated test suite. real_path = os.path.realpath(path) res = cache.get(real_path) if res is None: cache[real_path] = res = search1(path) return res # Canonicalize the path. item = os.path.normpath(os.path.join(os.getcwd(), item)) # Skip files and virtual components. components = [] while not os.path.isdir(item): parent,base = os.path.split(item) if parent == item: return (None, ()) components.append(base) item = parent components.reverse() ts, relative = search(item) return ts, tuple(relative + tuple(components)) def getLocalConfig(ts, path_in_suite, litConfig, cache): def search1(path_in_suite): # Get the parent config. if not path_in_suite: parent = ts.config else: parent = search(path_in_suite[:-1]) # Check if there is a local configuration file. source_path = ts.getSourcePath(path_in_suite) cfgpath = chooseConfigFileFromDir(source_path, litConfig.local_config_names) # If not, just reuse the parent config. if not cfgpath: return parent # Otherwise, copy the current config and load the local configuration # file into it. config = copy.deepcopy(parent) if litConfig.debug: litConfig.note('loading local config %r' % cfgpath) config.load_from_path(cfgpath, litConfig) return config def search(path_in_suite): key = (ts, path_in_suite) res = cache.get(key) if res is None: cache[key] = res = search1(path_in_suite) return res return search(path_in_suite) def getTests(path, litConfig, testSuiteCache, localConfigCache): # Find the test suite for this input and its relative path. ts,path_in_suite = getTestSuite(path, litConfig, testSuiteCache) if ts is None: litConfig.warning('unable to find test suite for %r' % path) return (),() if litConfig.debug: litConfig.note('resolved input %r to %r::%r' % (path, ts.name, path_in_suite)) return ts, getTestsInSuite(ts, path_in_suite, litConfig, testSuiteCache, localConfigCache) def getTestsInSuite(ts, path_in_suite, litConfig, testSuiteCache, localConfigCache): # Check that the source path exists (errors here are reported by the # caller). source_path = ts.getSourcePath(path_in_suite) if not os.path.exists(source_path): return # Check if the user named a test directly. if not os.path.isdir(source_path): lc = getLocalConfig(ts, path_in_suite[:-1], litConfig, localConfigCache) yield Test.Test(ts, path_in_suite, lc) return # Otherwise we have a directory to search for tests, start by getting the # local configuration. lc = getLocalConfig(ts, path_in_suite, litConfig, localConfigCache) # Search for tests. if lc.test_format is not None: for res in lc.test_format.getTestsInDirectory(ts, path_in_suite, litConfig, lc): yield res # Search subdirectories. for filename in os.listdir(source_path): # FIXME: This doesn't belong here? if filename in ('Output', '.svn', '.git') or filename in lc.excludes: continue # Ignore non-directories. file_sourcepath = os.path.join(source_path, filename) if not os.path.isdir(file_sourcepath): continue # Check for nested test suites, first in the execpath in case there is a # site configuration and then in the source path. subpath = path_in_suite + (filename,) file_execpath = ts.getExecPath(subpath) if dirContainsTestSuite(file_execpath, litConfig): sub_ts, subpath_in_suite = getTestSuite(file_execpath, litConfig, testSuiteCache) elif dirContainsTestSuite(file_sourcepath, litConfig): sub_ts, subpath_in_suite = getTestSuite(file_sourcepath, litConfig, testSuiteCache) else: sub_ts = None # If the this directory recursively maps back to the current test suite, # disregard it (this can happen if the exec root is located inside the # current test suite, for example). if sub_ts is ts: continue # Otherwise, load from the nested test suite, if present. if sub_ts is not None: subiter = getTestsInSuite(sub_ts, subpath_in_suite, litConfig, testSuiteCache, localConfigCache) else: subiter = getTestsInSuite(ts, subpath, litConfig, testSuiteCache, localConfigCache) N = 0 for res in subiter: N += 1 yield res if sub_ts and not N: litConfig.warning('test suite %r contained no tests' % sub_ts.name) def find_tests_for_inputs(lit_config, inputs): """ find_tests_for_inputs(lit_config, inputs) -> [Test] Given a configuration object and a list of input specifiers, find all the tests to execute. """ # Expand '@...' form in inputs. actual_inputs = [] for input in inputs: if input.startswith('@'): f = open(input[1:]) try: for ln in f: ln = ln.strip() if ln: actual_inputs.append(ln) finally: f.close() else: actual_inputs.append(input) # Load the tests from the inputs. tests = [] test_suite_cache = {} local_config_cache = {} for input in actual_inputs: prev = len(tests) tests.extend(getTests(input, lit_config, test_suite_cache, local_config_cache)[1]) if prev == len(tests): lit_config.warning('input %r contained no tests' % input) # If there were any errors during test discovery, exit now. if lit_config.numErrors: sys.stderr.write('%d errors, exiting.\n' % lit_config.numErrors) sys.exit(2) return tests
bsd-2-clause
simod/geonode
geonode/proxy/views.py
1
14175
# -*- coding: utf-8 -*- ######################################################################### # # Copyright (C) 2016 OSGeo # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ######################################################################### import os import re import json import shutil import logging import requests import tempfile import traceback from slugify import Slugify from httplib import HTTPConnection, HTTPSConnection from urlparse import urlsplit, urljoin from django.conf import settings from django.http import HttpResponse from django.utils.http import is_safe_url from django.http.request import validate_host from django.views.decorators.csrf import requires_csrf_token from django.middleware.csrf import get_token from distutils.version import StrictVersion from django.utils.translation import ugettext as _ from django.core.files.storage import default_storage as storage from geonode.base.models import Link from geonode.layers.models import Layer, LayerFile from geonode.utils import (resolve_object, check_ogc_backend, get_dir_time_suffix, zip_dir) from geonode import geoserver, qgis_server # noqa TIMEOUT = 30 logger = logging.getLogger(__name__) custom_slugify = Slugify(separator='_') ows_regexp = re.compile( "^(?i)(version)=(\d\.\d\.\d)(?i)&(?i)request=(?i)(GetCapabilities)&(?i)service=(?i)(\w\w\w)$") @requires_csrf_token def proxy(request, url=None, response_callback=None, sec_chk_hosts=True, sec_chk_rules=True, **kwargs): # Security rules and settings PROXY_ALLOWED_HOSTS = getattr(settings, 'PROXY_ALLOWED_HOSTS', ()) # Sanity url checks if 'url' not in request.GET and not url: return HttpResponse("The proxy service requires a URL-encoded URL as a parameter.", status=400, content_type="text/plain" ) raw_url = url or request.GET['url'] raw_url = urljoin( settings.SITEURL, raw_url) if raw_url.startswith("/") else raw_url url = urlsplit(raw_url) locator = str(url.path) if url.query != "": locator += '?' + url.query if url.fragment != "": locator += '#' + url.fragment access_token = None if request and 'access_token' in request.session: access_token = request.session['access_token'] # White-Black Listing Hosts if sec_chk_hosts and not settings.DEBUG: site_url = urlsplit(settings.SITEURL) if site_url.hostname not in PROXY_ALLOWED_HOSTS: PROXY_ALLOWED_HOSTS += (site_url.hostname, ) if check_ogc_backend(geoserver.BACKEND_PACKAGE): from geonode.geoserver.helpers import ogc_server_settings hostname = ( ogc_server_settings.hostname, ) if ogc_server_settings else () if hostname not in PROXY_ALLOWED_HOSTS: PROXY_ALLOWED_HOSTS += hostname if url.query and ows_regexp.match(url.query): ows_tokens = ows_regexp.match(url.query).groups() if len(ows_tokens) == 4 and 'version' == ows_tokens[0] and StrictVersion( ows_tokens[1]) >= StrictVersion("1.0.0") and StrictVersion( ows_tokens[1]) <= StrictVersion("3.0.0") and ows_tokens[2].lower() in ( 'getcapabilities') and ows_tokens[3].upper() in ('OWS', 'WCS', 'WFS', 'WMS', 'WPS', 'CSW'): if url.hostname not in PROXY_ALLOWED_HOSTS: PROXY_ALLOWED_HOSTS += (url.hostname, ) if not validate_host( url.hostname, PROXY_ALLOWED_HOSTS): return HttpResponse("DEBUG is set to False but the host of the path provided to the proxy service" " is not in the PROXY_ALLOWED_HOSTS setting.", status=403, content_type="text/plain" ) # Security checks based on rules; allow only specific requests if sec_chk_rules: # TODO: Not yet implemented pass # Collecting headers and cookies headers = {} cookies = None csrftoken = None if settings.SESSION_COOKIE_NAME in request.COOKIES and is_safe_url( url=raw_url, host=url.hostname): cookies = request.META["HTTP_COOKIE"] for cook in request.COOKIES: name = str(cook) value = request.COOKIES.get(name) if name == 'csrftoken': csrftoken = value cook = "%s=%s" % (name, value) cookies = cook if not cookies else (cookies + '; ' + cook) csrftoken = get_token(request) if not csrftoken else csrftoken if csrftoken: headers['X-Requested-With'] = "XMLHttpRequest" headers['X-CSRFToken'] = csrftoken cook = "%s=%s" % ('csrftoken', csrftoken) cookies = cook if not cookies else (cookies + '; ' + cook) if cookies: if 'JSESSIONID' in request.session and request.session['JSESSIONID']: cookies = cookies + '; JSESSIONID=' + \ request.session['JSESSIONID'] headers['Cookie'] = cookies if request.method in ("POST", "PUT") and "CONTENT_TYPE" in request.META: headers["Content-Type"] = request.META["CONTENT_TYPE"] access_token = None if request and 'access_token' in request.session: access_token = request.session['access_token'] if access_token: # TODO: Bearer is currently cutted of by Djano / GeoServer if request.method in ("POST", "PUT"): headers['Authorization'] = 'Bearer %s' % access_token if access_token and 'access_token' not in locator: query_separator = '&' if '?' in locator else '?' locator = ('%s%saccess_token=%s' % (locator, query_separator, access_token)) elif 'HTTP_AUTHORIZATION' in request.META: auth = request.META.get( 'HTTP_AUTHORIZATION', request.META.get('HTTP_AUTHORIZATION2')) if auth: headers['Authorization'] = auth site_url = urlsplit(settings.SITEURL) pragma = "no-cache" referer = request.META[ "HTTP_REFERER"] if "HTTP_REFERER" in request.META else \ "{scheme}://{netloc}/".format(scheme=site_url.scheme, netloc=site_url.netloc) encoding = request.META["HTTP_ACCEPT_ENCODING"] if "HTTP_ACCEPT_ENCODING" in request.META else "gzip" headers.update({"Pragma": pragma, "Referer": referer, "Accept-encoding": encoding, }) if url.scheme == 'https': conn = HTTPSConnection(url.hostname, url.port) else: conn = HTTPConnection(url.hostname, url.port) conn.request(request.method, locator.encode('utf8'), request.body, headers) response = conn.getresponse() content = response.read() status = response.status content_type = response.getheader("Content-Type", "text/plain") # decompress GZipped responses if not enabled if content and response.getheader('Content-Encoding') == 'gzip': from StringIO import StringIO import gzip buf = StringIO(content) f = gzip.GzipFile(fileobj=buf) content = f.read() if response_callback: kwargs = {} if not kwargs else kwargs kwargs.update({ 'response': response, 'content': content, 'status': status, 'content_type': content_type }) return response_callback(**kwargs) else: # If we get a redirect, let's add a useful message. if status in (301, 302, 303, 307): _response = HttpResponse(('This proxy does not support redirects. The server in "%s" ' 'asked for a redirect to "%s"' % (url, response.getheader('Location'))), status=status, content_type=content_type ) _response['Location'] = response.getheader('Location') return _response else: return HttpResponse( content=content, status=status, content_type=content_type) def download(request, resourceid, sender=Layer): instance = resolve_object(request, sender, {'pk': resourceid}, permission='base.download_resourcebase', permission_msg=_("You are not permitted to save or edit this resource.")) if isinstance(instance, Layer): try: upload_session = instance.get_upload_session() layer_files = [item for idx, item in enumerate(LayerFile.objects.filter(upload_session=upload_session))] # Create Target Folder dirpath = tempfile.mkdtemp() dir_time_suffix = get_dir_time_suffix() target_folder = os.path.join(dirpath, dir_time_suffix) if not os.path.exists(target_folder): os.makedirs(target_folder) # Copy all Layer related files into a temporary folder for l in layer_files: if storage.exists(l.file): geonode_layer_path = storage.path(l.file) base_filename, original_ext = os.path.splitext(geonode_layer_path) shutil.copy2(geonode_layer_path, target_folder) # Let's check for associated SLD files (if any) try: for s in instance.styles.all(): sld_file_path = os.path.join(target_folder, "".join([s.name, ".sld"])) sld_file = open(sld_file_path, "w") sld_file.write(s.sld_body.strip()) sld_file.close() try: sld_file = open(sld_file_path, "r") response = requests.get(s.sld_url, timeout=TIMEOUT) sld_remote_content = response.text sld_file_path = os.path.join(target_folder, "".join([s.name, "_remote.sld"])) sld_file = open(sld_file_path, "w") sld_file.write(sld_remote_content.strip()) sld_file.close() except BaseException: traceback.print_exc() tb = traceback.format_exc() logger.debug(tb) except BaseException: traceback.print_exc() tb = traceback.format_exc() logger.debug(tb) # Let's dump metadata target_md_folder = os.path.join(target_folder, ".metadata") if not os.path.exists(target_md_folder): os.makedirs(target_md_folder) try: links = Link.objects.filter(resource=instance.resourcebase_ptr) for link in links: link_name = custom_slugify(link.name) link_file = os.path.join(target_md_folder, "".join([link_name, ".%s" % link.extension])) if link.link_type in ('data'): # Skipping 'data' download links continue elif link.link_type in ('metadata', 'image'): # Dumping metadata files and images link_file = open(link_file, "wb") try: response = requests.get(link.url, stream=True, timeout=TIMEOUT) response.raw.decode_content = True shutil.copyfileobj(response.raw, link_file) except BaseException: traceback.print_exc() tb = traceback.format_exc() logger.debug(tb) finally: link_file.close() elif link.link_type.startswith('OGC'): # Dumping OGC/OWS links link_file = open(link_file, "w") link_file.write(link.url.strip()) link_file.close() except BaseException: traceback.print_exc() tb = traceback.format_exc() logger.debug(tb) # ZIP everything and return target_file_name = "".join([instance.name, ".zip"]) target_file = os.path.join(dirpath, target_file_name) zip_dir(target_folder, target_file) response = HttpResponse( content=open(target_file), status=200, content_type="application/zip") response['Content-Disposition'] = 'attachment; filename="%s"' % target_file_name return response except NotImplementedError: traceback.print_exc() tb = traceback.format_exc() logger.debug(tb) return HttpResponse( json.dumps({ 'error': 'file_not_found' }), status=404, content_type="application/json" ) return HttpResponse( json.dumps({ 'error': 'unauthorized_request' }), status=403, content_type="application/json" )
gpl-3.0
yamateh/robotframework
src/robot/output/librarylogger.py
1
1938
# Copyright 2008-2013 Nokia Siemens Networks Oyj # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Implementation of the public test library logging API. This is exposed via :py:mod:`robot.api.logger`. Implementation must reside here to avoid cyclic imports. """ import sys import threading from robot.utils import unic, encode_output from .logger import LOGGER from .loggerhelper import Message LOGGING_THREADS = ('MainThread', 'RobotFrameworkTimeoutThread') def write(msg, level, html=False): # Callable messages allow lazy logging internally, but we don't want to # expose this functionality publicly. See the following issue for details: # http://code.google.com/p/robotframework/issues/detail?id=1505 if callable(msg): msg = unic(msg) if threading.currentThread().getName() in LOGGING_THREADS: LOGGER.log_message(Message(msg, level, html)) def trace(msg, html=False): write(msg, 'TRACE', html) def debug(msg, html=False): write(msg, 'DEBUG', html) def info(msg, html=False, also_console=False): write(msg, 'INFO', html) if also_console: console(msg) def warn(msg, html=False): write(msg, 'WARN', html) def console(msg, newline=True, stream='stdout'): msg = unic(msg) if newline: msg += '\n' stream = sys.__stdout__ if stream.lower() != 'stderr' else sys.__stderr__ stream.write(encode_output(msg)) stream.flush()
apache-2.0
dennis-sheil/commandergenius
project/jni/python/src/Tools/pynche/PyncheWidget.py
100
10501
"""Main Pynche (Pythonically Natural Color and Hue Editor) widget. This window provides the basic decorations, primarily including the menubar. It is used to bring up other windows. """ import sys import os from Tkinter import * import tkMessageBox import tkFileDialog import ColorDB # Milliseconds between interrupt checks KEEPALIVE_TIMER = 500 class PyncheWidget: def __init__(self, version, switchboard, master=None, extrapath=[]): self.__sb = switchboard self.__version = version self.__textwin = None self.__listwin = None self.__detailswin = None self.__helpwin = None self.__dialogstate = {} modal = self.__modal = not not master # If a master was given, we are running as a modal dialog servant to # some other application. We rearrange our UI in this case (there's # no File menu and we get `Okay' and `Cancel' buttons), and we do a # grab_set() to make ourselves modal if modal: self.__tkroot = tkroot = Toplevel(master, class_='Pynche') tkroot.grab_set() tkroot.withdraw() else: # Is there already a default root for Tk, say because we're # running under Guido's IDE? :-) Two conditions say no, either the # import fails or _default_root is None. tkroot = None try: from Tkinter import _default_root tkroot = self.__tkroot = _default_root except ImportError: pass if not tkroot: tkroot = self.__tkroot = Tk(className='Pynche') # but this isn't our top level widget, so make it invisible tkroot.withdraw() # create the menubar menubar = self.__menubar = Menu(tkroot) # # File menu # filemenu = self.__filemenu = Menu(menubar, tearoff=0) filemenu.add_command(label='Load palette...', command=self.__load, underline=0) if not modal: filemenu.add_command(label='Quit', command=self.__quit, accelerator='Alt-Q', underline=0) # # View menu # views = make_view_popups(self.__sb, self.__tkroot, extrapath) viewmenu = Menu(menubar, tearoff=0) for v in views: viewmenu.add_command(label=v.menutext(), command=v.popup, underline=v.underline()) # # Help menu # helpmenu = Menu(menubar, name='help', tearoff=0) helpmenu.add_command(label='About Pynche...', command=self.__popup_about, underline=0) helpmenu.add_command(label='Help...', command=self.__popup_usage, underline=0) # # Tie them all together # menubar.add_cascade(label='File', menu=filemenu, underline=0) menubar.add_cascade(label='View', menu=viewmenu, underline=0) menubar.add_cascade(label='Help', menu=helpmenu, underline=0) # now create the top level window root = self.__root = Toplevel(tkroot, class_='Pynche', menu=menubar) root.protocol('WM_DELETE_WINDOW', modal and self.__bell or self.__quit) root.title('Pynche %s' % version) root.iconname('Pynche') # Only bind accelerators for the File->Quit menu item if running as a # standalone app if not modal: root.bind('<Alt-q>', self.__quit) root.bind('<Alt-Q>', self.__quit) else: # We're a modal dialog so we have a new row of buttons bframe = Frame(root, borderwidth=1, relief=RAISED) bframe.grid(row=4, column=0, columnspan=2, sticky='EW', ipady=5) okay = Button(bframe, text='Okay', command=self.__okay) okay.pack(side=LEFT, expand=1) cancel = Button(bframe, text='Cancel', command=self.__cancel) cancel.pack(side=LEFT, expand=1) def __quit(self, event=None): self.__tkroot.quit() def __bell(self, event=None): self.__tkroot.bell() def __okay(self, event=None): self.__sb.withdraw_views() self.__tkroot.grab_release() self.__quit() def __cancel(self, event=None): self.__sb.canceled() self.__okay() def __keepalive(self): # Exercise the Python interpreter regularly so keyboard interrupts get # through. self.__tkroot.tk.createtimerhandler(KEEPALIVE_TIMER, self.__keepalive) def start(self): if not self.__modal: self.__keepalive() self.__tkroot.mainloop() def window(self): return self.__root def __popup_about(self, event=None): from Main import __version__ tkMessageBox.showinfo('About Pynche ' + __version__, '''\ Pynche %s The PYthonically Natural Color and Hue Editor For information contact: Barry A. Warsaw email: bwarsaw@python.org''' % __version__) def __popup_usage(self, event=None): if not self.__helpwin: self.__helpwin = Helpwin(self.__root, self.__quit) self.__helpwin.deiconify() def __load(self, event=None): while 1: idir, ifile = os.path.split(self.__sb.colordb().filename()) file = tkFileDialog.askopenfilename( filetypes=[('Text files', '*.txt'), ('All files', '*'), ], initialdir=idir, initialfile=ifile) if not file: # cancel button return try: colordb = ColorDB.get_colordb(file) except IOError: tkMessageBox.showerror('Read error', '''\ Could not open file for reading: %s''' % file) continue if colordb is None: tkMessageBox.showerror('Unrecognized color file type', '''\ Unrecognized color file type in file: %s''' % file) continue break self.__sb.set_colordb(colordb) def withdraw(self): self.__root.withdraw() def deiconify(self): self.__root.deiconify() class Helpwin: def __init__(self, master, quitfunc): from Main import docstring self.__root = root = Toplevel(master, class_='Pynche') root.protocol('WM_DELETE_WINDOW', self.__withdraw) root.title('Pynche Help Window') root.iconname('Pynche Help Window') root.bind('<Alt-q>', quitfunc) root.bind('<Alt-Q>', quitfunc) root.bind('<Alt-w>', self.__withdraw) root.bind('<Alt-W>', self.__withdraw) # more elaborate help is available in the README file readmefile = os.path.join(sys.path[0], 'README') try: fp = None try: fp = open(readmefile) contents = fp.read() # wax the last page, it contains Emacs cruft i = contents.rfind('\f') if i > 0: contents = contents[:i].rstrip() finally: if fp: fp.close() except IOError: sys.stderr.write("Couldn't open Pynche's README, " 'using docstring instead.\n') contents = docstring() self.__text = text = Text(root, relief=SUNKEN, width=80, height=24) self.__text.focus_set() text.insert(0.0, contents) scrollbar = Scrollbar(root) scrollbar.pack(fill=Y, side=RIGHT) text.pack(fill=BOTH, expand=YES) text.configure(yscrollcommand=(scrollbar, 'set')) scrollbar.configure(command=(text, 'yview')) def __withdraw(self, event=None): self.__root.withdraw() def deiconify(self): self.__root.deiconify() class PopupViewer: def __init__(self, module, name, switchboard, root): self.__m = module self.__name = name self.__sb = switchboard self.__root = root self.__menutext = module.ADDTOVIEW # find the underline character underline = module.ADDTOVIEW.find('%') if underline == -1: underline = 0 else: self.__menutext = module.ADDTOVIEW.replace('%', '', 1) self.__underline = underline self.__window = None def menutext(self): return self.__menutext def underline(self): return self.__underline def popup(self, event=None): if not self.__window: # class and module must have the same name class_ = getattr(self.__m, self.__name) self.__window = class_(self.__sb, self.__root) self.__sb.add_view(self.__window) self.__window.deiconify() def __cmp__(self, other): return cmp(self.__menutext, other.__menutext) def make_view_popups(switchboard, root, extrapath): viewers = [] # where we are in the file system dirs = [os.path.dirname(__file__)] + extrapath for dir in dirs: if dir == '': dir = '.' for file in os.listdir(dir): if file[-9:] == 'Viewer.py': name = file[:-3] try: module = __import__(name) except ImportError: # Pynche is running from inside a package, so get the # module using the explicit path. pkg = __import__('pynche.'+name) module = getattr(pkg, name) if hasattr(module, 'ADDTOVIEW') and module.ADDTOVIEW: # this is an external viewer v = PopupViewer(module, name, switchboard, root) viewers.append(v) # sort alphabetically viewers.sort() return viewers
lgpl-2.1
krintoxi/NoobSec-Toolkit
NoobSecToolkit /tools/sqli/plugins/dbms/mysql/connector.py
10
2046
#!/usr/bin/env python """ Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ try: import pymysql except ImportError: pass import logging from lib.core.data import conf from lib.core.data import logger from lib.core.exception import SqlmapConnectionException from plugins.generic.connector import Connector as GenericConnector class Connector(GenericConnector): """ Homepage: http://code.google.com/p/pymysql/ User guide: http://code.google.com/p/pymysql/ API: http://code.google.com/p/pymysql/ Debian package: <none> License: MIT Possible connectors: http://wiki.python.org/moin/MySQL """ def __init__(self): GenericConnector.__init__(self) def connect(self): self.initConnection() try: self.connector = pymysql.connect(host=self.hostname, user=self.user, passwd=self.password, db=self.db, port=self.port, connect_timeout=conf.timeout, use_unicode=True) except (pymysql.OperationalError, pymysql.InternalError), msg: raise SqlmapConnectionException(msg[1]) self.initCursor() self.printConnected() def fetchall(self): try: return self.cursor.fetchall() except pymysql.ProgrammingError, msg: logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg[1]) return None def execute(self, query): retVal = False try: self.cursor.execute(query) retVal = True except (pymysql.OperationalError, pymysql.ProgrammingError), msg: logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg[1]) except pymysql.InternalError, msg: raise SqlmapConnectionException(msg[1]) self.connector.commit() return retVal def select(self, query): retVal = None if self.execute(query): retVal = self.fetchall() return retVal
gpl-2.0
DoomTaper/ptp
tests/tools/burpsuite/test_parser.py
2
4008
# -*- coding: UTF-8 -*- import mock import unittest from lxml import etree from hamcrest import assert_that, has_entry, equal_to from ptp.libptp.exceptions import NotSupportedVersionError from ptp.tools.burpsuite.parser import BurpXMLParser def lxml_etree_parse(string): return etree.fromstring(string).getroottree() class TestBurpXMLParser(unittest.TestCase): ### # BurpXMLParser.is_mine ### @mock.patch('lxml.etree.parse', side_effect=lxml_etree_parse) def test_parser_burp_xml_is_mine(self, mock_lxml_etree_parse): from .burp_1_6_30_report import report_true with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_true]): BurpXMLParser.__format__ = '' self.assertTrue(BurpXMLParser.is_mine('foo', 'bar', first=True)) @mock.patch('lxml.etree.parse', side_effect=lxml_etree_parse) def test_parser_burp_xml_is_not_mine(self, mock_lxml_etree_parse): with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=['foo.bar']): BurpXMLParser.__format__ = '' self.assertFalse(BurpXMLParser.is_mine('foo', 'bar', first=True)) @mock.patch('lxml.etree.parse', side_effect=lxml_etree_parse) def test_parser_burp_xml_is_mine_no_version(self, mock_lxml_etree_parse): from .burp_1_6_30_report import report_true stripped_report = report_true.replace('<items burpVersion="1.6.30"', '<items NOTVERSIONNEVER="1.6.30"') with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[stripped_report]): BurpXMLParser.__format__ = '' self.assertFalse(BurpXMLParser.is_mine('foo', 'bar', first=True)) @mock.patch('lxml.etree.parse', side_effect=lxml_etree_parse) def test_parser_burp_xml_is_mine_version_not_supported(self, mock_lxml_etree_parse): from .burp_1_6_30_report import report_true stripped_report = report_true.replace('<items burpVersion="1.6.30"', '<items burpVersion="NOTSUPPORTEDVERSION"') with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[stripped_report]): BurpXMLParser.__format__ = '' self.assertFalse(BurpXMLParser.is_mine('foo', 'bar', first=True)) ### # BurpXMLParser.parse_metadata ### @mock.patch('lxml.etree.parse', side_effect=lxml_etree_parse) def test_parser_burp_xml_parse_metadata(self, mock_lxml_etree_parse): from .burp_1_6_30_report import report_true with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_true]): BurpXMLParser.__format__ = '' my_burp = BurpXMLParser('foo', 'bar', first=True) assert_that(my_burp.parse_metadata(), has_entry('version', '1.6.30')) @mock.patch('lxml.etree.parse', side_effect=lxml_etree_parse) def test_parser_burp_xml_parse_metadata_version_not_supported(self, mock_lxml_etree_parse): from .burp_1_6_30_report import report_true stripped_report = report_true.replace('<items burpVersion="1.6.30"', '<items burpVersion="NOTSUPPORTEDVERsION"') with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[stripped_report]): BurpXMLParser.__format__ = '' my_burp = BurpXMLParser('foo', 'bar', first=True) with self.assertRaises(NotSupportedVersionError): my_burp.parse_metadata() ### # BurpXMLParser.parse_report ### @mock.patch('lxml.etree.parse', side_effect=lxml_etree_parse) def test_parser_burp_xml_parse_report(self, mock_lxml_etree_parse): from .burp_1_6_30_report import report_true with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_true]): BurpXMLParser.__format__ = '' my_burp = BurpXMLParser() report = my_burp.parse_report() assert_that(9, equal_to(len(report[-1]['transactions'])))
bsd-3-clause
solintegra/addons
mrp_product_variants_configurable_timing/__openerp__.py
15
1296
# -*- encoding: utf-8 -*- ############################################################################## # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see http://www.gnu.org/licenses/. # ############################################################################## { "name": "MRP Variants Configurable Timing", "version": "1.0", "author": "OdooMRP team," "Avanzosc," "Serv. Tecnol. Avanzados - Pedro M. Baeza", "website": "http://www.odoomrp.com", "category": "Manufacturing", "depends": ["mrp_configurable_timing", "mrp_product_variants_operations"], "data": [], "auto_install": True, "installable": True, }
agpl-3.0
ygol/odoo
addons/account_payment/__init__.py
436
1279
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## #---------------------------------------------------------- # Init Sales #---------------------------------------------------------- import account_payment import wizard import account_move_line import account_invoice import report # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
kenshay/ImageScripter
ProgramData/SystemFiles/Python/Lib/site-packages/django/core/management/commands/makemigrations.py
46
15083
import io import os import sys import warnings from itertools import takewhile from django.apps import apps from django.conf import settings from django.core.management.base import BaseCommand, CommandError from django.db import DEFAULT_DB_ALIAS, connections, router from django.db.migrations import Migration from django.db.migrations.autodetector import MigrationAutodetector from django.db.migrations.loader import MigrationLoader from django.db.migrations.questioner import ( InteractiveMigrationQuestioner, MigrationQuestioner, NonInteractiveMigrationQuestioner, ) from django.db.migrations.state import ProjectState from django.db.migrations.utils import get_migration_name_timestamp from django.db.migrations.writer import MigrationWriter from django.utils.deprecation import RemovedInDjango20Warning from django.utils.six import iteritems from django.utils.six.moves import zip class Command(BaseCommand): help = "Creates new migration(s) for apps." def add_arguments(self, parser): parser.add_argument( 'args', metavar='app_label', nargs='*', help='Specify the app label(s) to create migrations for.', ) parser.add_argument( '--dry-run', action='store_true', dest='dry_run', default=False, help="Just show what migrations would be made; don't actually write them.", ) parser.add_argument( '--merge', action='store_true', dest='merge', default=False, help="Enable fixing of migration conflicts.", ) parser.add_argument( '--empty', action='store_true', dest='empty', default=False, help="Create an empty migration.", ) parser.add_argument( '--noinput', '--no-input', action='store_false', dest='interactive', default=True, help='Tells Django to NOT prompt the user for input of any kind.', ) parser.add_argument( '-n', '--name', action='store', dest='name', default=None, help="Use this name for migration file(s).", ) parser.add_argument( '-e', '--exit', action='store_true', dest='exit_code', default=False, help='Exit with error code 1 if no changes needing migrations are found. ' 'Deprecated, use the --check option instead.', ) parser.add_argument( '--check', action='store_true', dest='check_changes', help='Exit with a non-zero status if model changes are missing migrations.', ) def handle(self, *app_labels, **options): self.verbosity = options['verbosity'] self.interactive = options['interactive'] self.dry_run = options['dry_run'] self.merge = options['merge'] self.empty = options['empty'] self.migration_name = options['name'] self.exit_code = options['exit_code'] check_changes = options['check_changes'] if self.exit_code: warnings.warn( "The --exit option is deprecated in favor of the --check option.", RemovedInDjango20Warning ) # Make sure the app they asked for exists app_labels = set(app_labels) bad_app_labels = set() for app_label in app_labels: try: apps.get_app_config(app_label) except LookupError: bad_app_labels.add(app_label) if bad_app_labels: for app_label in bad_app_labels: self.stderr.write("App '%s' could not be found. Is it in INSTALLED_APPS?" % app_label) sys.exit(2) # Load the current graph state. Pass in None for the connection so # the loader doesn't try to resolve replaced migrations from DB. loader = MigrationLoader(None, ignore_no_migrations=True) # Raise an error if any migrations are applied before their dependencies. consistency_check_labels = set(config.label for config in apps.get_app_configs()) # Non-default databases are only checked if database routers used. aliases_to_check = connections if settings.DATABASE_ROUTERS else [DEFAULT_DB_ALIAS] for alias in sorted(aliases_to_check): connection = connections[alias] if (connection.settings_dict['ENGINE'] != 'django.db.backends.dummy' and any( # At least one model must be migrated to the database. router.allow_migrate(connection.alias, app_label, model_name=model._meta.object_name) for app_label in consistency_check_labels for model in apps.get_app_config(app_label).get_models() )): loader.check_consistent_history(connection) # Before anything else, see if there's conflicting apps and drop out # hard if there are any and they don't want to merge conflicts = loader.detect_conflicts() # If app_labels is specified, filter out conflicting migrations for unspecified apps if app_labels: conflicts = { app_label: conflict for app_label, conflict in iteritems(conflicts) if app_label in app_labels } if conflicts and not self.merge: name_str = "; ".join( "%s in %s" % (", ".join(names), app) for app, names in conflicts.items() ) raise CommandError( "Conflicting migrations detected; multiple leaf nodes in the " "migration graph: (%s).\nTo fix them run " "'python manage.py makemigrations --merge'" % name_str ) # If they want to merge and there's nothing to merge, then politely exit if self.merge and not conflicts: self.stdout.write("No conflicts detected to merge.") return # If they want to merge and there is something to merge, then # divert into the merge code if self.merge and conflicts: return self.handle_merge(loader, conflicts) if self.interactive: questioner = InteractiveMigrationQuestioner(specified_apps=app_labels, dry_run=self.dry_run) else: questioner = NonInteractiveMigrationQuestioner(specified_apps=app_labels, dry_run=self.dry_run) # Set up autodetector autodetector = MigrationAutodetector( loader.project_state(), ProjectState.from_apps(apps), questioner, ) # If they want to make an empty migration, make one for each app if self.empty: if not app_labels: raise CommandError("You must supply at least one app label when using --empty.") # Make a fake changes() result we can pass to arrange_for_graph changes = { app: [Migration("custom", app)] for app in app_labels } changes = autodetector.arrange_for_graph( changes=changes, graph=loader.graph, migration_name=self.migration_name, ) self.write_migration_files(changes) return # Detect changes changes = autodetector.changes( graph=loader.graph, trim_to_apps=app_labels or None, convert_apps=app_labels or None, migration_name=self.migration_name, ) if not changes: # No changes? Tell them. if self.verbosity >= 1: if len(app_labels) == 1: self.stdout.write("No changes detected in app '%s'" % app_labels.pop()) elif len(app_labels) > 1: self.stdout.write("No changes detected in apps '%s'" % ("', '".join(app_labels))) else: self.stdout.write("No changes detected") if self.exit_code: sys.exit(1) else: self.write_migration_files(changes) if check_changes: sys.exit(1) def write_migration_files(self, changes): """ Takes a changes dict and writes them out as migration files. """ directory_created = {} for app_label, app_migrations in changes.items(): if self.verbosity >= 1: self.stdout.write(self.style.MIGRATE_HEADING("Migrations for '%s':" % app_label) + "\n") for migration in app_migrations: # Describe the migration writer = MigrationWriter(migration) if self.verbosity >= 1: # Display a relative path if it's below the current working # directory, or an absolute path otherwise. try: migration_string = os.path.relpath(writer.path) except ValueError: migration_string = writer.path if migration_string.startswith('..'): migration_string = writer.path self.stdout.write(" %s\n" % (self.style.MIGRATE_LABEL(migration_string),)) for operation in migration.operations: self.stdout.write(" - %s\n" % operation.describe()) if not self.dry_run: # Write the migrations file to the disk. migrations_directory = os.path.dirname(writer.path) if not directory_created.get(app_label): if not os.path.isdir(migrations_directory): os.mkdir(migrations_directory) init_path = os.path.join(migrations_directory, "__init__.py") if not os.path.isfile(init_path): open(init_path, "w").close() # We just do this once per app directory_created[app_label] = True migration_string = writer.as_string() with io.open(writer.path, "w", encoding='utf-8') as fh: fh.write(migration_string) elif self.verbosity == 3: # Alternatively, makemigrations --dry-run --verbosity 3 # will output the migrations to stdout rather than saving # the file to the disk. self.stdout.write(self.style.MIGRATE_HEADING( "Full migrations file '%s':" % writer.filename) + "\n" ) self.stdout.write("%s\n" % writer.as_string()) def handle_merge(self, loader, conflicts): """ Handles merging together conflicted migrations interactively, if it's safe; otherwise, advises on how to fix it. """ if self.interactive: questioner = InteractiveMigrationQuestioner() else: questioner = MigrationQuestioner(defaults={'ask_merge': True}) for app_label, migration_names in conflicts.items(): # Grab out the migrations in question, and work out their # common ancestor. merge_migrations = [] for migration_name in migration_names: migration = loader.get_migration(app_label, migration_name) migration.ancestry = [ mig for mig in loader.graph.forwards_plan((app_label, migration_name)) if mig[0] == migration.app_label ] merge_migrations.append(migration) def all_items_equal(seq): return all(item == seq[0] for item in seq[1:]) merge_migrations_generations = zip(*[m.ancestry for m in merge_migrations]) common_ancestor_count = sum(1 for common_ancestor_generation in takewhile(all_items_equal, merge_migrations_generations)) if not common_ancestor_count: raise ValueError("Could not find common ancestor of %s" % migration_names) # Now work out the operations along each divergent branch for migration in merge_migrations: migration.branch = migration.ancestry[common_ancestor_count:] migrations_ops = (loader.get_migration(node_app, node_name).operations for node_app, node_name in migration.branch) migration.merged_operations = sum(migrations_ops, []) # In future, this could use some of the Optimizer code # (can_optimize_through) to automatically see if they're # mergeable. For now, we always just prompt the user. if self.verbosity > 0: self.stdout.write(self.style.MIGRATE_HEADING("Merging %s" % app_label)) for migration in merge_migrations: self.stdout.write(self.style.MIGRATE_LABEL(" Branch %s" % migration.name)) for operation in migration.merged_operations: self.stdout.write(" - %s\n" % operation.describe()) if questioner.ask_merge(app_label): # If they still want to merge it, then write out an empty # file depending on the migrations needing merging. numbers = [ MigrationAutodetector.parse_number(migration.name) for migration in merge_migrations ] try: biggest_number = max(x for x in numbers if x is not None) except ValueError: biggest_number = 1 subclass = type("Migration", (Migration, ), { "dependencies": [(app_label, migration.name) for migration in merge_migrations], }) migration_name = "%04i_%s" % ( biggest_number + 1, self.migration_name or ("merge_%s" % get_migration_name_timestamp()) ) new_migration = subclass(migration_name, app_label) writer = MigrationWriter(new_migration) if not self.dry_run: # Write the merge migrations file to the disk with io.open(writer.path, "w", encoding='utf-8') as fh: fh.write(writer.as_string()) if self.verbosity > 0: self.stdout.write("\nCreated new merge migration %s" % writer.path) elif self.verbosity == 3: # Alternatively, makemigrations --merge --dry-run --verbosity 3 # will output the merge migrations to stdout rather than saving # the file to the disk. self.stdout.write(self.style.MIGRATE_HEADING( "Full merge migrations file '%s':" % writer.filename) + "\n" ) self.stdout.write("%s\n" % writer.as_string())
gpl-3.0
trickv/fspot_browser
urls.py
1
1469
from django.conf.urls.defaults import patterns, include, url # Uncomment the next two lines to enable the admin: from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', url(r'^render/(?P<photo_id>\d+)/$', 'render.views.raw'), url(r'^render/(?P<photo_id>\d+)/thumbnail/$', 'render.views.thumbnail'), url(r'^render/(?P<photo_id>\d+)/scale/(?P<request_width>\d+)/$', 'render.views.scale'), url(r'^$', 'browser.views.tag_list'), url(r'^tag/(?P<tag_id>\d+)/$', 'browser.views.tag'), url(r'^photo/(?P<photo_id>\d+)/$', 'browser.views.photo'), url(r'^time/$', 'browser.views.time'), url(r'^time/month/(?P<year_int>\d+)-(?P<month_int>\d+)/$', 'browser.views.month'), url(r'^hack/tag-best-2011/(?P<photo_id>\d+)/', 'browser.views.hack_best_2011'), url(r'^hack/add_tag/(?P<photo_id>\d+)/(?P<tag_id>\d+)/$', 'browser.views.hack_add_tag'), url(r'^hack/remove_tag/(?P<photo_id>\d+)/(?P<tag_id>\d+)/$', 'browser.views.hack_remove_tag'), url(r'^hack/remove-best-tag/(?P<photo_id>\d+)/$', 'browser.views.hack_remove_best'), # Examples: # url(r'^$', 'fspot_browser.views.home', name='home'), # url(r'^fspot_browser/', include('fspot_browser.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: # url(r'^admin/', include(admin.site.urls)), )
bsd-3-clause
boneknuckleskin/libforensics
code/lf/win/shell/link/consts.py
13
3381
# Copyright 2010 Michael Murr # # This file is part of LibForensics. # # LibForensics is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # LibForensics is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with LibForensics. If not, see <http://www.gnu.org/licenses/>. """Constants for working with shell link files""" __docformat__ = "restructuredtext en" __all__ = [ "SLDF_DEFAULT", "SLDF_HAS_ID_LIST", "SLDF_HAS_LINK_INFO", "SLDF_HAS_NAME", "SLDF_HAS_RELPATH", "SLDF_HAS_WORKINGDIR", "SLDF_HAS_ARGS", "SLDF_HAS_ICONLOCATION", "SLDF_UNICODE","SLDF_FORCE_NO_LINKINFO", "SLDF_HAS_EXP_SZ", "SLDF_RUN_IN_SEPARATE", "SLDF_HAS_LOGO3ID", "SLDF_HAS_DARWINID", "SLDF_RUNAS_USER", "SLDF_HAS_EXP_ICON_SZ", "SLDF_NO_PIDL_ALIAS", "SLDF_FORCE_UNCNAME", "SLDF_RUN_WITH_SHIMLAYER", "SLDF_FORCE_NO_LINKTRACK", "SLDF_ENABLE_TARGET_METADATA", "SLDF_DISABLE_LINK_PATH_TRACKING", "SLDF_DISABLE_KNOWNFOLDER_RELATIVE_TRACKING", "SLDF_NO_KF_ALIAS", "SLDF_ALLOW_LINK_TO_LINK", "SLDF_UNALIAS_ON_SAVE", "SLDF_PREFER_ENVIRONMENT_PATH", "SLDF_KEEP_LOCAL_IDLIST_FOR_UNC_TARGET", "SLIF_HAS_VOLUME_ID_LOCAL_BASE_PATH", "SLIF_HAS_CNRL_AND_PATH_SUFFIX", "CNRLF_HAS_VALID_DEVICE", "CNRLF_HAS_VALID_NET_TYPE", "CONSOLE_PROPS_SIG", "CONSOLE_FE_PROPS_SIG", "DARWIN_PROPS_SIG", "ENVIRONMENT_PROPS_SIG", "ICON_ENVIRONMENT_PROPS_SIG", "KNOWN_FOLDER_PROPS_SIG", "PROPERTY_STORE_PROPS_SIG", "SHIM_PROPS_SIG", "SPECIAL_FOLDER_PROPS_SIG", "TRACKER_PROPS_SIG", "VISTA_AND_ABOVE_IDLIST_PROPS_SIG" ] SLDF_DEFAULT = 0 SLDF_HAS_ID_LIST = 1 SLDF_HAS_LINK_INFO = 2 SLDF_HAS_NAME = 4 SLDF_HAS_RELPATH = 8 SLDF_HAS_WORKINGDIR = 0x10 SLDF_HAS_ARGS = 0x20 SLDF_HAS_ICONLOCATION = 0x40 SLDF_UNICODE = 0x80 SLDF_FORCE_NO_LINKINFO = 0x100 SLDF_HAS_EXP_SZ = 0x200 SLDF_RUN_IN_SEPARATE = 0x400 SLDF_HAS_LOGO3ID = 0x800 SLDF_HAS_DARWINID = 0x1000 SLDF_RUNAS_USER = 0x2000 SLDF_HAS_EXP_ICON_SZ = 0x4000 SLDF_NO_PIDL_ALIAS = 0x8000 SLDF_FORCE_UNCNAME = 0x10000 SLDF_RUN_WITH_SHIMLAYER = 0x20000 SLDF_FORCE_NO_LINKTRACK = 0x40000 SLDF_ENABLE_TARGET_METADATA = 0x800000 SLDF_DISABLE_LINK_PATH_TRACKING = 0x100000 SLDF_DISABLE_KNOWNFOLDER_RELATIVE_TRACKING = 0x200000 SLDF_NO_KF_ALIAS = 0x400000 SLDF_ALLOW_LINK_TO_LINK = 0x800000 SLDF_UNALIAS_ON_SAVE = 0x1000000 SLDF_PREFER_ENVIRONMENT_PATH = 0x2000000 SLDF_KEEP_LOCAL_IDLIST_FOR_UNC_TARGET = 0x4000000 # CNRL = CommonNetworkRelativeLink SLIF_HAS_VOLUME_ID_LOCAL_BASE_PATH = 1 SLIF_HAS_CNRL_AND_PATH_SUFFIX = 2 CNRLF_HAS_VALID_DEVICE = 1 CNRLF_HAS_VALID_NET_TYPE = 2 CONSOLE_PROPS_SIG = 0xA0000002 CONSOLE_FE_PROPS_SIG = 0xA0000004 DARWIN_PROPS_SIG = 0xA0000006 ENVIRONMENT_PROPS_SIG = 0xA0000001 ICON_ENVIRONMENT_PROPS_SIG = 0xA0000007 KNOWN_FOLDER_PROPS_SIG = 0xA000000B PROPERTY_STORE_PROPS_SIG = 0xA0000009 SHIM_PROPS_SIG = 0xA0000008 SPECIAL_FOLDER_PROPS_SIG = 0xA0000005 TRACKER_PROPS_SIG = 0xA0000003 VISTA_AND_ABOVE_IDLIST_PROPS_SIG = 0xA000000C
gpl-3.0
citrix/netscaler-ansible-modules
docs/conf.py
1
10060
# Copyright (c) 2017 Citrix Systems # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('..')) sys.path.insert(0, os.path.abspath('.')) VERSION = "0.1" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.autosummary', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode', ] autodoc_default_flags = ['inherited-members', 'show-inheritance'] autodoc_member_order = 'bysource' # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Citrix ADC Ansible' copyright = u'2017, Citrix Systems' author = u'Citrix Systems' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = VERSION # The full version, including alpha/beta/rc tags. release = VERSION # links to sections rst_epilog = """ """ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build', '**test**'] # The reST default role (used for this markup: `text`) to use for all # documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' # html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value # html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'NetscalerAnsibledoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # 'preamble': '', # Latex figure (float) alignment # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'NetscalerAnsible.tex', u'Netscaler Ansible Documentation', u'Netscaler Networks', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'netscaleransible', u'Netscaler Ansible Documentation', [author], 1) ] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'NetscalerAnsible', u'Netscaler Ansible Documentation', author, 'NetscalerAnsible', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { 'requests': ('http://docs.python-requests.org/en/latest/', None) }
gpl-3.0
mottosso/mindbender-setup
bin/windows/python36/Lib/curses/textpad.py
40
7657
"""Simple textbox editing widget with Emacs-like keybindings.""" import curses import curses.ascii def rectangle(win, uly, ulx, lry, lrx): """Draw a rectangle with corners at the provided upper-left and lower-right coordinates. """ win.vline(uly+1, ulx, curses.ACS_VLINE, lry - uly - 1) win.hline(uly, ulx+1, curses.ACS_HLINE, lrx - ulx - 1) win.hline(lry, ulx+1, curses.ACS_HLINE, lrx - ulx - 1) win.vline(uly+1, lrx, curses.ACS_VLINE, lry - uly - 1) win.addch(uly, ulx, curses.ACS_ULCORNER) win.addch(uly, lrx, curses.ACS_URCORNER) win.addch(lry, lrx, curses.ACS_LRCORNER) win.addch(lry, ulx, curses.ACS_LLCORNER) class Textbox: """Editing widget using the interior of a window object. Supports the following Emacs-like key bindings: Ctrl-A Go to left edge of window. Ctrl-B Cursor left, wrapping to previous line if appropriate. Ctrl-D Delete character under cursor. Ctrl-E Go to right edge (stripspaces off) or end of line (stripspaces on). Ctrl-F Cursor right, wrapping to next line when appropriate. Ctrl-G Terminate, returning the window contents. Ctrl-H Delete character backward. Ctrl-J Terminate if the window is 1 line, otherwise insert newline. Ctrl-K If line is blank, delete it, otherwise clear to end of line. Ctrl-L Refresh screen. Ctrl-N Cursor down; move down one line. Ctrl-O Insert a blank line at cursor location. Ctrl-P Cursor up; move up one line. Move operations do nothing if the cursor is at an edge where the movement is not possible. The following synonyms are supported where possible: KEY_LEFT = Ctrl-B, KEY_RIGHT = Ctrl-F, KEY_UP = Ctrl-P, KEY_DOWN = Ctrl-N KEY_BACKSPACE = Ctrl-h """ def __init__(self, win, insert_mode=False): self.win = win self.insert_mode = insert_mode self._update_max_yx() self.stripspaces = 1 self.lastcmd = None win.keypad(1) def _update_max_yx(self): maxy, maxx = self.win.getmaxyx() self.maxy = maxy - 1 self.maxx = maxx - 1 def _end_of_line(self, y): """Go to the location of the first blank on the given line, returning the index of the last non-blank character.""" self._update_max_yx() last = self.maxx while True: if curses.ascii.ascii(self.win.inch(y, last)) != curses.ascii.SP: last = min(self.maxx, last+1) break elif last == 0: break last = last - 1 return last def _insert_printable_char(self, ch): self._update_max_yx() (y, x) = self.win.getyx() backyx = None while y < self.maxy or x < self.maxx: if self.insert_mode: oldch = self.win.inch() # The try-catch ignores the error we trigger from some curses # versions by trying to write into the lowest-rightmost spot # in the window. try: self.win.addch(ch) except curses.error: pass if not self.insert_mode or not curses.ascii.isprint(oldch): break ch = oldch (y, x) = self.win.getyx() # Remember where to put the cursor back since we are in insert_mode if backyx is None: backyx = y, x if backyx is not None: self.win.move(*backyx) def do_command(self, ch): "Process a single editing command." self._update_max_yx() (y, x) = self.win.getyx() self.lastcmd = ch if curses.ascii.isprint(ch): if y < self.maxy or x < self.maxx: self._insert_printable_char(ch) elif ch == curses.ascii.SOH: # ^a self.win.move(y, 0) elif ch in (curses.ascii.STX,curses.KEY_LEFT, curses.ascii.BS,curses.KEY_BACKSPACE): if x > 0: self.win.move(y, x-1) elif y == 0: pass elif self.stripspaces: self.win.move(y-1, self._end_of_line(y-1)) else: self.win.move(y-1, self.maxx) if ch in (curses.ascii.BS, curses.KEY_BACKSPACE): self.win.delch() elif ch == curses.ascii.EOT: # ^d self.win.delch() elif ch == curses.ascii.ENQ: # ^e if self.stripspaces: self.win.move(y, self._end_of_line(y)) else: self.win.move(y, self.maxx) elif ch in (curses.ascii.ACK, curses.KEY_RIGHT): # ^f if x < self.maxx: self.win.move(y, x+1) elif y == self.maxy: pass else: self.win.move(y+1, 0) elif ch == curses.ascii.BEL: # ^g return 0 elif ch == curses.ascii.NL: # ^j if self.maxy == 0: return 0 elif y < self.maxy: self.win.move(y+1, 0) elif ch == curses.ascii.VT: # ^k if x == 0 and self._end_of_line(y) == 0: self.win.deleteln() else: # first undo the effect of self._end_of_line self.win.move(y, x) self.win.clrtoeol() elif ch == curses.ascii.FF: # ^l self.win.refresh() elif ch in (curses.ascii.SO, curses.KEY_DOWN): # ^n if y < self.maxy: self.win.move(y+1, x) if x > self._end_of_line(y+1): self.win.move(y+1, self._end_of_line(y+1)) elif ch == curses.ascii.SI: # ^o self.win.insertln() elif ch in (curses.ascii.DLE, curses.KEY_UP): # ^p if y > 0: self.win.move(y-1, x) if x > self._end_of_line(y-1): self.win.move(y-1, self._end_of_line(y-1)) return 1 def gather(self): "Collect and return the contents of the window." result = "" self._update_max_yx() for y in range(self.maxy+1): self.win.move(y, 0) stop = self._end_of_line(y) if stop == 0 and self.stripspaces: continue for x in range(self.maxx+1): if self.stripspaces and x > stop: break result = result + chr(curses.ascii.ascii(self.win.inch(y, x))) if self.maxy > 0: result = result + "\n" return result def edit(self, validate=None): "Edit in the widget window and collect the results." while 1: ch = self.win.getch() if validate: ch = validate(ch) if not ch: continue if not self.do_command(ch): break self.win.refresh() return self.gather() if __name__ == '__main__': def test_editbox(stdscr): ncols, nlines = 9, 4 uly, ulx = 15, 20 stdscr.addstr(uly-2, ulx, "Use Ctrl-G to end editing.") win = curses.newwin(nlines, ncols, uly, ulx) rectangle(stdscr, uly-1, ulx-1, uly + nlines, ulx + ncols) stdscr.refresh() return Textbox(win).edit() str = curses.wrapper(test_editbox) print('Contents of text box:', repr(str))
mit
hayderimran7/tempest
tempest/api/compute/limits/test_absolute_limits.py
17
2089
# Copyright 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from tempest.api.compute import base from tempest import test class AbsoluteLimitsTestJSON(base.BaseV2ComputeTest): @classmethod def setup_clients(cls): super(AbsoluteLimitsTestJSON, cls).setup_clients() cls.client = cls.limits_client @test.idempotent_id('b54c66af-6ab6-4cf0-a9e5-a0cb58d75e0b') def test_absLimits_get(self): # To check if all limits are present in the response limits = self.client.show_limits() absolute_limits = limits['absolute'] expected_elements = ['maxImageMeta', 'maxPersonality', 'maxPersonalitySize', 'maxServerMeta', 'maxTotalCores', 'maxTotalFloatingIps', 'maxSecurityGroups', 'maxSecurityGroupRules', 'maxTotalInstances', 'maxTotalKeypairs', 'maxTotalRAMSize', 'totalCoresUsed', 'totalFloatingIpsUsed', 'totalSecurityGroupsUsed', 'totalInstancesUsed', 'totalRAMUsed'] # check whether all expected elements exist missing_elements =\ [ele for ele in expected_elements if ele not in absolute_limits] self.assertEqual(0, len(missing_elements), "Failed to find element %s in absolute limits list" % ', '.join(ele for ele in missing_elements))
apache-2.0
nickleefly/youtube-dl
youtube_dl/extractor/iwara.py
11
2611
# coding: utf-8 from __future__ import unicode_literals from .common import InfoExtractor from ..compat import compat_urllib_parse_urlparse from ..utils import remove_end class IwaraIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.|ecchi\.)?iwara\.tv/videos/(?P<id>[a-zA-Z0-9]+)' _TESTS = [{ 'url': 'http://iwara.tv/videos/amVwUl1EHpAD9RD', 'md5': '1d53866b2c514b23ed69e4352fdc9839', 'info_dict': { 'id': 'amVwUl1EHpAD9RD', 'ext': 'mp4', 'title': '【MMD R-18】ガールフレンド carry_me_off', 'age_limit': 18, }, }, { 'url': 'http://ecchi.iwara.tv/videos/Vb4yf2yZspkzkBO', 'md5': '7e5f1f359cd51a027ba4a7b7710a50f0', 'info_dict': { 'id': '0B1LvuHnL-sRFNXB1WHNqbGw4SXc', 'ext': 'mp4', 'title': '[3D Hentai] Kyonyu Ã\x97 Genkai Ã\x97 Emaki Shinobi Girls.mp4', 'age_limit': 18, }, 'add_ie': ['GoogleDrive'], }, { 'url': 'http://www.iwara.tv/videos/nawkaumd6ilezzgq', 'md5': '1d85f1e5217d2791626cff5ec83bb189', 'info_dict': { 'id': '6liAP9s2Ojc', 'ext': 'mp4', 'age_limit': 0, 'title': '[MMD] Do It Again Ver.2 [1080p 60FPS] (Motion,Camera,Wav+DL)', 'description': 'md5:590c12c0df1443d833fbebe05da8c47a', 'upload_date': '20160910', 'uploader': 'aMMDsork', 'uploader_id': 'UCVOFyOSCyFkXTYYHITtqB7A', }, 'add_ie': ['Youtube'], }] def _real_extract(self, url): video_id = self._match_id(url) webpage, urlh = self._download_webpage_handle(url, video_id) hostname = compat_urllib_parse_urlparse(urlh.geturl()).hostname # ecchi is 'sexy' in Japanese age_limit = 18 if hostname.split('.')[0] == 'ecchi' else 0 entries = self._parse_html5_media_entries(url, webpage, video_id) if not entries: iframe_url = self._html_search_regex( r'<iframe[^>]+src=([\'"])(?P<url>[^\'"]+)\1', webpage, 'iframe URL', group='url') return { '_type': 'url_transparent', 'url': iframe_url, 'age_limit': age_limit, } title = remove_end(self._html_search_regex( r'<title>([^<]+)</title>', webpage, 'title'), ' | Iwara') info_dict = entries[0] info_dict.update({ 'id': video_id, 'title': title, 'age_limit': age_limit, }) return info_dict
unlicense
rhyolight/nupic.research
projects/encoder_quality/scalar_encoder_check_demo.py
12
1668
# ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2015, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Affero Public License for more details. # # You should have received a copy of the GNU Affero Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- import encoder_check import numpy as np from nupic.encoders.scalar import ScalarEncoder if __name__ == "__main__": print "Testing ScalarEncoder Quality" maxval = 100.0 minval = -100.0 Nsamples = 1000 encoder = ScalarEncoder(name="scalar", n=14, w=3, minval=minval, maxval=maxval, periodic=True, forced=True) distance_function = lambda x,y : abs(x-y) sample_generator = lambda : np.random.uniform(minval, maxval) input_pairs_source = encoder_check.InputTripleCreator(sample_generator) err = encoder_check.encoderCheck(encoder, distance_function, input_pairs_source) print "Average error: ", print err
gpl-3.0
chyeh727/django
django/contrib/gis/db/backends/base/operations.py
263
4865
class BaseSpatialOperations(object): """ This module holds the base `BaseSpatialBackend` object, which is instantiated by each spatial database backend with the features it has. """ truncate_params = {} # Quick booleans for the type of this spatial backend, and # an attribute for the spatial database version tuple (if applicable) postgis = False spatialite = False mysql = False oracle = False spatial_version = None # How the geometry column should be selected. select = None # Does the spatial database have a geometry or geography type? geography = False geometry = False area = False bounding_circle = False centroid = False difference = False distance = False distance_sphere = False distance_spheroid = False envelope = False force_rhr = False mem_size = False num_geom = False num_points = False perimeter = False perimeter3d = False point_on_surface = False polygonize = False reverse = False scale = False snap_to_grid = False sym_difference = False transform = False translate = False union = False # Aggregates disallowed_aggregates = () geom_func_prefix = '' # Mapping between Django function names and backend names, when names do not # match; used in spatial_function_name(). function_names = {} # Blacklist/set of known unsupported functions of the backend unsupported_functions = { 'Area', 'AsGeoHash', 'AsGeoJSON', 'AsGML', 'AsKML', 'AsSVG', 'BoundingCircle', 'Centroid', 'Difference', 'Distance', 'Envelope', 'ForceRHR', 'Intersection', 'Length', 'MemSize', 'NumGeometries', 'NumPoints', 'Perimeter', 'PointOnSurface', 'Reverse', 'Scale', 'SnapToGrid', 'SymDifference', 'Transform', 'Translate', 'Union', } # Serialization geohash = False geojson = False gml = False kml = False svg = False # Constructors from_text = False from_wkb = False # Default conversion functions for aggregates; will be overridden if implemented # for the spatial backend. def convert_extent(self, box, srid): raise NotImplementedError('Aggregate extent not implemented for this spatial backend.') def convert_extent3d(self, box, srid): raise NotImplementedError('Aggregate 3D extent not implemented for this spatial backend.') def convert_geom(self, geom_val, geom_field): raise NotImplementedError('Aggregate method not implemented for this spatial backend.') # For quoting column values, rather than columns. def geo_quote_name(self, name): return "'%s'" % name # GeometryField operations def geo_db_type(self, f): """ Returns the database column type for the geometry field on the spatial backend. """ raise NotImplementedError('subclasses of BaseSpatialOperations must provide a geo_db_type() method') def get_distance(self, f, value, lookup_type): """ Returns the distance parameters for the given geometry field, lookup value, and lookup type. """ raise NotImplementedError('Distance operations not available on this spatial backend.') def get_geom_placeholder(self, f, value, compiler): """ Returns the placeholder for the given geometry field with the given value. Depending on the spatial backend, the placeholder may contain a stored procedure call to the transformation function of the spatial backend. """ raise NotImplementedError('subclasses of BaseSpatialOperations must provide a geo_db_placeholder() method') def check_expression_support(self, expression): if isinstance(expression, self.disallowed_aggregates): raise NotImplementedError( "%s spatial aggregation is not supported by this database backend." % expression.name ) super(BaseSpatialOperations, self).check_expression_support(expression) def spatial_aggregate_name(self, agg_name): raise NotImplementedError('Aggregate support not implemented for this spatial backend.') def spatial_function_name(self, func_name): if func_name in self.unsupported_functions: raise NotImplementedError("This backend doesn't support the %s function." % func_name) return self.function_names.get(func_name, self.geom_func_prefix + func_name) # Routines for getting the OGC-compliant models. def geometry_columns(self): raise NotImplementedError('Subclasses of BaseSpatialOperations must provide a geometry_columns() method.') def spatial_ref_sys(self): raise NotImplementedError('subclasses of BaseSpatialOperations must a provide spatial_ref_sys() method')
bsd-3-clause
jkarnows/scikit-learn
sklearn/utils/fixes.py
29
12072
"""Compatibility fixes for older version of python, numpy and scipy If you add content to this file, please give the version of the package at which the fixe is no longer needed. """ # Authors: Emmanuelle Gouillart <emmanuelle.gouillart@normalesup.org> # Gael Varoquaux <gael.varoquaux@normalesup.org> # Fabian Pedregosa <fpedregosa@acm.org> # Lars Buitinck # # License: BSD 3 clause import inspect import warnings import sys import functools import numpy as np import scipy.sparse as sp import scipy def _parse_version(version_string): version = [] for x in version_string.split('.'): try: version.append(int(x)) except ValueError: # x may be of the form dev-1ea1592 version.append(x) return tuple(version) np_version = _parse_version(np.__version__) sp_version = _parse_version(scipy.__version__) try: from scipy.special import expit # SciPy >= 0.10 with np.errstate(invalid='ignore', over='ignore'): if np.isnan(expit(1000)): # SciPy < 0.14 raise ImportError("no stable expit in scipy.special") except ImportError: def expit(x, out=None): """Logistic sigmoid function, ``1 / (1 + exp(-x))``. See sklearn.utils.extmath.log_logistic for the log of this function. """ if out is None: out = np.empty(np.atleast_1d(x).shape, dtype=np.float64) out[:] = x # 1 / (1 + exp(-x)) = (1 + tanh(x / 2)) / 2 # This way of computing the logistic is both fast and stable. out *= .5 np.tanh(out, out) out += 1 out *= .5 return out.reshape(np.shape(x)) # little danse to see if np.copy has an 'order' keyword argument if 'order' in inspect.getargspec(np.copy)[0]: def safe_copy(X): # Copy, but keep the order return np.copy(X, order='K') else: # Before an 'order' argument was introduced, numpy wouldn't muck with # the ordering safe_copy = np.copy try: if (not np.allclose(np.divide(.4, 1, casting="unsafe"), np.divide(.4, 1, casting="unsafe", dtype=np.float)) or not np.allclose(np.divide(.4, 1), .4)): raise TypeError('Divide not working with dtype: ' 'https://github.com/numpy/numpy/issues/3484') divide = np.divide except TypeError: # Compat for old versions of np.divide that do not provide support for # the dtype args def divide(x1, x2, out=None, dtype=None): out_orig = out if out is None: out = np.asarray(x1, dtype=dtype) if out is x1: out = x1.copy() else: if out is not x1: out[:] = x1 if dtype is not None and out.dtype != dtype: out = out.astype(dtype) out /= x2 if out_orig is None and np.isscalar(x1): out = np.asscalar(out) return out try: np.array(5).astype(float, copy=False) except TypeError: # Compat where astype accepted no copy argument def astype(array, dtype, copy=True): if not copy and array.dtype == dtype: return array return array.astype(dtype) else: astype = np.ndarray.astype try: with warnings.catch_warnings(record=True): # Don't raise the numpy deprecation warnings that appear in # 1.9, but avoid Python bug due to simplefilter('ignore') warnings.simplefilter('always') sp.csr_matrix([1.0, 2.0, 3.0]).max(axis=0) except (TypeError, AttributeError): # in scipy < 14.0, sparse matrix min/max doesn't accept an `axis` argument # the following code is taken from the scipy 0.14 codebase def _minor_reduce(X, ufunc): major_index = np.flatnonzero(np.diff(X.indptr)) if X.data.size == 0 and major_index.size == 0: # Numpy < 1.8.0 don't handle empty arrays in reduceat value = np.zeros_like(X.data) else: value = ufunc.reduceat(X.data, X.indptr[major_index]) return major_index, value def _min_or_max_axis(X, axis, min_or_max): N = X.shape[axis] if N == 0: raise ValueError("zero-size array to reduction operation") M = X.shape[1 - axis] mat = X.tocsc() if axis == 0 else X.tocsr() mat.sum_duplicates() major_index, value = _minor_reduce(mat, min_or_max) not_full = np.diff(mat.indptr)[major_index] < N value[not_full] = min_or_max(value[not_full], 0) mask = value != 0 major_index = np.compress(mask, major_index) value = np.compress(mask, value) from scipy.sparse import coo_matrix if axis == 0: res = coo_matrix((value, (np.zeros(len(value)), major_index)), dtype=X.dtype, shape=(1, M)) else: res = coo_matrix((value, (major_index, np.zeros(len(value)))), dtype=X.dtype, shape=(M, 1)) return res.A.ravel() def _sparse_min_or_max(X, axis, min_or_max): if axis is None: if 0 in X.shape: raise ValueError("zero-size array to reduction operation") zero = X.dtype.type(0) if X.nnz == 0: return zero m = min_or_max.reduce(X.data.ravel()) if X.nnz != np.product(X.shape): m = min_or_max(zero, m) return m if axis < 0: axis += 2 if (axis == 0) or (axis == 1): return _min_or_max_axis(X, axis, min_or_max) else: raise ValueError("invalid axis, use 0 for rows, or 1 for columns") def sparse_min_max(X, axis): return (_sparse_min_or_max(X, axis, np.minimum), _sparse_min_or_max(X, axis, np.maximum)) else: def sparse_min_max(X, axis): return (X.min(axis=axis).toarray().ravel(), X.max(axis=axis).toarray().ravel()) try: from numpy import argpartition except ImportError: # numpy.argpartition was introduced in v 1.8.0 def argpartition(a, kth, axis=-1, kind='introselect', order=None): return np.argsort(a, axis=axis, order=order) try: from itertools import combinations_with_replacement except ImportError: # Backport of itertools.combinations_with_replacement for Python 2.6, # from Python 3.4 documentation (http://tinyurl.com/comb-w-r), copyright # Python Software Foundation (https://docs.python.org/3/license.html) def combinations_with_replacement(iterable, r): # combinations_with_replacement('ABC', 2) --> AA AB AC BB BC CC pool = tuple(iterable) n = len(pool) if not n and r: return indices = [0] * r yield tuple(pool[i] for i in indices) while True: for i in reversed(range(r)): if indices[i] != n - 1: break else: return indices[i:] = [indices[i] + 1] * (r - i) yield tuple(pool[i] for i in indices) try: from numpy import isclose except ImportError: def isclose(a, b, rtol=1.e-5, atol=1.e-8, equal_nan=False): """ Returns a boolean array where two arrays are element-wise equal within a tolerance. This function was added to numpy v1.7.0, and the version you are running has been backported from numpy v1.8.1. See its documentation for more details. """ def within_tol(x, y, atol, rtol): with np.errstate(invalid='ignore'): result = np.less_equal(abs(x - y), atol + rtol * abs(y)) if np.isscalar(a) and np.isscalar(b): result = bool(result) return result x = np.array(a, copy=False, subok=True, ndmin=1) y = np.array(b, copy=False, subok=True, ndmin=1) xfin = np.isfinite(x) yfin = np.isfinite(y) if all(xfin) and all(yfin): return within_tol(x, y, atol, rtol) else: finite = xfin & yfin cond = np.zeros_like(finite, subok=True) # Since we're using boolean indexing, x & y must be the same shape. # Ideally, we'd just do x, y = broadcast_arrays(x, y). It's in # lib.stride_tricks, though, so we can't import it here. x = x * np.ones_like(cond) y = y * np.ones_like(cond) # Avoid subtraction with infinite/nan values... cond[finite] = within_tol(x[finite], y[finite], atol, rtol) # Check for equality of infinite values... cond[~finite] = (x[~finite] == y[~finite]) if equal_nan: # Make NaN == NaN cond[np.isnan(x) & np.isnan(y)] = True return cond if np_version < (1, 7): # Prior to 1.7.0, np.frombuffer wouldn't work for empty first arg. def frombuffer_empty(buf, dtype): if len(buf) == 0: return np.empty(0, dtype=dtype) else: return np.frombuffer(buf, dtype=dtype) else: frombuffer_empty = np.frombuffer if np_version < (1, 8): def in1d(ar1, ar2, assume_unique=False, invert=False): # Backport of numpy function in1d 1.8.1 to support numpy 1.6.2 # Ravel both arrays, behavior for the first array could be different ar1 = np.asarray(ar1).ravel() ar2 = np.asarray(ar2).ravel() # This code is significantly faster when the condition is satisfied. if len(ar2) < 10 * len(ar1) ** 0.145: if invert: mask = np.ones(len(ar1), dtype=np.bool) for a in ar2: mask &= (ar1 != a) else: mask = np.zeros(len(ar1), dtype=np.bool) for a in ar2: mask |= (ar1 == a) return mask # Otherwise use sorting if not assume_unique: ar1, rev_idx = np.unique(ar1, return_inverse=True) ar2 = np.unique(ar2) ar = np.concatenate((ar1, ar2)) # We need this to be a stable sort, so always use 'mergesort' # here. The values from the first array should always come before # the values from the second array. order = ar.argsort(kind='mergesort') sar = ar[order] if invert: bool_ar = (sar[1:] != sar[:-1]) else: bool_ar = (sar[1:] == sar[:-1]) flag = np.concatenate((bool_ar, [invert])) indx = order.argsort(kind='mergesort')[:len(ar1)] if assume_unique: return flag[indx] else: return flag[indx][rev_idx] else: from numpy import in1d if sp_version < (0, 15): # Backport fix for scikit-learn/scikit-learn#2986 / scipy/scipy#4142 from ._scipy_sparse_lsqr_backport import lsqr as sparse_lsqr else: from scipy.sparse.linalg import lsqr as sparse_lsqr if sys.version_info < (2, 7, 0): # partial cannot be pickled in Python 2.6 # http://bugs.python.org/issue1398 class partial(object): def __init__(self, func, *args, **keywords): functools.update_wrapper(self, func) self.func = func self.args = args self.keywords = keywords def __call__(self, *args, **keywords): args = self.args + args kwargs = self.keywords.copy() kwargs.update(keywords) return self.func(*args, **kwargs) else: from functools import partial if np_version < (1, 6, 2): # Allow bincount to accept empty arrays # https://github.com/numpy/numpy/commit/40f0844846a9d7665616b142407a3d74cb65a040 def bincount(x, weights=None, minlength=None): if len(x) > 0: return np.bincount(x, weights, minlength) else: if minlength is None: minlength = 0 minlength = np.asscalar(np.asarray(minlength, dtype=np.intp)) return np.zeros(minlength, dtype=np.intp) else: from numpy import bincount
bsd-3-clause
mozilla/addons-server
src/olympia/addons/admin.py
4
18785
import functools from urllib.parse import urlencode, urljoin from django import http, forms from django.conf import settings from django.contrib import admin from django.core import validators from django.forms.models import modelformset_factory from django.http.response import ( HttpResponseForbidden, HttpResponseNotAllowed, HttpResponseRedirect, ) from django.shortcuts import get_object_or_404 from django.urls import re_path, resolve, reverse from django.utils.encoding import force_str from django.utils.html import format_html, format_html_join from django.utils.translation import gettext, gettext_lazy as _ import olympia.core.logger from olympia import amo from olympia.access import acl from olympia.activity.models import ActivityLog from olympia.addons.models import Addon, AddonUser from olympia.amo.utils import send_mail from olympia.files.models import File from olympia.git.models import GitExtractionEntry from olympia.ratings.models import Rating from olympia.versions.models import Version from olympia.zadmin.admin import related_content_link from . import models from .forms import AdminBaseFileFormSet, FileStatusForm log = olympia.core.logger.getLogger('z.addons.admin') class AddonUserInline(admin.TabularInline): model = AddonUser raw_id_fields = ('user',) readonly_fields = ('user_profile_link',) extra = 0 def user_profile_link(self, obj): if obj.pk: return format_html( '<a href="{}">Admin User Profile</a> ({})', reverse('admin:users_userprofile_change', args=(obj.user.pk,)), obj.user.email, ) else: return '' user_profile_link.short_description = 'User Profile' class FileInlineChecks(admin.checks.InlineModelAdminChecks): def _check_relation(self, obj, parent_model): """File doesn't have a direct FK to Addon (it's via Version) so we have to bypass this check. """ return [] class FileInline(admin.TabularInline): model = File extra = 0 max_num = 0 fields = ( 'created', 'version__version', 'version__channel', 'status', 'version__is_blocked', 'hash_link', ) editable_fields = ('status',) readonly_fields = tuple(set(fields) - set(editable_fields)) can_delete = False view_on_site = False template = 'admin/addons/file_inline.html' checks_class = FileInlineChecks def version__version(self, obj): return obj.version.version + (' - Deleted' if obj.version.deleted else '') version__version.short_description = 'Version' def version__channel(self, obj): return obj.version.get_channel_display() version__channel.short_description = 'Channel' def version__is_blocked(self, obj): block = self.instance.block if not (block and block.is_version_blocked(obj.version.version)): return '' url = block.get_admin_url_path() template = '<a href="{}">Blocked ({} - {})</a>' return format_html(template, url, block.min_version, block.max_version) version__is_blocked.short_description = 'Block status' def hash_link(self, obj): url = reverse('zadmin.recalc_hash', args=(obj.id,)) template = '<a href="{}" class="recalc" title="{}">Recalc Hash</a>' return format_html(template, url, obj.hash) hash_link.short_description = 'Hash' def get_formset(self, request, obj=None, **kwargs): self.instance = obj Formset = modelformset_factory( File, form=FileStatusForm, formset=AdminBaseFileFormSet, extra=self.get_extra(request, obj, **kwargs), min_num=self.get_min_num(request, obj, **kwargs), max_num=self.get_max_num(request, obj, **kwargs), ) return Formset def has_add_permission(self, request, obj=None): return False def get_queryset(self, request): self.pager = amo.utils.paginate( request, Version.unfiltered.filter(addon=self.instance).values_list('pk', flat=True), 30, ) # A list coercion so this doesn't result in a subquery with a LIMIT # which MySQL doesn't support (at this time). versions = list(self.pager.object_list) qs = ( super() .get_queryset(request) .filter(version__in=versions) .order_by('-version__id') ) return qs.select_related('version') class AddonAdmin(admin.ModelAdmin): class Media: css = { 'all': ( 'css/admin/l10n.css', 'css/admin/pagination.css', 'css/admin/addons.css', ) } js = ('admin/js/jquery.init.js', 'js/admin/l10n.js', 'js/admin/recalc_hash.js') list_display = ( '__str__', 'type', 'guid', 'status', 'average_daily_users', 'average_rating', 'authors_links', 'reviewer_links', ) list_filter = ('type', 'status') search_fields = ('id', '^guid', '^slug') inlines = (AddonUserInline, FileInline) readonly_fields = ( 'id', 'created', 'average_rating', 'bayesian_rating', 'guid', 'total_ratings_link', 'text_ratings_count', 'weekly_downloads', 'average_daily_users', ) fieldsets = ( ( None, { 'fields': ( 'id', 'created', 'name', 'slug', 'guid', 'default_locale', 'type', 'status', ), }, ), ( 'Details', { 'fields': ( 'summary', 'description', 'homepage', 'eula', 'privacy_policy', 'developer_comments', 'icon_type', ), }, ), ( 'Support', { 'fields': ('support_url', 'support_email'), }, ), ( 'Stats', { 'fields': ( 'total_ratings_link', 'average_rating', 'bayesian_rating', 'text_ratings_count', 'weekly_downloads', 'average_daily_users', ), }, ), ( 'Flags', { 'fields': ( 'disabled_by_user', 'requires_payment', 'is_experimental', 'reputation', ), }, ), ( 'Dictionaries and Language Packs', { 'fields': ('target_locale',), }, ), ) actions = ['git_extract_action'] def get_queryset(self, request): # We want to _unlisted_versions_exists/_listed_versions_exists to avoid # repeating that query for each add-on in the list. A cleaner way to do this # would be to use annotate like this: # sub_qs = Version.unfiltered.filter(addon=OuterRef('pk')).values_list('id') # (...).annotate( # _unlisted_versions_exists=Exists( # sub_qs.filter(channel=amo.RELEASE_CHANNEL_UNLISTED) # ), # _listed_versions_exists=Exists( # sub_qs.filter(channel=amo.RELEASE_CHANNEL_LISTED) # ), # ) # But while this works, the subquery is a lot less optimized (it does a full # query instead of the SELECT 1 ... LIMIT 1) and to make things worse django # admin doesn't know it's only for displayed data (it doesn't realize we aren't # filtering on it, and even if it did can't remove the annotations from the # queryset anyway) so it uses it for the count() queries as well, making them a # lot slower. subquery = ( 'SELECT 1 FROM `versions` WHERE `channel` = %s' ' AND `addon_id` = `addons`.`id` LIMIT 1' ) extra = { 'select': { '_unlisted_versions_exists': subquery, '_listed_versions_exists': subquery, }, 'select_params': ( amo.RELEASE_CHANNEL_UNLISTED, amo.RELEASE_CHANNEL_LISTED, ), } return ( Addon.unfiltered.all() .only_translations() .transform(Addon.attach_all_authors) .extra(**extra) ) def get_urls(self): def wrap(view): def wrapper(*args, **kwargs): return self.admin_site.admin_view(view)(*args, **kwargs) return functools.update_wrapper(wrapper, view) urlpatterns = super(AddonAdmin, self).get_urls() custom_urlpatterns = [ re_path( r'^(?P<object_id>.+)/git_extract/$', wrap(self.git_extract_view), name='addons_git_extract', ), ] return custom_urlpatterns + urlpatterns def authors_links(self, obj): # Note: requires .transform(Addon.attach_all_authors) to have been # applied to fill all_authors property and role on each user in it. authors = obj.all_authors return ( format_html( '<ul>{}</ul>', format_html_join( '', '<li><a href="{}">{} ({}{})</a></li>', ( ( urljoin( settings.EXTERNAL_SITE_URL, reverse( 'admin:users_userprofile_change', args=(author.pk,) ), ), author.email, dict(amo.AUTHOR_CHOICES_UNFILTERED)[author.role], ', Not listed' if author.listed is False else '', ) for author in authors ), ), ) if authors else '-' ) authors_links.short_description = _('Authors') def total_ratings_link(self, obj): return related_content_link( obj, Rating, 'addon', related_manager='without_replies', text=obj.total_ratings, ) total_ratings_link.short_description = _('Ratings') def reviewer_links(self, obj): links = [] # _has_listed_versions_exists and _has_unlisted_versions_exists are # provided by annotations made in get_queryset() if obj._listed_versions_exists: links.append( '<a href="{}">{}</a>'.format( urljoin( settings.EXTERNAL_SITE_URL, reverse('reviewers.review', args=['listed', obj.id]), ), _('Reviewer Tools (listed)'), ) ) if obj._unlisted_versions_exists: links.append( '<a href="{}">{}</a>'.format( urljoin( settings.EXTERNAL_SITE_URL, reverse('reviewers.review', args=['unlisted', obj.id]), ), _('Reviewer Tools (unlisted)'), ) ) return format_html('&nbsp;|&nbsp;'.join(links)) reviewer_links.short_description = _('Reviewer links') def change_view(self, request, object_id, form_url='', extra_context=None): lookup_field = Addon.get_lookup_field(object_id) if lookup_field != 'pk': addon = None try: if lookup_field in ('slug', 'guid'): addon = self.get_queryset(request).get(**{lookup_field: object_id}) except Addon.DoesNotExist: raise http.Http404 # Don't get in an infinite loop if addon.slug.isdigit(). if addon and addon.id and addon.id != object_id: url = request.path.replace(object_id, str(addon.id), 1) if request.GET: url += '?' + request.GET.urlencode() return http.HttpResponsePermanentRedirect(url) return super().change_view( request, object_id, form_url, extra_context=extra_context ) def render_change_form( self, request, context, add=False, change=False, form_url='', obj=None ): context.update( { 'external_site_url': settings.EXTERNAL_SITE_URL, 'has_listed_versions': obj.has_listed_versions(include_deleted=True) if obj else False, 'has_unlisted_versions': obj.has_unlisted_versions(include_deleted=True) if obj else False, } ) return super().render_change_form( request=request, context=context, add=add, change=change, form_url=form_url, obj=obj, ) def save_model(self, request, obj, form, change): super().save_model(request, obj, form, change) if 'status' in form.changed_data: ActivityLog.create(amo.LOG.CHANGE_STATUS, obj, form.cleaned_data['status']) log.info( 'Addon "%s" status changed to: %s' % (obj.slug, form.cleaned_data['status']) ) def git_extract_action(self, request, qs): addon_ids = [] for addon in qs: GitExtractionEntry.objects.create(addon=addon) addon_ids.append(force_str(addon)) kw = {'addons': ', '.join(addon_ids)} self.message_user( request, gettext('Git extraction triggered for "%(addons)s".' % kw) ) git_extract_action.short_description = 'Git-Extract' def git_extract_view(self, request, object_id, extra_context=None): if request.method != 'POST': return HttpResponseNotAllowed(['POST']) if not acl.action_allowed(request, amo.permissions.ADDONS_EDIT): return HttpResponseForbidden() obj = get_object_or_404(Addon, id=object_id) self.git_extract_action(request, (obj,)) return HttpResponseRedirect( reverse('admin:addons_addon_change', args=(obj.pk,)) ) class FrozenAddonAdmin(admin.ModelAdmin): raw_id_fields = ('addon',) class ReplacementAddonForm(forms.ModelForm): def clean_path(self): path = None try: path = self.data.get('path') site = settings.SITE_URL if models.ReplacementAddon.path_is_external(path): if path.startswith(site): raise forms.ValidationError( 'Paths for [%s] should be relative, not full URLs ' 'including the domain name' % site ) validators.URLValidator()(path) else: path = ('/' if not path.startswith('/') else '') + path resolve(path) except forms.ValidationError as validation_error: # Re-raise the ValidationError about full paths for SITE_URL. raise validation_error except Exception: raise forms.ValidationError('Path [%s] is not valid' % path) return path class ReplacementAddonAdmin(admin.ModelAdmin): list_display = ('guid', 'path', 'guid_slug', '_url') form = ReplacementAddonForm def _url(self, obj): guid_param = urlencode({'guid': obj.guid}) return format_html( '<a href="{}">Test</a>', reverse('addons.find_replacement') + '?%s' % guid_param, ) def guid_slug(self, obj): try: slug = models.Addon.objects.get(guid=obj.guid).slug except models.Addon.DoesNotExist: slug = gettext('- Add-on not on AMO -') return slug def has_module_permission(self, request): # If one can see the changelist, then they have access to the module. return self.has_change_permission(request) def has_change_permission(self, request, obj=None): # If an obj is passed, then we're looking at the individual change page # for a replacement addon, otherwise we're looking at the list. When # looking at the list, we also allow users with Addons:Edit - they # won't be able to make any changes but they can see the list. if obj is not None: return super(ReplacementAddonAdmin, self).has_change_permission( request, obj=obj ) else: return acl.action_allowed(request, amo.permissions.ADDONS_EDIT) or super( ReplacementAddonAdmin, self ).has_change_permission(request, obj=obj) @admin.register(models.AddonRegionalRestrictions) class AddonRegionalRestrictionsAdmin(admin.ModelAdmin): list_display = ('addon__name', 'excluded_regions') fields = ('created', 'modified', 'addon', 'excluded_regions') raw_id_fields = ('addon',) readonly_fields = ('created', 'modified') def get_readonly_fields(self, request, obj=None): return self.readonly_fields + (('addon',) if obj else ()) def addon__name(self, obj): return str(obj.addon) addon__name.short_description = 'Addon' def _send_mail(self, obj, action): message = ( f'Regional restriction for addon "{obj.addon.name}" ' f'[{obj.addon.id}] {action}: {obj.excluded_regions}' ) send_mail( f'Regional Restriction {action} for Add-on', message, recipient_list=('amo-admins@mozilla.com',), ) def delete_model(self, request, obj): self._send_mail(obj, 'deleted') super().delete_model(request, obj) def save_model(self, request, obj, form, change): super().save_model(request, obj, form, change) self._send_mail(obj, 'changed' if change else 'added') admin.site.register(models.DeniedGuid) admin.site.register(models.Addon, AddonAdmin) admin.site.register(models.FrozenAddon, FrozenAddonAdmin) admin.site.register(models.ReplacementAddon, ReplacementAddonAdmin)
bsd-3-clause
drpaneas/linuxed.gr
lib/python2.7/site-packages/Crypto/SelfTest/Util/__init__.py
116
1743
# -*- coding: utf-8 -*- # # SelfTest/Util/__init__.py: Self-test for utility modules # # Written in 2008 by Dwayne C. Litzenberger <dlitz@dlitz.net> # # =================================================================== # The contents of this file are dedicated to the public domain. To # the extent that dedication to the public domain is not available, # everyone is granted a worldwide, perpetual, royalty-free, # non-exclusive license to exercise all rights associated with the # contents of this file for any purpose whatsoever. # No rights are reserved. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS # BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # =================================================================== """Self-test for utility modules""" __revision__ = "$Id$" import os def get_tests(config={}): tests = [] if os.name == 'nt': from Crypto.SelfTest.Util import test_winrandom; tests += test_winrandom.get_tests(config=config) from Crypto.SelfTest.Util import test_number; tests += test_number.get_tests(config=config) from Crypto.SelfTest.Util import test_Counter; tests += test_Counter.get_tests(config=config) return tests if __name__ == '__main__': import unittest suite = lambda: unittest.TestSuite(get_tests()) unittest.main(defaultTest='suite') # vim:set ts=4 sw=4 sts=4 expandtab:
mit
ne9een/Movie-Trailer-Website
entertainment_centre.py
1
1894
import media import fresh_tomatoes print (media.Movie.__doc__) # Instantiate of class Movie inglourious_basterds = media.Movie("Inglourious Basterds", "Story of war between France and Germany", "http://goo.gl/rz9p8v", "https://goo.gl/QkVIuK") in_better_world = media.Movie("In a Better World", "The lives of two Danish" "families cross each other, and an extraordinary" "but risky friendship comes into bud. But" "loneliness, frailty and sorrow lie in wait.", "http://goo.gl/SHltht", "https://www.youtube.com/watch?v=ava0Rn8nrVs") a_second_chance = media.Movie("A Second Chance", "How far would decent human" "beings be willing to go, when tragedy blurs" "the line between just and unjust?", "http://goo.gl/z01Ydr", "https://www.youtube.com/watch?v=yDQ7mX3SA80") secretary = media.Movie("Secretary", "A young woman, recently released from" "a mental hospital, gets a job as a secretary to a" "demanding lawyer, where their employer-employee " "relationship turns into a sexual," "sadomasochistic one.", "http://goo.gl/TlC6Dr", "https://www.youtube.com/watch?v=AFma24S-Uvw") # Saving list of all movies in "movies" movies = [inglourious_basterds, in_better_world, a_second_chance, secretary] # Calling the open_movie_page function from fresh_tomatoes # class to open up the html containig my favoirte movies # together with trailer. fresh_tomatoes.open_movies_page(movies)
unlicense
theherk/django-theherk-resources
setup.py
1
1124
import os from setuptools import setup, find_packages def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='django-theherk-resources', version='1.6', packages=find_packages(), include_package_data=True, install_requires=[ 'django-localflavor', ], license='see file LICENSE', description='Django app for keeping data on organizations and people.', long_description=read('README.md'), url='https://github.com/theherk/django-theherk-resources', download_url='https://github.com/theherk/django-theherk-resources/archive/1.6.zip', author='Adam Sherwood', author_email='theherk@gmail.com', classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', ], )
bsd-3-clause
Spoken-tutorial/spoken-website
donate/views.py
1
13338
from config import TARGET, CHANNEL_ID, CHANNEL_KEY, EXPIRY_DAYS from .helpers import PURPOSE from django.shortcuts import render from creation.models import FossCategory, Language from cms.models import Profile from django.contrib.auth.decorators import login_required from django.contrib.auth import authenticate, login, logout from django.core.exceptions import PermissionDenied from django.shortcuts import render, redirect from django.http import HttpResponse from django.template.context_processors import csrf from donate.forms import PayeeForm, TransactionForm from donate.models import * from cms.views import create_profile, email_otp,send_registration_confirmation from django import forms from django.views.decorators.csrf import csrf_protect, csrf_exempt from django.contrib.auth.mixins import LoginRequiredMixin from django.views.generic import CreateView, DetailView from certificate.views import _clean_certificate_certificate from django.urls import reverse_lazy from cdcontent.forms import CDContentForm from cdcontent.views import internal_computation,is_organizer_paid from django.contrib import messages from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned from datetime import datetime from datetime import timedelta, date from events import display import requests import json from string import Template import subprocess import os from events.models import AcademicKey import random from training.views import reg_success from .models import * from .forms import * # @csrf_exempt # def donatehome(request): # form = PayeeForm(initial={'country': 'India'}) # if request.method == 'POST': # type = request.POST.get("type", "") # amount = request.POST.get("amount", "") # if type == 'initiate': # form.fields['amount'].widget = forms.NumberInput(attrs={'min': amount, 'step': 50.00}) # form.initial = {'amount': amount} # else: # form = DonateForm(initial={'country': 'India', 'amount': 50.00}) # form.fields['amount'].widget = forms.NumberInput(attrs={'min': 50.00, 'step': 50.00}) # context = { # 'form': form # } # context.update(csrf(request)) # # return render(request, 'donate/templates/cd_payment_success.html', context) # return render(request, 'donate/donate.html', context) @csrf_exempt def donatehome(request): form = DonateForm() if request.method == 'POST': type = request.POST.get("type", "") amount = request.POST.get("amount", "") if type == 'initiate': form.fields['amount'].widget = forms.NumberInput(attrs={'min': amount, 'step': 50.00}) form.initial = {'amount': amount} else: initial = {'amount': 500} form = DonateForm(initial = initial) context = { 'form': form } context.update(csrf(request)) # return render(request, 'donate/templates/cd_payment_success.html', context) return render(request, 'donate/donate.html', context) @csrf_exempt def purchase(request): form = GoodiesForm() if request.method == 'POST': type = request.POST.get("type", "") amount = request.POST.get("amount", "") if type == 'initiate': form.fields['amount'].widget = forms.NumberInput(attrs={'min': amount, 'step': 50.00}) form.initial = {'amount': amount} else: initial = {'amount': 1000} form = GoodiesForm(initial=initial) context = { 'form': form } context.update(csrf(request)) # return render(request, 'donate/templates/cd_payment_success.html', context) return render(request, 'donate/purchase.html', context) @csrf_exempt def pay_now(request, purpose): if request.method=='POST': if 'Donate' in purpose: form = DonateForm(request.POST) if 'Goodie' in purpose: form = GoodiesForm(request.POST) if form.is_valid(): form.save(commit=False) form.reqId = CHANNEL_ID+str(display.value(datetime.now().strftime('%Y%m%d%H%M%S'))[0:20]) obj = form.save() data = get_final_data(request, obj, purpose) else: messages.errors(request,'Invalid Form') return render(request, 'payment_status.html', data) @csrf_exempt def form_valid(request, form, purpose): """ If the form is valid, save the associated model. """ form_data = form.save(commit=False) form_data.reqId = CHANNEL_ID+str(display.value(datetime.now().strftime('%Y%m%d%H%M%S'))[0:20]) form_data.user = request.user form_data.status = 0 form_data.expiry = calculate_expiry() form_data.purpose = purpose form_data.save() payee_obj = form_data foss_ids = form.cleaned_data.get('foss_id') languages = form.cleaned_data.get('language_id') level_ids = form.cleaned_data.get('level_id') fosses = foss_ids.split(',') foss_languages = languages.split(',|') levels = level_ids.split(',') payee_id = payee_obj.pk foss_level = 0 for i in range(len(fosses)): foss_category = FossCategory.objects.get(pk=int(fosses[i])) if int(levels[i]): foss_level = Level.objects.get(pk=int(levels[i])) languages = foss_languages[i].split(',') try: custom_lang = request.POST.get('foss_language') languages = languages + [str(custom_lang)] except: # this is coming from Events' page pass for language in languages: if language not in ('','None'): foss_language = Language.objects.get(pk=int(language)) cd_foss_langs = CdFossLanguages() cd_foss_langs.payment = Payee.objects.get(pk=payee_id) cd_foss_langs.foss = foss_category cd_foss_langs.lang = foss_language if foss_level: cd_foss_langs.level = foss_level cd_foss_langs.save() form.save_m2m() return payee_obj @csrf_exempt def form_invalid(request, form): """ If the form is invalid, re-render the context data with the data-filled form and errors. """ messages.warning(request, 'Invalid form payment request.') return redirect('cdcontent:cdcontenthome') @csrf_exempt def controller(request, purpose): form = PayeeForm(request.POST) if request.method == 'POST': if form.is_valid(): payee_obj_new = form_valid(request, form, purpose) else: form_invalid(request, form) if purpose != 'cdcontent': participant_form = reg_success(request, 'general') participant_form.payment_status = payee_obj_new try : participant_form.save() except : return redirect('training:list_events', status='myevents') data = get_final_data(request, payee_obj_new, purpose) return render(request, 'payment_status.html', data) @csrf_exempt def calculate_expiry(): return datetime.now() + timedelta(days = EXPIRY_DAYS) @csrf_exempt def encrypted_data(request, obj, purpose): STdata = '' user_name = obj.name amount = obj.amount #amount = 1.00 purpose = purpose+"NEW"+str(obj.pk) request_id = obj.reqId STdata = request_id + str(request.user.id) + str(user_name) + str(amount) + purpose + CHANNEL_ID + CHANNEL_KEY s = display.value(str(STdata)) return s @csrf_exempt def get_final_data(request, obj, purpose): data = { 'reqId' : obj.reqId, 'userId': str(request.user.id), 'name': obj.name, 'amount': obj.amount, #'amount': 1.00, 'purpose': purpose+"NEW"+str(obj.pk) , 'channelId': CHANNEL_ID, 'target': TARGET, 'random': encrypted_data(request, obj, purpose) } return data @csrf_protect def send_onetime(request): from django.core.validators import validate_email from django.core.exceptions import ValidationError from django.core.validators import EmailValidator context = {} user_name = request.POST.get('username') email = request.POST.get('email') temp = user_name.split(" ") try: fname, lname = temp[0], temp[1] except: fname, lname = user_name, "" password = fname+'@ST'+str(random.random()).split('.')[1][:5] # validate email try: validate_email( email ) context['email_validation']="" context['valid_email']='1' except ValidationError as e: context['valid_email']='0' try: context['email_validation']=e.messages[0] except: context['email_validation']="Please Enter Valid Email" try: user = User.objects.get(email=email) if user.is_active: context['message'] = "active_user" else: send_registration_confirmation(user) context['message'] = "inactive_user" except MultipleObjectsReturned as e: pass except ObjectDoesNotExist: user = User.objects.create_user(email, email, password) user.first_name = fname user.last_name = lname user.is_active = False user.save() create_profile(user, '') email_otp(user) context['message'] = "new" return HttpResponse(json.dumps(context), content_type='application/json') @csrf_exempt def validate_user(request): context = {} user_name = request.POST.get('username') email = request.POST.get('email') password = request.POST.get('password') if email and password: user = authenticate(username=email, password=password) if user is not None: if user.is_active: login(request, user) msg = '' context['organizer_paid'] = is_organizer_paid(request) else: msg = "Your account is disabled.<br>\ Kindly activate your account by clicking on the activation link which has been sent to your registered email %s.<br>\ In case if you do not receive any activation mail kindly verify and activate your account from below link :<br>\ <a href='https://spoken-tutorial.org/accounts/verify/'>https://spoken-tutorial.org/accounts/verify/</a>"% (user.email) else: msg = 'Invalid username / password' else: msg = 'Please enter username and Password' context['msg']=msg return HttpResponse(json.dumps(context), content_type='application/json') @csrf_exempt def validate(request): context = {} user_pass = request.POST.get("otp") email = request.POST.get("email") user = User.objects.get(email=email) profile = Profile.objects.get(user=user) print(profile.confirmation_code, " - ", user_pass) if profile.confirmation_code == user_pass: user.is_active = True user.save() user.backend = 'django.contrib.auth.backends.ModelBackend' login(request, user) context['validate'] = "success" else: context["validate"] = "fail" return HttpResponse(json.dumps(context), content_type='application/json') def receipt(request): response = HttpResponse(content_type='application/pdf') file_name = request.POST.get("name").split(" ")[0] try: download_file_name = None template = 'receipt_template' download_file_name = "ST_" + file_name + '.pdf' certificate_path = os.path.dirname(os.path.realpath(__file__))+"/receipt/" template_file = open('{0}{1}'.format (certificate_path, template), 'r') content = Template(template_file.read()) template_file.close() content_tex = content.safe_substitute( image_dir = certificate_path+"images/", name = request.POST.get("name"), amount = request.POST.get("amount"), reqId = request.POST.get("reqId"), transId = request.POST.get("transId"), refNo = request.POST.get("refNo"), provId = request.POST.get("provId"), status = request.POST.get("status"), msg = request.POST.get("msg"), expiry = request.POST.get("expiry"), email = request.POST.get("email")) create_tex = open('{0}{1}.tex'.format (certificate_path, file_name), 'w') create_tex.write(content_tex) create_tex.close() out = certificate_path command = 'user_receipt' process = subprocess.Popen('make -C {0} {1} file_name={2}'.format(certificate_path, command, file_name), stderr=subprocess.PIPE, shell=True) err = process.communicate()[1] if process.returncode == 0: pdf = open('{0}{1}.pdf'.format(certificate_path, file_name), 'rb') response['Content-Disposition'] = 'attachment; \ filename=%s' % (download_file_name) response.write(pdf.read()) clean_process = subprocess.Popen('make -C {0} clean file_name={1}'.format( certificate_path, file_name), shell=True) return response except Exception as e: print("error is ",e) return response
gpl-3.0
OpenPymeMx/OCB
addons/account/report/account_partner_ledger.py
14
13132
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import time import re from openerp.report import report_sxw from common_report_header import common_report_header from openerp import SUPERUSER_ID from openerp.tools.translate import _ class third_party_ledger(report_sxw.rml_parse, common_report_header): def __init__(self, cr, uid, name, context=None): super(third_party_ledger, self).__init__(cr, uid, name, context=context) self.init_bal_sum = 0.0 self.localcontext.update({ 'time': time, 'lines': self.lines, 'sum_debit_partner': self._sum_debit_partner, 'sum_credit_partner': self._sum_credit_partner, 'get_currency': self._get_currency, 'get_start_period': self.get_start_period, 'get_end_period': self.get_end_period, 'get_account': self._get_account, 'get_filter': self._get_filter, 'get_start_date': self._get_start_date, 'get_end_date': self._get_end_date, 'get_fiscalyear': self._get_fiscalyear, 'get_journal': self._get_journal, 'get_partners':self._get_partners, 'get_intial_balance':self._get_intial_balance, 'display_initial_balance':self._display_initial_balance, 'display_currency':self._display_currency, 'get_target_move': self._get_target_move, }) def _get_filter(self, data): if data['form']['filter'] == 'unreconciled': return _('Unreconciled Entries') return super(third_party_ledger, self)._get_filter(data) def set_context(self, objects, data, ids, report_type=None): obj_move = self.pool.get('account.move.line') obj_partner = self.pool.get('res.partner') self.query = obj_move._query_get(self.cr, self.uid, obj='l', context=data['form'].get('used_context', {})) ctx2 = data['form'].get('used_context',{}).copy() self.initial_balance = data['form'].get('initial_balance', True) if self.initial_balance: ctx2.update({'initial_bal': True}) self.init_query = obj_move._query_get(self.cr, self.uid, obj='l', context=ctx2) self.reconcil = True if data['form']['filter'] == 'unreconciled': self.reconcil = False self.result_selection = data['form'].get('result_selection', 'customer') self.amount_currency = data['form'].get('amount_currency', False) self.target_move = data['form'].get('target_move', 'all') PARTNER_REQUEST = '' move_state = ['draft','posted'] if self.target_move == 'posted': move_state = ['posted'] if self.result_selection == 'supplier': self.ACCOUNT_TYPE = ['payable'] elif self.result_selection == 'customer': self.ACCOUNT_TYPE = ['receivable'] else: self.ACCOUNT_TYPE = ['payable','receivable'] self.cr.execute( "SELECT a.id " \ "FROM account_account a " \ "LEFT JOIN account_account_type t " \ "ON (a.type=t.code) " \ 'WHERE a.type IN %s' \ "AND a.active", (tuple(self.ACCOUNT_TYPE), )) self.account_ids = [a for (a,) in self.cr.fetchall()] params = [tuple(move_state), tuple(self.account_ids)] #if we print from the partners, add a clause on active_ids if (data['model'] == 'res.partner') and ids: PARTNER_REQUEST = "AND l.partner_id IN %s" params += [tuple(ids)] reconcile = "" if self.reconcil else "AND l.reconcile_id IS NULL " self.cr.execute( "SELECT DISTINCT l.partner_id " \ "FROM account_move_line AS l, account_account AS account, " \ " account_move AS am " \ "WHERE l.partner_id IS NOT NULL " \ "AND l.account_id = account.id " \ "AND am.id = l.move_id " \ "AND am.state IN %s" "AND " + self.query +" " \ "AND l.account_id IN %s " \ " " + PARTNER_REQUEST + " " \ "AND account.active " + reconcile + " ", params) self.partner_ids = [res['partner_id'] for res in self.cr.dictfetchall()] objects = obj_partner.browse(self.cr, SUPERUSER_ID, self.partner_ids) objects.sort(key=lambda x: (x.ref, x.name)) return super(third_party_ledger, self).set_context(objects, data, self.partner_ids, report_type) def lines(self, partner): move_state = ['draft','posted'] if self.target_move == 'posted': move_state = ['posted'] full_account = [] if self.reconcil: RECONCILE_TAG = " " else: RECONCILE_TAG = "AND l.reconcile_id IS NULL" self.cr.execute( "SELECT l.id, l.date, j.code, acc.code as a_code, acc.name as a_name, l.ref, m.name as move_name, l.name, l.debit, l.credit, l.amount_currency,l.currency_id, c.symbol AS currency_code " \ "FROM account_move_line l " \ "LEFT JOIN account_journal j " \ "ON (l.journal_id = j.id) " \ "LEFT JOIN account_account acc " \ "ON (l.account_id = acc.id) " \ "LEFT JOIN res_currency c ON (l.currency_id=c.id)" \ "LEFT JOIN account_move m ON (m.id=l.move_id)" \ "WHERE l.partner_id = %s " \ "AND l.account_id IN %s AND " + self.query +" " \ "AND m.state IN %s " \ " " + RECONCILE_TAG + " "\ "ORDER BY l.date", (partner.id, tuple(self.account_ids), tuple(move_state))) res = self.cr.dictfetchall() sum = 0.0 if self.initial_balance: sum = self.init_bal_sum for r in res: sum += r['debit'] - r['credit'] r['progress'] = sum full_account.append(r) return full_account def _get_intial_balance(self, partner): move_state = ['draft','posted'] if self.target_move == 'posted': move_state = ['posted'] if self.reconcil: RECONCILE_TAG = " " else: RECONCILE_TAG = "AND l.reconcile_id IS NULL" self.cr.execute( "SELECT COALESCE(SUM(l.debit),0.0), COALESCE(SUM(l.credit),0.0), COALESCE(sum(debit-credit), 0.0) " \ "FROM account_move_line AS l, " \ "account_move AS m " "WHERE l.partner_id = %s " \ "AND m.id = l.move_id " \ "AND m.state IN %s " "AND account_id IN %s" \ " " + RECONCILE_TAG + " "\ "AND " + self.init_query + " ", (partner.id, tuple(move_state), tuple(self.account_ids))) res = self.cr.fetchall() self.init_bal_sum = res[0][2] return res def _sum_debit_partner(self, partner): move_state = ['draft','posted'] if self.target_move == 'posted': move_state = ['posted'] result_tmp = 0.0 result_init = 0.0 if self.reconcil: RECONCILE_TAG = " " else: RECONCILE_TAG = "AND reconcile_id IS NULL" if self.initial_balance: self.cr.execute( "SELECT sum(debit) " \ "FROM account_move_line AS l, " \ "account_move AS m " "WHERE l.partner_id = %s" \ "AND m.id = l.move_id " \ "AND m.state IN %s " "AND account_id IN %s" \ " " + RECONCILE_TAG + " " \ "AND " + self.init_query + " ", (partner.id, tuple(move_state), tuple(self.account_ids))) contemp = self.cr.fetchone() if contemp != None: result_init = contemp[0] or 0.0 else: result_init = result_tmp + 0.0 self.cr.execute( "SELECT sum(debit) " \ "FROM account_move_line AS l, " \ "account_move AS m " "WHERE l.partner_id = %s " \ "AND m.id = l.move_id " \ "AND m.state IN %s " "AND account_id IN %s" \ " " + RECONCILE_TAG + " " \ "AND " + self.query + " ", (partner.id, tuple(move_state), tuple(self.account_ids),)) contemp = self.cr.fetchone() if contemp != None: result_tmp = contemp[0] or 0.0 else: result_tmp = result_tmp + 0.0 return result_tmp + result_init def _sum_credit_partner(self, partner): move_state = ['draft','posted'] if self.target_move == 'posted': move_state = ['posted'] result_tmp = 0.0 result_init = 0.0 if self.reconcil: RECONCILE_TAG = " " else: RECONCILE_TAG = "AND reconcile_id IS NULL" if self.initial_balance: self.cr.execute( "SELECT sum(credit) " \ "FROM account_move_line AS l, " \ "account_move AS m " "WHERE l.partner_id = %s" \ "AND m.id = l.move_id " \ "AND m.state IN %s " "AND account_id IN %s" \ " " + RECONCILE_TAG + " " \ "AND " + self.init_query + " ", (partner.id, tuple(move_state), tuple(self.account_ids))) contemp = self.cr.fetchone() if contemp != None: result_init = contemp[0] or 0.0 else: result_init = result_tmp + 0.0 self.cr.execute( "SELECT sum(credit) " \ "FROM account_move_line AS l, " \ "account_move AS m " "WHERE l.partner_id=%s " \ "AND m.id = l.move_id " \ "AND m.state IN %s " "AND account_id IN %s" \ " " + RECONCILE_TAG + " " \ "AND " + self.query + " ", (partner.id, tuple(move_state), tuple(self.account_ids),)) contemp = self.cr.fetchone() if contemp != None: result_tmp = contemp[0] or 0.0 else: result_tmp = result_tmp + 0.0 return result_tmp + result_init def _get_partners(self): # TODO: deprecated, to remove in trunk if self.result_selection == 'customer': return _('Receivable Accounts') elif self.result_selection == 'supplier': return _('Payable Accounts') elif self.result_selection == 'customer_supplier': return _('Receivable and Payable Accounts') return '' def _sum_currency_amount_account(self, account, form): self._set_get_account_currency_code(account.id) self.cr.execute("SELECT sum(aml.amount_currency) FROM account_move_line as aml,res_currency as rc WHERE aml.currency_id = rc.id AND aml.account_id= %s ", (account.id,)) total = self.cr.fetchone() if self.account_currency: return_field = str(total[0]) + self.account_currency return return_field else: currency_total = self.tot_currency = 0.0 return currency_total def _display_initial_balance(self, data): if self.initial_balance: return True return False def _display_currency(self, data): if self.amount_currency: return True return False report_sxw.report_sxw('report.account.third_party_ledger', 'res.partner', 'addons/account/report/account_partner_ledger.rml',parser=third_party_ledger, header='internal') report_sxw.report_sxw('report.account.third_party_ledger_other', 'res.partner', 'addons/account/report/account_partner_ledger_other.rml',parser=third_party_ledger, header='internal') # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
Friedbaumer/litecoin
test/functional/test_framework/blocktools.py
53
3932
#!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Utilities for manipulating blocks and transactions.""" from .mininode import * from .script import CScript, OP_TRUE, OP_CHECKSIG, OP_RETURN # Create a block (with regtest difficulty) def create_block(hashprev, coinbase, nTime=None): block = CBlock() if nTime is None: import time block.nTime = int(time.time()+600) else: block.nTime = nTime block.hashPrevBlock = hashprev block.nBits = 0x207fffff # Will break after a difficulty adjustment... block.vtx.append(coinbase) block.hashMerkleRoot = block.calc_merkle_root() block.calc_sha256() return block # From BIP141 WITNESS_COMMITMENT_HEADER = b"\xaa\x21\xa9\xed" def get_witness_script(witness_root, witness_nonce): witness_commitment = uint256_from_str(hash256(ser_uint256(witness_root)+ser_uint256(witness_nonce))) output_data = WITNESS_COMMITMENT_HEADER + ser_uint256(witness_commitment) return CScript([OP_RETURN, output_data]) # According to BIP141, blocks with witness rules active must commit to the # hash of all in-block transactions including witness. def add_witness_commitment(block, nonce=0): # First calculate the merkle root of the block's # transactions, with witnesses. witness_nonce = nonce witness_root = block.calc_witness_merkle_root() # witness_nonce should go to coinbase witness. block.vtx[0].wit.vtxinwit = [CTxInWitness()] block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ser_uint256(witness_nonce)] # witness commitment is the last OP_RETURN output in coinbase block.vtx[0].vout.append(CTxOut(0, get_witness_script(witness_root, witness_nonce))) block.vtx[0].rehash() block.hashMerkleRoot = block.calc_merkle_root() block.rehash() def serialize_script_num(value): r = bytearray(0) if value == 0: return r neg = value < 0 absvalue = -value if neg else value while (absvalue): r.append(int(absvalue & 0xff)) absvalue >>= 8 if r[-1] & 0x80: r.append(0x80 if neg else 0) elif neg: r[-1] |= 0x80 return r # Create a coinbase transaction, assuming no miner fees. # If pubkey is passed in, the coinbase output will be a P2PK output; # otherwise an anyone-can-spend output. def create_coinbase(height, pubkey = None): coinbase = CTransaction() coinbase.vin.append(CTxIn(COutPoint(0, 0xffffffff), ser_string(serialize_script_num(height)), 0xffffffff)) coinbaseoutput = CTxOut() coinbaseoutput.nValue = 50 * COIN halvings = int(height/150) # regtest coinbaseoutput.nValue >>= halvings if (pubkey != None): coinbaseoutput.scriptPubKey = CScript([pubkey, OP_CHECKSIG]) else: coinbaseoutput.scriptPubKey = CScript([OP_TRUE]) coinbase.vout = [ coinbaseoutput ] coinbase.calc_sha256() return coinbase # Create a transaction. # If the scriptPubKey is not specified, make it anyone-can-spend. def create_transaction(prevtx, n, sig, value, scriptPubKey=CScript()): tx = CTransaction() assert(n < len(prevtx.vout)) tx.vin.append(CTxIn(COutPoint(prevtx.sha256, n), sig, 0xffffffff)) tx.vout.append(CTxOut(value, scriptPubKey)) tx.calc_sha256() return tx def get_legacy_sigopcount_block(block, fAccurate=True): count = 0 for tx in block.vtx: count += get_legacy_sigopcount_tx(tx, fAccurate) return count def get_legacy_sigopcount_tx(tx, fAccurate=True): count = 0 for i in tx.vout: count += i.scriptPubKey.GetSigOpCount(fAccurate) for j in tx.vin: # scriptSig might be of type bytes, so convert to CScript for the moment count += CScript(j.scriptSig).GetSigOpCount(fAccurate) return count
mit
Perferom/android_external_chromium_org
remoting/tools/build/remoting_copy_locales.py
142
5150
#!/usr/bin/env python # Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Helper script to repack paks for a list of locales. Gyp doesn't have any built-in looping capability, so this just provides a way to loop over a list of locales when repacking pak files, thus avoiding a proliferation of mostly duplicate, cut-n-paste gyp actions. """ import optparse import os import sys sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', '..', 'tools', 'grit')) from grit.format import data_pack # Some build paths defined by gyp. GRIT_DIR = None INT_DIR = None # The target platform. If it is not defined, sys.platform will be used. OS = None # Extra input files. EXTRA_INPUT_FILES = [] class Usage(Exception): def __init__(self, msg): self.msg = msg def calc_output(locale): """Determine the file that will be generated for the given locale.""" #e.g. '<(INTERMEDIATE_DIR)/remoting_locales/da.pak', if OS == 'mac' or OS == 'ios': # For Cocoa to find the locale at runtime, it needs to use '_' instead # of '-' (http://crbug.com/20441). return os.path.join(INT_DIR, 'remoting', 'resources', '%s.lproj' % locale.replace('-', '_'), 'locale.pak') else: return os.path.join(INT_DIR, 'remoting_locales', locale + '.pak') def calc_inputs(locale): """Determine the files that need processing for the given locale.""" inputs = [] #e.g. '<(grit_out_dir)/remoting/resources/da.pak' inputs.append(os.path.join(GRIT_DIR, 'remoting/resources/%s.pak' % locale)) # Add any extra input files. for extra_file in EXTRA_INPUT_FILES: inputs.append('%s_%s.pak' % (extra_file, locale)) return inputs def list_outputs(locales): """Returns the names of files that will be generated for the given locales. This is to provide gyp the list of output files, so build targets can properly track what needs to be built. """ outputs = [] for locale in locales: outputs.append(calc_output(locale)) # Quote each element so filename spaces don't mess up gyp's attempt to parse # it into a list. return " ".join(['"%s"' % x for x in outputs]) def list_inputs(locales): """Returns the names of files that will be processed for the given locales. This is to provide gyp the list of input files, so build targets can properly track their prerequisites. """ inputs = [] for locale in locales: inputs += calc_inputs(locale) # Quote each element so filename spaces don't mess up gyp's attempt to parse # it into a list. return " ".join(['"%s"' % x for x in inputs]) def repack_locales(locales): """ Loop over and repack the given locales.""" for locale in locales: inputs = calc_inputs(locale) output = calc_output(locale) data_pack.DataPack.RePack(output, inputs) def DoMain(argv): global GRIT_DIR global INT_DIR global OS global EXTRA_INPUT_FILES parser = optparse.OptionParser("usage: %prog [options] locales") parser.add_option("-i", action="store_true", dest="inputs", default=False, help="Print the expected input file list, then exit.") parser.add_option("-o", action="store_true", dest="outputs", default=False, help="Print the expected output file list, then exit.") parser.add_option("-g", action="store", dest="grit_dir", help="GRIT build files output directory.") parser.add_option("-x", action="store", dest="int_dir", help="Intermediate build files output directory.") parser.add_option("-e", action="append", dest="extra_input", default=[], help="Full path to an extra input pak file without the\ locale suffix and \".pak\" extension.") parser.add_option("-p", action="store", dest="os", help="The target OS. (e.g. mac, linux, win, etc.)") options, locales = parser.parse_args(argv) if not locales: parser.error('Please specificy at least one locale to process.\n') print_inputs = options.inputs print_outputs = options.outputs GRIT_DIR = options.grit_dir INT_DIR = options.int_dir EXTRA_INPUT_FILES = options.extra_input OS = options.os if not OS: if sys.platform == 'darwin': OS = 'mac' elif sys.platform.startswith('linux'): OS = 'linux' elif sys.platform in ('cygwin', 'win32'): OS = 'win' else: OS = sys.platform if print_inputs and print_outputs: parser.error('Please specify only one of "-i" or "-o".\n') if print_inputs and not GRIT_DIR: parser.error('Please specify "-g".\n') if print_outputs and not INT_DIR: parser.error('Please specify "-x".\n') if not (print_inputs or print_outputs or (GRIT_DIR and INT_DIR)): parser.error('Please specify both "-g" and "-x".\n') if print_inputs: return list_inputs(locales) if print_outputs: return list_outputs(locales) return repack_locales(locales) if __name__ == '__main__': results = DoMain(sys.argv[1:]) if results: print results
bsd-3-clause