repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
BehavioralInsightsTeam/edx-platform | common/lib/xmodule/xmodule/tests/test_word_cloud.py | 13 | 1806 | # -*- coding: utf-8 -*-
"""Test for Word cloud Xmodule functional logic."""
from webob.multidict import MultiDict
from xmodule.word_cloud_module import WordCloudDescriptor
from . import LogicTest
class WordCloudModuleTest(LogicTest):
"""Logic tests for Word Cloud Xmodule."""
shard = 1
descriptor_class = WordCloudDescriptor
raw_field_data = {
'all_words': {'cat': 10, 'dog': 5, 'mom': 1, 'dad': 2},
'top_words': {'cat': 10, 'dog': 5, 'dad': 2},
'submitted': False
}
def test_bad_ajax_request(self):
"Make sure that answer for incorrect request is error json"
response = self.ajax_request('bad_dispatch', {})
self.assertDictEqual(response, {
'status': 'fail',
'error': 'Unknown Command!'
})
def test_good_ajax_request(self):
"Make sure that ajax request works correctly"
post_data = MultiDict(('student_words[]', word) for word in ['cat', 'cat', 'dog', 'sun'])
response = self.ajax_request('submit', post_data)
self.assertEqual(response['status'], 'success')
self.assertEqual(response['submitted'], True)
self.assertEqual(response['total_count'], 22)
self.assertDictEqual(
response['student_words'],
{'sun': 1, 'dog': 6, 'cat': 12}
)
self.assertListEqual(
response['top_words'],
[{'text': 'dad', 'size': 2, 'percent': 9.0},
{'text': 'sun', 'size': 1, 'percent': 5.0},
{'text': 'dog', 'size': 6, 'percent': 27.0},
{'text': 'mom', 'size': 1, 'percent': 5.0},
{'text': 'cat', 'size': 12, 'percent': 54.0}]
)
self.assertEqual(
100.0,
sum(i['percent'] for i in response['top_words']))
| agpl-3.0 |
rjschof/gem5 | configs/example/arm/devices.py | 3 | 9394 | # Copyright (c) 2016 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Andreas Sandberg
# Gabor Dozsa
# System components used by the bigLITTLE.py configuration script
import m5
from m5.objects import *
m5.util.addToPath('../../')
from common.Caches import *
from common import CpuConfig
class L1I(L1_ICache):
tag_latency = 1
data_latency = 1
response_latency = 1
mshrs = 4
tgts_per_mshr = 8
size = '48kB'
assoc = 3
class L1D(L1_DCache):
tag_latency = 2
data_latency = 2
response_latency = 1
mshrs = 16
tgts_per_mshr = 16
size = '32kB'
assoc = 2
write_buffers = 16
class WalkCache(PageTableWalkerCache):
tag_latency = 4
data_latency = 4
response_latency = 4
mshrs = 6
tgts_per_mshr = 8
size = '1kB'
assoc = 8
write_buffers = 16
class L2(L2Cache):
tag_latency = 12
data_latency = 12
response_latency = 5
mshrs = 32
tgts_per_mshr = 8
size = '1MB'
assoc = 16
write_buffers = 8
clusivity='mostly_excl'
class L3(Cache):
size = '16MB'
assoc = 16
tag_latency = 20
data_latency = 20
response_latency = 20
mshrs = 20
tgts_per_mshr = 12
clusivity='mostly_excl'
class MemBus(SystemXBar):
badaddr_responder = BadAddr(warn_access="warn")
default = Self.badaddr_responder.pio
class CpuCluster(SubSystem):
def __init__(self, system, num_cpus, cpu_clock, cpu_voltage,
cpu_type, l1i_type, l1d_type, wcache_type, l2_type):
super(CpuCluster, self).__init__()
self._cpu_type = cpu_type
self._l1i_type = l1i_type
self._l1d_type = l1d_type
self._wcache_type = wcache_type
self._l2_type = l2_type
assert num_cpus > 0
self.voltage_domain = VoltageDomain(voltage=cpu_voltage)
self.clk_domain = SrcClockDomain(clock=cpu_clock,
voltage_domain=self.voltage_domain)
self.cpus = [ self._cpu_type(cpu_id=system.numCpus() + idx,
clk_domain=self.clk_domain)
for idx in range(num_cpus) ]
for cpu in self.cpus:
cpu.createThreads()
cpu.createInterruptController()
cpu.socket_id = system.numCpuClusters()
system.addCpuCluster(self, num_cpus)
def requireCaches(self):
return self._cpu_type.require_caches()
def memoryMode(self):
return self._cpu_type.memory_mode()
def addL1(self):
for cpu in self.cpus:
l1i = None if self._l1i_type is None else self._l1i_type()
l1d = None if self._l1d_type is None else self._l1d_type()
iwc = None if self._wcache_type is None else self._wcache_type()
dwc = None if self._wcache_type is None else self._wcache_type()
cpu.addPrivateSplitL1Caches(l1i, l1d, iwc, dwc)
def addL2(self, clk_domain):
if self._l2_type is None:
return
self.toL2Bus = L2XBar(width=64, clk_domain=clk_domain)
self.l2 = self._l2_type()
for cpu in self.cpus:
cpu.connectAllPorts(self.toL2Bus)
self.toL2Bus.master = self.l2.cpu_side
def connectMemSide(self, bus):
bus.slave
try:
self.l2.mem_side = bus.slave
except AttributeError:
for cpu in self.cpus:
cpu.connectAllPorts(bus)
class AtomicCluster(CpuCluster):
def __init__(self, system, num_cpus, cpu_clock, cpu_voltage="1.0V"):
cpu_config = [ CpuConfig.get("atomic"), None, None, None, None ]
super(AtomicCluster, self).__init__(system, num_cpus, cpu_clock,
cpu_voltage, *cpu_config)
def addL1(self):
pass
class SimpleSystem(LinuxArmSystem):
cache_line_size = 64
def __init__(self, caches, mem_size, **kwargs):
super(SimpleSystem, self).__init__(**kwargs)
self.voltage_domain = VoltageDomain(voltage="1.0V")
self.clk_domain = SrcClockDomain(clock="1GHz",
voltage_domain=Parent.voltage_domain)
self.realview = VExpress_GEM5_V1()
self.gic_cpu_addr = self.realview.gic.cpu_addr
self.flags_addr = self.realview.realview_io.pio_addr + 0x30
self.membus = MemBus()
self.intrctrl = IntrControl()
self.terminal = Terminal()
self.vncserver = VncServer()
self.iobus = IOXBar()
# CPUs->PIO
self.iobridge = Bridge(delay='50ns')
# Device DMA -> MEM
mem_range = self.realview._mem_regions[0]
mem_range_size = long(mem_range[1]) - long(mem_range[0])
assert mem_range_size >= long(Addr(mem_size))
self._mem_range = AddrRange(start=mem_range[0], size=mem_size)
self._caches = caches
if self._caches:
self.iocache = IOCache(addr_ranges=[self._mem_range])
else:
self.dmabridge = Bridge(delay='50ns',
ranges=[self._mem_range])
self._pci_devices = 0
self._clusters = []
self._num_cpus = 0
def attach_pci(self, dev):
dev.pci_bus, dev.pci_dev, dev.pci_func = (0, self._pci_devices + 1, 0)
self._pci_devices += 1
self.realview.attachPciDevice(dev, self.iobus)
def connect(self):
self.iobridge.master = self.iobus.slave
self.iobridge.slave = self.membus.master
if self._caches:
self.iocache.mem_side = self.membus.slave
self.iocache.cpu_side = self.iobus.master
else:
self.dmabridge.master = self.membus.slave
self.dmabridge.slave = self.iobus.master
self.gic_cpu_addr = self.realview.gic.cpu_addr
self.realview.attachOnChipIO(self.membus, self.iobridge)
self.realview.attachIO(self.iobus)
self.system_port = self.membus.slave
def numCpuClusters(self):
return len(self._clusters)
def addCpuCluster(self, cpu_cluster, num_cpus):
assert cpu_cluster not in self._clusters
assert num_cpus > 0
self._clusters.append(cpu_cluster)
self._num_cpus += num_cpus
def numCpus(self):
return self._num_cpus
def addCaches(self, need_caches, last_cache_level):
if not need_caches:
# connect each cluster to the memory hierarchy
for cluster in self._clusters:
cluster.connectMemSide(self.membus)
return
cluster_mem_bus = self.membus
assert last_cache_level >= 1 and last_cache_level <= 3
for cluster in self._clusters:
cluster.addL1()
if last_cache_level > 1:
for cluster in self._clusters:
cluster.addL2(cluster.clk_domain)
if last_cache_level > 2:
max_clock_cluster = max(self._clusters,
key=lambda c: c.clk_domain.clock[0])
self.l3 = L3(clk_domain=max_clock_cluster.clk_domain)
self.toL3Bus = L2XBar(width=64)
self.toL3Bus.master = self.l3.cpu_side
self.l3.mem_side = self.membus.slave
cluster_mem_bus = self.toL3Bus
# connect each cluster to the memory hierarchy
for cluster in self._clusters:
cluster.connectMemSide(cluster_mem_bus)
| bsd-3-clause |
jonnatas/codeschool | old/cs_questions/migrations/old/0002_auto_20160501_1934.py | 3 | 1252 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-05-01 22:34
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import picklefield.fields
class Migration(migrations.Migration):
dependencies = [
('cs_core', '0001_initial'),
('cs_activities', '0011_auto_20160421_1618'),
('cs_questions', '0001_initial'),
]
operations = [
migrations.DeleteModel(
name='CodingIoResponse',
),
migrations.CreateModel(
name='CodingIoResponse',
fields=[
('response_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='cs_activities.Response')),
('source', models.TextField(blank=True)),
('feedback', picklefield.fields.PickledObjectField(blank=True, editable=False, null=True)),
('language', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cs_core.ProgrammingLanguage')),
],
options={
'abstract': False,
},
bases=('cs_activities.response',),
),
]
| gpl-3.0 |
Pluto-tv/blink-crosswalk | Tools/Scripts/webkitpy/layout_tests/breakpad/dump_reader_win.py | 50 | 5429 | # Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
import os
import shlex
from webkitpy.layout_tests.breakpad.dump_reader import DumpReader
_log = logging.getLogger(__name__)
class DumpReaderWin(DumpReader):
"""DumpReader for windows breakpad."""
def __init__(self, host, build_dir):
super(DumpReaderWin, self).__init__(host, build_dir)
self._cdb_available = None
def check_is_functional(self):
return self._check_cdb_available()
def _file_extension(self):
return 'txt'
def _get_pid_from_dump(self, dump_file):
with self._host.filesystem.open_text_file_for_reading(dump_file) as f:
crash_keys = dict([l.split(':', 1) for l in f.read().splitlines()])
if 'pid' in crash_keys:
return crash_keys['pid']
return None
def _get_stack_from_dump(self, dump_file):
minidump = dump_file[:-3] + 'dmp'
cmd = [self._cdb_path, '-y', self._build_dir, '-c', '.lines;.ecxr;k30;q', '-z', minidump]
try:
stack = self._host.executive.run_command(cmd)
except:
_log.warning('Failed to execute "%s"' % ' '.join(cmd))
else:
return stack
return None
def _find_depot_tools_path(self):
"""Attempt to find depot_tools location in PATH."""
for i in os.environ['PATH'].split(os.pathsep):
if os.path.isfile(os.path.join(i, 'gclient')):
return i
def _check_cdb_available(self):
"""Checks whether we can use cdb to symbolize minidumps."""
if self._cdb_available != None:
return self._cdb_available
CDB_LOCATION_TEMPLATES = [
'%s\\Debugging Tools For Windows',
'%s\\Debugging Tools For Windows (x86)',
'%s\\Debugging Tools For Windows (x64)',
'%s\\Windows Kits\\8.0\\Debuggers\\x86',
'%s\\Windows Kits\\8.0\\Debuggers\\x64',
'%s\\Windows Kits\\8.1\\Debuggers\\x86',
'%s\\Windows Kits\\8.1\\Debuggers\\x64',
]
program_files_directories = ['C:\\Program Files']
program_files = os.environ.get('ProgramFiles')
if program_files:
program_files_directories.append(program_files)
program_files = os.environ.get('ProgramFiles(x86)')
if program_files:
program_files_directories.append(program_files)
possible_cdb_locations = []
for template in CDB_LOCATION_TEMPLATES:
for program_files in program_files_directories:
possible_cdb_locations.append(template % program_files)
gyp_defines = os.environ.get('GYP_DEFINES', [])
if gyp_defines:
gyp_defines = shlex.split(gyp_defines)
if 'windows_sdk_path' in gyp_defines:
possible_cdb_locations.extend([
'%s\\Debuggers\\x86' % gyp_defines['windows_sdk_path'],
'%s\\Debuggers\\x64' % gyp_defines['windows_sdk_path'],
])
# Look in depot_tools win_toolchain too.
depot_tools = self._find_depot_tools_path()
if depot_tools:
win8sdk = os.path.join(depot_tools, 'win_toolchain', 'vs2013_files', 'win8sdk')
possible_cdb_locations.extend([
'%s\\Debuggers\\x86' % win8sdk,
'%s\\Debuggers\\x64' % win8sdk,
])
for cdb_path in possible_cdb_locations:
cdb = self._host.filesystem.join(cdb_path, 'cdb.exe')
try:
_ = self._host.executive.run_command([cdb, '-version'])
except:
pass
else:
self._cdb_path = cdb
self._cdb_available = True
return self._cdb_available
_log.warning("CDB is not installed; can't symbolize minidumps.")
_log.warning('')
self._cdb_available = False
return self._cdb_available
| bsd-3-clause |
pokowaka/xbmc | lib/gtest/test/gtest_xml_output_unittest.py | 1815 | 14580 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for the gtest_xml_output module"""
__author__ = 'eefacm@gmail.com (Sean Mcafee)'
import datetime
import errno
import os
import re
import sys
from xml.dom import minidom, Node
import gtest_test_utils
import gtest_xml_test_utils
GTEST_FILTER_FLAG = '--gtest_filter'
GTEST_LIST_TESTS_FLAG = '--gtest_list_tests'
GTEST_OUTPUT_FLAG = "--gtest_output"
GTEST_DEFAULT_OUTPUT_FILE = "test_detail.xml"
GTEST_PROGRAM_NAME = "gtest_xml_output_unittest_"
SUPPORTS_STACK_TRACES = False
if SUPPORTS_STACK_TRACES:
STACK_TRACE_TEMPLATE = '\nStack trace:\n*'
else:
STACK_TRACE_TEMPLATE = ''
EXPECTED_NON_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="23" failures="4" disabled="2" errors="0" time="*" timestamp="*" name="AllTests" ad_hoc_property="42">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
</testsuite>
<testsuite name="FailedTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="Fails" status="run" time="*" classname="FailedTest">
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="MixedResultTest" tests="3" failures="1" disabled="1" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="MixedResultTest"/>
<testcase name="Fails" status="run" time="*" classname="MixedResultTest">
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 3
Expected: 2" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 3
Expected: 2%(stack)s]]></failure>
</testcase>
<testcase name="DISABLED_test" status="notrun" time="*" classname="MixedResultTest"/>
</testsuite>
<testsuite name="XmlQuotingTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="OutputsCData" status="run" time="*" classname="XmlQuotingTest">
<failure message="gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]></top>" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]>]]><![CDATA[</top>%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="InvalidCharactersTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="InvalidCharactersInMessage" status="run" time="*" classname="InvalidCharactersTest">
<failure message="gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="DisabledTest" tests="1" failures="0" disabled="1" errors="0" time="*">
<testcase name="DISABLED_test_not_run" status="notrun" time="*" classname="DisabledTest"/>
</testsuite>
<testsuite name="PropertyRecordingTest" tests="4" failures="0" disabled="0" errors="0" time="*" SetUpTestCase="yes" TearDownTestCase="aye">
<testcase name="OneProperty" status="run" time="*" classname="PropertyRecordingTest" key_1="1"/>
<testcase name="IntValuedProperty" status="run" time="*" classname="PropertyRecordingTest" key_int="1"/>
<testcase name="ThreeProperties" status="run" time="*" classname="PropertyRecordingTest" key_1="1" key_2="2" key_3="3"/>
<testcase name="TwoValuesForOneKeyUsesLastValue" status="run" time="*" classname="PropertyRecordingTest" key_1="2"/>
</testsuite>
<testsuite name="NoFixtureTest" tests="3" failures="0" disabled="0" errors="0" time="*">
<testcase name="RecordProperty" status="run" time="*" classname="NoFixtureTest" key="1"/>
<testcase name="ExternalUtilityThatCallsRecordIntValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_int="1"/>
<testcase name="ExternalUtilityThatCallsRecordStringValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_string="1"/>
</testsuite>
<testsuite name="Single/ValueParamTest" tests="4" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="HasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
</testsuite>
<testsuite name="TypedTest/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/0" />
</testsuite>
<testsuite name="TypedTest/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/1" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/0" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/1" />
</testsuite>
</testsuites>""" % {'stack': STACK_TRACE_TEMPLATE}
EXPECTED_FILTERED_TEST_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*"
timestamp="*" name="AllTests" ad_hoc_property="42">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0"
errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
</testsuite>
</testsuites>"""
EXPECTED_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="0" failures="0" disabled="0" errors="0" time="*"
timestamp="*" name="AllTests">
</testsuites>"""
GTEST_PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath(GTEST_PROGRAM_NAME)
SUPPORTS_TYPED_TESTS = 'TypedTest' in gtest_test_utils.Subprocess(
[GTEST_PROGRAM_PATH, GTEST_LIST_TESTS_FLAG], capture_stderr=False).output
class GTestXMLOutputUnitTest(gtest_xml_test_utils.GTestXMLTestCase):
"""
Unit test for Google Test's XML output functionality.
"""
# This test currently breaks on platforms that do not support typed and
# type-parameterized tests, so we don't run it under them.
if SUPPORTS_TYPED_TESTS:
def testNonEmptyXmlOutput(self):
"""
Runs a test program that generates a non-empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_NON_EMPTY_XML, 1)
def testEmptyXmlOutput(self):
"""Verifies XML output for a Google Test binary without actual tests.
Runs a test program that generates an empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput('gtest_no_test_unittest', EXPECTED_EMPTY_XML, 0)
def testTimestampValue(self):
"""Checks whether the timestamp attribute in the XML output is valid.
Runs a test program that generates an empty XML output, and checks if
the timestamp attribute in the testsuites tag is valid.
"""
actual = self._GetXmlOutput('gtest_no_test_unittest', [], 0)
date_time_str = actual.documentElement.getAttributeNode('timestamp').value
# datetime.strptime() is only available in Python 2.5+ so we have to
# parse the expected datetime manually.
match = re.match(r'(\d+)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)', date_time_str)
self.assertTrue(
re.match,
'XML datettime string %s has incorrect format' % date_time_str)
date_time_from_xml = datetime.datetime(
year=int(match.group(1)), month=int(match.group(2)),
day=int(match.group(3)), hour=int(match.group(4)),
minute=int(match.group(5)), second=int(match.group(6)))
time_delta = abs(datetime.datetime.now() - date_time_from_xml)
# timestamp value should be near the current local time
self.assertTrue(time_delta < datetime.timedelta(seconds=600),
'time_delta is %s' % time_delta)
actual.unlink()
def testDefaultOutputFile(self):
"""
Confirms that Google Test produces an XML output file with the expected
default name if no name is explicitly specified.
"""
output_file = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_DEFAULT_OUTPUT_FILE)
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(
'gtest_no_test_unittest')
try:
os.remove(output_file)
except OSError, e:
if e.errno != errno.ENOENT:
raise
p = gtest_test_utils.Subprocess(
[gtest_prog_path, '%s=xml' % GTEST_OUTPUT_FLAG],
working_dir=gtest_test_utils.GetTempDir())
self.assert_(p.exited)
self.assertEquals(0, p.exit_code)
self.assert_(os.path.isfile(output_file))
def testSuppressedXmlOutput(self):
"""
Tests that no XML file is generated if the default XML listener is
shut down before RUN_ALL_TESTS is invoked.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_PROGRAM_NAME + 'out.xml')
if os.path.isfile(xml_path):
os.remove(xml_path)
command = [GTEST_PROGRAM_PATH,
'%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path),
'--shut_down_xml']
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
# p.signal is avalable only if p.terminated_by_signal is True.
self.assertFalse(
p.terminated_by_signal,
'%s was killed by signal %d' % (GTEST_PROGRAM_NAME, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(1, p.exit_code,
"'%s' exited with code %s, which doesn't match "
'the expected exit code %s.'
% (command, p.exit_code, 1))
self.assert_(not os.path.isfile(xml_path))
def testFilteredTestXmlOutput(self):
"""Verifies XML output when a filter is applied.
Runs a test program that executes only some tests and verifies that
non-selected tests do not show up in the XML output.
"""
self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_FILTERED_TEST_XML, 0,
extra_args=['%s=SuccessfulTest.*' % GTEST_FILTER_FLAG])
def _GetXmlOutput(self, gtest_prog_name, extra_args, expected_exit_code):
"""
Returns the xml output generated by running the program gtest_prog_name.
Furthermore, the program's exit code must be expected_exit_code.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
gtest_prog_name + 'out.xml')
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(gtest_prog_name)
command = ([gtest_prog_path, '%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path)] +
extra_args)
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
self.assert_(False,
'%s was killed by signal %d' % (gtest_prog_name, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(expected_exit_code, p.exit_code,
"'%s' exited with code %s, which doesn't match "
'the expected exit code %s.'
% (command, p.exit_code, expected_exit_code))
actual = minidom.parse(xml_path)
return actual
def _TestXmlOutput(self, gtest_prog_name, expected_xml,
expected_exit_code, extra_args=None):
"""
Asserts that the XML document generated by running the program
gtest_prog_name matches expected_xml, a string containing another
XML document. Furthermore, the program's exit code must be
expected_exit_code.
"""
actual = self._GetXmlOutput(gtest_prog_name, extra_args or [],
expected_exit_code)
expected = minidom.parseString(expected_xml)
self.NormalizeXml(actual.documentElement)
self.AssertEquivalentNodes(expected.documentElement,
actual.documentElement)
expected.unlink()
actual.unlink()
if __name__ == '__main__':
os.environ['GTEST_STACK_TRACE_DEPTH'] = '1'
gtest_test_utils.Main()
| gpl-2.0 |
Beyond-Imagination/BlubBlub | RaspberryPI/django-env/lib/python3.4/site-packages/django/db/backends/base/creation.py | 44 | 12033 | import sys
from django.apps import apps
from django.conf import settings
from django.core import serializers
from django.db import router
from django.utils.six import StringIO
from django.utils.six.moves import input
# The prefix to put on the default database name when creating
# the test database.
TEST_DATABASE_PREFIX = 'test_'
class BaseDatabaseCreation(object):
"""
This class encapsulates all backend-specific differences that pertain to
creation and destruction of the test database.
"""
def __init__(self, connection):
self.connection = connection
@property
def _nodb_connection(self):
"""
Used to be defined here, now moved to DatabaseWrapper.
"""
return self.connection._nodb_connection
def create_test_db(self, verbosity=1, autoclobber=False, serialize=True, keepdb=False):
"""
Creates a test database, prompting the user for confirmation if the
database already exists. Returns the name of the test database created.
"""
# Don't import django.core.management if it isn't needed.
from django.core.management import call_command
test_database_name = self._get_test_db_name()
if verbosity >= 1:
action = 'Creating'
if keepdb:
action = "Using existing"
print("%s test database for alias %s..." % (
action,
self._get_database_display_str(verbosity, test_database_name),
))
# We could skip this call if keepdb is True, but we instead
# give it the keepdb param. This is to handle the case
# where the test DB doesn't exist, in which case we need to
# create it, then just not destroy it. If we instead skip
# this, we will get an exception.
self._create_test_db(verbosity, autoclobber, keepdb)
self.connection.close()
settings.DATABASES[self.connection.alias]["NAME"] = test_database_name
self.connection.settings_dict["NAME"] = test_database_name
# We report migrate messages at one level lower than that requested.
# This ensures we don't get flooded with messages during testing
# (unless you really ask to be flooded).
call_command(
'migrate',
verbosity=max(verbosity - 1, 0),
interactive=False,
database=self.connection.alias,
run_syncdb=True,
)
# We then serialize the current state of the database into a string
# and store it on the connection. This slightly horrific process is so people
# who are testing on databases without transactions or who are using
# a TransactionTestCase still get a clean database on every test run.
if serialize:
self.connection._test_serialized_contents = self.serialize_db_to_string()
call_command('createcachetable', database=self.connection.alias)
# Ensure a connection for the side effect of initializing the test database.
self.connection.ensure_connection()
return test_database_name
def set_as_test_mirror(self, primary_settings_dict):
"""
Set this database up to be used in testing as a mirror of a primary database
whose settings are given
"""
self.connection.settings_dict['NAME'] = primary_settings_dict['NAME']
def serialize_db_to_string(self):
"""
Serializes all data in the database into a JSON string.
Designed only for test runner usage; will not handle large
amounts of data.
"""
# Build list of all apps to serialize
from django.db.migrations.loader import MigrationLoader
loader = MigrationLoader(self.connection)
app_list = []
for app_config in apps.get_app_configs():
if (
app_config.models_module is not None and
app_config.label in loader.migrated_apps and
app_config.name not in settings.TEST_NON_SERIALIZED_APPS
):
app_list.append((app_config, None))
# Make a function to iteratively return every object
def get_objects():
for model in serializers.sort_dependencies(app_list):
if (model._meta.can_migrate(self.connection) and
router.allow_migrate_model(self.connection.alias, model)):
queryset = model._default_manager.using(self.connection.alias).order_by(model._meta.pk.name)
for obj in queryset.iterator():
yield obj
# Serialize to a string
out = StringIO()
serializers.serialize("json", get_objects(), indent=None, stream=out)
return out.getvalue()
def deserialize_db_from_string(self, data):
"""
Reloads the database with data from a string generated by
the serialize_db_to_string method.
"""
data = StringIO(data)
for obj in serializers.deserialize("json", data, using=self.connection.alias):
obj.save()
def _get_database_display_str(self, verbosity, database_name):
"""
Return display string for a database for use in various actions.
"""
return "'%s'%s" % (
self.connection.alias,
(" ('%s')" % database_name) if verbosity >= 2 else '',
)
def _get_test_db_name(self):
"""
Internal implementation - returns the name of the test DB that will be
created. Only useful when called from create_test_db() and
_create_test_db() and when no external munging is done with the 'NAME'
settings.
"""
if self.connection.settings_dict['TEST']['NAME']:
return self.connection.settings_dict['TEST']['NAME']
return TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME']
def _create_test_db(self, verbosity, autoclobber, keepdb=False):
"""
Internal implementation - creates the test db tables.
"""
suffix = self.sql_table_creation_suffix()
test_database_name = self._get_test_db_name()
qn = self.connection.ops.quote_name
# Create the test database and connect to it.
with self._nodb_connection.cursor() as cursor:
try:
cursor.execute(
"CREATE DATABASE %s %s" % (qn(test_database_name), suffix))
except Exception as e:
# if we want to keep the db, then no need to do any of the below,
# just return and skip it all.
if keepdb:
return test_database_name
sys.stderr.write(
"Got an error creating the test database: %s\n" % e)
if not autoclobber:
confirm = input(
"Type 'yes' if you would like to try deleting the test "
"database '%s', or 'no' to cancel: " % test_database_name)
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print("Destroying old test database for alias %s..." % (
self._get_database_display_str(verbosity, test_database_name),
))
cursor.execute(
"DROP DATABASE %s" % qn(test_database_name))
cursor.execute(
"CREATE DATABASE %s %s" % (qn(test_database_name),
suffix))
except Exception as e:
sys.stderr.write(
"Got an error recreating the test database: %s\n" % e)
sys.exit(2)
else:
print("Tests cancelled.")
sys.exit(1)
return test_database_name
def clone_test_db(self, number, verbosity=1, autoclobber=False, keepdb=False):
"""
Clone a test database.
"""
source_database_name = self.connection.settings_dict['NAME']
if verbosity >= 1:
action = 'Cloning test database'
if keepdb:
action = 'Using existing clone'
print("%s for alias %s..." % (
action,
self._get_database_display_str(verbosity, source_database_name),
))
# We could skip this call if keepdb is True, but we instead
# give it the keepdb param. See create_test_db for details.
self._clone_test_db(number, verbosity, keepdb)
def get_test_db_clone_settings(self, number):
"""
Return a modified connection settings dict for the n-th clone of a DB.
"""
# When this function is called, the test database has been created
# already and its name has been copied to settings_dict['NAME'] so
# we don't need to call _get_test_db_name.
orig_settings_dict = self.connection.settings_dict
new_settings_dict = orig_settings_dict.copy()
new_settings_dict['NAME'] = '{}_{}'.format(orig_settings_dict['NAME'], number)
return new_settings_dict
def _clone_test_db(self, number, verbosity, keepdb=False):
"""
Internal implementation - duplicate the test db tables.
"""
raise NotImplementedError(
"The database backend doesn't support cloning databases. "
"Disable the option to run tests in parallel processes.")
def destroy_test_db(self, old_database_name=None, verbosity=1, keepdb=False, number=None):
"""
Destroy a test database, prompting the user for confirmation if the
database already exists.
"""
self.connection.close()
if number is None:
test_database_name = self.connection.settings_dict['NAME']
else:
test_database_name = self.get_test_db_clone_settings(number)['NAME']
if verbosity >= 1:
action = 'Destroying'
if keepdb:
action = 'Preserving'
print("%s test database for alias %s..." % (
action,
self._get_database_display_str(verbosity, test_database_name),
))
# if we want to preserve the database
# skip the actual destroying piece.
if not keepdb:
self._destroy_test_db(test_database_name, verbosity)
# Restore the original database name
if old_database_name is not None:
settings.DATABASES[self.connection.alias]["NAME"] = old_database_name
self.connection.settings_dict["NAME"] = old_database_name
def _destroy_test_db(self, test_database_name, verbosity):
"""
Internal implementation - remove the test db tables.
"""
# Remove the test database to clean up after
# ourselves. Connect to the previous database (not the test database)
# to do so, because it's not allowed to delete a database while being
# connected to it.
with self.connection._nodb_connection.cursor() as cursor:
cursor.execute("DROP DATABASE %s"
% self.connection.ops.quote_name(test_database_name))
def sql_table_creation_suffix(self):
"""
SQL to append to the end of the test table creation statements.
"""
return ''
def test_db_signature(self):
"""
Returns a tuple with elements of self.connection.settings_dict (a
DATABASES setting value) that uniquely identify a database
accordingly to the RDBMS particularities.
"""
settings_dict = self.connection.settings_dict
return (
settings_dict['HOST'],
settings_dict['PORT'],
settings_dict['ENGINE'],
self._get_test_db_name(),
)
| gpl-3.0 |
bmihelac/django-shop | shop/migrations/0009_auto__chg_field_order_order_total__chg_field_order_order_subtotal__chg.py | 14 | 13276 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Order.order_total'
db.alter_column('shop_order', 'order_total', self.gf('django.db.models.fields.DecimalField')(max_digits=30, decimal_places=2))
# Changing field 'Order.order_subtotal'
db.alter_column('shop_order', 'order_subtotal', self.gf('django.db.models.fields.DecimalField')(max_digits=30, decimal_places=2))
# Changing field 'OrderPayment.amount'
db.alter_column('shop_orderpayment', 'amount', self.gf('django.db.models.fields.DecimalField')(max_digits=30, decimal_places=2))
# Changing field 'ExtraOrderPriceField.value'
db.alter_column('shop_extraorderpricefield', 'value', self.gf('django.db.models.fields.DecimalField')(max_digits=30, decimal_places=2))
# Changing field 'OrderItem.line_total'
db.alter_column('shop_orderitem', 'line_total', self.gf('django.db.models.fields.DecimalField')(max_digits=30, decimal_places=2))
# Changing field 'OrderItem.unit_price'
db.alter_column('shop_orderitem', 'unit_price', self.gf('django.db.models.fields.DecimalField')(max_digits=30, decimal_places=2))
# Changing field 'OrderItem.line_subtotal'
db.alter_column('shop_orderitem', 'line_subtotal', self.gf('django.db.models.fields.DecimalField')(max_digits=30, decimal_places=2))
# Changing field 'Product.unit_price'
db.alter_column('shop_product', 'unit_price', self.gf('django.db.models.fields.DecimalField')(max_digits=30, decimal_places=2))
# Changing field 'ExtraOrderItemPriceField.value'
db.alter_column('shop_extraorderitempricefield', 'value', self.gf('django.db.models.fields.DecimalField')(max_digits=30, decimal_places=2))
def backwards(self, orm):
# Changing field 'Order.order_total'
db.alter_column('shop_order', 'order_total', self.gf('django.db.models.fields.DecimalField')(max_digits=30, decimal_places=10))
# Changing field 'Order.order_subtotal'
db.alter_column('shop_order', 'order_subtotal', self.gf('django.db.models.fields.DecimalField')(max_digits=30, decimal_places=10))
# Changing field 'OrderPayment.amount'
db.alter_column('shop_orderpayment', 'amount', self.gf('django.db.models.fields.DecimalField')(max_digits=30, decimal_places=10))
# Changing field 'ExtraOrderPriceField.value'
db.alter_column('shop_extraorderpricefield', 'value', self.gf('django.db.models.fields.DecimalField')(max_digits=30, decimal_places=10))
# Changing field 'OrderItem.line_total'
db.alter_column('shop_orderitem', 'line_total', self.gf('django.db.models.fields.DecimalField')(max_digits=30, decimal_places=10))
# Changing field 'OrderItem.unit_price'
db.alter_column('shop_orderitem', 'unit_price', self.gf('django.db.models.fields.DecimalField')(max_digits=30, decimal_places=10))
# Changing field 'OrderItem.line_subtotal'
db.alter_column('shop_orderitem', 'line_subtotal', self.gf('django.db.models.fields.DecimalField')(max_digits=30, decimal_places=10))
# Changing field 'Product.unit_price'
db.alter_column('shop_product', 'unit_price', self.gf('django.db.models.fields.DecimalField')(max_digits=30, decimal_places=10))
# Changing field 'ExtraOrderItemPriceField.value'
db.alter_column('shop_extraorderitempricefield', 'value', self.gf('django.db.models.fields.DecimalField')(max_digits=30, decimal_places=10))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'shop.cart': {
'Meta': {'object_name': 'Cart'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'shop.cartitem': {
'Meta': {'object_name': 'CartItem'},
'cart': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'items'", 'to': "orm['shop.Cart']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shop.Product']"}),
'quantity': ('django.db.models.fields.IntegerField', [], {})
},
'shop.extraorderitempricefield': {
'Meta': {'object_name': 'ExtraOrderItemPriceField'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'order_item': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shop.OrderItem']"}),
'value': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '30', 'decimal_places': '2'})
},
'shop.extraorderpricefield': {
'Meta': {'object_name': 'ExtraOrderPriceField'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shop.Order']"}),
'value': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '30', 'decimal_places': '2'})
},
'shop.order': {
'Meta': {'object_name': 'Order'},
'billing_address_text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'order_subtotal': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '30', 'decimal_places': '2'}),
'order_total': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '30', 'decimal_places': '2'}),
'shipping_address_text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'shop.orderextrainfo': {
'Meta': {'object_name': 'OrderExtraInfo'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'extra_info'", 'to': "orm['shop.Order']"}),
'text': ('django.db.models.fields.TextField', [], {})
},
'shop.orderitem': {
'Meta': {'object_name': 'OrderItem'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_subtotal': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '30', 'decimal_places': '2'}),
'line_total': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '30', 'decimal_places': '2'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'items'", 'to': "orm['shop.Order']"}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shop.Product']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'product_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'product_reference': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'quantity': ('django.db.models.fields.IntegerField', [], {}),
'unit_price': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '30', 'decimal_places': '2'})
},
'shop.orderpayment': {
'Meta': {'object_name': 'OrderPayment'},
'amount': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '30', 'decimal_places': '2'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shop.Order']"}),
'payment_method': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'transaction_id': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'shop.product': {
'Meta': {'object_name': 'Product'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_shop.product_set'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'unit_price': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '30', 'decimal_places': '2'})
}
}
complete_apps = ['shop'] | bsd-3-clause |
Logicalmars/appengine-keeptrack | pytz/zoneinfo/PRC.py | 9 | 1090 | '''tzinfo timezone information for PRC.'''
from pytz.tzinfo import DstTzInfo
from pytz.tzinfo import memorized_datetime as d
from pytz.tzinfo import memorized_ttinfo as i
class PRC(DstTzInfo):
'''PRC timezone definition. See datetime.tzinfo for details'''
zone = 'PRC'
_utc_transition_times = [
d(1,1,1,0,0,0),
d(1927,12,31,15,54,8),
d(1940,6,2,16,0,0),
d(1940,9,30,15,0,0),
d(1941,3,15,16,0,0),
d(1941,9,30,15,0,0),
d(1986,5,3,16,0,0),
d(1986,9,13,15,0,0),
d(1987,4,11,16,0,0),
d(1987,9,12,15,0,0),
d(1988,4,9,16,0,0),
d(1988,9,10,15,0,0),
d(1989,4,15,16,0,0),
d(1989,9,16,15,0,0),
d(1990,4,14,16,0,0),
d(1990,9,15,15,0,0),
d(1991,4,13,16,0,0),
d(1991,9,14,15,0,0),
]
_transition_info = [
i(29160,0,'LMT'),
i(28800,0,'CST'),
i(32400,3600,'CDT'),
i(28800,0,'CST'),
i(32400,3600,'CDT'),
i(28800,0,'CST'),
i(32400,3600,'CDT'),
i(28800,0,'CST'),
i(32400,3600,'CDT'),
i(28800,0,'CST'),
i(32400,3600,'CDT'),
i(28800,0,'CST'),
i(32400,3600,'CDT'),
i(28800,0,'CST'),
i(32400,3600,'CDT'),
i(28800,0,'CST'),
i(32400,3600,'CDT'),
i(28800,0,'CST'),
]
PRC = PRC()
| bsd-3-clause |
fduraffourg/servo | tests/wpt/web-platform-tests/tools/py/py/_process/cmdexec.py | 273 | 1814 | import sys
import subprocess
import py
from subprocess import Popen, PIPE
def cmdexec(cmd):
""" return unicode output of executing 'cmd' in a separate process.
raise cmdexec.Error exeception if the command failed.
the exception will provide an 'err' attribute containing
the error-output from the command.
if the subprocess module does not provide a proper encoding/unicode strings
sys.getdefaultencoding() will be used, if that does not exist, 'UTF-8'.
"""
process = subprocess.Popen(cmd, shell=True,
universal_newlines=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = process.communicate()
if sys.version_info[0] < 3: # on py3 we get unicode strings, on py2 not
try:
default_encoding = sys.getdefaultencoding() # jython may not have it
except AttributeError:
default_encoding = sys.stdout.encoding or 'UTF-8'
out = unicode(out, process.stdout.encoding or default_encoding)
err = unicode(err, process.stderr.encoding or default_encoding)
status = process.poll()
if status:
raise ExecutionFailed(status, status, cmd, out, err)
return out
class ExecutionFailed(py.error.Error):
def __init__(self, status, systemstatus, cmd, out, err):
Exception.__init__(self)
self.status = status
self.systemstatus = systemstatus
self.cmd = cmd
self.err = err
self.out = out
def __str__(self):
return "ExecutionFailed: %d %s\n%s" %(self.status, self.cmd, self.err)
# export the exception under the name 'py.process.cmdexec.Error'
cmdexec.Error = ExecutionFailed
try:
ExecutionFailed.__module__ = 'py.process.cmdexec'
ExecutionFailed.__name__ = 'Error'
except (AttributeError, TypeError):
pass
| mpl-2.0 |
mancoast/CPythonPyc_test | cpython/274_test_threading_local.py | 95 | 6671 | import unittest
from doctest import DocTestSuite
from test import test_support
import weakref
import gc
# Modules under test
_thread = test_support.import_module('thread')
threading = test_support.import_module('threading')
import _threading_local
class Weak(object):
pass
def target(local, weaklist):
weak = Weak()
local.weak = weak
weaklist.append(weakref.ref(weak))
class BaseLocalTest:
def test_local_refs(self):
self._local_refs(20)
self._local_refs(50)
self._local_refs(100)
def _local_refs(self, n):
local = self._local()
weaklist = []
for i in range(n):
t = threading.Thread(target=target, args=(local, weaklist))
t.start()
t.join()
del t
gc.collect()
self.assertEqual(len(weaklist), n)
# XXX _threading_local keeps the local of the last stopped thread alive.
deadlist = [weak for weak in weaklist if weak() is None]
self.assertIn(len(deadlist), (n-1, n))
# Assignment to the same thread local frees it sometimes (!)
local.someothervar = None
gc.collect()
deadlist = [weak for weak in weaklist if weak() is None]
self.assertIn(len(deadlist), (n-1, n), (n, len(deadlist)))
def test_derived(self):
# Issue 3088: if there is a threads switch inside the __init__
# of a threading.local derived class, the per-thread dictionary
# is created but not correctly set on the object.
# The first member set may be bogus.
import time
class Local(self._local):
def __init__(self):
time.sleep(0.01)
local = Local()
def f(i):
local.x = i
# Simply check that the variable is correctly set
self.assertEqual(local.x, i)
threads= []
for i in range(10):
t = threading.Thread(target=f, args=(i,))
t.start()
threads.append(t)
for t in threads:
t.join()
def test_derived_cycle_dealloc(self):
# http://bugs.python.org/issue6990
class Local(self._local):
pass
locals = None
passed = [False]
e1 = threading.Event()
e2 = threading.Event()
def f():
# 1) Involve Local in a cycle
cycle = [Local()]
cycle.append(cycle)
cycle[0].foo = 'bar'
# 2) GC the cycle (triggers threadmodule.c::local_clear
# before local_dealloc)
del cycle
gc.collect()
e1.set()
e2.wait()
# 4) New Locals should be empty
passed[0] = all(not hasattr(local, 'foo') for local in locals)
t = threading.Thread(target=f)
t.start()
e1.wait()
# 3) New Locals should recycle the original's address. Creating
# them in the thread overwrites the thread state and avoids the
# bug
locals = [Local() for i in range(10)]
e2.set()
t.join()
self.assertTrue(passed[0])
def test_arguments(self):
# Issue 1522237
from thread import _local as local
from _threading_local import local as py_local
for cls in (local, py_local):
class MyLocal(cls):
def __init__(self, *args, **kwargs):
pass
MyLocal(a=1)
MyLocal(1)
self.assertRaises(TypeError, cls, a=1)
self.assertRaises(TypeError, cls, 1)
def _test_one_class(self, c):
self._failed = "No error message set or cleared."
obj = c()
e1 = threading.Event()
e2 = threading.Event()
def f1():
obj.x = 'foo'
obj.y = 'bar'
del obj.y
e1.set()
e2.wait()
def f2():
try:
foo = obj.x
except AttributeError:
# This is expected -- we haven't set obj.x in this thread yet!
self._failed = "" # passed
else:
self._failed = ('Incorrectly got value %r from class %r\n' %
(foo, c))
sys.stderr.write(self._failed)
t1 = threading.Thread(target=f1)
t1.start()
e1.wait()
t2 = threading.Thread(target=f2)
t2.start()
t2.join()
# The test is done; just let t1 know it can exit, and wait for it.
e2.set()
t1.join()
self.assertFalse(self._failed, self._failed)
def test_threading_local(self):
self._test_one_class(self._local)
def test_threading_local_subclass(self):
class LocalSubclass(self._local):
"""To test that subclasses behave properly."""
self._test_one_class(LocalSubclass)
def _test_dict_attribute(self, cls):
obj = cls()
obj.x = 5
self.assertEqual(obj.__dict__, {'x': 5})
with self.assertRaises(AttributeError):
obj.__dict__ = {}
with self.assertRaises(AttributeError):
del obj.__dict__
def test_dict_attribute(self):
self._test_dict_attribute(self._local)
def test_dict_attribute_subclass(self):
class LocalSubclass(self._local):
"""To test that subclasses behave properly."""
self._test_dict_attribute(LocalSubclass)
class ThreadLocalTest(unittest.TestCase, BaseLocalTest):
_local = _thread._local
# Fails for the pure Python implementation
def test_cycle_collection(self):
class X:
pass
x = X()
x.local = self._local()
x.local.x = x
wr = weakref.ref(x)
del x
gc.collect()
self.assertIs(wr(), None)
class PyThreadingLocalTest(unittest.TestCase, BaseLocalTest):
_local = _threading_local.local
def test_main():
suite = unittest.TestSuite()
suite.addTest(DocTestSuite('_threading_local'))
suite.addTest(unittest.makeSuite(ThreadLocalTest))
suite.addTest(unittest.makeSuite(PyThreadingLocalTest))
try:
from thread import _local
except ImportError:
pass
else:
import _threading_local
local_orig = _threading_local.local
def setUp(test):
_threading_local.local = _local
def tearDown(test):
_threading_local.local = local_orig
suite.addTest(DocTestSuite('_threading_local',
setUp=setUp, tearDown=tearDown)
)
test_support.run_unittest(suite)
if __name__ == '__main__':
test_main()
| gpl-3.0 |
xuld/dark | apps/node_modules/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/generator/make.py | 30 | 87831 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Notes:
#
# This is all roughly based on the Makefile system used by the Linux
# kernel, but is a non-recursive make -- we put the entire dependency
# graph in front of make and let it figure it out.
#
# The code below generates a separate .mk file for each target, but
# all are sourced by the top-level Makefile. This means that all
# variables in .mk-files clobber one another. Be careful to use :=
# where appropriate for immediate evaluation, and similarly to watch
# that you're not relying on a variable value to last beween different
# .mk files.
#
# TODOs:
#
# Global settings and utility functions are currently stuffed in the
# toplevel Makefile. It may make sense to generate some .mk files on
# the side to keep the the files readable.
import gyp
import gyp.common
import gyp.system_test
import gyp.xcode_emulation
import os
import re
import sys
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'SHARED_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'INTERMEDIATE_DIR': '$(obj).$(TOOLSET)/$(TARGET)/geni',
'SHARED_INTERMEDIATE_DIR': '$(obj)/gen',
'PRODUCT_DIR': '$(builddir)',
'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python.
'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python.
'RULE_INPUT_PATH': '$(abspath $<)',
'RULE_INPUT_EXT': '$(suffix $<)',
'RULE_INPUT_NAME': '$(notdir $<)',
# This appears unused --- ?
'CONFIGURATION_NAME': '$(BUILDTYPE)',
}
# Make supports multiple toolsets
generator_supports_multiple_toolsets = True
# Request sorted dependencies in the order from dependents to dependencies.
generator_wants_sorted_dependencies = False
def CalculateVariables(default_variables, params):
"""Calculate additional variables for use in the build (called by gyp)."""
cc_target = os.environ.get('CC.target', os.environ.get('CC', 'cc'))
flavor = gyp.common.GetFlavor(params)
if flavor == 'mac':
default_variables.setdefault('OS', 'mac')
default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib')
default_variables.setdefault('SHARED_LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
default_variables.setdefault('LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
# Copy additional generator configuration data from Xcode, which is shared
# by the Mac Make generator.
import gyp.generator.xcode as xcode_generator
global generator_additional_non_configuration_keys
generator_additional_non_configuration_keys = getattr(xcode_generator,
'generator_additional_non_configuration_keys', [])
global generator_additional_path_sections
generator_additional_path_sections = getattr(xcode_generator,
'generator_additional_path_sections', [])
global generator_extra_sources_for_rules
generator_extra_sources_for_rules = getattr(xcode_generator,
'generator_extra_sources_for_rules', [])
global COMPILABLE_EXTENSIONS
COMPILABLE_EXTENSIONS.update({'.m': 'objc', '.mm' : 'objcxx'})
else:
operating_system = flavor
if flavor == 'android':
operating_system = 'linux' # Keep this legacy behavior for now.
default_variables.setdefault('OS', operating_system)
default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
default_variables.setdefault('SHARED_LIB_DIR','$(builddir)/lib.$(TOOLSET)')
default_variables.setdefault('LIB_DIR', '$(obj).$(TOOLSET)')
def CalculateGeneratorInputInfo(params):
"""Calculate the generator specific info that gets fed to input (called by
gyp)."""
generator_flags = params.get('generator_flags', {})
android_ndk_version = generator_flags.get('android_ndk_version', None)
# Android NDK requires a strict link order.
if android_ndk_version:
global generator_wants_sorted_dependencies
generator_wants_sorted_dependencies = True
def ensure_directory_exists(path):
dir = os.path.dirname(path)
if dir and not os.path.exists(dir):
os.makedirs(dir)
# The .d checking code below uses these functions:
# wildcard, sort, foreach, shell, wordlist
# wildcard can handle spaces, the rest can't.
# Since I could find no way to make foreach work with spaces in filenames
# correctly, the .d files have spaces replaced with another character. The .d
# file for
# Chromium\ Framework.framework/foo
# is for example
# out/Release/.deps/out/Release/Chromium?Framework.framework/foo
# This is the replacement character.
SPACE_REPLACEMENT = '?'
LINK_COMMANDS_LINUX = """\
quiet_cmd_alink = AR($(TOOLSET)) $@
cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) $(ARFLAGS.$(TOOLSET)) $@ $(filter %.o,$^)
# Due to circular dependencies between libraries :(, we wrap the
# special "figure out circular dependencies" flags around the entire
# input list during linking.
quiet_cmd_link = LINK($(TOOLSET)) $@
cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS)
# We support two kinds of shared objects (.so):
# 1) shared_library, which is just bundling together many dependent libraries
# into a link line.
# 2) loadable_module, which is generating a module intended for dlopen().
#
# They differ only slightly:
# In the former case, we want to package all dependent code into the .so.
# In the latter case, we want to package just the API exposed by the
# outermost module.
# This means shared_library uses --whole-archive, while loadable_module doesn't.
# (Note that --whole-archive is incompatible with the --start-group used in
# normal linking.)
# Other shared-object link notes:
# - Set SONAME to the library filename so our binaries don't reference
# the local, absolute paths used on the link command-line.
quiet_cmd_solink = SOLINK($(TOOLSET)) $@
cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS)
quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
"""
LINK_COMMANDS_MAC = """\
quiet_cmd_alink = LIBTOOL-STATIC $@
cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool -static -o $@ $(filter %.o,$^)
quiet_cmd_link = LINK($(TOOLSET)) $@
cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
# TODO(thakis): Find out and document the difference between shared_library and
# loadable_module on mac.
quiet_cmd_solink = SOLINK($(TOOLSET)) $@
cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
# TODO(thakis): The solink_module rule is likely wrong. Xcode seems to pass
# -bundle -single_module here (for osmesa.so).
quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
"""
LINK_COMMANDS_ANDROID = """\
quiet_cmd_alink = AR($(TOOLSET)) $@
cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) $(ARFLAGS.$(TOOLSET)) $@ $(filter %.o,$^)
# Due to circular dependencies between libraries :(, we wrap the
# special "figure out circular dependencies" flags around the entire
# input list during linking.
quiet_cmd_link = LINK($(TOOLSET)) $@
quiet_cmd_link_host = LINK($(TOOLSET)) $@
cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS)
cmd_link_host = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
# Other shared-object link notes:
# - Set SONAME to the library filename so our binaries don't reference
# the local, absolute paths used on the link command-line.
quiet_cmd_solink = SOLINK($(TOOLSET)) $@
cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS)
quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
quiet_cmd_solink_module_host = SOLINK_MODULE($(TOOLSET)) $@
cmd_solink_module_host = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
"""
# Header of toplevel Makefile.
# This should go into the build tree, but it's easier to keep it here for now.
SHARED_HEADER = ("""\
# We borrow heavily from the kernel build setup, though we are simpler since
# we don't have Kconfig tweaking settings on us.
# The implicit make rules have it looking for RCS files, among other things.
# We instead explicitly write all the rules we care about.
# It's even quicker (saves ~200ms) to pass -r on the command line.
MAKEFLAGS=-r
# The source directory tree.
srcdir := %(srcdir)s
abs_srcdir := $(abspath $(srcdir))
# The name of the builddir.
builddir_name ?= %(builddir)s
# The V=1 flag on command line makes us verbosely print command lines.
ifdef V
quiet=
else
quiet=quiet_
endif
# Specify BUILDTYPE=Release on the command line for a release build.
BUILDTYPE ?= %(default_configuration)s
# Directory all our build output goes into.
# Note that this must be two directories beneath src/ for unit tests to pass,
# as they reach into the src/ directory for data with relative paths.
builddir ?= $(builddir_name)/$(BUILDTYPE)
abs_builddir := $(abspath $(builddir))
depsdir := $(builddir)/.deps
# Object output directory.
obj := $(builddir)/obj
abs_obj := $(abspath $(obj))
# We build up a list of every single one of the targets so we can slurp in the
# generated dependency rule Makefiles in one pass.
all_deps :=
%(make_global_settings)s
# C++ apps need to be linked with g++.
#
# Note: flock is used to seralize linking. Linking is a memory-intensive
# process so running parallel links can often lead to thrashing. To disable
# the serialization, override LINK via an envrionment variable as follows:
#
# export LINK=g++
#
# This will allow make to invoke N linker processes as specified in -jN.
LINK ?= %(flock)s $(builddir)/linker.lock $(CXX)
CC.target ?= $(CC)
CFLAGS.target ?= $(CFLAGS)
CXX.target ?= $(CXX)
CXXFLAGS.target ?= $(CXXFLAGS)
LINK.target ?= $(LINK)
LDFLAGS.target ?= $(LDFLAGS)
AR.target ?= $(AR)
ARFLAGS.target ?= %(ARFLAGS.target)s
# N.B.: the logic of which commands to run should match the computation done
# in gyp's make.py where ARFLAGS.host etc. is computed.
# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
# to replicate this environment fallback in make as well.
CC.host ?= gcc
CFLAGS.host ?=
CXX.host ?= g++
CXXFLAGS.host ?=
LINK.host ?= g++
LDFLAGS.host ?=
AR.host ?= ar
ARFLAGS.host := %(ARFLAGS.host)s
# Define a dir function that can handle spaces.
# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
# "leading spaces cannot appear in the text of the first argument as written.
# These characters can be put into the argument value by variable substitution."
empty :=
space := $(empty) $(empty)
# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
replace_spaces = $(subst $(space),""" + SPACE_REPLACEMENT + """,$1)
unreplace_spaces = $(subst """ + SPACE_REPLACEMENT + """,$(space),$1)
dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
# Flags to make gcc output dependency info. Note that you need to be
# careful here to use the flags that ccache and distcc can understand.
# We write to a dep file on the side first and then rename at the end
# so we can't end up with a broken dep file.
depfile = $(depsdir)/$(call replace_spaces,$@).d
DEPFLAGS = -MMD -MF $(depfile).raw
# We have to fixup the deps output in a few ways.
# (1) the file output should mention the proper .o file.
# ccache or distcc lose the path to the target, so we convert a rule of
# the form:
# foobar.o: DEP1 DEP2
# into
# path/to/foobar.o: DEP1 DEP2
# (2) we want missing files not to cause us to fail to build.
# We want to rewrite
# foobar.o: DEP1 DEP2 \\
# DEP3
# to
# DEP1:
# DEP2:
# DEP3:
# so if the files are missing, they're just considered phony rules.
# We have to do some pretty insane escaping to get those backslashes
# and dollar signs past make, the shell, and sed at the same time.
# Doesn't work with spaces, but that's fine: .d files have spaces in
# their names replaced with other characters."""
r"""
define fixup_dep
# The depfile may not exist if the input file didn't have any #includes.
touch $(depfile).raw
# Fixup path as in (1).
sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
# Add extra rules as in (2).
# We remove slashes and replace spaces with new lines;
# remove blank lines;
# delete the first line and append a colon to the remaining lines.
sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
grep -v '^$$' |\
sed -e 1d -e 's|$$|:|' \
>> $(depfile)
rm $(depfile).raw
endef
"""
"""
# Command definitions:
# - cmd_foo is the actual command to run;
# - quiet_cmd_foo is the brief-output summary of the command.
quiet_cmd_cc = CC($(TOOLSET)) $@
cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
quiet_cmd_cxx = CXX($(TOOLSET)) $@
cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
%(extra_commands)s
quiet_cmd_touch = TOUCH $@
cmd_touch = touch $@
quiet_cmd_copy = COPY $@
# send stderr to /dev/null to ignore messages when linking directories.
cmd_copy = ln -f "$<" "$@" 2>/dev/null || (rm -rf "$@" && cp -af "$<" "$@")
%(link_commands)s
"""
r"""
# Define an escape_quotes function to escape single quotes.
# This allows us to handle quotes properly as long as we always use
# use single quotes and escape_quotes.
escape_quotes = $(subst ','\'',$(1))
# This comment is here just to include a ' to unconfuse syntax highlighting.
# Define an escape_vars function to escape '$' variable syntax.
# This allows us to read/write command lines with shell variables (e.g.
# $LD_LIBRARY_PATH), without triggering make substitution.
escape_vars = $(subst $$,$$$$,$(1))
# Helper that expands to a shell command to echo a string exactly as it is in
# make. This uses printf instead of echo because printf's behaviour with respect
# to escape sequences is more portable than echo's across different shells
# (e.g., dash, bash).
exact_echo = printf '%%s\n' '$(call escape_quotes,$(1))'
"""
"""
# Helper to compare the command we're about to run against the command
# we logged the last time we ran the command. Produces an empty
# string (false) when the commands match.
# Tricky point: Make has no string-equality test function.
# The kernel uses the following, but it seems like it would have false
# positives, where one string reordered its arguments.
# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \\
# $(filter-out $(cmd_$@), $(cmd_$(1))))
# We instead substitute each for the empty string into the other, and
# say they're equal if both substitutions produce the empty string.
# .d files contain """ + SPACE_REPLACEMENT + \
""" instead of spaces, take that into account.
command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\\
$(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
# Helper that is non-empty when a prerequisite changes.
# Normally make does this implicitly, but we force rules to always run
# so we can check their command lines.
# $? -- new prerequisites
# $| -- order-only dependencies
prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
# Helper that executes all postbuilds, and deletes the output file when done
# if any of the postbuilds failed.
define do_postbuilds
@E=0;\\
for p in $(POSTBUILDS); do\\
eval $$p;\\
F=$$?;\\
if [ $$F -ne 0 ]; then\\
E=$$F;\\
fi;\\
done;\\
if [ $$E -ne 0 ]; then\\
rm -rf "$@";\\
exit $$E;\\
fi
endef
# do_cmd: run a command via the above cmd_foo names, if necessary.
# Should always run for a given target to handle command-line changes.
# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
# Third argument, if non-zero, makes it do POSTBUILDS processing.
# Note: We intentionally do NOT call dirx for depfile, since it contains """ + \
SPACE_REPLACEMENT + """ for
# spaces already and dirx strips the """ + SPACE_REPLACEMENT + \
""" characters.
define do_cmd
$(if $(or $(command_changed),$(prereq_changed)),
@$(call exact_echo, $($(quiet)cmd_$(1)))
@mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
$(if $(findstring flock,$(word %(flock_index)d,$(cmd_$1))),
@$(cmd_$(1))
@echo " $(quiet_cmd_$(1)): Finished",
@$(cmd_$(1))
)
@$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
@$(if $(2),$(fixup_dep))
$(if $(and $(3), $(POSTBUILDS)),
$(call do_postbuilds)
)
)
endef
# Declare the "%(default_target)s" target first so it is the default,
# even though we don't have the deps yet.
.PHONY: %(default_target)s
%(default_target)s:
# Use FORCE_DO_CMD to force a target to run. Should be coupled with
# do_cmd.
.PHONY: FORCE_DO_CMD
FORCE_DO_CMD:
""")
SHARED_HEADER_MAC_COMMANDS = """
quiet_cmd_objc = CXX($(TOOLSET)) $@
cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
quiet_cmd_objcxx = CXX($(TOOLSET)) $@
cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
# Commands for precompiled header files.
quiet_cmd_pch_c = CXX($(TOOLSET)) $@
cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
quiet_cmd_pch_m = CXX($(TOOLSET)) $@
cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
# gyp-mac-tool is written next to the root Makefile by gyp.
# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
# already.
quiet_cmd_mac_tool = MACTOOL $(4) $<
cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
quiet_cmd_infoplist = INFOPLIST $@
cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
"""
SHARED_HEADER_SUN_COMMANDS = """
# gyp-sun-tool is written next to the root Makefile by gyp.
# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
# already.
quiet_cmd_sun_tool = SUNTOOL $(4) $<
cmd_sun_tool = ./gyp-sun-tool $(4) $< "$@"
"""
def WriteRootHeaderSuffixRules(writer):
extensions = sorted(COMPILABLE_EXTENSIONS.keys(), key=str.lower)
writer.write('# Suffix rules, putting all outputs into $(obj).\n')
for ext in extensions:
writer.write('$(obj).$(TOOLSET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD\n' % ext)
writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
writer.write('\n# Try building from generated source, too.\n')
for ext in extensions:
writer.write(
'$(obj).$(TOOLSET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD\n' % ext)
writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
writer.write('\n')
for ext in extensions:
writer.write('$(obj).$(TOOLSET)/%%.o: $(obj)/%%%s FORCE_DO_CMD\n' % ext)
writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
writer.write('\n')
SHARED_HEADER_SUFFIX_RULES_COMMENT1 = ("""\
# Suffix rules, putting all outputs into $(obj).
""")
SHARED_HEADER_SUFFIX_RULES_COMMENT2 = ("""\
# Try building from generated source, too.
""")
SHARED_FOOTER = """\
# "all" is a concatenation of the "all" targets from all the included
# sub-makefiles. This is just here to clarify.
all:
# Add in dependency-tracking rules. $(all_deps) is the list of every single
# target in our tree. Only consider the ones with .d (dependency) info:
d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
ifneq ($(d_files),)
# Rather than include each individual .d file, concatenate them into a
# single file which make is able to load faster. We split this into
# commands that take 512 files at a time to avoid overflowing the
# command line.
$(shell cat $(wordlist 1,512,$(d_files)) > $(depsdir)/all.deps)
%(generate_all_deps)s
# make looks for ways to re-generate included makefiles, but in our case, we
# don't have a direct way. Explicitly telling make that it has nothing to do
# for them makes it go faster.
$(depsdir)/all.deps: ;
include $(depsdir)/all.deps
endif
"""
header = """\
# This file is generated by gyp; do not edit.
"""
# Maps every compilable file extension to the do_cmd that compiles it.
COMPILABLE_EXTENSIONS = {
'.c': 'cc',
'.cc': 'cxx',
'.cpp': 'cxx',
'.cxx': 'cxx',
'.s': 'cc',
'.S': 'cc',
}
def Compilable(filename):
"""Return true if the file is compilable (should be in OBJS)."""
for res in (filename.endswith(e) for e in COMPILABLE_EXTENSIONS):
if res:
return True
return False
def Linkable(filename):
"""Return true if the file is linkable (should be on the link line)."""
return filename.endswith('.o')
def Target(filename):
"""Translate a compilable filename to its .o target."""
return os.path.splitext(filename)[0] + '.o'
def EscapeShellArgument(s):
"""Quotes an argument so that it will be interpreted literally by a POSIX
shell. Taken from
http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
"""
return "'" + s.replace("'", "'\\''") + "'"
def EscapeMakeVariableExpansion(s):
"""Make has its own variable expansion syntax using $. We must escape it for
string to be interpreted literally."""
return s.replace('$', '$$')
def EscapeCppDefine(s):
"""Escapes a CPP define so that it will reach the compiler unaltered."""
s = EscapeShellArgument(s)
s = EscapeMakeVariableExpansion(s)
return s
def QuoteIfNecessary(string):
"""TODO: Should this ideally be replaced with one or more of the above
functions?"""
if '"' in string:
string = '"' + string.replace('"', '\\"') + '"'
return string
def StringToMakefileVariable(string):
"""Convert a string to a value that is acceptable as a make variable name."""
# TODO: replace other metacharacters that we encounter.
return re.sub('[ {}$]', '_', string)
srcdir_prefix = ''
def Sourceify(path):
"""Convert a path to its source directory form."""
if '$(' in path:
return path
if os.path.isabs(path):
return path
return srcdir_prefix + path
def QuoteSpaces(s, quote=r'\ '):
return s.replace(' ', quote)
# Map from qualified target to path to output.
target_outputs = {}
# Map from qualified target to any linkable output. A subset
# of target_outputs. E.g. when mybinary depends on liba, we want to
# include liba in the linker line; when otherbinary depends on
# mybinary, we just want to build mybinary first.
target_link_deps = {}
class MakefileWriter:
"""MakefileWriter packages up the writing of one target-specific foobar.mk.
Its only real entry point is Write(), and is mostly used for namespacing.
"""
def __init__(self, generator_flags, flavor):
self.generator_flags = generator_flags
self.flavor = flavor
# Keep track of the total number of outputs for this makefile.
self._num_outputs = 0
self.suffix_rules_srcdir = {}
self.suffix_rules_objdir1 = {}
self.suffix_rules_objdir2 = {}
# Generate suffix rules for all compilable extensions.
for ext in COMPILABLE_EXTENSIONS.keys():
# Suffix rules for source folder.
self.suffix_rules_srcdir.update({ext: ("""\
$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD
@$(call do_cmd,%s,1)
""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
# Suffix rules for generated source files.
self.suffix_rules_objdir1.update({ext: ("""\
$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD
@$(call do_cmd,%s,1)
""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
self.suffix_rules_objdir2.update({ext: ("""\
$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
@$(call do_cmd,%s,1)
""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
def NumOutputs(self):
return self._num_outputs
def Write(self, qualified_target, base_path, output_filename, spec, configs,
part_of_all):
"""The main entry point: writes a .mk file for a single target.
Arguments:
qualified_target: target we're generating
base_path: path relative to source root we're building in, used to resolve
target-relative paths
output_filename: output .mk file name to write
spec, configs: gyp info
part_of_all: flag indicating this target is part of 'all'
"""
ensure_directory_exists(output_filename)
self.fp = open(output_filename, 'w')
self.fp.write(header)
self.path = base_path
self.target = spec['target_name']
self.type = spec['type']
self.toolset = spec['toolset']
self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
if self.flavor == 'mac':
self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
else:
self.xcode_settings = None
deps, link_deps = self.ComputeDeps(spec)
# Some of the generation below can add extra output, sources, or
# link dependencies. All of the out params of the functions that
# follow use names like extra_foo.
extra_outputs = []
extra_sources = []
extra_link_deps = []
extra_mac_bundle_resources = []
mac_bundle_deps = []
if self.is_mac_bundle:
self.output = self.ComputeMacBundleOutput(spec)
self.output_binary = self.ComputeMacBundleBinaryOutput(spec)
else:
self.output = self.output_binary = self.ComputeOutput(spec)
self._INSTALLABLE_TARGETS = ('executable', 'loadable_module',
'shared_library')
if self.type in self._INSTALLABLE_TARGETS:
self.alias = os.path.basename(self.output)
install_path = self._InstallableTargetInstallPath()
else:
self.alias = self.output
install_path = self.output
self.WriteLn("TOOLSET := " + self.toolset)
self.WriteLn("TARGET := " + self.target)
# Actions must come first, since they can generate more OBJs for use below.
if 'actions' in spec:
self.WriteActions(spec['actions'], extra_sources, extra_outputs,
extra_mac_bundle_resources, part_of_all)
# Rules must be early like actions.
if 'rules' in spec:
self.WriteRules(spec['rules'], extra_sources, extra_outputs,
extra_mac_bundle_resources, part_of_all)
if 'copies' in spec:
self.WriteCopies(spec['copies'], extra_outputs, part_of_all)
# Bundle resources.
if self.is_mac_bundle:
all_mac_bundle_resources = (
spec.get('mac_bundle_resources', []) + extra_mac_bundle_resources)
self.WriteMacBundleResources(all_mac_bundle_resources, mac_bundle_deps)
self.WriteMacInfoPlist(mac_bundle_deps)
# Sources.
all_sources = spec.get('sources', []) + extra_sources
if all_sources:
self.WriteSources(
configs, deps, all_sources, extra_outputs,
extra_link_deps, part_of_all,
gyp.xcode_emulation.MacPrefixHeader(
self.xcode_settings, self.Absolutify, self.Pchify))
sources = filter(Compilable, all_sources)
if sources:
self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1)
extensions = set([os.path.splitext(s)[1] for s in sources])
for ext in extensions:
if ext in self.suffix_rules_srcdir:
self.WriteLn(self.suffix_rules_srcdir[ext])
self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT2)
for ext in extensions:
if ext in self.suffix_rules_objdir1:
self.WriteLn(self.suffix_rules_objdir1[ext])
for ext in extensions:
if ext in self.suffix_rules_objdir2:
self.WriteLn(self.suffix_rules_objdir2[ext])
self.WriteLn('# End of this set of suffix rules')
# Add dependency from bundle to bundle binary.
if self.is_mac_bundle:
mac_bundle_deps.append(self.output_binary)
self.WriteTarget(spec, configs, deps, extra_link_deps + link_deps,
mac_bundle_deps, extra_outputs, part_of_all)
# Update global list of target outputs, used in dependency tracking.
target_outputs[qualified_target] = install_path
# Update global list of link dependencies.
if self.type in ('static_library', 'shared_library'):
target_link_deps[qualified_target] = self.output_binary
# Currently any versions have the same effect, but in future the behavior
# could be different.
if self.generator_flags.get('android_ndk_version', None):
self.WriteAndroidNdkModuleRule(self.target, all_sources, link_deps)
self.fp.close()
def WriteSubMake(self, output_filename, makefile_path, targets, build_dir):
"""Write a "sub-project" Makefile.
This is a small, wrapper Makefile that calls the top-level Makefile to build
the targets from a single gyp file (i.e. a sub-project).
Arguments:
output_filename: sub-project Makefile name to write
makefile_path: path to the top-level Makefile
targets: list of "all" targets for this sub-project
build_dir: build output directory, relative to the sub-project
"""
ensure_directory_exists(output_filename)
self.fp = open(output_filename, 'w')
self.fp.write(header)
# For consistency with other builders, put sub-project build output in the
# sub-project dir (see test/subdirectory/gyptest-subdir-all.py).
self.WriteLn('export builddir_name ?= %s' %
os.path.join(os.path.dirname(output_filename), build_dir))
self.WriteLn('.PHONY: all')
self.WriteLn('all:')
if makefile_path:
makefile_path = ' -C ' + makefile_path
self.WriteLn('\t$(MAKE)%s %s' % (makefile_path, ' '.join(targets)))
self.fp.close()
def WriteActions(self, actions, extra_sources, extra_outputs,
extra_mac_bundle_resources, part_of_all):
"""Write Makefile code for any 'actions' from the gyp input.
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
actions (used to make other pieces dependent on these
actions)
part_of_all: flag indicating this target is part of 'all'
"""
for action in actions:
name = self.target + '_' + StringToMakefileVariable(action['action_name'])
self.WriteLn('### Rules for action "%s":' % action['action_name'])
inputs = action['inputs']
outputs = action['outputs']
# Build up a list of outputs.
# Collect the output dirs we'll need.
dirs = set()
for out in outputs:
dir = os.path.split(out)[0]
if dir:
dirs.add(dir)
if int(action.get('process_outputs_as_sources', False)):
extra_sources += outputs
if int(action.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += outputs
# Write the actual command.
command = gyp.common.EncodePOSIXShellList(action['action'])
if 'message' in action:
self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, action['message']))
else:
self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, name))
if len(dirs) > 0:
command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
cd_action = 'cd %s; ' % Sourceify(self.path or '.')
# command and cd_action get written to a toplevel variable called
# cmd_foo. Toplevel variables can't handle things that change per
# makefile like $(TARGET), so hardcode the target.
command = command.replace('$(TARGET)', self.target)
cd_action = cd_action.replace('$(TARGET)', self.target)
# Set LD_LIBRARY_PATH in case the action runs an executable from this
# build which links to shared libs from this build.
# actions run on the host, so they should in theory only use host
# libraries, but until everything is made cross-compile safe, also use
# target libraries.
# TODO(piman): when everything is cross-compile safe, remove lib.target
self.WriteLn('cmd_%s = LD_LIBRARY_PATH=$(builddir)/lib.host:'
'$(builddir)/lib.target:$$LD_LIBRARY_PATH; '
'export LD_LIBRARY_PATH; '
'%s%s'
% (name, cd_action, command))
self.WriteLn()
outputs = map(self.Absolutify, outputs)
# The makefile rules are all relative to the top dir, but the gyp actions
# are defined relative to their containing dir. This replaces the obj
# variable for the action rule with an absolute version so that the output
# goes in the right place.
# Only write the 'obj' and 'builddir' rules for the "primary" output (:1);
# it's superfluous for the "extra outputs", and this avoids accidentally
# writing duplicate dummy rules for those outputs.
# Same for environment.
self.WriteLn("%s: obj := $(abs_obj)" % QuoteSpaces(outputs[0]))
self.WriteLn("%s: builddir := $(abs_builddir)" % QuoteSpaces(outputs[0]))
self.WriteXcodeEnv(outputs[0], self.GetXcodeEnv())
for input in inputs:
assert ' ' not in input, (
"Spaces in action input filenames not supported (%s)" % input)
for output in outputs:
assert ' ' not in output, (
"Spaces in action output filenames not supported (%s)" % output)
# See the comment in WriteCopies about expanding env vars.
env = self.GetXcodeEnv()
outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
self.WriteDoCmd(outputs, map(Sourceify, map(self.Absolutify, inputs)),
part_of_all=part_of_all, command=name)
# Stuff the outputs in a variable so we can refer to them later.
outputs_variable = 'action_%s_outputs' % name
self.WriteLn('%s := %s' % (outputs_variable, ' '.join(outputs)))
extra_outputs.append('$(%s)' % outputs_variable)
self.WriteLn()
self.WriteLn()
def WriteRules(self, rules, extra_sources, extra_outputs,
extra_mac_bundle_resources, part_of_all):
"""Write Makefile code for any 'rules' from the gyp input.
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
rules (used to make other pieces dependent on these rules)
part_of_all: flag indicating this target is part of 'all'
"""
for rule in rules:
name = self.target + '_' + StringToMakefileVariable(rule['rule_name'])
count = 0
self.WriteLn('### Generated for rule %s:' % name)
all_outputs = []
for rule_source in rule.get('rule_sources', []):
dirs = set()
(rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
(rule_source_root, rule_source_ext) = \
os.path.splitext(rule_source_basename)
outputs = [self.ExpandInputRoot(out, rule_source_root,
rule_source_dirname)
for out in rule['outputs']]
# If an output is just the file name, turn it into a path so
# FixupArgPath() will know to Absolutify() it.
outputs = map(
lambda x : os.path.dirname(x) and x or os.path.join('.', x),
outputs)
for out in outputs:
dir = os.path.dirname(out)
if dir:
dirs.add(dir)
if int(rule.get('process_outputs_as_sources', False)):
extra_sources += outputs
if int(rule.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += outputs
all_outputs += outputs
inputs = map(Sourceify, map(self.Absolutify, [rule_source] +
rule.get('inputs', [])))
actions = ['$(call do_cmd,%s_%d)' % (name, count)]
if name == 'resources_grit':
# HACK: This is ugly. Grit intentionally doesn't touch the
# timestamp of its output file when the file doesn't change,
# which is fine in hash-based dependency systems like scons
# and forge, but not kosher in the make world. After some
# discussion, hacking around it here seems like the least
# amount of pain.
actions += ['@touch --no-create $@']
# Only write the 'obj' and 'builddir' rules for the "primary" output
# (:1); it's superfluous for the "extra outputs", and this avoids
# accidentally writing duplicate dummy rules for those outputs.
self.WriteLn('%s: obj := $(abs_obj)' % outputs[0])
self.WriteLn('%s: builddir := $(abs_builddir)' % outputs[0])
self.WriteMakeRule(outputs, inputs + ['FORCE_DO_CMD'], actions)
for output in outputs:
assert ' ' not in output, (
"Spaces in rule filenames not yet supported (%s)" % output)
self.WriteLn('all_deps += %s' % ' '.join(outputs))
self._num_outputs += len(outputs)
action = [self.ExpandInputRoot(ac, rule_source_root,
rule_source_dirname)
for ac in rule['action']]
mkdirs = ''
if len(dirs) > 0:
mkdirs = 'mkdir -p %s; ' % ' '.join(dirs)
cd_action = 'cd %s; ' % Sourceify(self.path or '.')
# action, cd_action, and mkdirs get written to a toplevel variable
# called cmd_foo. Toplevel variables can't handle things that change
# per makefile like $(TARGET), so hardcode the target.
action = gyp.common.EncodePOSIXShellList(action)
action = action.replace('$(TARGET)', self.target)
cd_action = cd_action.replace('$(TARGET)', self.target)
mkdirs = mkdirs.replace('$(TARGET)', self.target)
# Set LD_LIBRARY_PATH in case the rule runs an executable from this
# build which links to shared libs from this build.
# rules run on the host, so they should in theory only use host
# libraries, but until everything is made cross-compile safe, also use
# target libraries.
# TODO(piman): when everything is cross-compile safe, remove lib.target
self.WriteLn(
"cmd_%(name)s_%(count)d = LD_LIBRARY_PATH="
"$(builddir)/lib.host:$(builddir)/lib.target:$$LD_LIBRARY_PATH; "
"export LD_LIBRARY_PATH; "
"%(cd_action)s%(mkdirs)s%(action)s" % {
'action': action,
'cd_action': cd_action,
'count': count,
'mkdirs': mkdirs,
'name': name,
})
self.WriteLn(
'quiet_cmd_%(name)s_%(count)d = RULE %(name)s_%(count)d $@' % {
'count': count,
'name': name,
})
self.WriteLn()
count += 1
outputs_variable = 'rule_%s_outputs' % name
self.WriteList(all_outputs, outputs_variable)
extra_outputs.append('$(%s)' % outputs_variable)
self.WriteLn('### Finished generating for rule: %s' % name)
self.WriteLn()
self.WriteLn('### Finished generating for all rules')
self.WriteLn('')
def WriteCopies(self, copies, extra_outputs, part_of_all):
"""Write Makefile code for any 'copies' from the gyp input.
extra_outputs: a list that will be filled in with any outputs of this action
(used to make other pieces dependent on this action)
part_of_all: flag indicating this target is part of 'all'
"""
self.WriteLn('### Generated for copy rule.')
variable = self.target + '_copies'
outputs = []
for copy in copies:
for path in copy['files']:
# Absolutify() calls normpath, stripping trailing slashes.
path = Sourceify(self.Absolutify(path))
filename = os.path.split(path)[1]
output = Sourceify(self.Absolutify(os.path.join(copy['destination'],
filename)))
# If the output path has variables in it, which happens in practice for
# 'copies', writing the environment as target-local doesn't work,
# because the variables are already needed for the target name.
# Copying the environment variables into global make variables doesn't
# work either, because then the .d files will potentially contain spaces
# after variable expansion, and .d file handling cannot handle spaces.
# As a workaround, manually expand variables at gyp time. Since 'copies'
# can't run scripts, there's no need to write the env then.
# WriteDoCmd() will escape spaces for .d files.
env = self.GetXcodeEnv()
output = gyp.xcode_emulation.ExpandEnvVars(output, env)
path = gyp.xcode_emulation.ExpandEnvVars(path, env)
self.WriteDoCmd([output], [path], 'copy', part_of_all)
outputs.append(output)
self.WriteLn('%s = %s' % (variable, ' '.join(map(QuoteSpaces, outputs))))
extra_outputs.append('$(%s)' % variable)
self.WriteLn()
def WriteMacBundleResources(self, resources, bundle_deps):
"""Writes Makefile code for 'mac_bundle_resources'."""
self.WriteLn('### Generated for mac_bundle_resources')
for output, res in gyp.xcode_emulation.GetMacBundleResources(
generator_default_variables['PRODUCT_DIR'], self.xcode_settings,
map(Sourceify, map(self.Absolutify, resources))):
self.WriteDoCmd([output], [res], 'mac_tool,,,copy-bundle-resource',
part_of_all=True)
bundle_deps.append(output)
def WriteMacInfoPlist(self, bundle_deps):
"""Write Makefile code for bundle Info.plist files."""
info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
generator_default_variables['PRODUCT_DIR'], self.xcode_settings,
self.Absolutify)
if not info_plist:
return
if defines:
# Create an intermediate file to store preprocessed results.
intermediate_plist = ('$(obj).$(TOOLSET)/$(TARGET)/' +
os.path.basename(info_plist))
self.WriteList(defines, intermediate_plist + ': INFOPLIST_DEFINES', '-D',
quoter=EscapeCppDefine)
self.WriteMakeRule([intermediate_plist], [info_plist],
['$(call do_cmd,infoplist)',
# "Convert" the plist so that any weird whitespace changes from the
# preprocessor do not affect the XML parser in mac_tool.
'@plutil -convert xml1 $@ $@'])
info_plist = intermediate_plist
# plists can contain envvars and substitute them into the file.
self.WriteXcodeEnv(out, self.GetXcodeEnv(additional_settings=extra_env))
self.WriteDoCmd([out], [info_plist], 'mac_tool,,,copy-info-plist',
part_of_all=True)
bundle_deps.append(out)
def WriteSources(self, configs, deps, sources,
extra_outputs, extra_link_deps,
part_of_all, precompiled_header):
"""Write Makefile code for any 'sources' from the gyp input.
These are source files necessary to build the current target.
configs, deps, sources: input from gyp.
extra_outputs: a list of extra outputs this action should be dependent on;
used to serialize action/rules before compilation
extra_link_deps: a list that will be filled in with any outputs of
compilation (to be used in link lines)
part_of_all: flag indicating this target is part of 'all'
"""
# Write configuration-specific variables for CFLAGS, etc.
for configname in sorted(configs.keys()):
config = configs[configname]
self.WriteList(config.get('defines'), 'DEFS_%s' % configname, prefix='-D',
quoter=EscapeCppDefine)
if self.flavor == 'mac':
cflags = self.xcode_settings.GetCflags(configname)
cflags_c = self.xcode_settings.GetCflagsC(configname)
cflags_cc = self.xcode_settings.GetCflagsCC(configname)
cflags_objc = self.xcode_settings.GetCflagsObjC(configname)
cflags_objcc = self.xcode_settings.GetCflagsObjCC(configname)
else:
cflags = config.get('cflags')
cflags_c = config.get('cflags_c')
cflags_cc = config.get('cflags_cc')
self.WriteLn("# Flags passed to all source files.");
self.WriteList(cflags, 'CFLAGS_%s' % configname)
self.WriteLn("# Flags passed to only C files.");
self.WriteList(cflags_c, 'CFLAGS_C_%s' % configname)
self.WriteLn("# Flags passed to only C++ files.");
self.WriteList(cflags_cc, 'CFLAGS_CC_%s' % configname)
if self.flavor == 'mac':
self.WriteLn("# Flags passed to only ObjC files.");
self.WriteList(cflags_objc, 'CFLAGS_OBJC_%s' % configname)
self.WriteLn("# Flags passed to only ObjC++ files.");
self.WriteList(cflags_objcc, 'CFLAGS_OBJCC_%s' % configname)
includes = config.get('include_dirs')
if includes:
includes = map(Sourceify, map(self.Absolutify, includes))
self.WriteList(includes, 'INCS_%s' % configname, prefix='-I')
compilable = filter(Compilable, sources)
objs = map(self.Objectify, map(self.Absolutify, map(Target, compilable)))
self.WriteList(objs, 'OBJS')
for obj in objs:
assert ' ' not in obj, (
"Spaces in object filenames not supported (%s)" % obj)
self.WriteLn('# Add to the list of files we specially track '
'dependencies for.')
self.WriteLn('all_deps += $(OBJS)')
self._num_outputs += len(objs)
self.WriteLn()
# Make sure our dependencies are built first.
if deps:
self.WriteMakeRule(['$(OBJS)'], deps,
comment = 'Make sure our dependencies are built '
'before any of us.',
order_only = True)
# Make sure the actions and rules run first.
# If they generate any extra headers etc., the per-.o file dep tracking
# will catch the proper rebuilds, so order only is still ok here.
if extra_outputs:
self.WriteMakeRule(['$(OBJS)'], extra_outputs,
comment = 'Make sure our actions/rules run '
'before any of us.',
order_only = True)
pchdeps = precompiled_header.GetObjDependencies(compilable, objs )
if pchdeps:
self.WriteLn('# Dependencies from obj files to their precompiled headers')
for source, obj, gch in pchdeps:
self.WriteLn('%s: %s' % (obj, gch))
self.WriteLn('# End precompiled header dependencies')
if objs:
extra_link_deps.append('$(OBJS)')
self.WriteLn("""\
# CFLAGS et al overrides must be target-local.
# See "Target-specific Variable Values" in the GNU Make manual.""")
self.WriteLn("$(OBJS): TOOLSET := $(TOOLSET)")
self.WriteLn("$(OBJS): GYP_CFLAGS := "
"$(DEFS_$(BUILDTYPE)) "
"$(INCS_$(BUILDTYPE)) "
"%s " % precompiled_header.GetInclude('c') +
"$(CFLAGS_$(BUILDTYPE)) "
"$(CFLAGS_C_$(BUILDTYPE))")
self.WriteLn("$(OBJS): GYP_CXXFLAGS := "
"$(DEFS_$(BUILDTYPE)) "
"$(INCS_$(BUILDTYPE)) "
"%s " % precompiled_header.GetInclude('cc') +
"$(CFLAGS_$(BUILDTYPE)) "
"$(CFLAGS_CC_$(BUILDTYPE))")
if self.flavor == 'mac':
self.WriteLn("$(OBJS): GYP_OBJCFLAGS := "
"$(DEFS_$(BUILDTYPE)) "
"$(INCS_$(BUILDTYPE)) "
"%s " % precompiled_header.GetInclude('m') +
"$(CFLAGS_$(BUILDTYPE)) "
"$(CFLAGS_C_$(BUILDTYPE)) "
"$(CFLAGS_OBJC_$(BUILDTYPE))")
self.WriteLn("$(OBJS): GYP_OBJCXXFLAGS := "
"$(DEFS_$(BUILDTYPE)) "
"$(INCS_$(BUILDTYPE)) "
"%s " % precompiled_header.GetInclude('mm') +
"$(CFLAGS_$(BUILDTYPE)) "
"$(CFLAGS_CC_$(BUILDTYPE)) "
"$(CFLAGS_OBJCC_$(BUILDTYPE))")
self.WritePchTargets(precompiled_header.GetGchBuildCommands())
# If there are any object files in our input file list, link them into our
# output.
extra_link_deps += filter(Linkable, sources)
self.WriteLn()
def WritePchTargets(self, pch_commands):
"""Writes make rules to compile prefix headers."""
if not pch_commands:
return
for gch, lang_flag, lang, input in pch_commands:
extra_flags = {
'c': '$(CFLAGS_C_$(BUILDTYPE))',
'cc': '$(CFLAGS_CC_$(BUILDTYPE))',
'm': '$(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))',
'mm': '$(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))',
}[lang]
var_name = {
'c': 'GYP_PCH_CFLAGS',
'cc': 'GYP_PCH_CXXFLAGS',
'm': 'GYP_PCH_OBJCFLAGS',
'mm': 'GYP_PCH_OBJCXXFLAGS',
}[lang]
self.WriteLn("%s: %s := %s " % (gch, var_name, lang_flag) +
"$(DEFS_$(BUILDTYPE)) "
"$(INCS_$(BUILDTYPE)) "
"$(CFLAGS_$(BUILDTYPE)) " +
extra_flags)
self.WriteLn('%s: %s FORCE_DO_CMD' % (gch, input))
self.WriteLn('\t@$(call do_cmd,pch_%s,1)' % lang)
self.WriteLn('')
assert ' ' not in gch, (
"Spaces in gch filenames not supported (%s)" % gch)
self.WriteLn('all_deps += %s' % gch)
self.WriteLn('')
def ComputeOutputBasename(self, spec):
"""Return the 'output basename' of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
'libfoobar.so'
"""
assert not self.is_mac_bundle
if self.flavor == 'mac' and self.type in (
'static_library', 'executable', 'shared_library', 'loadable_module'):
return self.xcode_settings.GetExecutablePath()
target = spec['target_name']
target_prefix = ''
target_ext = ''
if self.type == 'static_library':
if target[:3] == 'lib':
target = target[3:]
target_prefix = 'lib'
target_ext = '.a'
elif self.type in ('loadable_module', 'shared_library'):
if target[:3] == 'lib':
target = target[3:]
target_prefix = 'lib'
target_ext = '.so'
elif self.type == 'none':
target = '%s.stamp' % target
elif self.type != 'executable':
print ("ERROR: What output file should be generated?",
"type", self.type, "target", target)
target_prefix = spec.get('product_prefix', target_prefix)
target = spec.get('product_name', target)
product_ext = spec.get('product_extension')
if product_ext:
target_ext = '.' + product_ext
return target_prefix + target + target_ext
def _InstallImmediately(self):
return self.toolset == 'target' and self.flavor == 'mac' and self.type in (
'static_library', 'executable', 'shared_library', 'loadable_module')
def ComputeOutput(self, spec):
"""Return the 'output' (full output path) of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
'$(obj)/baz/libfoobar.so'
"""
assert not self.is_mac_bundle
path = os.path.join('$(obj).' + self.toolset, self.path)
if self.type == 'executable' or self._InstallImmediately():
path = '$(builddir)'
path = spec.get('product_dir', path)
return os.path.join(path, self.ComputeOutputBasename(spec))
def ComputeMacBundleOutput(self, spec):
"""Return the 'output' (full output path) to a bundle output directory."""
assert self.is_mac_bundle
path = generator_default_variables['PRODUCT_DIR']
return os.path.join(path, self.xcode_settings.GetWrapperName())
def ComputeMacBundleBinaryOutput(self, spec):
"""Return the 'output' (full output path) to the binary in a bundle."""
path = generator_default_variables['PRODUCT_DIR']
return os.path.join(path, self.xcode_settings.GetExecutablePath())
def ComputeDeps(self, spec):
"""Compute the dependencies of a gyp spec.
Returns a tuple (deps, link_deps), where each is a list of
filenames that will need to be put in front of make for either
building (deps) or linking (link_deps).
"""
deps = []
link_deps = []
if 'dependencies' in spec:
deps.extend([target_outputs[dep] for dep in spec['dependencies']
if target_outputs[dep]])
for dep in spec['dependencies']:
if dep in target_link_deps:
link_deps.append(target_link_deps[dep])
deps.extend(link_deps)
# TODO: It seems we need to transitively link in libraries (e.g. -lfoo)?
# This hack makes it work:
# link_deps.extend(spec.get('libraries', []))
return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
def WriteDependencyOnExtraOutputs(self, target, extra_outputs):
self.WriteMakeRule([self.output_binary], extra_outputs,
comment = 'Build our special outputs first.',
order_only = True)
def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps,
extra_outputs, part_of_all):
"""Write Makefile code to produce the final target of the gyp spec.
spec, configs: input from gyp.
deps, link_deps: dependency lists; see ComputeDeps()
extra_outputs: any extra outputs that our target should depend on
part_of_all: flag indicating this target is part of 'all'
"""
self.WriteLn('### Rules for final target.')
if extra_outputs:
self.WriteDependencyOnExtraOutputs(self.output_binary, extra_outputs)
self.WriteMakeRule(extra_outputs, deps,
comment=('Preserve order dependency of '
'special output on deps.'),
order_only = True,
multiple_output_trick = False)
target_postbuilds = {}
if self.type != 'none':
for configname in sorted(configs.keys()):
config = configs[configname]
if self.flavor == 'mac':
ldflags = self.xcode_settings.GetLdflags(configname,
generator_default_variables['PRODUCT_DIR'], self.Absolutify)
# TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on.
target_postbuild = self.xcode_settings.GetTargetPostbuilds(
configname,
QuoteSpaces(self.output),
QuoteSpaces(self.output_binary))
if target_postbuild:
target_postbuilds[configname] = target_postbuild
else:
ldflags = config.get('ldflags', [])
# Compute an rpath for this output if needed.
if any(dep.endswith('.so') for dep in deps):
# We want to get the literal string "$ORIGIN" into the link command,
# so we need lots of escaping.
ldflags.append(r'-Wl,-rpath=\$$ORIGIN/lib.%s/' % self.toolset)
ldflags.append(r'-Wl,-rpath-link=\$(builddir)/lib.%s/' %
self.toolset)
self.WriteList(ldflags, 'LDFLAGS_%s' % configname)
libraries = spec.get('libraries')
if libraries:
# Remove duplicate entries
libraries = gyp.common.uniquer(libraries)
if self.flavor == 'mac':
libraries = self.xcode_settings.AdjustLibraries(libraries)
self.WriteList(libraries, 'LIBS')
self.WriteLn('%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))' %
QuoteSpaces(self.output_binary))
self.WriteLn('%s: LIBS := $(LIBS)' % QuoteSpaces(self.output_binary))
# Postbuild actions. Like actions, but implicitly depend on the target's
# output.
postbuilds = []
if self.flavor == 'mac':
if target_postbuilds:
postbuilds.append('$(TARGET_POSTBUILDS_$(BUILDTYPE))')
postbuilds.extend(
gyp.xcode_emulation.GetSpecPostbuildCommands(spec, self.Absolutify))
if postbuilds:
# Envvars may be referenced by TARGET_POSTBUILDS_$(BUILDTYPE),
# so we must output its definition first, since we declare variables
# using ":=".
self.WriteXcodeEnv(self.output, self.GetXcodePostbuildEnv())
for configname in target_postbuilds:
self.WriteLn('%s: TARGET_POSTBUILDS_%s := %s' %
(QuoteSpaces(self.output),
configname,
gyp.common.EncodePOSIXShellList(target_postbuilds[configname])))
for i in xrange(len(postbuilds)):
if not postbuilds[i].startswith('$'):
postbuilds[i] = EscapeShellArgument(postbuilds[i])
self.WriteLn('%s: builddir := $(abs_builddir)' % QuoteSpaces(self.output))
self.WriteLn('%s: POSTBUILDS := %s' % (
QuoteSpaces(self.output), ' '.join(postbuilds)))
# A bundle directory depends on its dependencies such as bundle resources
# and bundle binary. When all dependencies have been built, the bundle
# needs to be packaged.
if self.is_mac_bundle:
# If the framework doesn't contain a binary, then nothing depends
# on the actions -- make the framework depend on them directly too.
self.WriteDependencyOnExtraOutputs(self.output, extra_outputs)
# Bundle dependencies. Note that the code below adds actions to this
# target, so if you move these two lines, move the lines below as well.
self.WriteList(map(QuoteSpaces, bundle_deps), 'BUNDLE_DEPS')
self.WriteLn('%s: $(BUNDLE_DEPS)' % QuoteSpaces(self.output))
# After the framework is built, package it. Needs to happen before
# postbuilds, since postbuilds depend on this.
if self.type in ('shared_library', 'loadable_module'):
self.WriteLn('\t@$(call do_cmd,mac_package_framework,,,%s)' %
self.xcode_settings.GetFrameworkVersion())
# Bundle postbuilds can depend on the whole bundle, so run them after
# the bundle is packaged, not already after the bundle binary is done.
if postbuilds:
self.WriteLn('\t@$(call do_postbuilds)')
postbuilds = [] # Don't write postbuilds for target's output.
# Needed by test/mac/gyptest-rebuild.py.
self.WriteLn('\t@true # No-op, used by tests')
# Since this target depends on binary and resources which are in
# nested subfolders, the framework directory will be older than
# its dependencies usually. To prevent this rule from executing
# on every build (expensive, especially with postbuilds), expliclity
# update the time on the framework directory.
self.WriteLn('\t@touch -c %s' % QuoteSpaces(self.output))
if postbuilds:
assert not self.is_mac_bundle, ('Postbuilds for bundles should be done '
'on the bundle, not the binary (target \'%s\')' % self.target)
assert 'product_dir' not in spec, ('Postbuilds do not work with '
'custom product_dir')
if self.type == 'executable':
self.WriteLn('%s: LD_INPUTS := %s' % (
QuoteSpaces(self.output_binary),
' '.join(map(QuoteSpaces, link_deps))))
if self.toolset == 'host' and self.flavor == 'android':
self.WriteDoCmd([self.output_binary], link_deps, 'link_host',
part_of_all, postbuilds=postbuilds)
else:
self.WriteDoCmd([self.output_binary], link_deps, 'link', part_of_all,
postbuilds=postbuilds)
elif self.type == 'static_library':
for link_dep in link_deps:
assert ' ' not in link_dep, (
"Spaces in alink input filenames not supported (%s)" % link_dep)
self.WriteDoCmd([self.output_binary], link_deps, 'alink', part_of_all,
postbuilds=postbuilds)
elif self.type == 'shared_library':
self.WriteLn('%s: LD_INPUTS := %s' % (
QuoteSpaces(self.output_binary),
' '.join(map(QuoteSpaces, link_deps))))
self.WriteDoCmd([self.output_binary], link_deps, 'solink', part_of_all,
postbuilds=postbuilds)
elif self.type == 'loadable_module':
for link_dep in link_deps:
assert ' ' not in link_dep, (
"Spaces in module input filenames not supported (%s)" % link_dep)
if self.toolset == 'host' and self.flavor == 'android':
self.WriteDoCmd([self.output_binary], link_deps, 'solink_module_host',
part_of_all, postbuilds=postbuilds)
else:
self.WriteDoCmd(
[self.output_binary], link_deps, 'solink_module', part_of_all,
postbuilds=postbuilds)
elif self.type == 'none':
# Write a stamp line.
self.WriteDoCmd([self.output_binary], deps, 'touch', part_of_all,
postbuilds=postbuilds)
else:
print "WARNING: no output for", self.type, target
# Add an alias for each target (if there are any outputs).
# Installable target aliases are created below.
if ((self.output and self.output != self.target) and
(self.type not in self._INSTALLABLE_TARGETS)):
self.WriteMakeRule([self.target], [self.output],
comment='Add target alias', phony = True)
if part_of_all:
self.WriteMakeRule(['all'], [self.target],
comment = 'Add target alias to "all" target.',
phony = True)
# Add special-case rules for our installable targets.
# 1) They need to install to the build dir or "product" dir.
# 2) They get shortcuts for building (e.g. "make chrome").
# 3) They are part of "make all".
if self.type in self._INSTALLABLE_TARGETS:
if self.type == 'shared_library':
file_desc = 'shared library'
else:
file_desc = 'executable'
install_path = self._InstallableTargetInstallPath()
installable_deps = [self.output]
if self.flavor == 'mac' and not 'product_dir' in spec:
# On mac, products are created in install_path immediately.
assert install_path == self.output, '%s != %s' % (
install_path, self.output)
# Point the target alias to the final binary output.
self.WriteMakeRule([self.target], [install_path],
comment='Add target alias', phony = True)
if install_path != self.output:
assert not self.is_mac_bundle # See comment a few lines above.
self.WriteDoCmd([install_path], [self.output], 'copy',
comment = 'Copy this to the %s output path.' %
file_desc, part_of_all=part_of_all)
installable_deps.append(install_path)
if self.output != self.alias and self.alias != self.target:
self.WriteMakeRule([self.alias], installable_deps,
comment = 'Short alias for building this %s.' %
file_desc, phony = True)
if part_of_all:
self.WriteMakeRule(['all'], [install_path],
comment = 'Add %s to "all" target.' % file_desc,
phony = True)
def WriteList(self, list, variable=None, prefix='', quoter=QuoteIfNecessary):
"""Write a variable definition that is a list of values.
E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
foo = blaha blahb
but in a pretty-printed style.
"""
self.fp.write(variable + " := ")
if list:
list = [quoter(prefix + l) for l in list]
self.fp.write(" \\\n\t".join(list))
self.fp.write("\n\n")
def WriteDoCmd(self, outputs, inputs, command, part_of_all, comment=None,
postbuilds=False):
"""Write a Makefile rule that uses do_cmd.
This makes the outputs dependent on the command line that was run,
as well as support the V= make command line flag.
"""
suffix = ''
if postbuilds:
assert ',' not in command
suffix = ',,1' # Tell do_cmd to honor $POSTBUILDS
self.WriteMakeRule(outputs, inputs,
actions = ['$(call do_cmd,%s%s)' % (command, suffix)],
comment = comment,
force = True)
# Add our outputs to the list of targets we read depfiles from.
# all_deps is only used for deps file reading, and for deps files we replace
# spaces with ? because escaping doesn't work with make's $(sort) and
# other functions.
outputs = [QuoteSpaces(o, SPACE_REPLACEMENT) for o in outputs]
self.WriteLn('all_deps += %s' % ' '.join(outputs))
self._num_outputs += len(outputs)
def WriteMakeRule(self, outputs, inputs, actions=None, comment=None,
order_only=False, force=False, phony=False,
multiple_output_trick=True):
"""Write a Makefile rule, with some extra tricks.
outputs: a list of outputs for the rule (note: this is not directly
supported by make; see comments below)
inputs: a list of inputs for the rule
actions: a list of shell commands to run for the rule
comment: a comment to put in the Makefile above the rule (also useful
for making this Python script's code self-documenting)
order_only: if true, makes the dependency order-only
force: if true, include FORCE_DO_CMD as an order-only dep
phony: if true, the rule does not actually generate the named output, the
output is just a name to run the rule
multiple_output_trick: if true (the default), perform tricks such as dummy
rules to avoid problems with multiple outputs.
"""
outputs = map(QuoteSpaces, outputs)
inputs = map(QuoteSpaces, inputs)
if comment:
self.WriteLn('# ' + comment)
if phony:
self.WriteLn('.PHONY: ' + ' '.join(outputs))
# TODO(evanm): just make order_only a list of deps instead of these hacks.
if order_only:
order_insert = '| '
else:
order_insert = ''
if force:
force_append = ' FORCE_DO_CMD'
else:
force_append = ''
if actions:
self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0])
self.WriteLn('%s: %s%s%s' % (outputs[0], order_insert, ' '.join(inputs),
force_append))
if actions:
for action in actions:
self.WriteLn('\t%s' % action)
if multiple_output_trick and len(outputs) > 1:
# If we have more than one output, a rule like
# foo bar: baz
# that for *each* output we must run the action, potentially
# in parallel. That is not what we're trying to write -- what
# we want is that we run the action once and it generates all
# the files.
# http://www.gnu.org/software/hello/manual/automake/Multiple-Outputs.html
# discusses this problem and has this solution:
# 1) Write the naive rule that would produce parallel runs of
# the action.
# 2) Make the outputs seralized on each other, so we won't start
# a parallel run until the first run finishes, at which point
# we'll have generated all the outputs and we're done.
self.WriteLn('%s: %s' % (' '.join(outputs[1:]), outputs[0]))
# Add a dummy command to the "extra outputs" rule, otherwise make seems to
# think these outputs haven't (couldn't have?) changed, and thus doesn't
# flag them as changed (i.e. include in '$?') when evaluating dependent
# rules, which in turn causes do_cmd() to skip running dependent commands.
self.WriteLn('%s: ;' % (' '.join(outputs[1:])))
self.WriteLn()
def WriteAndroidNdkModuleRule(self, module_name, all_sources, link_deps):
"""Write a set of LOCAL_XXX definitions for Android NDK.
These variable definitions will be used by Android NDK but do nothing for
non-Android applications.
Arguments:
module_name: Android NDK module name, which must be unique among all
module names.
all_sources: A list of source files (will be filtered by Compilable).
link_deps: A list of link dependencies, which must be sorted in
the order from dependencies to dependents.
"""
if self.type not in ('executable', 'shared_library', 'static_library'):
return
self.WriteLn('# Variable definitions for Android applications')
self.WriteLn('include $(CLEAR_VARS)')
self.WriteLn('LOCAL_MODULE := ' + module_name)
self.WriteLn('LOCAL_CFLAGS := $(CFLAGS_$(BUILDTYPE)) '
'$(DEFS_$(BUILDTYPE)) '
# LOCAL_CFLAGS is applied to both of C and C++. There is
# no way to specify $(CFLAGS_C_$(BUILDTYPE)) only for C
# sources.
'$(CFLAGS_C_$(BUILDTYPE)) '
# $(INCS_$(BUILDTYPE)) includes the prefix '-I' while
# LOCAL_C_INCLUDES does not expect it. So put it in
# LOCAL_CFLAGS.
'$(INCS_$(BUILDTYPE))')
# LOCAL_CXXFLAGS is obsolete and LOCAL_CPPFLAGS is preferred.
self.WriteLn('LOCAL_CPPFLAGS := $(CFLAGS_CC_$(BUILDTYPE))')
self.WriteLn('LOCAL_C_INCLUDES :=')
self.WriteLn('LOCAL_LDLIBS := $(LDFLAGS_$(BUILDTYPE)) $(LIBS)')
# Detect the C++ extension.
cpp_ext = {'.cc': 0, '.cpp': 0, '.cxx': 0}
default_cpp_ext = '.cpp'
for filename in all_sources:
ext = os.path.splitext(filename)[1]
if ext in cpp_ext:
cpp_ext[ext] += 1
if cpp_ext[ext] > cpp_ext[default_cpp_ext]:
default_cpp_ext = ext
self.WriteLn('LOCAL_CPP_EXTENSION := ' + default_cpp_ext)
self.WriteList(map(self.Absolutify, filter(Compilable, all_sources)),
'LOCAL_SRC_FILES')
# Filter out those which do not match prefix and suffix and produce
# the resulting list without prefix and suffix.
def DepsToModules(deps, prefix, suffix):
modules = []
for filepath in deps:
filename = os.path.basename(filepath)
if filename.startswith(prefix) and filename.endswith(suffix):
modules.append(filename[len(prefix):-len(suffix)])
return modules
# Retrieve the default value of 'SHARED_LIB_SUFFIX'
params = {'flavor': 'linux'}
default_variables = {}
CalculateVariables(default_variables, params)
self.WriteList(
DepsToModules(link_deps,
generator_default_variables['SHARED_LIB_PREFIX'],
default_variables['SHARED_LIB_SUFFIX']),
'LOCAL_SHARED_LIBRARIES')
self.WriteList(
DepsToModules(link_deps,
generator_default_variables['STATIC_LIB_PREFIX'],
generator_default_variables['STATIC_LIB_SUFFIX']),
'LOCAL_STATIC_LIBRARIES')
if self.type == 'executable':
self.WriteLn('include $(BUILD_EXECUTABLE)')
elif self.type == 'shared_library':
self.WriteLn('include $(BUILD_SHARED_LIBRARY)')
elif self.type == 'static_library':
self.WriteLn('include $(BUILD_STATIC_LIBRARY)')
self.WriteLn()
def WriteLn(self, text=''):
self.fp.write(text + '\n')
def GetXcodeEnv(self, additional_settings=None):
return gyp.xcode_emulation.GetXcodeEnv(
self.xcode_settings, "$(abs_builddir)",
os.path.join("$(abs_srcdir)", self.path), "$(BUILDTYPE)",
additional_settings)
def GetXcodePostbuildEnv(self):
# CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
# TODO(thakis): It would be nice to have some general mechanism instead.
strip_save_file = self.xcode_settings.GetPerTargetSetting(
'CHROMIUM_STRIP_SAVE_FILE')
if strip_save_file:
strip_save_file = self.Absolutify(strip_save_file)
else:
# Explicitly clear this out, else a postbuild might pick up an export
# from an earlier target.
strip_save_file = ''
return self.GetXcodeEnv(
additional_settings={'CHROMIUM_STRIP_SAVE_FILE': strip_save_file})
def WriteXcodeEnv(self, target, env):
for k in gyp.xcode_emulation.TopologicallySortedEnvVarKeys(env):
# For
# foo := a\ b
# the escaped space does the right thing. For
# export foo := a\ b
# it does not -- the backslash is written to the env as literal character.
# So don't escape spaces in |env[k]|.
self.WriteLn('%s: export %s := %s' % (QuoteSpaces(target), k, env[k]))
def Objectify(self, path):
"""Convert a path to its output directory form."""
if '$(' in path:
path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/' % self.toolset)
return path
return '$(obj).%s/$(TARGET)/%s' % (self.toolset, path)
def Pchify(self, path, lang):
"""Convert a prefix header path to its output directory form."""
path = self.Absolutify(path)
if '$(' in path:
path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/pch-%s' %
(self.toolset, lang))
return path
return '$(obj).%s/$(TARGET)/pch-%s/%s' % (self.toolset, lang, path)
def Absolutify(self, path):
"""Convert a subdirectory-relative path into a base-relative path.
Skips over paths that contain variables."""
if '$(' in path:
# path is no existing file in this case, but calling normpath is still
# important for trimming trailing slashes.
return os.path.normpath(path)
return os.path.normpath(os.path.join(self.path, path))
def FixupArgPath(self, arg):
if '/' in arg or '.h.' in arg:
return self.Absolutify(arg)
return arg
def ExpandInputRoot(self, template, expansion, dirname):
if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template:
return template
path = template % {
'INPUT_ROOT': expansion,
'INPUT_DIRNAME': dirname,
}
return path
def _InstallableTargetInstallPath(self):
"""Returns the location of the final output for an installable target."""
# Xcode puts shared_library results into PRODUCT_DIR, and some gyp files
# rely on this. Emulate this behavior for mac.
if self.type == 'shared_library' and self.flavor != 'mac':
# Install all shared libs into a common directory (per toolset) for
# convenient access with LD_LIBRARY_PATH.
return '$(builddir)/lib.%s/%s' % (self.toolset, self.alias)
return '$(builddir)/' + self.alias
def WriteAutoRegenerationRule(params, root_makefile, makefile_name,
build_files):
"""Write the target to regenerate the Makefile."""
options = params['options']
build_files_args = [gyp.common.RelativePath(filename, options.toplevel_dir)
for filename in params['build_files_arg']]
gyp_binary = gyp.common.FixIfRelativePath(params['gyp_binary'],
options.toplevel_dir)
if not gyp_binary.startswith(os.sep):
gyp_binary = os.path.join('.', gyp_binary)
root_makefile.write(
"quiet_cmd_regen_makefile = ACTION Regenerating $@\n"
"cmd_regen_makefile = %(cmd)s\n"
"%(makefile_name)s: %(deps)s\n"
"\t$(call do_cmd,regen_makefile)\n\n" % {
'makefile_name': makefile_name,
'deps': ' '.join(map(Sourceify, build_files)),
'cmd': gyp.common.EncodePOSIXShellList(
[gyp_binary, '-fmake'] +
gyp.RegenerateFlags(options) +
build_files_args)})
def RunSystemTests(flavor):
"""Run tests against the system to compute default settings for commands.
Returns:
dictionary of settings matching the block of command-lines used in
SHARED_HEADER. E.g. the dictionary will contain a ARFLAGS.target
key for the default ARFLAGS for the target ar command.
"""
# Compute flags used for building static archives.
# N.B.: this fallback logic should match the logic in SHARED_HEADER.
# See comment there for more details.
ar_target = os.environ.get('AR.target', os.environ.get('AR', 'ar'))
cc_target = os.environ.get('CC.target', os.environ.get('CC', 'cc'))
arflags_target = 'crs'
# ar -T enables thin archives on Linux. OS X's ar supports a -T flag, but it
# does something useless (it limits filenames in the archive to 15 chars).
if flavor != 'mac' and gyp.system_test.TestArSupportsT(ar_command=ar_target,
cc_command=cc_target):
arflags_target = 'crsT'
ar_host = os.environ.get('AR.host', 'ar')
cc_host = os.environ.get('CC.host', 'gcc')
arflags_host = 'crs'
# It feels redundant to compute this again given that most builds aren't
# cross-compiles, but due to quirks of history CC.host defaults to 'gcc'
# while CC.target defaults to 'cc', so the commands really are different
# even though they're nearly guaranteed to run the same code underneath.
if flavor != 'mac' and gyp.system_test.TestArSupportsT(ar_command=ar_host,
cc_command=cc_host):
arflags_host = 'crsT'
return { 'ARFLAGS.target': arflags_target,
'ARFLAGS.host': arflags_host }
def GenerateOutput(target_list, target_dicts, data, params):
options = params['options']
flavor = gyp.common.GetFlavor(params)
generator_flags = params.get('generator_flags', {})
builddir_name = generator_flags.get('output_dir', 'out')
android_ndk_version = generator_flags.get('android_ndk_version', None)
default_target = generator_flags.get('default_target', 'all')
def CalculateMakefilePath(build_file, base_name):
"""Determine where to write a Makefile for a given gyp file."""
# Paths in gyp files are relative to the .gyp file, but we want
# paths relative to the source root for the master makefile. Grab
# the path of the .gyp file as the base to relativize against.
# E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp".
base_path = gyp.common.RelativePath(os.path.dirname(build_file),
options.depth)
# We write the file in the base_path directory.
output_file = os.path.join(options.depth, base_path, base_name)
if options.generator_output:
output_file = os.path.join(options.generator_output, output_file)
base_path = gyp.common.RelativePath(os.path.dirname(build_file),
options.toplevel_dir)
return base_path, output_file
# TODO: search for the first non-'Default' target. This can go
# away when we add verification that all targets have the
# necessary configurations.
default_configuration = None
toolsets = set([target_dicts[target]['toolset'] for target in target_list])
for target in target_list:
spec = target_dicts[target]
if spec['default_configuration'] != 'Default':
default_configuration = spec['default_configuration']
break
if not default_configuration:
default_configuration = 'Default'
srcdir = '.'
makefile_name = 'Makefile' + options.suffix
makefile_path = os.path.join(options.toplevel_dir, makefile_name)
if options.generator_output:
global srcdir_prefix
makefile_path = os.path.join(options.generator_output, makefile_path)
srcdir = gyp.common.RelativePath(srcdir, options.generator_output)
srcdir_prefix = '$(srcdir)/'
flock_command= 'flock'
header_params = {
'default_target': default_target,
'builddir': builddir_name,
'default_configuration': default_configuration,
'flock': flock_command,
'flock_index': 1,
'link_commands': LINK_COMMANDS_LINUX,
'extra_commands': '',
'srcdir': srcdir,
}
if flavor == 'mac':
flock_command = './gyp-mac-tool flock'
header_params.update({
'flock': flock_command,
'flock_index': 2,
'link_commands': LINK_COMMANDS_MAC,
'extra_commands': SHARED_HEADER_MAC_COMMANDS,
})
elif flavor == 'android':
header_params.update({
'link_commands': LINK_COMMANDS_ANDROID,
})
elif flavor == 'solaris':
header_params.update({
'flock': './gyp-sun-tool flock',
'flock_index': 2,
'extra_commands': SHARED_HEADER_SUN_COMMANDS,
})
elif flavor == 'freebsd':
header_params.update({
'flock': 'lockf',
})
header_params.update(RunSystemTests(flavor))
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
make_global_settings_dict = data[build_file].get('make_global_settings', {})
make_global_settings = ''
for key, value in make_global_settings_dict:
if value[0] != '$':
value = '$(abspath %s)' % value
if key == 'LINK':
make_global_settings += ('%s ?= %s $(builddir)/linker.lock %s\n' %
(key, flock_command, value))
elif key in ['CC', 'CXX']:
make_global_settings += (
'ifneq (,$(filter $(origin %s), undefined default))\n' % key)
# Let gyp-time envvars win over global settings.
if key in os.environ:
value = os.environ[key]
make_global_settings += ' %s = %s\n' % (key, value)
make_global_settings += 'endif\n'
else:
make_global_settings += '%s ?= %s\n' % (key, value)
header_params['make_global_settings'] = make_global_settings
ensure_directory_exists(makefile_path)
root_makefile = open(makefile_path, 'w')
root_makefile.write(SHARED_HEADER % header_params)
# Currently any versions have the same effect, but in future the behavior
# could be different.
if android_ndk_version:
root_makefile.write(
'# Define LOCAL_PATH for build of Android applications.\n'
'LOCAL_PATH := $(call my-dir)\n'
'\n')
for toolset in toolsets:
root_makefile.write('TOOLSET := %s\n' % toolset)
WriteRootHeaderSuffixRules(root_makefile)
# Put build-time support tools next to the root Makefile.
dest_path = os.path.dirname(makefile_path)
gyp.common.CopyTool(flavor, dest_path)
# Find the list of targets that derive from the gyp file(s) being built.
needed_targets = set()
for build_file in params['build_files']:
for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
needed_targets.add(target)
num_outputs = 0
build_files = set()
include_list = set()
for qualified_target in target_list:
build_file, target, toolset = gyp.common.ParseQualifiedTarget(
qualified_target)
this_make_global_settings = data[build_file].get('make_global_settings', {})
assert make_global_settings_dict == this_make_global_settings, (
"make_global_settings needs to be the same for all targets.")
build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir))
included_files = data[build_file]['included_files']
for included_file in included_files:
# The included_files entries are relative to the dir of the build file
# that included them, so we have to undo that and then make them relative
# to the root dir.
relative_include_file = gyp.common.RelativePath(
gyp.common.UnrelativePath(included_file, build_file),
options.toplevel_dir)
abs_include_file = os.path.abspath(relative_include_file)
# If the include file is from the ~/.gyp dir, we should use absolute path
# so that relocating the src dir doesn't break the path.
if (params['home_dot_gyp'] and
abs_include_file.startswith(params['home_dot_gyp'])):
build_files.add(abs_include_file)
else:
build_files.add(relative_include_file)
base_path, output_file = CalculateMakefilePath(build_file,
target + '.' + toolset + options.suffix + '.mk')
spec = target_dicts[qualified_target]
configs = spec['configurations']
if flavor == 'mac':
gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
writer = MakefileWriter(generator_flags, flavor)
writer.Write(qualified_target, base_path, output_file, spec, configs,
part_of_all=qualified_target in needed_targets)
num_outputs += writer.NumOutputs()
# Our root_makefile lives at the source root. Compute the relative path
# from there to the output_file for including.
mkfile_rel_path = gyp.common.RelativePath(output_file,
os.path.dirname(makefile_path))
include_list.add(mkfile_rel_path)
# Write out per-gyp (sub-project) Makefiles.
depth_rel_path = gyp.common.RelativePath(options.depth, os.getcwd())
for build_file in build_files:
# The paths in build_files were relativized above, so undo that before
# testing against the non-relativized items in target_list and before
# calculating the Makefile path.
build_file = os.path.join(depth_rel_path, build_file)
gyp_targets = [target_dicts[target]['target_name'] for target in target_list
if target.startswith(build_file) and
target in needed_targets]
# Only generate Makefiles for gyp files with targets.
if not gyp_targets:
continue
base_path, output_file = CalculateMakefilePath(build_file,
os.path.splitext(os.path.basename(build_file))[0] + '.Makefile')
makefile_rel_path = gyp.common.RelativePath(os.path.dirname(makefile_path),
os.path.dirname(output_file))
writer.WriteSubMake(output_file, makefile_rel_path, gyp_targets,
builddir_name)
# Write out the sorted list of includes.
root_makefile.write('\n')
for include_file in sorted(include_list):
# We wrap each .mk include in an if statement so users can tell make to
# not load a file by setting NO_LOAD. The below make code says, only
# load the .mk file if the .mk filename doesn't start with a token in
# NO_LOAD.
root_makefile.write(
"ifeq ($(strip $(foreach prefix,$(NO_LOAD),\\\n"
" $(findstring $(join ^,$(prefix)),\\\n"
" $(join ^," + include_file + ")))),)\n")
root_makefile.write(" include " + include_file + "\n")
root_makefile.write("endif\n")
root_makefile.write('\n')
if generator_flags.get('auto_regeneration', True):
WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files)
# Write the rule to load dependencies. We batch 512 files at a time to
# avoid overflowing the command line.
all_deps = ""
for i in range(513, num_outputs, 512):
all_deps += ("""
ifneq ($(word %(start)d,$(d_files)),)
$(shell cat $(wordlist %(start)d,%(end)d,$(d_files)) >> $(depsdir)/all.deps)
endif""" % { 'start': i, 'end': i + 999 })
# Add a check to make sure we tried to process all the .d files.
all_deps += """
ifneq ($(word %(last)d,$(d_files)),)
$(error Found unprocessed dependency files (gyp didn't generate enough rules!))
endif
""" % { 'last': ((num_outputs / 512) + 1) * 512 + 1 }
root_makefile.write(SHARED_FOOTER % { 'generate_all_deps': all_deps })
root_makefile.close()
| bsd-3-clause |
UTSA-ICS/keystone-SID | keystone/token/backends/kvs.py | 5 | 14879 | # -*- coding: utf-8 -*-
# Copyright 2013 Metacloud, Inc.
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import copy
import six
from keystone.common import kvs
from keystone import config
from keystone import exception
from keystone.openstack.common.gettextutils import _
from keystone.openstack.common import log
from keystone.openstack.common import timeutils
from keystone import token
CONF = config.CONF
LOG = log.getLogger(__name__)
class Token(token.Driver):
"""KeyValueStore backend for tokens.
This is the base implementation for any/all key-value-stores (e.g.
memcached) for the Token backend. It is recommended to only use the base
in-memory implementation for testing purposes.
"""
revocation_key = 'revocation-list'
kvs_backend = 'openstack.kvs.Memory'
def __init__(self, backing_store=None, **kwargs):
super(Token, self).__init__()
self._store = kvs.get_key_value_store('token-driver')
if backing_store is not None:
self.kvs_backend = backing_store
self._store.configure(backing_store=self.kvs_backend, **kwargs)
if self.__class__ == Token:
# NOTE(morganfainberg): Only warn if the base KVS implementation
# is instantiated.
LOG.warn(_('It is recommended to only use the base '
'key-value-store implementation for the token driver '
'for testing purposes. '
'Please use keystone.token.backends.memcache.Token '
'or keystone.token.backends.sql.Token instead.'))
def _prefix_token_id(self, token_id):
return 'token-%s' % token_id.encode('utf-8')
def _prefix_user_id(self, user_id):
return 'usertokens-%s' % user_id.encode('utf-8')
def _get_key_or_default(self, key, default=None):
try:
return self._store.get(key)
except exception.NotFound:
return default
def _get_key(self, key):
return self._store.get(key)
def _set_key(self, key, value, lock=None):
self._store.set(key, value, lock)
def _delete_key(self, key):
return self._store.delete(key)
def get_token(self, token_id):
ptk = self._prefix_token_id(token_id)
try:
token_ref = self._get_key(ptk)
except exception.NotFound:
raise exception.TokenNotFound(token_id=token_id)
return token_ref
def create_token(self, token_id, data):
"""Create a token by id and data.
It is assumed the caller has performed data validation on the "data"
parameter.
"""
data_copy = copy.deepcopy(data)
ptk = self._prefix_token_id(token_id)
if not data_copy.get('expires'):
data_copy['expires'] = token.default_expire_time()
if not data_copy.get('user_id'):
data_copy['user_id'] = data_copy['user']['id']
# NOTE(morganfainberg): for ease of manipulating the data without
# concern about the backend, always store the value(s) in the
# index as the isotime (string) version so this is where the string is
# built.
expires_str = timeutils.isotime(data_copy['expires'], subsecond=True)
self._set_key(ptk, data_copy)
user_id = data['user']['id']
user_key = self._prefix_user_id(user_id)
self._update_user_token_list(user_key, token_id, expires_str)
if CONF.trust.enabled and data.get('trust_id'):
# NOTE(morganfainberg): If trusts are enabled and this is a trust
# scoped token, we add the token to the trustee list as well. This
# allows password changes of the trustee to also expire the token.
# There is no harm in placing the token in multiple lists, as
# _list_tokens is smart enough to handle almost any case of
# valid/invalid/expired for a given token.
token_data = data_copy['token_data']
if data_copy['token_version'] == token.provider.V2:
trustee_user_id = token_data['access']['trust'][
'trustee_user_id']
elif data_copy['token_version'] == token.provider.V3:
trustee_user_id = token_data['OS-TRUST:trust'][
'trustee_user_id']
else:
raise token.provider.UnsupportedTokenVersionException(
_('Unknown token version %s') %
data_copy.get('token_version'))
trustee_key = self._prefix_user_id(trustee_user_id)
self._update_user_token_list(trustee_key, token_id, expires_str)
return data_copy
def _get_user_token_list_with_expiry(self, user_key):
"""Return a list of tuples in the format (token_id, token_expiry) for
the user_key.
"""
return self._get_key_or_default(user_key, default=[])
def _get_user_token_list(self, user_key):
"""Return a list of token_ids for the user_key."""
token_list = self._get_user_token_list_with_expiry(user_key)
# Each element is a tuple of (token_id, token_expiry). Most code does
# not care about the expiry, it is stripped out and only a
# list of token_ids are returned.
return [t[0] for t in token_list]
def _update_user_token_list(self, user_key, token_id, expires_isotime_str):
current_time = self._get_current_time()
revoked_token_list = set([t['id'] for t in
self.list_revoked_tokens()])
with self._store.get_lock(user_key) as lock:
filtered_list = []
token_list = self._get_user_token_list_with_expiry(user_key)
for item in token_list:
try:
item_id, expires = self._format_token_index_item(item)
except (ValueError, TypeError):
# NOTE(morganfainberg): Skip on expected errors
# possibilities from the `_format_token_index_item` method.
continue
if expires < current_time:
LOG.debug(_('Token `%(token_id)s` is expired, removing '
'from `%(user_key)s`.'),
{'token_id': item_id, 'user_key': user_key})
continue
if item_id in revoked_token_list:
# NOTE(morganfainberg): If the token has been revoked, it
# can safely be removed from this list. This helps to keep
# the user_token_list as reasonably small as possible.
LOG.debug(_('Token `%(token_id)s` is revoked, removing '
'from `%(user_key)s`.'),
{'token_id': item_id, 'user_key': user_key})
continue
filtered_list.append(item)
filtered_list.append((token_id, expires_isotime_str))
self._set_key(user_key, filtered_list, lock)
return filtered_list
def _get_current_time(self):
return timeutils.normalize_time(timeutils.utcnow())
def _add_to_revocation_list(self, data, lock):
filtered_list = []
revoked_token_data = {}
current_time = self._get_current_time()
expires = data['expires']
if isinstance(expires, six.string_types):
expires = timeutils.parse_isotime(expires)
expires = timeutils.normalize_time(expires)
if expires < current_time:
LOG.warning(_('Token `%s` is expired, not adding to the '
'revocation list.'), data['id'])
return
revoked_token_data['expires'] = timeutils.isotime(expires,
subsecond=True)
revoked_token_data['id'] = data['id']
token_list = self._get_key_or_default(self.revocation_key, default=[])
if not isinstance(token_list, list):
# NOTE(morganfainberg): In the case that the revocation list is not
# in a format we understand, reinitialize it. This is an attempt to
# not allow the revocation list to be completely broken if
# somehow the key is changed outside of keystone (e.g. memcache
# that is shared by multiple applications). Logging occurs at error
# level so that the cloud administrators have some awareness that
# the revocation_list needed to be cleared out. In all, this should
# be recoverable. Keystone cannot control external applications
# from changing a key in some backends, however, it is possible to
# gracefully handle and notify of this event.
LOG.error(_('Reinitializing revocation list due to error '
'in loading revocation list from backend. '
'Expected `list` type got `%(type)s`. Old '
'revocation list data: %(list)r'),
{'type': type(token_list), 'list': token_list})
token_list = []
# NOTE(morganfainberg): on revocation, cleanup the expired entries, try
# to keep the list of tokens revoked at the minimum.
for token_data in token_list:
try:
expires_at = timeutils.normalize_time(
timeutils.parse_isotime(token_data['expires']))
except ValueError:
LOG.warning(_('Removing `%s` from revocation list due to '
'invalid expires data in revocation list.'),
token_data.get('id', 'INVALID_TOKEN_DATA'))
continue
if expires_at > current_time:
filtered_list.append(token_data)
filtered_list.append(revoked_token_data)
self._set_key(self.revocation_key, filtered_list, lock)
def delete_token(self, token_id):
# Test for existence
with self._store.get_lock(self.revocation_key) as lock:
data = self.get_token(token_id)
ptk = self._prefix_token_id(token_id)
result = self._delete_key(ptk)
self._add_to_revocation_list(data, lock)
return result
def delete_tokens(self, user_id, tenant_id=None, trust_id=None,
consumer_id=None):
return super(Token, self).delete_tokens(
user_id=user_id,
tenant_id=tenant_id,
trust_id=trust_id,
consumer_id=consumer_id,
)
def _format_token_index_item(self, item):
try:
token_id, expires = item
except (TypeError, ValueError):
LOG.debug(_('Invalid token entry expected tuple of '
'`(<token_id>, <expires>)` got: `%(item)r`'),
dict(item=item))
raise
try:
expires = timeutils.normalize_time(
timeutils.parse_isotime(expires))
except ValueError:
LOG.debug(_('Invalid expires time on token `%(token_id)s`:'
' %(expires)r'),
dict(token_id=token_id, expires=expires))
raise
return token_id, expires
def _token_match_tenant(self, token_ref, tenant_id):
if token_ref.get('tenant'):
return token_ref['tenant'].get('id') == tenant_id
return False
def _token_match_trust(self, token_ref, trust_id):
if not token_ref.get('trust_id'):
return False
return token_ref['trust_id'] == trust_id
def _token_match_consumer(self, token_ref, consumer_id):
try:
oauth = token_ref['token_data']['token']['OS-OAUTH1']
return oauth.get('consumer_id') == consumer_id
except KeyError:
return False
def _list_tokens(self, user_id, tenant_id=None, trust_id=None,
consumer_id=None):
# This function is used to generate the list of tokens that should be
# revoked when revoking by token identifiers. This approach will be
# deprecated soon, probably in the Juno release. Setting revoke_by_id
# to False indicates that this kind of recording should not be
# performed. In order to test the revocation events, tokens shouldn't
# be deleted from the backends. This check ensures that tokens are
# still recorded.
if not CONF.token.revoke_by_id:
return []
tokens = []
user_key = self._prefix_user_id(user_id)
token_list = self._get_user_token_list_with_expiry(user_key)
current_time = self._get_current_time()
for item in token_list:
try:
token_id, expires = self._format_token_index_item(item)
except (TypeError, ValueError):
# NOTE(morganfainberg): Skip on expected error possibilities
# from the `_format_token_index_item` method.
continue
if expires < current_time:
continue
try:
token_ref = self.get_token(token_id)
except exception.TokenNotFound:
# NOTE(morganfainberg): Token doesn't exist, skip it.
continue
if token_ref:
if tenant_id is not None:
if not self._token_match_tenant(token_ref, tenant_id):
continue
if trust_id is not None:
if not self._token_match_trust(token_ref, trust_id):
continue
if consumer_id is not None:
if not self._token_match_consumer(token_ref, consumer_id):
continue
tokens.append(token_id)
return tokens
def list_revoked_tokens(self):
revoked_token_list = self._get_key_or_default(self.revocation_key,
default=[])
if isinstance(revoked_token_list, list):
return revoked_token_list
return []
def flush_expired_tokens(self):
"""Archive or delete tokens that have expired."""
raise exception.NotImplemented()
| apache-2.0 |
CAST-Extend/com.castsoftware.uc.checkanalysiscompleteness | xlsxwriter/chart_area.py | 14 | 2710 | ###############################################################################
#
# ChartArea - A class for writing the Excel XLSX Area charts.
#
# Copyright 2013-2015, John McNamara, jmcnamara@cpan.org
#
from . import chart
class ChartArea(chart.Chart):
"""
A class for writing the Excel XLSX Area charts.
"""
###########################################################################
#
# Public API.
#
###########################################################################
def __init__(self, options=None):
"""
Constructor.
"""
super(ChartArea, self).__init__()
if options is None:
options = {}
self.subtype = options.get('subtype')
if not self.subtype:
self.subtype = 'standard'
self.cross_between = 'midCat'
self.show_crosses = 0
# Override and reset the default axis values.
if self.subtype == 'percent_stacked':
self.y_axis['defaults']['num_format'] = '0%'
# Set the available data label positions for this chart type.
self.label_position_default = 'center'
self.label_positions = {'center': 'ctr'}
self.set_y_axis({})
###########################################################################
#
# Private API.
#
###########################################################################
def _write_chart_type(self, args):
# Override the virtual superclass method with a chart specific method.
# Write the c:areaChart element.
self._write_area_chart(args)
###########################################################################
#
# XML methods.
#
###########################################################################
#
def _write_area_chart(self, args):
# Write the <c:areaChart> element.
if args['primary_axes']:
series = self._get_primary_axes_series()
else:
series = self._get_secondary_axes_series()
if not len(series):
return
subtype = self.subtype
if subtype == 'percent_stacked':
subtype = 'percentStacked'
self._xml_start_tag('c:areaChart')
# Write the c:grouping element.
self._write_grouping(subtype)
# Write the series elements.
for data in series:
self._write_ser(data)
# Write the c:dropLines element.
self._write_drop_lines()
# Write the c:marker element.
self._write_marker_value()
# Write the c:axId elements
self._write_axis_ids(args)
self._xml_end_tag('c:areaChart')
| mit |
rrampage/rethinkdb | external/v8_3.30.33.16/build/gyp/pylib/gyp/win_tool.py | 58 | 11526 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility functions for Windows builds.
These functions are executed via gyp-win-tool when using the ninja generator.
"""
import os
import re
import shutil
import subprocess
import string
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
# A regex matching an argument corresponding to a PDB filename passed as an
# argument to link.exe.
_LINK_EXE_PDB_ARG = re.compile('/PDB:(?P<pdb>.+\.exe\.pdb)$', re.IGNORECASE)
def main(args):
executor = WinTool()
exit_code = executor.Dispatch(args)
if exit_code is not None:
sys.exit(exit_code)
class WinTool(object):
"""This class performs all the Windows tooling steps. The methods can either
be executed directly, or dispatched from an argument list."""
def _MaybeUseSeparateMspdbsrv(self, env, args):
"""Allows to use a unique instance of mspdbsrv.exe for the linkers linking
an .exe target if GYP_USE_SEPARATE_MSPDBSRV has been set."""
if not os.environ.get('GYP_USE_SEPARATE_MSPDBSRV'):
return
if len(args) < 1:
raise Exception("Not enough arguments")
if args[0] != 'link.exe':
return
# Checks if this linker produces a PDB for an .exe target. If so use the
# name of this PDB to generate an endpoint name for mspdbsrv.exe.
endpoint_name = None
for arg in args:
m = _LINK_EXE_PDB_ARG.match(arg)
if m:
endpoint_name = '%s_%d' % (m.group('pdb'), os.getpid())
break
if endpoint_name is None:
return
# Adds the appropriate environment variable. This will be read by link.exe
# to know which instance of mspdbsrv.exe it should connect to (if it's
# not set then the default endpoint is used).
env['_MSPDBSRV_ENDPOINT_'] = endpoint_name
def Dispatch(self, args):
"""Dispatches a string command to a method."""
if len(args) < 1:
raise Exception("Not enough arguments")
method = "Exec%s" % self._CommandifyName(args[0])
return getattr(self, method)(*args[1:])
def _CommandifyName(self, name_string):
"""Transforms a tool name like recursive-mirror to RecursiveMirror."""
return name_string.title().replace('-', '')
def _GetEnv(self, arch):
"""Gets the saved environment from a file for a given architecture."""
# The environment is saved as an "environment block" (see CreateProcess
# and msvs_emulation for details). We convert to a dict here.
# Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
pairs = open(arch).read()[:-2].split('\0')
kvs = [item.split('=', 1) for item in pairs]
return dict(kvs)
def ExecStamp(self, path):
"""Simple stamp command."""
open(path, 'w').close()
def ExecRecursiveMirror(self, source, dest):
"""Emulation of rm -rf out && cp -af in out."""
if os.path.exists(dest):
if os.path.isdir(dest):
shutil.rmtree(dest)
else:
os.unlink(dest)
if os.path.isdir(source):
shutil.copytree(source, dest)
else:
shutil.copy2(source, dest)
def ExecLinkWrapper(self, arch, *args):
"""Filter diagnostic output from link that looks like:
' Creating library ui.dll.lib and object ui.dll.exp'
This happens when there are exports from the dll or exe.
"""
env = self._GetEnv(arch)
self._MaybeUseSeparateMspdbsrv(env, args)
link = subprocess.Popen(args,
shell=True,
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
out, _ = link.communicate()
for line in out.splitlines():
if not line.startswith(' Creating library '):
print line
return link.returncode
def ExecLinkWithManifests(self, arch, embed_manifest, out, ldcmd, resname,
mt, rc, intermediate_manifest, *manifests):
"""A wrapper for handling creating a manifest resource and then executing
a link command."""
# The 'normal' way to do manifests is to have link generate a manifest
# based on gathering dependencies from the object files, then merge that
# manifest with other manifests supplied as sources, convert the merged
# manifest to a resource, and then *relink*, including the compiled
# version of the manifest resource. This breaks incremental linking, and
# is generally overly complicated. Instead, we merge all the manifests
# provided (along with one that includes what would normally be in the
# linker-generated one, see msvs_emulation.py), and include that into the
# first and only link. We still tell link to generate a manifest, but we
# only use that to assert that our simpler process did not miss anything.
variables = {
'python': sys.executable,
'arch': arch,
'out': out,
'ldcmd': ldcmd,
'resname': resname,
'mt': mt,
'rc': rc,
'intermediate_manifest': intermediate_manifest,
'manifests': ' '.join(manifests),
}
add_to_ld = ''
if manifests:
subprocess.check_call(
'%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
'-manifest %(manifests)s -out:%(out)s.manifest' % variables)
if embed_manifest == 'True':
subprocess.check_call(
'%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest'
' %(out)s.manifest.rc %(resname)s' % variables)
subprocess.check_call(
'%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s '
'%(out)s.manifest.rc' % variables)
add_to_ld = ' %(out)s.manifest.res' % variables
subprocess.check_call(ldcmd + add_to_ld)
# Run mt.exe on the theoretically complete manifest we generated, merging
# it with the one the linker generated to confirm that the linker
# generated one does not add anything. This is strictly unnecessary for
# correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not
# used in a #pragma comment.
if manifests:
# Merge the intermediate one with ours to .assert.manifest, then check
# that .assert.manifest is identical to ours.
subprocess.check_call(
'%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
'-manifest %(out)s.manifest %(intermediate_manifest)s '
'-out:%(out)s.assert.manifest' % variables)
assert_manifest = '%(out)s.assert.manifest' % variables
our_manifest = '%(out)s.manifest' % variables
# Load and normalize the manifests. mt.exe sometimes removes whitespace,
# and sometimes doesn't unfortunately.
with open(our_manifest, 'rb') as our_f:
with open(assert_manifest, 'rb') as assert_f:
our_data = our_f.read().translate(None, string.whitespace)
assert_data = assert_f.read().translate(None, string.whitespace)
if our_data != assert_data:
os.unlink(out)
def dump(filename):
sys.stderr.write('%s\n-----\n' % filename)
with open(filename, 'rb') as f:
sys.stderr.write(f.read() + '\n-----\n')
dump(intermediate_manifest)
dump(our_manifest)
dump(assert_manifest)
sys.stderr.write(
'Linker generated manifest "%s" added to final manifest "%s" '
'(result in "%s"). '
'Were /MANIFEST switches used in #pragma statements? ' % (
intermediate_manifest, our_manifest, assert_manifest))
return 1
def ExecManifestWrapper(self, arch, *args):
"""Run manifest tool with environment set. Strip out undesirable warning
(some XML blocks are recognized by the OS loader, but not the manifest
tool)."""
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
if line and 'manifest authoring warning 81010002' not in line:
print line
return popen.returncode
def ExecManifestToRc(self, arch, *args):
"""Creates a resource file pointing a SxS assembly manifest.
|args| is tuple containing path to resource file, path to manifest file
and resource name which can be "1" (for executables) or "2" (for DLLs)."""
manifest_path, resource_path, resource_name = args
with open(resource_path, 'wb') as output:
output.write('#include <windows.h>\n%s RT_MANIFEST "%s"' % (
resource_name,
os.path.abspath(manifest_path).replace('\\', '/')))
def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl,
*flags):
"""Filter noisy filenames output from MIDL compile step that isn't
quietable via command line flags.
"""
args = ['midl', '/nologo'] + list(flags) + [
'/out', outdir,
'/tlb', tlb,
'/h', h,
'/dlldata', dlldata,
'/iid', iid,
'/proxy', proxy,
idl]
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
# Filter junk out of stdout, and write filtered versions. Output we want
# to filter is pairs of lines that look like this:
# Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
# objidl.idl
lines = out.splitlines()
prefix = 'Processing '
processing = set(os.path.basename(x) for x in lines if x.startswith(prefix))
for line in lines:
if not line.startswith(prefix) and line not in processing:
print line
return popen.returncode
def ExecAsmWrapper(self, arch, *args):
"""Filter logo banner from invocations of asm.exe."""
env = self._GetEnv(arch)
# MSVS doesn't assemble x64 asm files.
if arch == 'environment.x64':
return 0
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
if (not line.startswith('Copyright (C) Microsoft Corporation') and
not line.startswith('Microsoft (R) Macro Assembler') and
not line.startswith(' Assembling: ') and
line):
print line
return popen.returncode
def ExecRcWrapper(self, arch, *args):
"""Filter logo banner from invocations of rc.exe. Older versions of RC
don't support the /nologo flag."""
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler') and
not line.startswith('Copyright (C) Microsoft Corporation') and
line):
print line
return popen.returncode
def ExecActionWrapper(self, arch, rspfile, *dir):
"""Runs an action command line from a response file using the environment
for |arch|. If |dir| is supplied, use that as the working directory."""
env = self._GetEnv(arch)
args = open(rspfile).read()
dir = dir[0] if dir else None
return subprocess.call(args, shell=True, env=env, cwd=dir)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| agpl-3.0 |
LLNL/spack | var/spack/repos/builtin/packages/r-yapsa/package.py | 5 | 2102 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RYapsa(RPackage):
"""Yet Another Package for Signature Analysis.
This package provides functions and routines useful in the analysis of
somatic signatures (cf. L. Alexandrov et al., Nature 2013). In
particular, functions to perform a signature analysis with known
signatures (LCD = linear combination decomposition) and a signature
analysis on stratified mutational catalogue (SMC = stratify mutational
catalogue) are provided."""
homepage = "https://bioconductor.org/packages/YAPSA"
git = "https://git.bioconductor.org/packages/YAPSA.git"
version('1.10.0', commit='06af18e424868eb0f0be6c80e90cbab1eabf3d73')
version('1.8.0', commit='402f3f7774fdf8afc7883579ad651c26df0f8fdb')
version('1.6.0', commit='2455d272b076835ddb36ad21c01ef15af66abc36')
version('1.4.0', commit='6f24150a0689d5215983975ece96c8c205923c72')
version('1.2.0', commit='320809b69e470e30a777a383f8341f93064ec24d')
depends_on('r@3.3.0:', type=('build', 'run'))
depends_on('r-genomicranges', type=('build', 'run'))
depends_on('r-ggplot2', type=('build', 'run'))
depends_on('r-lsei', type=('build', 'run'))
depends_on('r-somaticsignatures', type=('build', 'run'))
depends_on('r-variantannotation', type=('build', 'run'))
depends_on('r-genomeinfodb', type=('build', 'run'))
depends_on('r-reshape2', type=('build', 'run'))
depends_on('r-gridextra', type=('build', 'run'))
depends_on('r-corrplot', type=('build', 'run'))
depends_on('r-dendextend', type=('build', 'run'))
depends_on('r-getoptlong', type=('build', 'run'))
depends_on('r-circlize', type=('build', 'run'))
depends_on('r-gtrellis', type=('build', 'run'))
depends_on('r-pmcmr', type=('build', 'run'))
depends_on('r-complexheatmap', type=('build', 'run'))
depends_on('r-keggrest', type=('build', 'run'))
| lgpl-2.1 |
miptliot/edx-platform | lms/djangoapps/shoppingcart/processors/tests/test_CyberSource2.py | 10 | 18446 | # -*- coding: utf-8 -*-
"""
Tests for the newer CyberSource API implementation.
"""
import ddt
from django.conf import settings
from django.test import TestCase
from mock import patch
from shoppingcart.models import Order, OrderItem
from shoppingcart.processors.CyberSource2 import (
_get_processor_exception_html,
get_signed_purchase_params,
process_postpay_callback,
processor_hash,
render_purchase_form_html
)
from shoppingcart.processors.exceptions import (
CCProcessorDataException,
CCProcessorSignatureException,
CCProcessorWrongAmountException
)
from student.tests.factories import UserFactory
@ddt.ddt
class CyberSource2Test(TestCase):
"""
Test the CyberSource API implementation. As much as possible,
this test case should use ONLY the public processor interface
(defined in shoppingcart.processors.__init__.py).
Some of the tests in this suite rely on Django settings
to be configured a certain way.
"""
COST = "10.00"
CALLBACK_URL = "/test_callback_url"
FAILED_DECISIONS = ["DECLINE", "CANCEL", "ERROR"]
def setUp(self):
""" Create a user and an order. """
super(CyberSource2Test, self).setUp()
self.user = UserFactory()
self.order = Order.get_cart_for_user(self.user)
self.order_item = OrderItem.objects.create(
order=self.order,
user=self.user,
unit_cost=self.COST,
line_cost=self.COST
)
def assert_dump_recorded(self, order):
"""
Verify that this order does have a dump of information from the
payment processor.
"""
self.assertNotEqual(order.processor_reply_dump, '')
def test_render_purchase_form_html(self):
# Verify that the HTML form renders with the payment URL specified
# in the test settings.
# This does NOT test that all the form parameters are correct;
# we verify that by testing `get_signed_purchase_params()` directly.
html = render_purchase_form_html(self.order, callback_url=self.CALLBACK_URL)
self.assertIn('<form action="/shoppingcart/payment_fake" method="post">', html)
self.assertIn('transaction_uuid', html)
self.assertIn('signature', html)
self.assertIn(self.CALLBACK_URL, html)
def test_get_signed_purchase_params(self):
params = get_signed_purchase_params(self.order, callback_url=self.CALLBACK_URL)
# Check the callback URL override
self.assertEqual(params['override_custom_receipt_page'], self.CALLBACK_URL)
# Parameters determined by the order model
self.assertEqual(params['amount'], '10.00')
self.assertEqual(params['currency'], 'usd')
self.assertEqual(params['orderNumber'], 'OrderId: {order_id}'.format(order_id=self.order.id))
self.assertEqual(params['reference_number'], self.order.id)
# Parameters determined by the Django (test) settings
self.assertEqual(params['access_key'], '0123456789012345678901')
self.assertEqual(params['profile_id'], 'edx')
# Some fields will change depending on when the test runs,
# so we just check that they're set to a non-empty string
self.assertGreater(len(params['signed_date_time']), 0)
self.assertGreater(len(params['transaction_uuid']), 0)
# Constant parameters
self.assertEqual(params['transaction_type'], 'sale')
self.assertEqual(params['locale'], 'en')
self.assertEqual(params['payment_method'], 'card')
self.assertEqual(
params['signed_field_names'],
",".join([
'amount',
'currency',
'orderNumber',
'access_key',
'profile_id',
'reference_number',
'transaction_type',
'locale',
'signed_date_time',
'signed_field_names',
'unsigned_field_names',
'transaction_uuid',
'payment_method',
'override_custom_receipt_page',
'override_custom_cancel_page',
])
)
self.assertEqual(params['unsigned_field_names'], '')
# Check the signature
self.assertEqual(params['signature'], self._signature(params))
# We patch the purchased callback because
# we're using the OrderItem base class, which throws an exception
# when item doest not have a course id associated
@patch.object(OrderItem, 'purchased_callback')
def test_process_payment_raises_exception(self, purchased_callback): # pylint: disable=unused-argument
self.order.clear()
OrderItem.objects.create(
order=self.order,
user=self.user,
unit_cost=self.COST,
line_cost=self.COST,
)
params = self._signed_callback_params(self.order.id, self.COST, self.COST)
process_postpay_callback(params)
# We patch the purchased callback because
# (a) we're using the OrderItem base class, which doesn't implement this method, and
# (b) we want to verify that the method gets called on success.
@patch.object(OrderItem, 'purchased_callback')
@patch.object(OrderItem, 'pdf_receipt_display_name')
def test_process_payment_success(self, pdf_receipt_display_name, purchased_callback): # pylint: disable=unused-argument
# Simulate a callback from CyberSource indicating that payment was successful
params = self._signed_callback_params(self.order.id, self.COST, self.COST)
result = process_postpay_callback(params)
# Expect that we processed the payment successfully
self.assertTrue(
result['success'],
msg="Payment was not successful: {error}".format(error=result.get('error_html'))
)
self.assertEqual(result['error_html'], '')
# Expect that the item's purchased callback was invoked
purchased_callback.assert_called_with()
# Expect that the order has been marked as purchased
self.assertEqual(result['order'].status, 'purchased')
self.assert_dump_recorded(result['order'])
def test_process_payment_rejected(self):
# Simulate a callback from CyberSource indicating that the payment was rejected
params = self._signed_callback_params(self.order.id, self.COST, self.COST, decision='REJECT')
result = process_postpay_callback(params)
# Expect that we get an error message
self.assertFalse(result['success'])
self.assertIn(u"did not accept your payment", result['error_html'])
self.assert_dump_recorded(result['order'])
def test_process_payment_invalid_signature(self):
# Simulate a callback from CyberSource indicating that the payment was rejected
params = self._signed_callback_params(self.order.id, self.COST, self.COST, signature="invalid!")
result = process_postpay_callback(params)
# Expect that we get an error message
self.assertFalse(result['success'])
self.assertIn(u"corrupted message regarding your charge", result['error_html'])
def test_process_payment_invalid_order(self):
# Use an invalid order ID
params = self._signed_callback_params("98272", self.COST, self.COST)
result = process_postpay_callback(params)
# Expect an error
self.assertFalse(result['success'])
self.assertIn(u"inconsistent data", result['error_html'])
def test_process_invalid_payment_amount(self):
# Change the payment amount (no longer matches the database order record)
params = self._signed_callback_params(self.order.id, "145.00", "145.00")
result = process_postpay_callback(params)
# Expect an error
self.assertFalse(result['success'])
self.assertIn(u"different amount than the order total", result['error_html'])
# refresh data for current order
order = Order.objects.get(id=self.order.id)
self.assert_dump_recorded(order)
def test_process_amount_paid_not_decimal(self):
# Change the payment amount to a non-decimal
params = self._signed_callback_params(self.order.id, self.COST, "abcd")
result = process_postpay_callback(params)
# Expect an error
self.assertFalse(result['success'])
self.assertIn(u"badly-typed value", result['error_html'])
def test_process_user_cancelled(self):
# Change the payment amount to a non-decimal
params = self._signed_callback_params(self.order.id, self.COST, "abcd")
params['decision'] = u'CANCEL'
result = process_postpay_callback(params)
# Expect an error
self.assertFalse(result['success'])
self.assertIn(u"you have cancelled this transaction", result['error_html'])
@patch.object(OrderItem, 'purchased_callback')
@patch.object(OrderItem, 'pdf_receipt_display_name')
def test_process_no_credit_card_digits(self, pdf_receipt_display_name, purchased_callback): # pylint: disable=unused-argument
# Use a credit card number with no digits provided
params = self._signed_callback_params(
self.order.id, self.COST, self.COST,
card_number='nodigits'
)
result = process_postpay_callback(params)
# Expect that we processed the payment successfully
self.assertTrue(
result['success'],
msg="Payment was not successful: {error}".format(error=result.get('error_html'))
)
self.assertEqual(result['error_html'], '')
self.assert_dump_recorded(result['order'])
# Expect that the order has placeholders for the missing credit card digits
self.assertEqual(result['order'].bill_to_ccnum, '####')
@ddt.data('req_reference_number', 'req_currency', 'decision', 'auth_amount')
def test_process_missing_parameters(self, missing_param):
# Remove a required parameter
params = self._signed_callback_params(self.order.id, self.COST, self.COST)
del params[missing_param]
# Recalculate the signature with no signed fields so we can get past
# signature validation.
params['signed_field_names'] = 'reason_code,message'
params['signature'] = self._signature(params)
result = process_postpay_callback(params)
# Expect an error
self.assertFalse(result['success'])
self.assertIn(u"did not return a required parameter", result['error_html'])
@patch.object(OrderItem, 'purchased_callback')
@patch.object(OrderItem, 'pdf_receipt_display_name')
def test_sign_then_verify_unicode(self, pdf_receipt_display_name, purchased_callback): # pylint: disable=unused-argument
params = self._signed_callback_params(
self.order.id, self.COST, self.COST,
first_name=u'\u2699'
)
# Verify that this executes without a unicode error
result = process_postpay_callback(params)
self.assertTrue(result['success'])
self.assert_dump_recorded(result['order'])
@ddt.data('string', u'üñîçø∂é')
def test_get_processor_exception_html(self, error_string):
"""
Tests the processor exception html message
"""
for exception_type in [CCProcessorSignatureException, CCProcessorWrongAmountException, CCProcessorDataException]:
error_msg = error_string
exception = exception_type(error_msg)
html = _get_processor_exception_html(exception)
self.assertIn(settings.PAYMENT_SUPPORT_EMAIL, html)
self.assertIn('Sorry!', html)
self.assertIn(error_msg, html)
def _signed_callback_params(
self, order_id, order_amount, paid_amount,
decision='ACCEPT', signature=None, card_number='xxxxxxxxxxxx1111',
first_name='John'
):
"""
Construct parameters that could be returned from CyberSource
to our payment callback.
Some values can be overridden to simulate different test scenarios,
but most are fake values captured from interactions with
a CyberSource test account.
Args:
order_id (string or int): The ID of the `Order` model.
order_amount (string): The cost of the order.
paid_amount (string): The amount the user paid using CyberSource.
Keyword Args:
decision (string): Whether the payment was accepted or rejected or declined.
signature (string): If provided, use this value instead of calculating the signature.
card_numer (string): If provided, use this value instead of the default credit card number.
first_name (string): If provided, the first name of the user.
Returns:
dict
"""
# Parameters sent from CyberSource to our callback implementation
# These were captured from the CC test server.
signed_field_names = ["transaction_id",
"decision",
"req_access_key",
"req_profile_id",
"req_transaction_uuid",
"req_transaction_type",
"req_reference_number",
"req_amount",
"req_currency",
"req_locale",
"req_payment_method",
"req_override_custom_receipt_page",
"req_bill_to_forename",
"req_bill_to_surname",
"req_bill_to_email",
"req_bill_to_address_line1",
"req_bill_to_address_city",
"req_bill_to_address_state",
"req_bill_to_address_country",
"req_bill_to_address_postal_code",
"req_card_number",
"req_card_type",
"req_card_expiry_date",
"message",
"reason_code",
"auth_avs_code",
"auth_avs_code_raw",
"auth_response",
"auth_amount",
"auth_code",
"auth_trans_ref_no",
"auth_time",
"bill_trans_ref_no",
"signed_field_names",
"signed_date_time"]
# if decision is in FAILED_DECISIONS list then remove auth_amount from
# signed_field_names list.
if decision in self.FAILED_DECISIONS:
signed_field_names.remove("auth_amount")
params = {
# Parameters that change based on the test
"decision": decision,
"req_reference_number": str(order_id),
"req_amount": order_amount,
"auth_amount": paid_amount,
"req_card_number": card_number,
# Stub values
"utf8": u"✓",
"req_bill_to_address_country": "US",
"auth_avs_code": "X",
"req_card_expiry_date": "01-2018",
"bill_trans_ref_no": "85080648RYI23S6I",
"req_bill_to_address_state": "MA",
"signed_field_names": ",".join(signed_field_names),
"req_payment_method": "card",
"req_transaction_type": "sale",
"auth_code": "888888",
"req_locale": "en",
"reason_code": "100",
"req_bill_to_address_postal_code": "02139",
"req_bill_to_address_line1": "123 Fake Street",
"req_card_type": "001",
"req_bill_to_address_city": "Boston",
"signed_date_time": "2014-08-18T14:07:10Z",
"req_currency": "usd",
"auth_avs_code_raw": "I1",
"transaction_id": "4083708299660176195663",
"auth_time": "2014-08-18T140710Z",
"message": "Request was processed successfully.",
"auth_response": "100",
"req_profile_id": "0000001",
"req_transaction_uuid": "ddd9935b82dd403f9aa4ba6ecf021b1f",
"auth_trans_ref_no": "85080648RYI23S6I",
"req_bill_to_surname": "Doe",
"req_bill_to_forename": first_name,
"req_bill_to_email": "john@example.com",
"req_override_custom_receipt_page": "http://localhost:8000/shoppingcart/postpay_callback/",
"req_access_key": "abcd12345",
}
# if decision is in FAILED_DECISIONS list then remove the auth_amount from params dict
if decision in self.FAILED_DECISIONS:
del params["auth_amount"]
# Calculate the signature
params['signature'] = signature if signature is not None else self._signature(params)
return params
def _signature(self, params):
"""
Calculate the signature from a dictionary of params.
NOTE: This method uses the processor's hashing method. That method
is a thin wrapper of standard library calls, and it seemed overly complex
to rewrite that code in the test suite.
Args:
params (dict): Dictionary with a key 'signed_field_names',
which is a comma-separated list of keys in the dictionary
to include in the signature.
Returns:
string
"""
return processor_hash(
",".join([
u"{0}={1}".format(signed_field, params[signed_field])
for signed_field
in params['signed_field_names'].split(u",")
])
)
def test_process_payment_declined(self):
# Simulate a callback from CyberSource indicating that the payment was declined
params = self._signed_callback_params(self.order.id, self.COST, self.COST, decision='DECLINE')
result = process_postpay_callback(params)
# Expect that we get an error message
self.assertFalse(result['success'])
self.assertIn(u"payment was declined", result['error_html'])
| agpl-3.0 |
stympy/ansible-modules-extras | system/filesystem.py | 38 | 8561 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Alexander Bulimov <lazywolf0@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
author: "Alexander Bulimov (@abulimov)"
module: filesystem
short_description: Makes file system on block device
description:
- This module creates file system.
version_added: "1.2"
options:
fstype:
description:
- File System type to be created.
required: true
dev:
description:
- Target block device.
required: true
force:
choices: [ "yes", "no" ]
default: "no"
description:
- If yes, allows to create new filesystem on devices that already has filesystem.
required: false
resizefs:
choices: [ "yes", "no" ]
default: "no"
description:
- If yes, if the block device and filessytem size differ, grow the filesystem into the space. Note, XFS Will only grow if mounted.
required: false
version_added: "2.0"
opts:
description:
- List of options to be passed to mkfs command.
notes:
- uses mkfs command
'''
EXAMPLES = '''
# Create a ext2 filesystem on /dev/sdb1.
- filesystem: fstype=ext2 dev=/dev/sdb1
# Create a ext4 filesystem on /dev/sdb1 and check disk blocks.
- filesystem: fstype=ext4 dev=/dev/sdb1 opts="-cc"
'''
def _get_dev_size(dev, module):
""" Return size in bytes of device. Returns int """
blockdev_cmd = module.get_bin_path("blockdev", required=True)
rc, devsize_in_bytes, err = module.run_command("%s %s %s" % (blockdev_cmd, "--getsize64", dev))
return int(devsize_in_bytes)
def _get_fs_size(fssize_cmd, dev, module):
""" Return size in bytes of filesystem on device. Returns int """
cmd = module.get_bin_path(fssize_cmd, required=True)
if 'tune2fs' == fssize_cmd:
# Get Block count and Block size
rc, size, err = module.run_command("%s %s %s" % (cmd, '-l', dev))
if rc == 0:
for line in size.splitlines():
if 'Block count:' in line:
block_count = int(line.split(':')[1].strip())
elif 'Block size:' in line:
block_size = int(line.split(':')[1].strip())
break
else:
module.fail_json(msg="Failed to get block count and block size of %s with %s" % (dev, cmd), rc=rc, err=err )
elif 'xfs_info' == fssize_cmd:
# Get Block count and Block size
rc, size, err = module.run_command("%s %s" % (cmd, dev))
if rc == 0:
for line in size.splitlines():
#if 'data' in line:
if 'data ' in line:
block_size = int(line.split('=')[2].split()[0])
block_count = int(line.split('=')[3].split(',')[0])
break
else:
module.fail_json(msg="Failed to get block count and block size of %s with %s" % (dev, cmd), rc=rc, err=err )
elif 'btrfs' == fssize_cmd:
#ToDo
# There is no way to get the blocksize and blockcount for btrfs filesystems
block_size = 1
block_count = 1
return block_size*block_count
def main():
module = AnsibleModule(
argument_spec = dict(
fstype=dict(required=True, aliases=['type']),
dev=dict(required=True, aliases=['device']),
opts=dict(),
force=dict(type='bool', default='no'),
resizefs=dict(type='bool', default='no'),
),
supports_check_mode=True,
)
# There is no "single command" to manipulate filesystems, so we map them all out and their options
fs_cmd_map = {
'ext2' : {
'mkfs' : 'mkfs.ext2',
'grow' : 'resize2fs',
'grow_flag' : None,
'force_flag' : '-F',
'fsinfo': 'tune2fs',
},
'ext3' : {
'mkfs' : 'mkfs.ext3',
'grow' : 'resize2fs',
'grow_flag' : None,
'force_flag' : '-F',
'fsinfo': 'tune2fs',
},
'ext4' : {
'mkfs' : 'mkfs.ext4',
'grow' : 'resize2fs',
'grow_flag' : None,
'force_flag' : '-F',
'fsinfo': 'tune2fs',
},
'ext4dev' : {
'mkfs' : 'mkfs.ext4',
'grow' : 'resize2fs',
'grow_flag' : None,
'force_flag' : '-F',
'fsinfo': 'tune2fs',
},
'xfs' : {
'mkfs' : 'mkfs.xfs',
'grow' : 'xfs_growfs',
'grow_flag' : None,
'force_flag' : '-f',
'fsinfo': 'xfs_info',
},
'btrfs' : {
'mkfs' : 'mkfs.btrfs',
'grow' : 'btrfs',
'grow_flag' : 'filesystem resize',
'force_flag' : '-f',
'fsinfo': 'btrfs',
}
}
dev = module.params['dev']
fstype = module.params['fstype']
opts = module.params['opts']
force = module.boolean(module.params['force'])
resizefs = module.boolean(module.params['resizefs'])
changed = False
try:
_ = fs_cmd_map[fstype]
except KeyError:
module.exit_json(changed=False, msg="WARNING: module does not support this filesystem yet. %s" % fstype)
mkfscmd = fs_cmd_map[fstype]['mkfs']
force_flag = fs_cmd_map[fstype]['force_flag']
growcmd = fs_cmd_map[fstype]['grow']
fssize_cmd = fs_cmd_map[fstype]['fsinfo']
if not os.path.exists(dev):
module.fail_json(msg="Device %s not found."%dev)
cmd = module.get_bin_path('blkid', required=True)
rc,raw_fs,err = module.run_command("%s -c /dev/null -o value -s TYPE %s" % (cmd, dev))
fs = raw_fs.strip()
if fs == fstype and resizefs == False and not force:
module.exit_json(changed=False)
elif fs == fstype and resizefs == True:
# Get dev and fs size and compare
devsize_in_bytes = _get_dev_size(dev, module)
fssize_in_bytes = _get_fs_size(fssize_cmd, dev, module)
if fssize_in_bytes < devsize_in_bytes:
fs_smaller = True
else:
fs_smaller = False
if module.check_mode and fs_smaller:
module.exit_json(changed=True, msg="Resizing filesystem %s on device %s" % (fstype,dev))
elif module.check_mode and not fs_smaller:
module.exit_json(changed=False, msg="%s filesystem is using the whole device %s" % (fstype, dev))
elif fs_smaller:
cmd = module.get_bin_path(growcmd, required=True)
rc,out,err = module.run_command("%s %s" % (cmd, dev))
# Sadly there is no easy way to determine if this has changed. For now, just say "true" and move on.
# in the future, you would have to parse the output to determine this.
# thankfully, these are safe operations if no change is made.
if rc == 0:
module.exit_json(changed=True, msg=out)
else:
module.fail_json(msg="Resizing filesystem %s on device '%s' failed"%(fstype,dev), rc=rc, err=err)
else:
module.exit_json(changed=False, msg="%s filesystem is using the whole device %s" % (fstype, dev))
elif fs and not force:
module.fail_json(msg="'%s' is already used as %s, use force=yes to overwrite"%(dev,fs), rc=rc, err=err)
### create fs
if module.check_mode:
changed = True
else:
mkfs = module.get_bin_path(mkfscmd, required=True)
cmd = None
if opts is None:
cmd = "%s %s '%s'" % (mkfs, force_flag, dev)
else:
cmd = "%s %s %s '%s'" % (mkfs, force_flag, opts, dev)
rc,_,err = module.run_command(cmd)
if rc == 0:
changed = True
else:
module.fail_json(msg="Creating filesystem %s on device '%s' failed"%(fstype,dev), rc=rc, err=err)
module.exit_json(changed=changed)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
rec/BiblioPixel | test/bibliopixel/control/address_test.py | 2 | 3376 | import unittest
from bibliopixel.control.address import Address
class AddressTest(unittest.TestCase):
def test_empty(self):
address = Address('')
self.assertFalse(address)
self.assertFalse(address.segments)
self.assertFalse(address.assignment)
self.assertEqual(address.get(23), 23)
with self.assertRaises(ValueError):
address.set(self, 23)
def test_attrib(self):
address = Address('attr')
self.assertEqual(len(address.segments), 1)
self.attr = 'bingo'
self.assertIs(address.get(self), 'bingo')
address.set(self, 'bang')
self.assertIs(address.get(self), 'bang')
def test_attrib_error(self):
address = Address('.attr')
with self.assertRaises(AttributeError):
address.get(AddressTest)
with self.assertRaises(AttributeError):
address.get(0)
def test_array(self):
address = Address('[1]')
self.assertEqual(len(address.segments), 1)
data = [2, 4, 6]
self.assertEqual(address.get(data), 4)
address.set(data, 3)
self.assertEqual(data, [2, 3, 6])
def test_array_error(self):
address = Address('[1]')
with self.assertRaises(IndexError):
address.get([0])
with self.assertRaises(IndexError):
address.set([0], 5)
with self.assertRaises(TypeError):
address.get(0)
def test_compound(self):
self.attr1 = [{'test': [None, {'heck': self}]}, 'x', 'y']
self.attr2 = self
self.attr3 = 'bingo'
address = Address('.attr1[0][test][1][heck].attr2.attr3')
self.assertEqual(len(address.segments), 7)
self.assertEqual(address.get(self), 'bingo')
address.set(self, 'bang')
self.assertEqual(self.attr3, 'bang')
def call(self, x):
self.call_result = 23
def test_trivial_call(self):
address = Address('()')
self.assertEqual(len(address.segments), 1)
result = []
address.set(result.append, 'value')
self.assertEqual(result, ['value'])
def test_call(self):
address = Address('.call()')
self.assertEqual(len(address.segments), 2)
address.set(self, 23)
self.assertEqual(self.call_result, 23)
def call2(self):
return None, lambda: self
def test_call_complex(self):
self.results = []
address = Address('.call2()[1]().call()')
self.assertEqual(len(address.segments), 6)
address.set(self, 23)
self.assertEqual(self.call_result, 23)
def test_compound_error(self):
address = Address('attr1[0][test][1][heck].attr2.attr3')
with self.assertRaises(AttributeError):
address.get(self)
self.attr1 = None
with self.assertRaises(TypeError):
address.get(self)
with self.assertRaises(TypeError):
address.set(self, 2)
def test_segment_start_with_index(self):
Address('[1]')
with self.assertRaises(ValueError):
Address('foo.[1]')
def test_assignment(self):
self.attr = None
Address('.attr = 1').set(self)
self.assertEqual(self.attr, 1)
self.attr = None
Address('.attr = 1, 2.5, 3').set(self)
self.assertEqual(self.attr, (1, 2.5, 3))
| mit |
adamcaudill/yawast | yawast/scanner/plugins/http/applications/wordpress.py | 1 | 7344 | # Copyright (c) 2013 - 2020 Adam Caudill and Contributors.
# This file is part of YAWAST which is released under the MIT license.
# See the LICENSE file or go to https://yawast.org/license/ for full license details.
import re
from typing import Tuple, Union, List, cast
from urllib.parse import urljoin
from packaging import version
from requests import Response
from yawast.reporting.enums import Vulnerabilities
from yawast.scanner.plugins.evidence import Evidence
from yawast.scanner.plugins.http import version_checker, response_scanner
from yawast.scanner.plugins.result import Result
from yawast.shared import network, output
def identify(url: str) -> Tuple[Union[str, None], List[Result]]:
results = []
# find WordPress
res, path = _identify_by_path(url, "")
if path is None:
res, path = _identify_by_path(url, "blog/")
# check to see if we have a valid hit
if path is not None:
# we have a WordPress install, let's see if we can get a version
body = res.text
ver = "Unknown"
# this works for modern versions
m = re.search(r"login.min.css\?ver=\d+\.\d+\.?\d*", body)
if m:
ver = m.group(0).split("=")[1]
else:
# the current method doesn't work, fall back to an older method
m = re.search(r"load-styles.php\?[\w,;=&%]+;ver=\d+\.\d+\.?\d*", body)
if m:
ver = m.group(0).split("=")[-1]
# report that we found WordPress
results.append(
Result.from_evidence(
Evidence.from_response(res, {"version": ver}),
f"Found WordPress v{ver} at {path}",
Vulnerabilities.APP_WORDPRESS_VERSION,
)
)
# is this a current version?
ver = cast(version.Version, version.parse(ver))
curr_version = version_checker.get_latest_version("wordpress", ver)
if curr_version is not None and curr_version > ver:
results.append(
Result.from_evidence(
Evidence.from_response(
res,
{
"installed_version": str(ver),
"current_verison": str(curr_version),
},
),
f"WordPress Outdated: {ver} - Current: {curr_version}",
Vulnerabilities.APP_WORDPRESS_OUTDATED,
)
)
return path, results
else:
return None, []
def check_path_disclosure(wp_url: str) -> List[Result]:
# this is a list of files that are known to throw a fatal error when accessed directly
# this is from a manual review of all plugins with at least 1M installs
urls = [
"wp-content/plugins/hello.php",
"wp-content/plugins/akismet/akismet.php",
"wp-content/plugins/contact-form-7/includes/capabilities.php",
"wp-content/plugins/wordpress-seo/admin/views/partial-alerts-errors.php",
"wp-content/plugins/jetpack/load-jetpack.php",
"wp-content/plugins/jetpack/uninstall.php",
"wp-content/plugins/duplicate-post/duplicate-post-admin.php",
"wp-content/plugins/wpforms-lite/includes/admin/class-welcome.php",
"wp-content/plugins/wp-google-maps/base/includes/welcome.php",
"wp-content/plugins/wp-super-cache/wp-cache.php",
"wp-content/plugins/mailchimp-for-wp/integrations/wpforms/bootstrap.php",
"wp-content/plugins/mailchimp-for-wp/integrations/bootstrap.php",
"wp-content/plugins/regenerate-thumbnails/regenerate-thumbnails.php",
"wp-content/plugins/advanced-custom-fields/includes/deprecated.php",
"wp-content/plugins/redirection/redirection.php",
"wp-content/plugins/wpforms-lite/includes/admin/importers/class-ninja-forms.php",
"wp-content/plugins/ninja-forms/includes/deprecated.php",
"wp-content/plugins/so-widgets-bundle/so-widgets-bundle.php",
"wp-content/plugins/wp-fastest-cache/templates/preload.php",
"wp-content/plugins/duplicate-page/duplicatepage.php",
"wp-content/plugins/better-wp-security/better-wp-security.php",
"wp-content/plugins/all-in-one-wp-security-and-firewall/other-includes/wp-security-unlock-request.php",
"wp-content/plugins/related-posts/views/settings.php",
"wp-content/plugins/wpcontentguard/views/settings.php",
"wp-content/plugins/simple-social-icons/simple-social-icons.php",
]
results: List[Result] = []
for url in urls:
target = urljoin(wp_url, url)
head = network.http_head(target, False)
if head.status_code != 404:
resp = network.http_get(target, False)
if resp.status_code < 300 or resp.status_code >= 500:
# we have some kind of response that could be useful
if "<b>Fatal error</b>:" in resp.text:
# we have an error
pattern = r"<b>((\/|[A-Z]:\\).*.php)<\/b>"
if re.search(pattern, resp.text):
try:
path = re.findall(pattern, resp.text)[0][0]
results.append(
Result.from_evidence(
Evidence.from_response(resp, {"path": path}),
f"WordPress File Path Disclosure: {target} ({path})",
Vulnerabilities.APP_WORDPRESS_PATH_DISCLOSURE,
)
)
except Exception:
output.debug_exception()
results += response_scanner.check_response(target, resp)
return results
def check_json_user_enum(url: str) -> List[Result]:
results = []
target = urljoin(url, "wp-json/wp/v2/users")
res = network.http_get(target, False)
body = res.text
if res.status_code < 300 and "slug" in body:
data = res.json()
# log the enum finding
results.append(
Result.from_evidence(
Evidence.from_response(res),
f"WordPress WP-JSON User Enumeration at {target}",
Vulnerabilities.APP_WORDPRESS_USER_ENUM_API,
)
)
# log the individual users
for user in data:
results.append(
Result.from_evidence(
Evidence.from_response(
res,
{
"user_id": user["id"],
"user_slug": user["slug"],
"user_name": user["name"],
},
),
f"ID: {user['id']}\tUser Slug: '{user['slug']}'\t\tUser Name: '{user['name']}'",
Vulnerabilities.APP_WORDPRESS_USER_FOUND,
)
)
return results
def _identify_by_path(url: str, path: str) -> Tuple[Response, Union[str, None]]:
target = urljoin(url, f"{path}wp-login.php")
res = network.http_get(target, False)
body = res.text
if res.status_code == 200 and "Powered by WordPress" in body:
return res, urljoin(url, path)
else:
return res, None
| mit |
jmiserez/sts | tests/unit/sts/util/precompute_cache_test.py | 2 | 2034 | # Copyright 2011-2013 Colin Scott
# Copyright 2011-2013 Andreas Wundsam
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import sys
import os.path
sys.path.append(os.path.dirname(__file__) + "/../../..")
from sts.util.precompute_cache import *
class precompute_cache_test(unittest.TestCase):
def test_simple(self):
p = PrecomputeCache()
p.update( (1,2,3) )
self.assertTrue(p.already_done( (1,2,3)))
self.assertFalse(p.already_done( (1,2)))
self.assertFalse(p.already_done( (1,)))
p.update( (1,2) )
self.assertTrue(p.already_done( (1,2,3)))
self.assertTrue(p.already_done( (1,2)))
self.assertFalse(p.already_done( (1,)))
self.assertFalse(p.already_done( (1,2,3,4)))
def test_power(self):
p = PrecomputePowerSetCache()
p.update( (1,2,3) )
self.assertTrue(p.already_done( (1,2,3)))
self.assertTrue(p.already_done( (1,2)))
self.assertTrue(p.already_done( (1,)))
self.assertTrue(p.already_done( (2,3)))
self.assertFalse(p.already_done( (1,2,3,4)))
self.assertFalse(p.already_done( (1,2,4)))
p.update( (1,2) )
self.assertTrue(p.already_done( (1,2,3)))
self.assertTrue(p.already_done( (1,2)))
self.assertTrue(p.already_done( (1,)))
self.assertFalse(p.already_done( (1,2,3,4)))
p.update( (3,4) )
self.assertTrue(p.already_done( (1,2)))
self.assertTrue(p.already_done( (3,4)))
self.assertFalse(p.already_done( (2,4)))
self.assertTrue(p.already_done( (4,)))
self.assertFalse(p.already_done( (1,2,3,4)))
| apache-2.0 |
xuru/pyvisdk | pyvisdk/do/cluster_power_on_vm_result.py | 1 | 1098 |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def ClusterPowerOnVmResult(vim, *args, **kwargs):
'''PowerOnVmResult is the base class of the result returned to the
PowerOnMultiVM_Task method.'''
obj = vim.client.factory.create('ns0:ClusterPowerOnVmResult')
# do some validation checking...
if (len(args) + len(kwargs)) < 0:
raise IndexError('Expected at least 1 arguments got: %d' % len(args))
required = [ ]
optional = [ 'attempted', 'notAttempted', 'recommendations', 'dynamicProperty',
'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
| mit |
betoesquivel/fil2014 | build/django/build/lib.linux-x86_64-2.7/django/contrib/sessions/models.py | 173 | 1997 | from django.db import models
from django.utils.translation import ugettext_lazy as _
class SessionManager(models.Manager):
def encode(self, session_dict):
"""
Returns the given session dictionary serialized and encoded as a string.
"""
return SessionStore().encode(session_dict)
def save(self, session_key, session_dict, expire_date):
s = self.model(session_key, self.encode(session_dict), expire_date)
if session_dict:
s.save()
else:
s.delete() # Clear sessions with no data.
return s
class Session(models.Model):
"""
Django provides full support for anonymous sessions. The session
framework lets you store and retrieve arbitrary data on a
per-site-visitor basis. It stores data on the server side and
abstracts the sending and receiving of cookies. Cookies contain a
session ID -- not the data itself.
The Django sessions framework is entirely cookie-based. It does
not fall back to putting session IDs in URLs. This is an intentional
design decision. Not only does that behavior make URLs ugly, it makes
your site vulnerable to session-ID theft via the "Referer" header.
For complete documentation on using Sessions in your code, consult
the sessions documentation that is shipped with Django (also available
on the Django Web site).
"""
session_key = models.CharField(_('session key'), max_length=40,
primary_key=True)
session_data = models.TextField(_('session data'))
expire_date = models.DateTimeField(_('expire date'), db_index=True)
objects = SessionManager()
class Meta:
db_table = 'django_session'
verbose_name = _('session')
verbose_name_plural = _('sessions')
def get_decoded(self):
return SessionStore().decode(self.session_data)
# At bottom to avoid circular import
from django.contrib.sessions.backends.db import SessionStore
| mit |
romz-pl/romzdb | 3rdparty/googletest/googlemock/test/gmock_leak_test.py | 779 | 4384 | #!/usr/bin/env python
#
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests that leaked mock objects can be caught be Google Mock."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import gmock_test_utils
PROGRAM_PATH = gmock_test_utils.GetTestExecutablePath('gmock_leak_test_')
TEST_WITH_EXPECT_CALL = [PROGRAM_PATH, '--gtest_filter=*ExpectCall*']
TEST_WITH_ON_CALL = [PROGRAM_PATH, '--gtest_filter=*OnCall*']
TEST_MULTIPLE_LEAKS = [PROGRAM_PATH, '--gtest_filter=*MultipleLeaked*']
environ = gmock_test_utils.environ
SetEnvVar = gmock_test_utils.SetEnvVar
# Tests in this file run a Google-Test-based test program and expect it
# to terminate prematurely. Therefore they are incompatible with
# the premature-exit-file protocol by design. Unset the
# premature-exit filepath to prevent Google Test from creating
# the file.
SetEnvVar(gmock_test_utils.PREMATURE_EXIT_FILE_ENV_VAR, None)
class GMockLeakTest(gmock_test_utils.TestCase):
def testCatchesLeakedMockByDefault(self):
self.assertNotEqual(
0,
gmock_test_utils.Subprocess(TEST_WITH_EXPECT_CALL,
env=environ).exit_code)
self.assertNotEqual(
0,
gmock_test_utils.Subprocess(TEST_WITH_ON_CALL,
env=environ).exit_code)
def testDoesNotCatchLeakedMockWhenDisabled(self):
self.assertEquals(
0,
gmock_test_utils.Subprocess(TEST_WITH_EXPECT_CALL +
['--gmock_catch_leaked_mocks=0'],
env=environ).exit_code)
self.assertEquals(
0,
gmock_test_utils.Subprocess(TEST_WITH_ON_CALL +
['--gmock_catch_leaked_mocks=0'],
env=environ).exit_code)
def testCatchesLeakedMockWhenEnabled(self):
self.assertNotEqual(
0,
gmock_test_utils.Subprocess(TEST_WITH_EXPECT_CALL +
['--gmock_catch_leaked_mocks'],
env=environ).exit_code)
self.assertNotEqual(
0,
gmock_test_utils.Subprocess(TEST_WITH_ON_CALL +
['--gmock_catch_leaked_mocks'],
env=environ).exit_code)
def testCatchesLeakedMockWhenEnabledWithExplictFlagValue(self):
self.assertNotEqual(
0,
gmock_test_utils.Subprocess(TEST_WITH_EXPECT_CALL +
['--gmock_catch_leaked_mocks=1'],
env=environ).exit_code)
def testCatchesMultipleLeakedMocks(self):
self.assertNotEqual(
0,
gmock_test_utils.Subprocess(TEST_MULTIPLE_LEAKS +
['--gmock_catch_leaked_mocks'],
env=environ).exit_code)
if __name__ == '__main__':
gmock_test_utils.Main()
| gpl-3.0 |
CUCWD/edx-platform | common/test/acceptance/pages/lms/conditional.py | 47 | 1119 | """
Conditional Pages
"""
from bok_choy.page_object import PageObject
POLL_ANSWER = 'Yes, of course'
class ConditionalPage(PageObject):
"""
View of conditional page.
"""
url = None
def is_browser_on_page(self):
"""
Returns True if the browser is currently on the right page.
"""
return self.q(css='.conditional-wrapper').visible
def is_content_visible(self):
"""
Returns True if the conditional's content has been revealed,
False otherwise
"""
return self.q(css='.hidden-contents').visible
def fill_in_poll(self):
"""
Fills in a poll on the same page as the conditional
with the answer that matches POLL_ANSWER
"""
text_selector = '.poll_answer .text'
text_options = self.q(css=text_selector).text
# Out of the possible poll answers, we want
# to select the one that matches POLL_ANSWER and click it.
for idx, text in enumerate(text_options):
if text == POLL_ANSWER:
self.q(css=text_selector).nth(idx).click()
| agpl-3.0 |
sergeykolychev/mxnet | tests/python/gpu/test_rtc.py | 61 | 1318 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: skip-file
import mxnet as mx
import numpy as np
from numpy.testing import assert_allclose
if __name__ == '__main__':
x = mx.nd.zeros((10,), ctx=mx.gpu(0))
x[:] = 1
y = mx.nd.zeros((10,), ctx=mx.gpu(0))
y[:] = 2
rtc = mx.rtc('abc', [('x', x)], [('y', y)], """
__shared__ float s_rec[10];
s_rec[threadIdx.x] = x[threadIdx.x];
y[threadIdx.x] = expf(s_rec[threadIdx.x]*5.0);""")
rtc.push([x], [y], (1, 1, 1), (10,1,1))
assert_allclose(y.asnumpy(), np.exp(x.asnumpy()*5.0))
| apache-2.0 |
anpingli/openshift-ansible | roles/lib_openshift/src/lib/base.py | 2 | 21696 | # pylint: skip-file
# flake8: noqa
# pylint: disable=too-many-lines
# noqa: E301,E302,E303,T001
class OpenShiftCLIError(Exception):
'''Exception class for openshiftcli'''
pass
ADDITIONAL_PATH_LOOKUPS = ['/usr/local/bin', os.path.expanduser('~/bin')]
def locate_oc_binary():
''' Find and return oc binary file '''
# https://github.com/openshift/openshift-ansible/issues/3410
# oc can be in /usr/local/bin in some cases, but that may not
# be in $PATH due to ansible/sudo
paths = os.environ.get("PATH", os.defpath).split(os.pathsep) + ADDITIONAL_PATH_LOOKUPS
oc_binary = 'oc'
# Use shutil.which if it is available, otherwise fallback to a naive path search
try:
which_result = shutil.which(oc_binary, path=os.pathsep.join(paths))
if which_result is not None:
oc_binary = which_result
except AttributeError:
for path in paths:
if os.path.exists(os.path.join(path, oc_binary)):
oc_binary = os.path.join(path, oc_binary)
break
return oc_binary
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False):
''' Constructor for OpenshiftCLI '''
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = Utils.create_tmpfile_copy(kubeconfig)
self.all_namespaces = all_namespaces
self.oc_binary = locate_oc_binary()
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, edits=None, force=False, sep='.'):
''' replace the current object with the content '''
res = self._get(resource, rname)
if not res['results']:
return res
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, res['results'][0], separator=sep)
updated = False
if content is not None:
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([change[0] for change in changes]):
updated = True
elif edits is not None:
results = Yedit.process_edits(edits, yed)
if results['changed']:
updated = True
if updated:
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
return {'returncode': 0, 'updated': False}
def _replace(self, fname, force=False):
'''replace the current object with oc replace'''
# We are removing the 'resourceVersion' to handle
# a race condition when modifying oc objects
yed = Yedit(fname)
results = yed.delete('metadata.resourceVersion')
if results[0]:
yed.write()
cmd = ['replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create_from_content(self, rname, content):
'''create a temporary file and then call oc create on it'''
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, content=content)
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._create(fname)
def _create(self, fname):
'''call oc create on a filename'''
return self.openshift_cmd(['create', '-f', fname])
def _delete(self, resource, name=None, selector=None):
'''call oc delete on a resource'''
cmd = ['delete', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
elif name is not None:
cmd.append(name)
else:
raise OpenShiftCLIError('Either name or selector is required when calling delete.')
return self.openshift_cmd(cmd)
def _process(self, template_name, create=False, params=None, template_data=None): # noqa: E501
'''process a template
template_name: the name of the template to process
create: whether to send to oc create after processing
params: the parameters for the template
template_data: the incoming template's data; instead of a file
'''
cmd = ['process']
if template_data:
cmd.extend(['-f', '-'])
else:
cmd.append(template_name)
if params:
param_str = ["{}={}".format(key, str(value).replace("'", r'"')) for key, value in params.items()]
cmd.append('-p')
cmd.extend(param_str)
results = self.openshift_cmd(cmd, output=True, input_data=template_data)
if results['returncode'] != 0 or not create:
return results
fname = Utils.create_tmpfile(template_name + '-')
yed = Yedit(fname, results['results'])
yed.write()
atexit.register(Utils.cleanup, [fname])
return self.openshift_cmd(['create', '-f', fname])
def _get(self, resource, name=None, selector=None, field_selector=None):
'''return a resource by name '''
cmd = ['get', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
if field_selector is not None:
cmd.append('--field-selector={}'.format(field_selector))
# Name cannot be used with selector or field_selector.
if selector is None and field_selector is None and name is not None:
cmd.append(name)
cmd.extend(['-o', 'json'])
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if 'items' in rval:
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def _schedulable(self, node=None, selector=None, schedulable=True):
''' perform oadm manage-node scheduable '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
cmd.append('--schedulable={}'.format(schedulable))
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw') # noqa: E501
def _list_pods(self, node=None, selector=None, pod_selector=None):
''' perform oadm list pods
node: the node in which to list pods
selector: the label selector filter if provided
pod_selector: the pod selector filter if provided
'''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
cmd.extend(['--list-pods', '-o', 'json'])
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
# pylint: disable=too-many-arguments
def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if dry_run:
cmd.append('--dry-run')
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
if grace_period:
cmd.append('--grace-period={}'.format(int(grace_period)))
if force:
cmd.append('--force')
cmd.append('--evacuate')
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _version(self):
''' return the openshift version'''
return self.openshift_cmd(['version'], output=True, output_type='raw')
def _import_image(self, url=None, name=None, tag=None):
''' perform image import '''
cmd = ['import-image']
image = '{0}'.format(name)
if tag:
image += ':{0}'.format(tag)
cmd.append(image)
if url:
cmd.append('--from={0}/{1}'.format(url, image))
cmd.append('-n{0}'.format(self.namespace))
cmd.append('--confirm')
return self.openshift_cmd(cmd)
def _run(self, cmds, input_data):
''' Actually executes the command. This makes mocking easier. '''
curr_env = os.environ.copy()
curr_env.update({'KUBECONFIG': self.kubeconfig})
proc = subprocess.Popen(cmds,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=curr_env)
stdout, stderr = proc.communicate(input_data)
return proc.returncode, stdout.decode('utf-8'), stderr.decode('utf-8')
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
'''Base command for oc '''
cmds = [self.oc_binary]
if oadm:
cmds.append('adm')
cmds.extend(cmd)
if self.all_namespaces:
cmds.extend(['--all-namespaces'])
elif self.namespace is not None and self.namespace.lower() not in ['none', 'emtpy']: # E501
cmds.extend(['-n', self.namespace])
if self.verbose:
print(' '.join(cmds))
try:
returncode, stdout, stderr = self._run(cmds, input_data)
except OSError as ex:
returncode, stdout, stderr = 1, '', 'Failed to execute {}: {}'.format(subprocess.list2cmdline(cmds), ex)
rval = {"returncode": returncode,
"cmd": ' '.join(cmds)}
if output_type == 'json':
rval['results'] = {}
if output and stdout:
try:
rval['results'] = json.loads(stdout)
except ValueError as verr:
if "No JSON object could be decoded" in verr.args:
rval['err'] = verr.args
elif output_type == 'raw':
rval['results'] = stdout if output else ''
if self.verbose:
print("STDOUT: {0}".format(stdout))
print("STDERR: {0}".format(stderr))
if 'err' in rval or returncode != 0:
rval.update({"stderr": stderr,
"stdout": stdout})
return rval
class Utils(object):
''' utilities for openshiftcli modules '''
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
with open(filename, 'w') as sfd:
sfd.write(str(contents))
@staticmethod
def create_tmp_file_from_contents(rname, data, ftype='yaml'):
''' create a file in tmp with name and contents'''
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripDumper'):
Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
else:
Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
Utils._write(tmp, data)
# Register cleanup when module is done
atexit.register(Utils.cleanup, [tmp])
return tmp
@staticmethod
def create_tmpfile_copy(inc_file):
'''create a temporary copy of a file'''
tmpfile = Utils.create_tmpfile('lib_openshift-')
Utils._write(tmpfile, open(inc_file).read())
# Cleanup the tmpfile
atexit.register(Utils.cleanup, [tmpfile])
return tmpfile
@staticmethod
def create_tmpfile(prefix='tmp'):
''' Generates and returns a temporary file name '''
with tempfile.NamedTemporaryFile(prefix=prefix, delete=False) as tmp:
return tmp.name
@staticmethod
def create_tmp_files_from_contents(content, content_type=None):
'''Turn an array of dict: filename, content into a files array'''
if not isinstance(content, list):
content = [content]
files = []
for item in content:
path = Utils.create_tmp_file_from_contents(item['path'] + '-',
item['data'],
ftype=content_type)
files.append({'name': os.path.basename(item['path']),
'path': path})
return files
@staticmethod
def cleanup(files):
'''Clean up on exit '''
for sfile in files:
if os.path.exists(sfile):
if os.path.isdir(sfile):
shutil.rmtree(sfile)
elif os.path.isfile(sfile):
os.remove(sfile)
@staticmethod
def exists(results, _name):
''' Check to see if the results include the name '''
if not results:
return False
if Utils.find_result(results, _name):
return True
return False
@staticmethod
def find_result(results, _name):
''' Find the specified result by name'''
rval = None
for result in results:
if 'metadata' in result and result['metadata']['name'] == _name:
rval = result
break
return rval
@staticmethod
def get_resource_file(sfile, sfile_type='yaml'):
''' return the service file '''
contents = None
with open(sfile) as sfd:
contents = sfd.read()
if sfile_type == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripLoader'):
contents = yaml.load(contents, yaml.RoundTripLoader)
else:
contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
return contents
@staticmethod
def filter_versions(stdout):
''' filter the oc version output '''
version_dict = {}
version_search = ['oc', 'openshift', 'kubernetes']
for line in stdout.strip().split('\n'):
for term in version_search:
if not line:
continue
if line.startswith(term):
version_dict[term] = line.split()[-1]
# horrible hack to get openshift version in Openshift 3.2
# By default "oc version in 3.2 does not return an "openshift" version
if "openshift" not in version_dict:
version_dict["openshift"] = version_dict["oc"]
return version_dict
@staticmethod
def add_custom_versions(versions):
''' create custom versions strings '''
versions_dict = {}
for tech, version in versions.items():
# clean up "-" from version
if "-" in version:
version = version.split("-")[0]
if version.startswith('v'):
versions_dict[tech + '_numeric'] = version[1:].split('+')[0]
# "v3.3.0.33" is what we have, we want "3.3"
versions_dict[tech + '_short'] = version[1:4]
return versions_dict
@staticmethod
def openshift_installed():
''' check if openshift is installed '''
import rpm
transaction_set = rpm.TransactionSet()
rpmquery = transaction_set.dbMatch("name", "atomic-openshift")
return rpmquery.count() > 0
# Disabling too-many-branches. This is a yaml dictionary comparison function
# pylint: disable=too-many-branches,too-many-return-statements,too-many-statements
@staticmethod
def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
''' Given a user defined definition, compare it with the results given back by our query. '''
# Currently these values are autogenerated and we do not need to check them
skip = ['metadata', 'status']
if skip_keys:
skip.extend(skip_keys)
for key, value in result_def.items():
if key in skip:
continue
# Both are lists
if isinstance(value, list):
if key not in user_def:
if debug:
print('User data does not have key [%s]' % key)
print('User data: %s' % user_def)
return False
if not isinstance(user_def[key], list):
if debug:
print('user_def[key] is not a list key=[%s] user_def[key]=%s' % (key, user_def[key]))
return False
if len(user_def[key]) != len(value):
if debug:
print("List lengths are not equal.")
print("key=[%s]: user_def[%s] != value[%s]" % (key, len(user_def[key]), len(value)))
print("user_def: %s" % user_def[key])
print("value: %s" % value)
return False
for values in zip(user_def[key], value):
if isinstance(values[0], dict) and isinstance(values[1], dict):
if debug:
print('sending list - list')
print(type(values[0]))
print(type(values[1]))
result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
if not result:
print('list compare returned false')
return False
elif value != user_def[key]:
if debug:
print('value should be identical')
print(user_def[key])
print(value)
return False
# recurse on a dictionary
elif isinstance(value, dict):
if key not in user_def:
if debug:
print("user_def does not have key [%s]" % key)
return False
if not isinstance(user_def[key], dict):
if debug:
print("dict returned false: not instance of dict")
return False
# before passing ensure keys match
api_values = set(value.keys()) - set(skip)
user_values = set(user_def[key].keys()) - set(skip)
if api_values != user_values:
if debug:
print("keys are not equal in dict")
print(user_values)
print(api_values)
return False
result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
if not result:
if debug:
print("dict returned false")
print(result)
return False
# Verify each key, value pair is the same
else:
if key not in user_def or value != user_def[key]:
if debug:
print("value not equal; user_def does not have key")
print(key)
print(value)
if key in user_def:
print(user_def[key])
return False
if debug:
print('returning true')
return True
class OpenShiftCLIConfig(object):
'''Generic Config'''
def __init__(self, rname, namespace, kubeconfig, options):
self.kubeconfig = kubeconfig
self.name = rname
self.namespace = namespace
self._options = options
@property
def config_options(self):
''' return config options '''
return self._options
def to_option_list(self, ascommalist=''):
'''return all options as a string
if ascommalist is set to the name of a key, and
the value of that key is a dict, format the dict
as a list of comma delimited key=value pairs'''
return self.stringify(ascommalist)
def stringify(self, ascommalist=''):
''' return the options hash as cli params in a string
if ascommalist is set to the name of a key, and
the value of that key is a dict, format the dict
as a list of comma delimited key=value pairs '''
rval = []
for key in sorted(self.config_options.keys()):
data = self.config_options[key]
if data['include'] \
and (data['value'] is not None or isinstance(data['value'], int)):
if key == ascommalist:
val = ','.join(['{}={}'.format(kk, vv) for kk, vv in sorted(data['value'].items())])
else:
val = data['value']
rval.append('--{}={}'.format(key.replace('_', '-'), val))
return rval
| apache-2.0 |
IxLabs/lguest64 | Documentation/target/tcm_mod_builder.py | 2358 | 40707 | #!/usr/bin/python
# The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD
#
# Copyright (c) 2010 Rising Tide Systems
# Copyright (c) 2010 Linux-iSCSI.org
#
# Author: nab@kernel.org
#
import os, sys
import subprocess as sub
import string
import re
import optparse
tcm_dir = ""
fabric_ops = []
fabric_mod_dir = ""
fabric_mod_port = ""
fabric_mod_init_port = ""
def tcm_mod_err(msg):
print msg
sys.exit(1)
def tcm_mod_create_module_subdir(fabric_mod_dir_var):
if os.path.isdir(fabric_mod_dir_var) == True:
return 1
print "Creating fabric_mod_dir: " + fabric_mod_dir_var
ret = os.mkdir(fabric_mod_dir_var)
if ret:
tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var)
return
def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n"
buf += " u64 nport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n"
buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* FC lport target portal group tag for TCM */\n"
buf += " u16 lport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n"
buf += " struct " + fabric_mod_name + "_lport *lport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_lport {\n"
buf += " /* SCSI protocol the lport is providing */\n"
buf += " u8 lport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n"
buf += " u64 lport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Target Lport */\n"
buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n"
buf += " struct se_wwn lport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "lport"
fabric_mod_init_port = "nport"
return
def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n"
buf += " u64 iport_wwpn;\n"
buf += " /* ASCII formatted WWPN for Sas Initiator port */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* SAS port target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for SAS Target port */\n"
buf += " u64 tport_wwpn;\n"
buf += " /* ASCII formatted WWPN for SAS Target port */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* ASCII formatted InitiatorName */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* iSCSI target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* ASCII formatted TargetName for IQN */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name):
if proto_ident == "FC":
tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "SAS":
tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "iSCSI":
tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name)
else:
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
return
def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#include <linux/module.h>\n"
buf += "#include <linux/moduleparam.h>\n"
buf += "#include <linux/version.h>\n"
buf += "#include <generated/utsrelease.h>\n"
buf += "#include <linux/utsname.h>\n"
buf += "#include <linux/init.h>\n"
buf += "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/configfs.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_fabric_configfs.h>\n"
buf += "#include <target/target_core_configfs.h>\n"
buf += "#include <target/configfs_macros.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "/* Local pointer to allocated TCM configfs fabric module */\n"
buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n"
buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n"
buf += " u32 nexus_depth;\n\n"
buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n"
buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n"
buf += " if (!se_nacl_new)\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n"
buf += " nexus_depth = 1;\n"
buf += " /*\n"
buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n"
buf += " * when converting a NodeACL from demo mode -> explict\n"
buf += " */\n"
buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n"
buf += " name, nexus_depth);\n"
buf += " if (IS_ERR(se_nacl)) {\n"
buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n"
buf += " return se_nacl;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n"
buf += " */\n"
buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return se_nacl;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n"
buf += " struct se_wwn *wwn,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg;\n"
buf += " unsigned long tpgt;\n"
buf += " int ret;\n\n"
buf += " if (strstr(name, \"tpgt_\") != name)\n"
buf += " return ERR_PTR(-EINVAL);\n"
buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n"
buf += " return ERR_PTR(-EINVAL);\n\n"
buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n"
buf += " if (!tpg) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n"
buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n"
buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n"
buf += " &tpg->se_tpg, (void *)tpg,\n"
buf += " TRANSPORT_TPG_TYPE_NORMAL);\n"
buf += " if (ret < 0) {\n"
buf += " kfree(tpg);\n"
buf += " return NULL;\n"
buf += " }\n"
buf += " return &tpg->se_tpg;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n"
buf += " core_tpg_deregister(se_tpg);\n"
buf += " kfree(tpg);\n"
buf += "}\n\n"
buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n\n"
buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n\n"
buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n"
buf += " if (!" + fabric_mod_port + ") {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n"
buf += " kfree(" + fabric_mod_port + ");\n"
buf += "}\n\n"
buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " char *page)\n"
buf += "{\n"
buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += "}\n\n"
buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n"
buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n"
buf += " &" + fabric_mod_name + "_wwn_version.attr,\n"
buf += " NULL,\n"
buf += "};\n\n"
buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n"
buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n"
buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n"
buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n"
buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n"
buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n"
buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n"
buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n"
buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n"
buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n"
buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n"
buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n"
buf += " .release_cmd = " + fabric_mod_name + "_release_cmd,\n"
buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n"
buf += " .close_session = " + fabric_mod_name + "_close_session,\n"
buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n"
buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n"
buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n"
buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n"
buf += " .sess_get_initiator_sid = NULL,\n"
buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n"
buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n"
buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n"
buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n"
buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n"
buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n"
buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n"
buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n"
buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n"
buf += " /*\n"
buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n"
buf += " */\n"
buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n"
buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n"
buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n"
buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n"
buf += " .fabric_post_link = NULL,\n"
buf += " .fabric_pre_unlink = NULL,\n"
buf += " .fabric_make_np = NULL,\n"
buf += " .fabric_drop_np = NULL,\n"
buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n"
buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n"
buf += "};\n\n"
buf += "static int " + fabric_mod_name + "_register_configfs(void)\n"
buf += "{\n"
buf += " struct target_fabric_configfs *fabric;\n"
buf += " int ret;\n\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += " /*\n"
buf += " * Register the top level struct config_item_type with TCM core\n"
buf += " */\n"
buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n"
buf += " if (IS_ERR(fabric)) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n"
buf += " return PTR_ERR(fabric);\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n"
buf += " */\n"
buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n"
buf += " /*\n"
buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n"
buf += " */\n"
buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n"
buf += " /*\n"
buf += " * Register the fabric for use within TCM\n"
buf += " */\n"
buf += " ret = target_fabric_configfs_register(fabric);\n"
buf += " if (ret < 0) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n"
buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n"
buf += " return ret;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup our local pointer to *fabric\n"
buf += " */\n"
buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_deregister_configfs(void)\n"
buf += "{\n"
buf += " if (!" + fabric_mod_name + "_fabric_configfs)\n"
buf += " return;\n\n"
buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n"
buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += "};\n\n"
buf += "static int __init " + fabric_mod_name + "_init(void)\n"
buf += "{\n"
buf += " int ret;\n\n"
buf += " ret = " + fabric_mod_name + "_register_configfs();\n"
buf += " if (ret < 0)\n"
buf += " return ret;\n\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_exit(void)\n"
buf += "{\n"
buf += " " + fabric_mod_name + "_deregister_configfs();\n"
buf += "};\n\n"
buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n"
buf += "MODULE_LICENSE(\"GPL\");\n"
buf += "module_init(" + fabric_mod_name + "_init);\n"
buf += "module_exit(" + fabric_mod_name + "_exit);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_scan_fabric_ops(tcm_dir):
fabric_ops_api = tcm_dir + "include/target/target_core_fabric.h"
print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api
process_fo = 0;
p = open(fabric_ops_api, 'r')
line = p.readline()
while line:
if process_fo == 0 and re.search('struct target_core_fabric_ops {', line):
line = p.readline()
continue
if process_fo == 0:
process_fo = 1;
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
continue
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
p.close()
return
def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
bufi = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h"
print "Writing file: " + fi
pi = open(fi, 'w')
if not pi:
tcm_mod_err("Unable to open file: " + fi)
buf = "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/list.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n"
buf += "#include <scsi/scsi.h>\n"
buf += "#include <scsi/scsi_host.h>\n"
buf += "#include <scsi/scsi_device.h>\n"
buf += "#include <scsi/scsi_cmnd.h>\n"
buf += "#include <scsi/libfc.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_configfs.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n"
buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n"
total_fabric_ops = len(fabric_ops)
i = 0
while i < total_fabric_ops:
fo = fabric_ops[i]
i += 1
# print "fabric_ops: " + fo
if re.search('get_fabric_name', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n"
buf += "{\n"
buf += " return \"" + fabric_mod_name[4:] + "\";\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n"
continue
if re.search('get_fabric_proto_ident', fo):
buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " u8 proto_id;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return proto_id;\n"
buf += "}\n\n"
bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n"
if re.search('get_wwn', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n"
if re.search('get_tag', fo):
buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " return tpg->" + fabric_mod_port + "_tpgt;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n"
if re.search('get_default_depth', fo):
buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n"
if re.search('get_pr_transport_id\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code,\n"
buf += " unsigned char *buf)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *, unsigned char *);\n"
if re.search('get_pr_transport_id_len\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *);\n"
if re.search('parse_pr_out_transport_id\)\(', fo):
buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " const char *buf,\n"
buf += " u32 *out_tid_len,\n"
buf += " char **port_nexus_ptr)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " char *tid = NULL;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
buf += " }\n\n"
buf += " return tid;\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n"
bufi += " const char *, u32 *, char **);\n"
if re.search('alloc_fabric_acl\)\(', fo):
buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n"
buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n"
buf += " if (!nacl) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_nacl\\n\");\n"
buf += " return NULL;\n"
buf += " }\n\n"
buf += " return &nacl->se_node_acl;\n"
buf += "}\n\n"
bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n"
if re.search('release_fabric_acl\)\(', fo):
buf += "void " + fabric_mod_name + "_release_fabric_acl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n"
bufi += " struct se_node_acl *);\n"
if re.search('tpg_get_inst_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n"
if re.search('\*release_cmd\)\(', fo):
buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n"
if re.search('shutdown_session\)\(', fo):
buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n"
if re.search('close_session\)\(', fo):
buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n"
if re.search('stop_session\)\(', fo):
buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n"
if re.search('fall_back_to_erl0\)\(', fo):
buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n"
if re.search('sess_logged_in\)\(', fo):
buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n"
if re.search('sess_get_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n"
if re.search('write_pending\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n"
if re.search('write_pending_status\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n"
if re.search('set_default_node_attributes\)\(', fo):
buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n"
if re.search('get_task_tag\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n"
if re.search('get_cmd_state\)\(', fo):
buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n"
if re.search('queue_data_in\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n"
if re.search('queue_status\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n"
if re.search('queue_tm_rsp\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n"
if re.search('is_state_remove\)\(', fo):
buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
ret = pi.write(bufi)
if ret:
tcm_mod_err("Unable to write fi: " + fi)
pi.close()
return
def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Makefile"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n"
buf += " " + fabric_mod_name + "_configfs.o\n"
buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Kconfig"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "config " + fabric_mod_name.upper() + "\n"
buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n"
buf += " depends on TARGET_CORE && CONFIGFS_FS\n"
buf += " default n\n"
buf += " ---help---\n"
buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name):
buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n"
kbuild = tcm_dir + "/drivers/target/Makefile"
f = open(kbuild, 'a')
f.write(buf)
f.close()
return
def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name):
buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n"
kconfig = tcm_dir + "/drivers/target/Kconfig"
f = open(kconfig, 'a')
f.write(buf)
f.close()
return
def main(modname, proto_ident):
# proto_ident = "FC"
# proto_ident = "SAS"
# proto_ident = "iSCSI"
tcm_dir = os.getcwd();
tcm_dir += "/../../"
print "tcm_dir: " + tcm_dir
fabric_mod_name = modname
fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name
print "Set fabric_mod_name: " + fabric_mod_name
print "Set fabric_mod_dir: " + fabric_mod_dir
print "Using proto_ident: " + proto_ident
if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI":
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
ret = tcm_mod_create_module_subdir(fabric_mod_dir)
if ret:
print "tcm_mod_create_module_subdir() failed because module already exists!"
sys.exit(1)
tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_scan_fabric_ops(tcm_dir)
tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kbuild(tcm_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kconfig(tcm_dir, fabric_mod_name)
return
parser = optparse.OptionParser()
parser.add_option('-m', '--modulename', help='Module name', dest='modname',
action='store', nargs=1, type='string')
parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident',
action='store', nargs=1, type='string')
(opts, args) = parser.parse_args()
mandatories = ['modname', 'protoident']
for m in mandatories:
if not opts.__dict__[m]:
print "mandatory option is missing\n"
parser.print_help()
exit(-1)
if __name__ == "__main__":
main(str(opts.modname), opts.protoident)
| gpl-2.0 |
HenrySeed/marvin | options.py | 2 | 1885 | import os.path
class Options():
def __init__(self):
scriptpath = os.path.dirname(__file__)
filename = os.path.join(scriptpath, 'options.txt')
lines = open(filename, 'r').read()
lines = lines.split('\n')
self.name, self.location = lines[0], lines[1]
self.public_key = (int(lines[2].split(', ')[0]), int(lines[2].split(', ')[1]))
self.private_key = (int(lines[3].split(', ')[0]), int(lines[3].split(', ')[1]))
def save(self):
scriptpath = os.path.dirname(__file__)
filename = os.path.join(scriptpath, 'options.txt')
infile = open(filename, 'w')
output = self.name + '\n' + str(self.location) + '\n' + \
str(self.public_key)[1:-1] + '\n' + str(self.private_key)[1:-1]
infile.write(output)
infile.close()
def __str__(self):
return """
Name: {0}
Location: {1}
Public key {2}
Private Key {3}
""".format(self.name, self.location, self.public_key, self.private_key)
def change_name(self):
self.name = input('Enter name: ')
self.save()
def change_cipher(self, public_key, private_key):
self.public_key = public_key
self.private_key = private_key
self.save
def change_location(self):
self.location = input('Enter location: ')
self.save()
def options(intro=False):
print(" Options:")
print(' To change a setting type change and then the name of the setting.')
options = Options()
print(options)
def change_option(string):
options_obj = Options()
string = string.lower()
if string == 'location':
options_obj.change_location()
elif string == 'name':
options_obj.change_name()
else:
print(' That option cant be changed, sorry.')
print()
options()
| apache-2.0 |
mancoast/CPythonPyc_test | cpython/243_test_dircache.py | 17 | 2302 | """
Test cases for the dircache module
Nick Mathewson
"""
import unittest
from test.test_support import run_unittest, TESTFN
import dircache, os, time, sys, tempfile
class DircacheTests(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.mkdtemp()
def tearDown(self):
for fname in os.listdir(self.tempdir):
self.delTemp(fname)
os.rmdir(self.tempdir)
def writeTemp(self, fname):
f = open(os.path.join(self.tempdir, fname), 'w')
f.close()
def mkdirTemp(self, fname):
os.mkdir(os.path.join(self.tempdir, fname))
def delTemp(self, fname):
fname = os.path.join(self.tempdir, fname)
if os.path.isdir(fname):
os.rmdir(fname)
else:
os.unlink(fname)
def test_listdir(self):
## SUCCESSFUL CASES
entries = dircache.listdir(self.tempdir)
self.assertEquals(entries, [])
# Check that cache is actually caching, not just passing through.
self.assert_(dircache.listdir(self.tempdir) is entries)
# Directories aren't "files" on Windows, and directory mtime has
# nothing to do with when files under a directory get created.
# That is, this test can't possibly work under Windows -- dircache
# is only good for capturing a one-shot snapshot there.
if sys.platform[:3] not in ('win', 'os2'):
# Sadly, dircache has the same granularity as stat.mtime, and so
# can't notice any changes that occurred within 1 sec of the last
# time it examined a directory.
time.sleep(1)
self.writeTemp("test1")
entries = dircache.listdir(self.tempdir)
self.assertEquals(entries, ['test1'])
self.assert_(dircache.listdir(self.tempdir) is entries)
## UNSUCCESSFUL CASES
self.assertRaises(OSError, dircache.listdir, self.tempdir+"_nonexistent")
def test_annotate(self):
self.writeTemp("test2")
self.mkdirTemp("A")
lst = ['A', 'test2', 'test_nonexistent']
dircache.annotate(self.tempdir, lst)
self.assertEquals(lst, ['A/', 'test2', 'test_nonexistent'])
def test_main():
run_unittest(DircacheTests)
if __name__ == "__main__":
test_main()
| gpl-3.0 |
Lujeni/ansible | lib/ansible/modules/windows/win_eventlog_entry.py | 38 | 2212 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Andrew Saraceni <andrew.saraceni@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_eventlog_entry
version_added: "2.4"
short_description: Write entries to Windows event logs
description:
- Write log entries to a given event log from a specified source.
options:
log:
description:
- Name of the event log to write an entry to.
type: str
required: yes
source:
description:
- Name of the log source to indicate where the entry is from.
type: str
required: yes
event_id:
description:
- The numeric event identifier for the entry.
- Value must be between 0 and 65535.
type: int
required: yes
message:
description:
- The message for the given log entry.
type: str
required: yes
entry_type:
description:
- Indicates the entry being written to the log is of a specific type.
type: str
choices: [ Error, FailureAudit, Information, SuccessAudit, Warning ]
category:
description:
- A numeric task category associated with the category message file for the log source.
type: int
raw_data:
description:
- Binary data associated with the log entry.
- Value must be a comma-separated array of 8-bit unsigned integers (0 to 255).
type: str
notes:
- This module will always report a change when writing an event entry.
seealso:
- module: win_eventlog
author:
- Andrew Saraceni (@andrewsaraceni)
'''
EXAMPLES = r'''
- name: Write an entry to a Windows event log
win_eventlog_entry:
log: MyNewLog
source: NewLogSource1
event_id: 1234
message: This is a test log entry.
- name: Write another entry to a different Windows event log
win_eventlog_entry:
log: AnotherLog
source: MyAppSource
event_id: 5000
message: An error has occurred.
entry_type: Error
category: 5
raw_data: 10,20
'''
RETURN = r'''
# Default return values
'''
| gpl-3.0 |
cysuncn/python | spark/crm/PROC_M_R_RET_CUST_FLOW.py | 1 | 4734 | #coding=UTF-8
from pyspark import SparkContext, SparkConf, SQLContext, Row, HiveContext
from pyspark.sql.types import *
from datetime import date, datetime, timedelta
import sys, re, os
st = datetime.now()
conf = SparkConf().setAppName('PROC_M_R_RET_CUST_FLOW').setMaster(sys.argv[2])
sc = SparkContext(conf = conf)
sc.setLogLevel('WARN')
if len(sys.argv) > 5:
if sys.argv[5] == "hive":
sqlContext = HiveContext(sc)
else:
sqlContext = SQLContext(sc)
hdfs = sys.argv[3]
dbname = sys.argv[4]
#处理需要使用的日期
etl_date = sys.argv[1]
#etl日期
V_DT = etl_date
#上一日日期
V_DT_LD = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8])) + timedelta(-1)).strftime("%Y%m%d")
#月初日期
V_DT_FMD = date(int(etl_date[0:4]), int(etl_date[4:6]), 1).strftime("%Y%m%d")
#上月末日期
V_DT_LMD = (date(int(etl_date[0:4]), int(etl_date[4:6]), 1) + timedelta(-1)).strftime("%Y%m%d")
#10位日期
V_DT10 = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8]))).strftime("%Y-%m-%d")
V_STEP = 0
#MCRM_RET_CUST_FLOW 增量 删除当天文件
ret = os.system("hdfs dfs -rm -r /"+dbname+"/MCRM_RET_CUST_FLOW/"+V_DT+".parquet")
MCRM_RET_CUST_ASSETS = sqlContext.read.parquet(hdfs+'/MCRM_RET_CUST_ASSETS/*')
MCRM_RET_CUST_ASSETS.registerTempTable("MCRM_RET_CUST_ASSETS")
ACRM_F_AG_AGREEMENT = sqlContext.read.parquet(hdfs+'/ACRM_F_AG_AGREEMENT/*')
ACRM_F_AG_AGREEMENT.registerTempTable("ACRM_F_AG_AGREEMENT")
#任务[21] 001-01::
V_STEP = V_STEP + 1
sql = """
SELECT CUST_ID AS CUST_ID
,FR_ID AS FR_ID
,MIN(concat(SUBSTR(START_DATE, 1, 4),'-',SUBSTR(START_DATE, 6, 2),'-',SUBSTR(START_DATE, 9, 2))) AS OPEN_DATE
,MAX(concat(SUBSTR(END_DATE, 1, 4),'-',SUBSTR(END_DATE, 6, 2),'-',SUBSTR(END_DATE, 9, 2))) AS CANCEL_DATE
FROM ACRM_F_AG_AGREEMENT A --客户协议表
GROUP BY FR_ID
,CUST_ID """
sql = re.sub(r"\bV_DT\b", "'"+V_DT10+"'", sql)
TMP_MCRM_RET_CUST_FLOW_01 = sqlContext.sql(sql)
TMP_MCRM_RET_CUST_FLOW_01.registerTempTable("TMP_MCRM_RET_CUST_FLOW_01")
dfn="TMP_MCRM_RET_CUST_FLOW_01/"+V_DT+".parquet"
TMP_MCRM_RET_CUST_FLOW_01.cache()
nrows = TMP_MCRM_RET_CUST_FLOW_01.count()
TMP_MCRM_RET_CUST_FLOW_01.write.save(path=hdfs + '/' + dfn, mode='overwrite')
TMP_MCRM_RET_CUST_FLOW_01.unpersist()
ACRM_F_AG_AGREEMENT.unpersist()
ret = os.system("hdfs dfs -rm -r /"+dbname+"/TMP_MCRM_RET_CUST_FLOW_01/"+V_DT_LD+".parquet")
et = datetime.now()
print("Step %d start[%s] end[%s] use %d seconds, insert TMP_MCRM_RET_CUST_FLOW_01 lines %d") % (V_STEP, st.strftime("%H:%M:%S"), et.strftime("%H:%M:%S"), (et-st).seconds, nrows)
#任务[11] 001-02::
V_STEP = V_STEP + 1
sql = """
SELECT A.CUST_ID AS CUST_ID
,A.CUST_ZH_NAME AS CUST_ZH_NAME
,A.CUST_MANAGER AS CUST_MANAGER
,A.CUST_MANAGER_NAME AS CUST_MANAGER_NAME
,A.ORG_ID AS ORG_ID
,A.ORG_NAME AS ORG_NAME
,A.CUST_LEVEL AS CUST_LEVEL
,A.GRADE_DATE AS GRADE_DATE
,B.OPEN_DATE AS OPEN_DATE
,C.CANCEL_DATE AS CANCEL_DATE
,A.MONTH_BAL AS CUST_ASSETS
,A.OLD_CUST_LEVEL AS CUST_LEVEL_FU
,A.ST_DATE AS ST_DATE
,'' AS O_MAIN_TYPE
,'' AS M_MAIN_TYPE
FROM MCRM_RET_CUST_ASSETS A --客户资产情况表
LEFT JOIN TMP_MCRM_RET_CUST_FLOW_01 B --客户流入流出机构统计表临时表01
ON A.CUST_ID = B.CUST_ID
AND B.FR_ID = A.FR_ID
AND SUBSTR(B.OPEN_DATE, 1, 7) = SUBSTR(V_DT, 1, 7)
LEFT JOIN TMP_MCRM_RET_CUST_FLOW_01 C --客户流入流出机构统计表临时表01
ON A.CUST_ID = C.CUST_ID
AND C.FR_ID = A.FR_ID
AND SUBSTR(C.CANCEL_DATE, 1, 7) = SUBSTR(V_DT, 1, 7)
WHERE A.ST_DATE = V_DT """
sql = re.sub(r"\bV_DT\b", "'"+V_DT10+"'", sql)
MCRM_RET_CUST_FLOW = sqlContext.sql(sql)
MCRM_RET_CUST_FLOW.registerTempTable("MCRM_RET_CUST_FLOW")
dfn="MCRM_RET_CUST_FLOW/"+V_DT+".parquet"
MCRM_RET_CUST_FLOW.cache()
nrows = MCRM_RET_CUST_FLOW.count()
MCRM_RET_CUST_FLOW.write.save(path=hdfs + '/' + dfn, mode='append')
MCRM_RET_CUST_FLOW.unpersist()
MCRM_RET_CUST_ASSETS.unpersist()
TMP_MCRM_RET_CUST_FLOW_01.unpersist()
et = datetime.now()
print("Step %d start[%s] end[%s] use %d seconds, insert MCRM_RET_CUST_FLOW lines %d") % (V_STEP, st.strftime("%H:%M:%S"), et.strftime("%H:%M:%S"), (et-st).seconds, nrows)
| gpl-3.0 |
smourph/PGo-TrainerTools | pgoapi/protos/POGOProtos/Networking/Responses/DownloadItemTemplatesResponse_pb2.py | 10 | 18923 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: POGOProtos/Networking/Responses/DownloadItemTemplatesResponse.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from POGOProtos.Settings.Master import ItemSettings_pb2 as POGOProtos_dot_Settings_dot_Master_dot_ItemSettings__pb2
from POGOProtos.Settings.Master import MoveSettings_pb2 as POGOProtos_dot_Settings_dot_Master_dot_MoveSettings__pb2
from POGOProtos.Settings.Master import BadgeSettings_pb2 as POGOProtos_dot_Settings_dot_Master_dot_BadgeSettings__pb2
from POGOProtos.Settings.Master import PokemonSettings_pb2 as POGOProtos_dot_Settings_dot_Master_dot_PokemonSettings__pb2
from POGOProtos.Settings.Master import MoveSequenceSettings_pb2 as POGOProtos_dot_Settings_dot_Master_dot_MoveSequenceSettings__pb2
from POGOProtos.Settings.Master import TypeEffectiveSettings_pb2 as POGOProtos_dot_Settings_dot_Master_dot_TypeEffectiveSettings__pb2
from POGOProtos.Settings.Master import CameraSettings_pb2 as POGOProtos_dot_Settings_dot_Master_dot_CameraSettings__pb2
from POGOProtos.Settings.Master import PlayerLevelSettings_pb2 as POGOProtos_dot_Settings_dot_Master_dot_PlayerLevelSettings__pb2
from POGOProtos.Settings.Master import GymLevelSettings_pb2 as POGOProtos_dot_Settings_dot_Master_dot_GymLevelSettings__pb2
from POGOProtos.Settings.Master import GymBattleSettings_pb2 as POGOProtos_dot_Settings_dot_Master_dot_GymBattleSettings__pb2
from POGOProtos.Settings.Master import EncounterSettings_pb2 as POGOProtos_dot_Settings_dot_Master_dot_EncounterSettings__pb2
from POGOProtos.Settings.Master import IapItemDisplay_pb2 as POGOProtos_dot_Settings_dot_Master_dot_IapItemDisplay__pb2
from POGOProtos.Settings.Master import IapSettings_pb2 as POGOProtos_dot_Settings_dot_Master_dot_IapSettings__pb2
from POGOProtos.Settings.Master import PokemonUpgradeSettings_pb2 as POGOProtos_dot_Settings_dot_Master_dot_PokemonUpgradeSettings__pb2
from POGOProtos.Settings.Master import EquippedBadgeSettings_pb2 as POGOProtos_dot_Settings_dot_Master_dot_EquippedBadgeSettings__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='POGOProtos/Networking/Responses/DownloadItemTemplatesResponse.proto',
package='POGOProtos.Networking.Responses',
syntax='proto3',
serialized_pb=_b('\nCPOGOProtos/Networking/Responses/DownloadItemTemplatesResponse.proto\x12\x1fPOGOProtos.Networking.Responses\x1a-POGOProtos/Settings/Master/ItemSettings.proto\x1a-POGOProtos/Settings/Master/MoveSettings.proto\x1a.POGOProtos/Settings/Master/BadgeSettings.proto\x1a\x30POGOProtos/Settings/Master/PokemonSettings.proto\x1a\x35POGOProtos/Settings/Master/MoveSequenceSettings.proto\x1a\x36POGOProtos/Settings/Master/TypeEffectiveSettings.proto\x1a/POGOProtos/Settings/Master/CameraSettings.proto\x1a\x34POGOProtos/Settings/Master/PlayerLevelSettings.proto\x1a\x31POGOProtos/Settings/Master/GymLevelSettings.proto\x1a\x32POGOProtos/Settings/Master/GymBattleSettings.proto\x1a\x32POGOProtos/Settings/Master/EncounterSettings.proto\x1a/POGOProtos/Settings/Master/IapItemDisplay.proto\x1a,POGOProtos/Settings/Master/IapSettings.proto\x1a\x37POGOProtos/Settings/Master/PokemonUpgradeSettings.proto\x1a\x36POGOProtos/Settings/Master/EquippedBadgeSettings.proto\"\xf0\t\n\x1d\x44ownloadItemTemplatesResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\x12\x63\n\x0eitem_templates\x18\x02 \x03(\x0b\x32K.POGOProtos.Networking.Responses.DownloadItemTemplatesResponse.ItemTemplate\x12\x14\n\x0ctimestamp_ms\x18\x03 \x01(\x04\x1a\xc2\x08\n\x0cItemTemplate\x12\x13\n\x0btemplate_id\x18\x01 \x01(\t\x12\x45\n\x10pokemon_settings\x18\x02 \x01(\x0b\x32+.POGOProtos.Settings.Master.PokemonSettings\x12?\n\ritem_settings\x18\x03 \x01(\x0b\x32(.POGOProtos.Settings.Master.ItemSettings\x12?\n\rmove_settings\x18\x04 \x01(\x0b\x32(.POGOProtos.Settings.Master.MoveSettings\x12P\n\x16move_sequence_settings\x18\x05 \x01(\x0b\x32\x30.POGOProtos.Settings.Master.MoveSequenceSettings\x12I\n\x0etype_effective\x18\x08 \x01(\x0b\x32\x31.POGOProtos.Settings.Master.TypeEffectiveSettings\x12\x41\n\x0e\x62\x61\x64ge_settings\x18\n \x01(\x0b\x32).POGOProtos.Settings.Master.BadgeSettings\x12:\n\x06\x63\x61mera\x18\x0b \x01(\x0b\x32*.POGOProtos.Settings.Master.CameraSettings\x12\x45\n\x0cplayer_level\x18\x0c \x01(\x0b\x32/.POGOProtos.Settings.Master.PlayerLevelSettings\x12?\n\tgym_level\x18\r \x01(\x0b\x32,.POGOProtos.Settings.Master.GymLevelSettings\x12\x46\n\x0f\x62\x61ttle_settings\x18\x0e \x01(\x0b\x32-.POGOProtos.Settings.Master.GymBattleSettings\x12I\n\x12\x65ncounter_settings\x18\x0f \x01(\x0b\x32-.POGOProtos.Settings.Master.EncounterSettings\x12\x44\n\x10iap_item_display\x18\x10 \x01(\x0b\x32*.POGOProtos.Settings.Master.IapItemDisplay\x12=\n\x0ciap_settings\x18\x11 \x01(\x0b\x32\'.POGOProtos.Settings.Master.IapSettings\x12L\n\x10pokemon_upgrades\x18\x12 \x01(\x0b\x32\x32.POGOProtos.Settings.Master.PokemonUpgradeSettings\x12J\n\x0f\x65quipped_badges\x18\x13 \x01(\x0b\x32\x31.POGOProtos.Settings.Master.EquippedBadgeSettingsb\x06proto3')
,
dependencies=[POGOProtos_dot_Settings_dot_Master_dot_ItemSettings__pb2.DESCRIPTOR,POGOProtos_dot_Settings_dot_Master_dot_MoveSettings__pb2.DESCRIPTOR,POGOProtos_dot_Settings_dot_Master_dot_BadgeSettings__pb2.DESCRIPTOR,POGOProtos_dot_Settings_dot_Master_dot_PokemonSettings__pb2.DESCRIPTOR,POGOProtos_dot_Settings_dot_Master_dot_MoveSequenceSettings__pb2.DESCRIPTOR,POGOProtos_dot_Settings_dot_Master_dot_TypeEffectiveSettings__pb2.DESCRIPTOR,POGOProtos_dot_Settings_dot_Master_dot_CameraSettings__pb2.DESCRIPTOR,POGOProtos_dot_Settings_dot_Master_dot_PlayerLevelSettings__pb2.DESCRIPTOR,POGOProtos_dot_Settings_dot_Master_dot_GymLevelSettings__pb2.DESCRIPTOR,POGOProtos_dot_Settings_dot_Master_dot_GymBattleSettings__pb2.DESCRIPTOR,POGOProtos_dot_Settings_dot_Master_dot_EncounterSettings__pb2.DESCRIPTOR,POGOProtos_dot_Settings_dot_Master_dot_IapItemDisplay__pb2.DESCRIPTOR,POGOProtos_dot_Settings_dot_Master_dot_IapSettings__pb2.DESCRIPTOR,POGOProtos_dot_Settings_dot_Master_dot_PokemonUpgradeSettings__pb2.DESCRIPTOR,POGOProtos_dot_Settings_dot_Master_dot_EquippedBadgeSettings__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_DOWNLOADITEMTEMPLATESRESPONSE_ITEMTEMPLATE = _descriptor.Descriptor(
name='ItemTemplate',
full_name='POGOProtos.Networking.Responses.DownloadItemTemplatesResponse.ItemTemplate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='template_id', full_name='POGOProtos.Networking.Responses.DownloadItemTemplatesResponse.ItemTemplate.template_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pokemon_settings', full_name='POGOProtos.Networking.Responses.DownloadItemTemplatesResponse.ItemTemplate.pokemon_settings', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='item_settings', full_name='POGOProtos.Networking.Responses.DownloadItemTemplatesResponse.ItemTemplate.item_settings', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='move_settings', full_name='POGOProtos.Networking.Responses.DownloadItemTemplatesResponse.ItemTemplate.move_settings', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='move_sequence_settings', full_name='POGOProtos.Networking.Responses.DownloadItemTemplatesResponse.ItemTemplate.move_sequence_settings', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='type_effective', full_name='POGOProtos.Networking.Responses.DownloadItemTemplatesResponse.ItemTemplate.type_effective', index=5,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='badge_settings', full_name='POGOProtos.Networking.Responses.DownloadItemTemplatesResponse.ItemTemplate.badge_settings', index=6,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='camera', full_name='POGOProtos.Networking.Responses.DownloadItemTemplatesResponse.ItemTemplate.camera', index=7,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='player_level', full_name='POGOProtos.Networking.Responses.DownloadItemTemplatesResponse.ItemTemplate.player_level', index=8,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='gym_level', full_name='POGOProtos.Networking.Responses.DownloadItemTemplatesResponse.ItemTemplate.gym_level', index=9,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='battle_settings', full_name='POGOProtos.Networking.Responses.DownloadItemTemplatesResponse.ItemTemplate.battle_settings', index=10,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='encounter_settings', full_name='POGOProtos.Networking.Responses.DownloadItemTemplatesResponse.ItemTemplate.encounter_settings', index=11,
number=15, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='iap_item_display', full_name='POGOProtos.Networking.Responses.DownloadItemTemplatesResponse.ItemTemplate.iap_item_display', index=12,
number=16, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='iap_settings', full_name='POGOProtos.Networking.Responses.DownloadItemTemplatesResponse.ItemTemplate.iap_settings', index=13,
number=17, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pokemon_upgrades', full_name='POGOProtos.Networking.Responses.DownloadItemTemplatesResponse.ItemTemplate.pokemon_upgrades', index=14,
number=18, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='equipped_badges', full_name='POGOProtos.Networking.Responses.DownloadItemTemplatesResponse.ItemTemplate.equipped_badges', index=15,
number=19, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1048,
serialized_end=2138,
)
_DOWNLOADITEMTEMPLATESRESPONSE = _descriptor.Descriptor(
name='DownloadItemTemplatesResponse',
full_name='POGOProtos.Networking.Responses.DownloadItemTemplatesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='success', full_name='POGOProtos.Networking.Responses.DownloadItemTemplatesResponse.success', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='item_templates', full_name='POGOProtos.Networking.Responses.DownloadItemTemplatesResponse.item_templates', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='timestamp_ms', full_name='POGOProtos.Networking.Responses.DownloadItemTemplatesResponse.timestamp_ms', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_DOWNLOADITEMTEMPLATESRESPONSE_ITEMTEMPLATE, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=874,
serialized_end=2138,
)
_DOWNLOADITEMTEMPLATESRESPONSE_ITEMTEMPLATE.fields_by_name['pokemon_settings'].message_type = POGOProtos_dot_Settings_dot_Master_dot_PokemonSettings__pb2._POKEMONSETTINGS
_DOWNLOADITEMTEMPLATESRESPONSE_ITEMTEMPLATE.fields_by_name['item_settings'].message_type = POGOProtos_dot_Settings_dot_Master_dot_ItemSettings__pb2._ITEMSETTINGS
_DOWNLOADITEMTEMPLATESRESPONSE_ITEMTEMPLATE.fields_by_name['move_settings'].message_type = POGOProtos_dot_Settings_dot_Master_dot_MoveSettings__pb2._MOVESETTINGS
_DOWNLOADITEMTEMPLATESRESPONSE_ITEMTEMPLATE.fields_by_name['move_sequence_settings'].message_type = POGOProtos_dot_Settings_dot_Master_dot_MoveSequenceSettings__pb2._MOVESEQUENCESETTINGS
_DOWNLOADITEMTEMPLATESRESPONSE_ITEMTEMPLATE.fields_by_name['type_effective'].message_type = POGOProtos_dot_Settings_dot_Master_dot_TypeEffectiveSettings__pb2._TYPEEFFECTIVESETTINGS
_DOWNLOADITEMTEMPLATESRESPONSE_ITEMTEMPLATE.fields_by_name['badge_settings'].message_type = POGOProtos_dot_Settings_dot_Master_dot_BadgeSettings__pb2._BADGESETTINGS
_DOWNLOADITEMTEMPLATESRESPONSE_ITEMTEMPLATE.fields_by_name['camera'].message_type = POGOProtos_dot_Settings_dot_Master_dot_CameraSettings__pb2._CAMERASETTINGS
_DOWNLOADITEMTEMPLATESRESPONSE_ITEMTEMPLATE.fields_by_name['player_level'].message_type = POGOProtos_dot_Settings_dot_Master_dot_PlayerLevelSettings__pb2._PLAYERLEVELSETTINGS
_DOWNLOADITEMTEMPLATESRESPONSE_ITEMTEMPLATE.fields_by_name['gym_level'].message_type = POGOProtos_dot_Settings_dot_Master_dot_GymLevelSettings__pb2._GYMLEVELSETTINGS
_DOWNLOADITEMTEMPLATESRESPONSE_ITEMTEMPLATE.fields_by_name['battle_settings'].message_type = POGOProtos_dot_Settings_dot_Master_dot_GymBattleSettings__pb2._GYMBATTLESETTINGS
_DOWNLOADITEMTEMPLATESRESPONSE_ITEMTEMPLATE.fields_by_name['encounter_settings'].message_type = POGOProtos_dot_Settings_dot_Master_dot_EncounterSettings__pb2._ENCOUNTERSETTINGS
_DOWNLOADITEMTEMPLATESRESPONSE_ITEMTEMPLATE.fields_by_name['iap_item_display'].message_type = POGOProtos_dot_Settings_dot_Master_dot_IapItemDisplay__pb2._IAPITEMDISPLAY
_DOWNLOADITEMTEMPLATESRESPONSE_ITEMTEMPLATE.fields_by_name['iap_settings'].message_type = POGOProtos_dot_Settings_dot_Master_dot_IapSettings__pb2._IAPSETTINGS
_DOWNLOADITEMTEMPLATESRESPONSE_ITEMTEMPLATE.fields_by_name['pokemon_upgrades'].message_type = POGOProtos_dot_Settings_dot_Master_dot_PokemonUpgradeSettings__pb2._POKEMONUPGRADESETTINGS
_DOWNLOADITEMTEMPLATESRESPONSE_ITEMTEMPLATE.fields_by_name['equipped_badges'].message_type = POGOProtos_dot_Settings_dot_Master_dot_EquippedBadgeSettings__pb2._EQUIPPEDBADGESETTINGS
_DOWNLOADITEMTEMPLATESRESPONSE_ITEMTEMPLATE.containing_type = _DOWNLOADITEMTEMPLATESRESPONSE
_DOWNLOADITEMTEMPLATESRESPONSE.fields_by_name['item_templates'].message_type = _DOWNLOADITEMTEMPLATESRESPONSE_ITEMTEMPLATE
DESCRIPTOR.message_types_by_name['DownloadItemTemplatesResponse'] = _DOWNLOADITEMTEMPLATESRESPONSE
DownloadItemTemplatesResponse = _reflection.GeneratedProtocolMessageType('DownloadItemTemplatesResponse', (_message.Message,), dict(
ItemTemplate = _reflection.GeneratedProtocolMessageType('ItemTemplate', (_message.Message,), dict(
DESCRIPTOR = _DOWNLOADITEMTEMPLATESRESPONSE_ITEMTEMPLATE,
__module__ = 'POGOProtos.Networking.Responses.DownloadItemTemplatesResponse_pb2'
# @@protoc_insertion_point(class_scope:POGOProtos.Networking.Responses.DownloadItemTemplatesResponse.ItemTemplate)
))
,
DESCRIPTOR = _DOWNLOADITEMTEMPLATESRESPONSE,
__module__ = 'POGOProtos.Networking.Responses.DownloadItemTemplatesResponse_pb2'
# @@protoc_insertion_point(class_scope:POGOProtos.Networking.Responses.DownloadItemTemplatesResponse)
))
_sym_db.RegisterMessage(DownloadItemTemplatesResponse)
_sym_db.RegisterMessage(DownloadItemTemplatesResponse.ItemTemplate)
# @@protoc_insertion_point(module_scope)
| gpl-3.0 |
thomastweets/PythonRSA | GUI_RSA.py | 1 | 15279 | ############################
### GUI for RS analysis ###
############################
import wx
import rsa
import os
import webbrowser
files_number = 0
class RSA_GUI(wx.Frame):
def __init__(self, parent, title):
super(RSA_GUI,self).__init__(parent, style=wx.DEFAULT_FRAME_STYLE ^ wx.RESIZE_BORDER, title = title, size = (400,275))
self.InitUI()
self.Show(True)
def InitUI(self):
## Creates Status Bar
self.CreateStatusBar()
self.menuBar = wx.MenuBar()
self.filemenu = wx.Menu()
self.helpmenu = wx.Menu()
self.menuHelp = self.helpmenu.Append(wx.ID_ANY, "&Help", "Learn more about RSA and how to use this program")
self.menuAbout = self.helpmenu.Append(wx.ID_ABOUT, "&About", "Learn more about this program")
self.menuClear = self.filemenu.Append(wx.ID_ANY,"&Clear","Clear data")
self.filemenu.AppendSeparator()
self.menuExit = self.filemenu.Append(wx.ID_EXIT, "&Exit", "Terminate the program")
self.menuBar.Append(self.filemenu, "&File")
self.menuBar.Append(self.helpmenu, "&Help")
self.SetMenuBar(self.menuBar)
self.Bind(wx.EVT_MENU, self.OnAbout, self.menuAbout)
self.Bind(wx.EVT_MENU, self.OnHelp, self.menuHelp)
self.Bind(wx.EVT_MENU, self.OnExit, self.menuExit)
self.Bind(wx.EVT_MENU, self.OnClear, self.menuClear)
## buttons
self.panel = wx.Panel(self)
self.main_box = wx.BoxSizer(wx.VERTICAL)
file_box = wx.BoxSizer(wx.HORIZONTAL)
file_button = wx.Button(self.panel, label = 'Select files', size = (90, 30))
file_box.Add(file_button)
self.file_text = wx.TextCtrl(self.panel)
self.file_text.Disable()
file_box.Add(self.file_text, proportion = 1, flag = wx.EXPAND | wx.LEFT, border = 5)
self.main_box.Add(file_box, flag = wx.EXPAND | wx.ALL, border = 10)
self.main_box.Add((-1,10))
label_box = wx.BoxSizer(wx.HORIZONTAL)
label_button = wx.Button(self.panel, label = 'Conditions', size = (90, 30))
label_box.Add(label_button)
self.label_text = wx.TextCtrl(self.panel)
self.label_text.Disable()
label_box.Add(self.label_text, proportion = 1, flag = wx.EXPAND | wx.LEFT, border = 5)
self.main_box.Add(label_box, flag = wx. EXPAND | wx.RIGHT | wx.LEFT, border = 10)
self.main_box.Add((-1,30))
options_box = wx.BoxSizer(wx.HORIZONTAL)
options_button = wx.Button(self.panel, label='Options', size = (70, 30))
options_box.Add(options_button)
self.main_box.Add(options_box, flag = wx.ALIGN_RIGHT | wx.RIGHT, border = 10)
self.main_box.Add((-1,10))
end_box = wx.BoxSizer(wx.HORIZONTAL)
self.go_btn = wx.Button(self.panel, label = 'Go', size = (70, 30))
self.go_btn.Disable()
end_box.Add(self.go_btn, flag = wx.BOTTOM, border = 5)
cancel_btn = wx.Button(self.panel, label = 'Cancel', size = (70, 30))
end_box.Add(cancel_btn, flag = wx.LEFT | wx.BOTTOM, border = 5)
self.main_box.Add(end_box, flag = wx.ALIGN_RIGHT | wx.RIGHT, border = 10)
self.panel.SetSizer(self.main_box)
self.Bind(wx.EVT_BUTTON, self.OnFiles, file_button)
self.Bind(wx.EVT_BUTTON, self.conditions, label_button)
self.Bind(wx.EVT_BUTTON, self.OnOptions, options_button)
self.go_btn.Bind(wx.EVT_BUTTON, self.OnGo)
self.Bind(wx.EVT_BUTTON, self.OnCancel, cancel_btn)
self.labels = []
self.files = []
self.Center()
def OnOptions(self, e):
self.new = OptionWindow(parent=None, id=-1)
self.new.Show()
def OnAbout(self, e):
dlg = wx.MessageDialog(self, "This is a program to perform a representational similarity analysis on functional magnetic resonance imaging data.\n\n"
"The analysis is following the principles described in the paper 'Representational Similarity Analysis - Connecting"
" the Branches of Systems Neuroscience' by Nikolaus Kriegeskorte, Marieke Mur and Peter Bandettini (2008). \n\nIt is the"
" result of a project work at Maastricht University by Pia Schroeder, Amelie Haugg and Julia Brehm under the supervision of Thomas Emmerling."
"\n\nFor correspondence please refer to https://github.com/thomastweets/PythonRSA", "About this program")
dlg.ShowModal()
dlg.Destroy()
def OnHelp(self, e):
webbrowser.open("https://github.com/thomastweets/PythonRSA/blob/master/README.md")
#dlg = wx.MessageDialog(self, "", "Help for this program")
#dlg.ShowModal()
#dlg.Destroy()
def OnExit(self, e):
self.Close(True)
def OnClear(self, e):
self.files = []
self.labels = []
self.file_text.ChangeValue(str(''))
self.label_text.ChangeValue(str(''))
rsa.matrix_plot1 = True
rsa.matrix_plot2 = False
rsa.bar_plot = False
rsa.correlations1 = False
rsa.correlations2 = False
rsa.pvalues = False
rsa.no_relabelings = 10000
rsa.dist_metric = 1
rsa.output_first = True
rsa.output_second = False
rsa.scale_to_max = False
global files_number
files_number = 0
self.go_btn.Disable()
def OnFiles(self, event):
dialog = wx.FileDialog(self, "Choose files:", os.getcwd(), " ","*.vom", wx.FD_OPEN|wx.FD_MULTIPLE)
self.files = []
if dialog.ShowModal() == wx.ID_OK:
self.paths = dialog.GetPaths()
# myfiles contains all the file names
for path in self.paths:
self.files.append(os.path.basename(path).encode("utf-8"))
global files_number
if len(self.files) > 1:
files_number = 1
else:
files_number = 0
if self.files:
self.file_text.ChangeValue(str(', '.join(self.files)))
self.go_btn.Enable()
dialog.Destroy()
def conditions(self, event):
self.textinput = wx.TextEntryDialog(self, "Type in condition names separated by a white space", "Condition labels")
if self.textinput.ShowModal() == wx.ID_OK:
self.input = self.textinput.GetValue()
# labels contains a list of all conditions
self.labels = self.input.split()
self.labels = [label.encode("utf-8") for label in self.labels]
if self.labels:
self.label_text.ChangeValue(str(', '.join(self.labels)))
self.textinput.Destroy()
def OnGo(self, e):
if self.labels == ['Tetris']:
import Tetris
else:
wait = wx.BusyCursor()
rsa.RSA(self.paths, self.files, self.labels)
del wait
def OnCancel(self, e):
self.Close(True)
class OptionWindow(wx.Frame):
def __init__(self, parent, id):
wx.Frame.__init__(self, parent, id, 'Options',
style=wx.DEFAULT_FRAME_STYLE ^ wx.RESIZE_BORDER ^ wx.MINIMIZE_BOX ^ wx.MAXIMIZE_BOX,
size=(400,500))
self.InitOpt()
def InitOpt(self):
self.panel = wx.Panel(self)
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add((-1,20))
self.line1 = wx.StaticLine(self.panel, wx.ID_ANY, style=wx.LI_VERTICAL)
self.vbox.Add(self.line1, 0, wx.ALL | wx.EXPAND, 5)
self.vbox.Add((-1,10))
# Check box: First-order RDMs
self.RDM1_box = wx.BoxSizer(wx.HORIZONTAL)
self.RDM1_cb = wx.CheckBox(self.panel, label = 'First order RDMs')
self.RDM1_cb.SetValue(rsa.output_first)
self.RDM1_cb.Bind(wx.EVT_CHECKBOX, self.OnSelectRDM1)
self.RDM1_box.Add(self.RDM1_cb)
self.vbox.Add(self.RDM1_box, flag = wx.LEFT, border = 10)
self.vbox.Add((-1,10))
# Check box: Matrix plots
self.mplot1_box = wx.BoxSizer(wx.HORIZONTAL)
self.mplot1_box.Add((25,-1))
self.mplot1_cb = wx.CheckBox(self.panel, label = 'Matrix plots')
self.mplot1_cb.SetValue(rsa.matrix_plot1)
self.mplot1_box.Add(self.mplot1_cb)
self.vbox.Add(self.mplot1_box, flag = wx.LEFT, border = 10)
self.vbox.Add((-1,10))
# Check box: First-order correlations
self.correlations1_box = wx.BoxSizer(wx.HORIZONTAL)
self.correlations1_box.Add((25,-1))
self.correlations1_cb = wx.CheckBox(self.panel, label = 'Correlations')
self.correlations1_cb.SetValue(rsa.correlations1)
self.correlations1_box.Add(self.correlations1_cb)
self.vbox.Add(self.correlations1_box, flag = wx.LEFT, border = 10)
self.vbox.Add((-1,10))
# Check box: Scale to maximum distance
self.scale_box = wx.BoxSizer(wx.HORIZONTAL)
self.scale_box.Add((25,-1))
self.scale_cb = wx.CheckBox(self.panel, label='Scale to max')
self.scale_cb.SetValue(rsa.scale_to_max)
self.scale_box.Add(self.scale_cb)
self.vbox.Add(self.scale_box, flag = wx.LEFT, border = 10)
self.vbox.Add((-1,10))
# Dropdown menu for distance metric
self.drop_box = wx.BoxSizer(wx.HORIZONTAL)
self.drop_box.Add((25,-1))
self.drop_label = wx.StaticText(self.panel, label = 'Distance metric ')
self.drop_box.Add(self.drop_label)
self.distances = ['Correlation distance', 'Euclidean distance', 'Absolute activation difference']
self.dropdown = wx.ComboBox(self.panel, value = self.distances[rsa.dist_metric-1], choices = self.distances, style=wx.CB_READONLY)
self.drop_box.Add(self.dropdown)
self.vbox.Add(self.drop_box, flag = wx.LEFT, border = 10)
self.vbox.Add((-1,20))
self.line2 = wx.StaticLine(self.panel, wx.ID_ANY, style=wx.LI_VERTICAL)
self.vbox.Add(self.line2, 0, wx.ALL | wx.EXPAND, 5)
self.vbox.Add((-1,10))
# Check box: Second-order RDM
self.RDM2_box = wx.BoxSizer(wx.HORIZONTAL)
self.RDM2_cb = wx.CheckBox(self.panel, label = 'Second order RDMs')
self.RDM2_cb.SetValue(rsa.output_second)
self.RDM2_cb.Bind(wx.EVT_CHECKBOX, self.OnSelectRDM2)
self.RDM2_box.Add(self.RDM2_cb)
self.vbox.Add(self.RDM2_box, flag = wx.LEFT, border = 10)
self.vbox.Add((-1,10))
# only checkable if you have chosen enough files
self.RDM2_cb.Disable()
if files_number == 1:
self.RDM2_cb.Enable()
# Check box: Matrix plots
self.mplot2_box = wx.BoxSizer(wx.HORIZONTAL)
self.mplot2_box.Add((25,-1))
self.mplot2_cb = wx.CheckBox(self.panel, label = 'Matrix plots')
self.mplot2_cb.SetValue(rsa.matrix_plot2)
self.mplot2_box.Add(self.mplot2_cb)
self.vbox.Add(self.mplot2_box, flag = wx.LEFT, border = 10)
self.vbox.Add((-1,10))
# Check box: Bar plots
self.bplot_box = wx.BoxSizer(wx.HORIZONTAL)
self.bplot_box.Add((25,-1))
self.bplot_cb = wx.CheckBox(self.panel, label = 'Bar plots')
self.bplot_cb.SetValue(rsa.bar_plot)
self.bplot_box.Add(self.bplot_cb)
self.vbox.Add(self.bplot_box, flag = wx.LEFT, border = 10)
self.vbox.Add((-1,10))
# Check box: Second-order correlations
self.correlations2_box = wx.BoxSizer(wx.HORIZONTAL)
self.correlations2_box.Add((25,-1))
self.correlations2_cb = wx.CheckBox(self.panel, label = 'Correlations')
self.correlations2_cb.SetValue(rsa.correlations2)
self.correlations2_box.Add(self.correlations2_cb)
self.vbox.Add(self.correlations2_box, flag = wx.LEFT, border = 10)
self.vbox.Add((-1,10))
# Check box: p-values
self.p_box = wx.BoxSizer(wx.HORIZONTAL)
self.p_box.Add((25,-1))
self.p_cb = wx.CheckBox(self.panel, label='p-values')
self.p_cb.SetValue(rsa.pvalues)
self.p_box.Add(self.p_cb)
self.vbox.Add(self.p_box, flag = wx.LEFT, border = 10)
self.vbox.Add((-1,10))
# No of permutations SpinControl
self.perm_box = wx.BoxSizer(wx.HORIZONTAL)
self.perm_box.Add((25,-1))
self.perm_label = wx.StaticText(self.panel, label = 'No. of Permutations ')
self.perm_box.Add(self.perm_label)
self.perm_spin = wx.SpinCtrl(self.panel, value=str(rsa.no_relabelings), min=100, max = 100000)
self.perm_box.Add(self.perm_spin, proportion = 1)
self.vbox.Add(self.perm_box, flag = wx.LEFT, border = 10)
self.vbox.Add((-1,10))
self.line3 = wx.StaticLine(self.panel, wx.ID_ANY, style=wx.LI_VERTICAL)
self.vbox.Add(self.line3, 0, wx.ALL | wx.EXPAND, 5)
self.vbox.Add((-1,50))
# Dis-/Enable options
self.OnSelectRDM1([])
self.OnSelectRDM2([])
# Done and Cancel Buttons
self.end_box = wx.BoxSizer(wx.HORIZONTAL)
self.done_btn = wx.Button(self.panel, label = 'Done', size = (70, 30))
self.done_btn.Bind(wx.EVT_BUTTON, self.OnDone)
self.end_box.Add(self.done_btn, flag = wx.BOTTOM, border = 5)
self.cancel_btn = wx.Button(self.panel, label = 'Cancel', size = (70, 30))
self.cancel_btn.Bind(wx.EVT_BUTTON, self.OnCancel)
self.end_box.Add(self.cancel_btn, flag = wx.LEFT | wx.BOTTOM, border = 5)
self.vbox.Add(self.end_box, flag = wx.ALIGN_RIGHT | wx.RIGHT, border = 10)
self.panel.SetSizer(self.vbox)
self.Center()
def OnSelectRDM1(self,e):
if self.RDM1_cb.GetValue():
self.mplot1_cb.Enable()
self.correlations1_cb.Enable()
self.scale_cb.Enable()
self.dropdown.Enable()
else:
self.mplot1_cb.Disable()
self.correlations1_cb.Disable()
self.scale_cb.Disable()
self.dropdown.Disable()
def OnSelectRDM2(self,e):
if self.RDM2_cb.GetValue() and files_number == 1:
self.bplot_cb.Enable()
self.mplot2_cb.Enable()
self.p_cb.Enable()
self.correlations2_cb.Enable()
self.perm_spin.Enable()
else:
self.bplot_cb.Disable()
self.p_cb.Disable()
self.perm_spin.Disable()
self.mplot2_cb.Disable()
self.correlations2_cb.Disable()
def OnDone(self,e):
rsa.output_first = self.RDM1_cb.GetValue()
rsa.output_second = self.RDM2_cb.GetValue()
rsa.matrix_plot1 = self.mplot1_cb.GetValue()
rsa.matrix_plot2 = self.mplot2_cb.GetValue()
rsa.bar_plot = self.bplot_cb.GetValue()
rsa.correlations1 = self.correlations1_cb.GetValue()
rsa.correlations2 = self.correlations2_cb.GetValue()
rsa.pvalues = self.p_cb.GetValue()
rsa.scale_to_max = self.scale_cb.GetValue()
rsa.no_relabelings = self.perm_spin.GetValue()
rsa.dist_metric = self.dropdown.GetSelection()+1
self.Close()
def OnCancel(self,e):
self.Close()
def main():
GUI = wx.App()
RSA_GUI(None, 'RSA')
GUI.MainLoop()
if __name__ == '__main__':
main()
| gpl-2.0 |
alanfgates/hive | service/lib/py/fb303_scripts/fb303_simple_mgmt.py | 171 | 5961 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import sys, os
from optparse import OptionParser
from thrift.Thrift import *
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from fb303 import *
from fb303.ttypes import *
def service_ctrl(
command,
port,
trans_factory = None,
prot_factory = None):
"""
service_ctrl is a generic function to execute standard fb303 functions
@param command: one of stop, start, reload, status, counters, name, alive
@param port: service's port
@param trans_factory: TTransportFactory to use for obtaining a TTransport. Default is
TBufferedTransportFactory
@param prot_factory: TProtocolFactory to use for obtaining a TProtocol. Default is
TBinaryProtocolFactory
"""
if command in ["status"]:
try:
status = fb303_wrapper('status', port, trans_factory, prot_factory)
status_details = fb303_wrapper('get_status_details', port, trans_factory, prot_factory)
msg = fb_status_string(status)
if (len(status_details)):
msg += " - %s" % status_details
print msg
if (status == fb_status.ALIVE):
return 2
else:
return 3
except:
print "Failed to get status"
return 3
# scalar commands
if command in ["version","alive","name"]:
try:
result = fb303_wrapper(command, port, trans_factory, prot_factory)
print result
return 0
except:
print "failed to get ",command
return 3
# counters
if command in ["counters"]:
try:
counters = fb303_wrapper('counters', port, trans_factory, prot_factory)
for counter in counters:
print "%s: %d" % (counter, counters[counter])
return 0
except:
print "failed to get counters"
return 3
# Only root should be able to run the following commands
if os.getuid() == 0:
# async commands
if command in ["stop","reload"] :
try:
fb303_wrapper(command, port, trans_factory, prot_factory)
return 0
except:
print "failed to tell the service to ", command
return 3
else:
if command in ["stop","reload"]:
print "root privileges are required to stop or reload the service."
return 4
print "The following commands are available:"
for command in ["counters","name","version","alive","status"]:
print "\t%s" % command
print "The following commands are available for users with root privileges:"
for command in ["stop","reload"]:
print "\t%s" % command
return 0;
def fb303_wrapper(command, port, trans_factory = None, prot_factory = None):
sock = TSocket.TSocket('localhost', port)
# use input transport factory if provided
if (trans_factory is None):
trans = TTransport.TBufferedTransport(sock)
else:
trans = trans_factory.getTransport(sock)
# use input protocol factory if provided
if (prot_factory is None):
prot = TBinaryProtocol.TBinaryProtocol(trans)
else:
prot = prot_factory.getProtocol(trans)
# initialize client and open transport
fb303_client = FacebookService.Client(prot, prot)
trans.open()
if (command == 'reload'):
fb303_client.reinitialize()
elif (command == 'stop'):
fb303_client.shutdown()
elif (command == 'status'):
return fb303_client.getStatus()
elif (command == 'version'):
return fb303_client.getVersion()
elif (command == 'get_status_details'):
return fb303_client.getStatusDetails()
elif (command == 'counters'):
return fb303_client.getCounters()
elif (command == 'name'):
return fb303_client.getName()
elif (command == 'alive'):
return fb303_client.aliveSince()
trans.close()
def fb_status_string(status_enum):
if (status_enum == fb_status.DEAD):
return "DEAD"
if (status_enum == fb_status.STARTING):
return "STARTING"
if (status_enum == fb_status.ALIVE):
return "ALIVE"
if (status_enum == fb_status.STOPPING):
return "STOPPING"
if (status_enum == fb_status.STOPPED):
return "STOPPED"
if (status_enum == fb_status.WARNING):
return "WARNING"
def main():
# parse command line options
parser = OptionParser()
commands=["stop","counters","status","reload","version","name","alive"]
parser.add_option("-c", "--command", dest="command", help="execute this API",
choices=commands, default="status")
parser.add_option("-p","--port",dest="port",help="the service's port",
default=9082)
(options, args) = parser.parse_args()
status = service_ctrl(options.command, options.port)
sys.exit(status)
if __name__ == '__main__':
main()
| apache-2.0 |
stevenbrichards/boto | tests/unit/cloudfront/test_invalidation_list.py | 114 | 4051 | #!/usr/bin/env python
import random
import string
from tests.compat import unittest, mock
import boto
RESPONSE_TEMPLATE = r"""
<InvalidationList>
<Marker/>
<NextMarker>%(next_marker)s</NextMarker>
<MaxItems>%(max_items)s</MaxItems>
<IsTruncated>%(is_truncated)s</IsTruncated>
%(inval_summaries)s
</InvalidationList>
"""
INVAL_SUMMARY_TEMPLATE = r"""
<InvalidationSummary>
<Id>%(cfid)s</Id>
<Status>%(status)s</Status>
</InvalidationSummary>
"""
class CFInvalidationListTest(unittest.TestCase):
cloudfront = True
def setUp(self):
self.cf = boto.connect_cloudfront('aws.aws_access_key_id',
'aws.aws_secret_access_key')
def _get_random_id(self, length=14):
return ''.join([random.choice(string.ascii_letters) for i in
range(length)])
def _group_iter(self, iterator, n):
accumulator = []
for item in iterator:
accumulator.append(item)
if len(accumulator) == n:
yield accumulator
accumulator = []
if len(accumulator) != 0:
yield accumulator
def _get_mock_responses(self, num, max_items):
max_items = min(max_items, 100)
cfid_groups = list(self._group_iter([self._get_random_id() for i in
range(num)], max_items))
cfg = dict(status='Completed', max_items=max_items, next_marker='')
responses = []
is_truncated = 'true'
for i, group in enumerate(cfid_groups):
next_marker = group[-1]
if (i + 1) == len(cfid_groups):
is_truncated = 'false'
next_marker = ''
invals = ''
cfg.update(dict(next_marker=next_marker,
is_truncated=is_truncated))
for cfid in group:
cfg.update(dict(cfid=cfid))
invals += INVAL_SUMMARY_TEMPLATE % cfg
cfg.update(dict(inval_summaries=invals))
mock_response = mock.Mock()
mock_response.read.return_value = (RESPONSE_TEMPLATE % cfg).encode('utf-8')
mock_response.status = 200
responses.append(mock_response)
return responses
def test_manual_pagination(self, num_invals=30, max_items=4):
"""
Test that paginating manually works properly
"""
self.assertGreater(num_invals, max_items)
responses = self._get_mock_responses(num=num_invals,
max_items=max_items)
self.cf.make_request = mock.Mock(side_effect=responses)
ir = self.cf.get_invalidation_requests('dist-id-here',
max_items=max_items)
all_invals = list(ir)
self.assertEqual(len(all_invals), max_items)
while ir.is_truncated:
ir = self.cf.get_invalidation_requests('dist-id-here',
marker=ir.next_marker,
max_items=max_items)
invals = list(ir)
self.assertLessEqual(len(invals), max_items)
all_invals.extend(invals)
remainder = num_invals % max_items
if remainder != 0:
self.assertEqual(len(invals), remainder)
self.assertEqual(len(all_invals), num_invals)
def test_auto_pagination(self, num_invals=1024):
"""
Test that auto-pagination works properly
"""
max_items = 100
self.assertGreaterEqual(num_invals, max_items)
responses = self._get_mock_responses(num=num_invals,
max_items=max_items)
self.cf.make_request = mock.Mock(side_effect=responses)
ir = self.cf.get_invalidation_requests('dist-id-here')
self.assertEqual(len(ir._inval_cache), max_items)
self.assertEqual(len(list(ir)), num_invals)
if __name__ == '__main__':
unittest.main()
| mit |
hojel/calibre | src/calibre/ebooks/oeb/polish/split.py | 11 | 18227 | #!/usr/bin/env python2
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
import copy, os, re
from future_builtins import map
from urlparse import urlparse
from calibre.ebooks.oeb.base import barename, XPNSMAP, XPath, OPF, XHTML, OEB_DOCS
from calibre.ebooks.oeb.polish.errors import MalformedMarkup
from calibre.ebooks.oeb.polish.toc import node_from_loc
from calibre.ebooks.oeb.polish.replace import LinkRebaser
class AbortError(ValueError):
pass
def in_table(node):
while node is not None:
if node.tag.endswith('}table'):
return True
node = node.getparent()
return False
def adjust_split_point(split_point, log):
'''
Move the split point up its ancestor chain if it has no content
before it. This handles the common case:
<div id="chapter1"><h2>Chapter 1</h2>...</div> with a page break on the
h2.
'''
sp = split_point
while True:
parent = sp.getparent()
if (
parent is None or
barename(parent.tag) in {'body', 'html'} or
(parent.text and parent.text.strip()) or
parent.index(sp) > 0
):
break
sp = parent
if sp is not split_point:
log.debug('Adjusted split point to ancestor')
return sp
def get_body(root):
return root.find('h:body', namespaces=XPNSMAP)
def do_split(split_point, log, before=True):
'''
Split tree into a *before* and an *after* tree at ``split_point``.
:param split_point: The Element at which to split
:param before: If True tree is split before split_point, otherwise after split_point
:return: before_tree, after_tree
'''
if before:
# We cannot adjust for after since moving an after split point to a
# parent will cause breakage if the parent contains any content
# after the original split point
split_point = adjust_split_point(split_point, log)
tree = split_point.getroottree()
path = tree.getpath(split_point)
tree, tree2 = copy.deepcopy(tree), copy.deepcopy(tree)
root, root2 = tree.getroot(), tree2.getroot()
body, body2 = map(get_body, (root, root2))
split_point = root.xpath(path)[0]
split_point2 = root2.xpath(path)[0]
def nix_element(elem, top=True):
# Remove elem unless top is False in which case replace elem by its
# children
parent = elem.getparent()
if top:
parent.remove(elem)
else:
index = parent.index(elem)
parent[index:index+1] = list(elem.iterchildren())
# Tree 1
hit_split_point = False
keep_descendants = False
split_point_descendants = frozenset(split_point.iterdescendants())
for elem in tuple(body.iterdescendants()):
if elem is split_point:
hit_split_point = True
if before:
nix_element(elem)
else:
# We want to keep the descendants of the split point in
# Tree 1
keep_descendants = True
# We want the split point element, but not its tail
elem.tail = '\n'
continue
if hit_split_point:
if keep_descendants:
if elem in split_point_descendants:
# elem is a descendant keep it
continue
else:
# We are out of split_point, so prevent further set
# lookups of split_point_descendants
keep_descendants = False
nix_element(elem)
# Tree 2
ancestors = frozenset(XPath('ancestor::*')(split_point2))
for elem in tuple(body2.iterdescendants()):
if elem is split_point2:
if not before:
# Keep the split point element's tail, if it contains non-whitespace
# text
tail = elem.tail
if tail and not tail.isspace():
parent = elem.getparent()
idx = parent.index(elem)
if idx == 0:
parent.text = (parent.text or '') + tail
else:
sib = parent[idx-1]
sib.tail = (sib.tail or '') + tail
# Remove the element itself
nix_element(elem)
break
if elem in ancestors:
# We have to preserve the ancestors as they could have CSS
# styles that are inherited/applicable, like font or
# width. So we only remove the text, if any.
elem.text = '\n'
else:
nix_element(elem, top=False)
body2.text = '\n'
return tree, tree2
class SplitLinkReplacer(object):
def __init__(self, base, bottom_anchors, top_name, bottom_name, container):
self.bottom_anchors, self.bottom_name = bottom_anchors, bottom_name
self.container, self.top_name = container, top_name
self.base = base
self.replaced = False
def __call__(self, url):
if url and url.startswith('#'):
return url
name = self.container.href_to_name(url, self.base)
if name != self.top_name:
return url
purl = urlparse(url)
if purl.fragment and purl.fragment in self.bottom_anchors:
url = self.container.name_to_href(self.bottom_name, self.base) + '#' + purl.fragment
self.replaced = True
return url
def split(container, name, loc_or_xpath, before=True, totals=None):
'''
Split the file specified by name at the position specified by loc_or_xpath.
Splitting automatically migrates all links and references to the affected
files.
:param loc_or_xpath: Should be an XPath expression such as
//h:div[@id="split_here"]. Can also be a *loc* which is used internally to
implement splitting in the preview panel.
:param before: If True the split occurs before the identified element otherwise after it.
:param totals: Used internally
'''
root = container.parsed(name)
if isinstance(loc_or_xpath, type('')):
split_point = root.xpath(loc_or_xpath)[0]
else:
try:
split_point = node_from_loc(root, loc_or_xpath, totals=totals)
except MalformedMarkup:
# The webkit HTML parser and the container parser have yielded
# different node counts, this can happen if the file is valid XML
# but contains constructs like nested <p> tags. So force parse it
# with the HTML 5 parser and try again.
raw = container.raw_data(name)
root = container.parse_xhtml(raw, fname=name, force_html5_parse=True)
try:
split_point = node_from_loc(root, loc_or_xpath, totals=totals)
except MalformedMarkup:
raise MalformedMarkup(_('The file %s has malformed markup. Try running the Fix HTML tool'
' before splitting') % name)
container.replace(name, root)
if in_table(split_point):
raise AbortError('Cannot split inside tables')
if split_point.tag.endswith('}body'):
raise AbortError('Cannot split on the <body> tag')
tree1, tree2 = do_split(split_point, container.log, before=before)
root1, root2 = tree1.getroot(), tree2.getroot()
anchors_in_top = frozenset(root1.xpath('//*/@id')) | frozenset(root1.xpath('//*/@name')) | {''}
anchors_in_bottom = frozenset(root2.xpath('//*/@id')) | frozenset(root2.xpath('//*/@name'))
base, ext = name.rpartition('.')[0::2]
base = re.sub(r'_split\d+$', '', base)
nname, s = None, 0
while not nname or container.exists(nname):
s += 1
nname = '%s_split%d.%s' % (base, s, ext)
manifest_item = container.generate_item(nname, media_type=container.mime_map[name])
bottom_name = container.href_to_name(manifest_item.get('href'), container.opf_name)
# Fix links in the split trees
for r, rname, anchors in [(root1, bottom_name, anchors_in_bottom), (root2, name, anchors_in_top)]:
for a in r.xpath('//*[@href]'):
url = a.get('href')
if url.startswith('#'):
fname = name
else:
fname = container.href_to_name(url, name)
if fname == name:
purl = urlparse(url)
if purl.fragment in anchors:
a.set('href', '%s#%s' % (container.name_to_href(rname, name), purl.fragment))
# Fix all links in the container that point to anchors in the bottom tree
for fname, media_type in container.mime_map.iteritems():
if fname not in {name, bottom_name}:
repl = SplitLinkReplacer(fname, anchors_in_bottom, name, bottom_name, container)
container.replace_links(fname, repl)
container.replace(name, root1)
container.replace(bottom_name, root2)
spine = container.opf_xpath('//opf:spine')[0]
for spine_item, spine_name, linear in container.spine_iter:
if spine_name == name:
break
index = spine.index(spine_item) + 1
si = spine.makeelement(OPF('itemref'), idref=manifest_item.get('id'))
if not linear:
si.set('linear', 'no')
container.insert_into_xml(spine, si, index=index)
container.dirty(container.opf_name)
return bottom_name
def multisplit(container, name, xpath, before=True):
'''
Split the specified file at multiple locations (all tags that match the specified XPath expression. See also: :func:`split`.
Splitting automatically migrates all links and references to the affected
files.
:param before: If True the splits occur before the identified element otherwise after it.
'''
root = container.parsed(name)
nodes = root.xpath(xpath, namespaces=XPNSMAP)
if not nodes:
raise AbortError(_('The expression %s did not match any nodes') % xpath)
for split_point in nodes:
if in_table(split_point):
raise AbortError('Cannot split inside tables')
if split_point.tag.endswith('}body'):
raise AbortError('Cannot split on the <body> tag')
for i, tag in enumerate(nodes):
tag.set('calibre-split-point', str(i))
current = name
all_names = [name]
for i in xrange(len(nodes)):
current = split(container, current, '//*[@calibre-split-point="%d"]' % i, before=before)
all_names.append(current)
for x in all_names:
for tag in container.parsed(x).xpath('//*[@calibre-split-point]'):
tag.attrib.pop('calibre-split-point')
container.dirty(x)
return all_names[1:]
class MergeLinkReplacer(object):
def __init__(self, base, anchor_map, master, container):
self.container, self.anchor_map = container, anchor_map
self.master = master
self.base = base
self.replaced = False
def __call__(self, url):
if url and url.startswith('#'):
return url
name = self.container.href_to_name(url, self.base)
amap = self.anchor_map.get(name, None)
if amap is None:
return url
purl = urlparse(url)
frag = purl.fragment or ''
frag = amap.get(frag, frag)
url = self.container.name_to_href(self.master, self.base) + '#' + frag
self.replaced = True
return url
def add_text(body, text):
if len(body) > 0:
body[-1].tail = (body[-1].tail or '') + text
else:
body.text = (body.text or '') + text
def all_anchors(root):
return set(root.xpath('//*/@id')) | set(root.xpath('//*/@name'))
def all_stylesheets(container, name):
for link in XPath('//h:head/h:link[@href]')(container.parsed(name)):
name = container.href_to_name(link.get('href'), name)
typ = link.get('type', 'text/css')
if typ == 'text/css':
yield name
def unique_anchor(seen_anchors, current):
c = 0
ans = current
while ans in seen_anchors:
c += 1
ans = '%s_%d' % (current, c)
return ans
def remove_name_attributes(root):
# Remove all name attributes, replacing them with id attributes
for elem in root.xpath('//*[@id and @name]'):
del elem.attrib['name']
for elem in root.xpath('//*[@name]'):
elem.set('id', elem.attrib.pop('name'))
def merge_html(container, names, master):
p = container.parsed
root = p(master)
# Ensure master has a <head>
head = root.find('h:head', namespaces=XPNSMAP)
if head is None:
head = root.makeelement(XHTML('head'))
container.insert_into_xml(root, head, 0)
seen_anchors = all_anchors(root)
seen_stylesheets = set(all_stylesheets(container, master))
master_body = p(master).findall('h:body', namespaces=XPNSMAP)[-1]
master_base = os.path.dirname(master)
anchor_map = {n:{} for n in names if n != master}
for name in names:
if name == master:
continue
# Insert new stylesheets into master
for sheet in all_stylesheets(container, name):
if sheet not in seen_stylesheets:
seen_stylesheets.add(sheet)
link = head.makeelement(XHTML('link'), rel='stylesheet', type='text/css', href=container.name_to_href(sheet, master))
container.insert_into_xml(head, link)
# Rebase links if master is in a different directory
if os.path.dirname(name) != master_base:
container.replace_links(name, LinkRebaser(container, name, master))
root = p(name)
children = []
for body in p(name).findall('h:body', namespaces=XPNSMAP):
children.append(body.text if body.text and body.text.strip() else '\n\n')
children.extend(body)
first_child = ''
for first_child in children:
if not isinstance(first_child, basestring):
break
if isinstance(first_child, basestring):
# Empty document, ignore
continue
amap = anchor_map[name]
remove_name_attributes(root)
for elem in root.xpath('//*[@id]'):
val = elem.get('id')
if not val:
continue
if val in seen_anchors:
nval = unique_anchor(seen_anchors, val)
elem.set('id', nval)
amap[val] = nval
else:
seen_anchors.add(val)
if 'id' not in first_child.attrib:
first_child.set('id', unique_anchor(seen_anchors, 'top'))
seen_anchors.add(first_child.get('id'))
amap[''] = first_child.get('id')
# Fix links that point to local changed anchors
for a in XPath('//h:a[starts-with(@href, "#")]')(root):
q = a.get('href')[1:]
if q in amap:
a.set('href', '#' + amap[q])
for child in children:
if isinstance(child, basestring):
add_text(master_body, child)
else:
master_body.append(copy.deepcopy(child))
container.remove_item(name, remove_from_guide=False)
# Fix all links in the container that point to merged files
for fname, media_type in container.mime_map.iteritems():
repl = MergeLinkReplacer(fname, anchor_map, master, container)
container.replace_links(fname, repl)
def merge_css(container, names, master):
p = container.parsed
msheet = p(master)
master_base = os.path.dirname(master)
merged = set()
for name in names:
if name == master:
continue
# Rebase links if master is in a different directory
if os.path.dirname(name) != master_base:
container.replace_links(name, LinkRebaser(container, name, master))
sheet = p(name)
# Remove charset rules
cr = [r for r in sheet.cssRules if r.type == r.CHARSET_RULE]
[sheet.deleteRule(sheet.cssRules.index(r)) for r in cr]
for rule in sheet.cssRules:
msheet.add(rule)
container.remove_item(name)
merged.add(name)
# Remove links to merged stylesheets in the html files, replacing with a
# link to the master sheet
for name, mt in container.mime_map.iteritems():
if mt in OEB_DOCS:
removed = False
root = p(name)
for link in XPath('//h:link[@href]')(root):
q = container.href_to_name(link.get('href'), name)
if q in merged:
container.remove_from_xml(link)
removed = True
if removed:
container.dirty(name)
if removed and master not in set(all_stylesheets(container, name)):
head = root.find('h:head', namespaces=XPNSMAP)
if head is not None:
link = head.makeelement(XHTML('link'), type='text/css', rel='stylesheet', href=container.name_to_href(master, name))
container.insert_into_xml(head, link)
def merge(container, category, names, master):
'''
Merge the specified files into a single file, automatically migrating all
links and references to the affected files. The file must all either be HTML or CSS files.
:param category: Must be either ``'text'`` for HTML files or ``'styles'`` for CSS files
:param names: The list of files to be merged
:param master: Which of the merged files is the *master* file, that is, the file that will remain after merging.
'''
if category not in {'text', 'styles'}:
raise AbortError('Cannot merge files of type: %s' % category)
if len(names) < 2:
raise AbortError('Must specify at least two files to be merged')
if master not in names:
raise AbortError('The master file must be one of the files being merged')
if category == 'text':
merge_html(container, names, master)
elif category == 'styles':
merge_css(container, names, master)
container.dirty(master)
| gpl-3.0 |
pavelchristof/gomoku-ai | tensorflow/python/util/example_parser_configuration.py | 77 | 4715 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Extract parse_example op configuration to a proto."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.example import example_parser_configuration_pb2
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
def extract_example_parser_configuration(parse_example_op, sess):
"""Returns an ExampleParserConfig proto.
Args:
parse_example_op: A ParseExample `Operation`
sess: A tf.Session needed to obtain some configuration values.
Returns:
A ExampleParserConfig proto.
Raises:
ValueError: If attributes are inconsistent.
"""
config = example_parser_configuration_pb2.ExampleParserConfiguration()
num_sparse = parse_example_op.get_attr("Nsparse")
num_dense = parse_example_op.get_attr("Ndense")
total_features = num_dense + num_sparse
sparse_types = parse_example_op.get_attr("sparse_types")
dense_types = parse_example_op.get_attr("Tdense")
dense_shapes = parse_example_op.get_attr("dense_shapes")
if len(sparse_types) != num_sparse:
raise ValueError("len(sparse_types) attribute does not match "
"Nsparse attribute (%d vs %d)" %
(len(sparse_types), num_sparse))
if len(dense_types) != num_dense:
raise ValueError("len(dense_types) attribute does not match "
"Ndense attribute (%d vs %d)" %
(len(dense_types), num_dense))
if len(dense_shapes) != num_dense:
raise ValueError("len(dense_shapes) attribute does not match "
"Ndense attribute (%d vs %d)" %
(len(dense_shapes), num_dense))
# Skip over the serialized input, and the names input.
fetch_list = parse_example_op.inputs[2:]
# Fetch total_features key names and num_dense default values.
if len(fetch_list) != (total_features + num_dense):
raise ValueError("len(fetch_list) does not match total features + "
"num_dense (%d vs %d)" %
(len(fetch_list), (total_features + num_dense)))
fetched = sess.run(fetch_list)
if len(fetched) != len(fetch_list):
raise ValueError("len(fetched) does not match len(fetch_list) "
"(%d vs %d)" % (len(fetched), len(fetch_list)))
# Fetch indices.
sparse_keys_start = 0
dense_keys_start = sparse_keys_start + num_sparse
dense_def_start = dense_keys_start + num_dense
# Output tensor indices.
sparse_indices_start = 0
sparse_values_start = num_sparse
sparse_shapes_start = sparse_values_start + num_sparse
dense_values_start = sparse_shapes_start + num_sparse
# Dense features.
for i in range(num_dense):
key = fetched[dense_keys_start + i]
feature_config = config.feature_map[key]
# Convert the default value numpy array fetched from the session run
# into a TensorProto.
fixed_config = feature_config.fixed_len_feature
fixed_config.default_value.CopyFrom(
tensor_util.make_tensor_proto(fetched[dense_def_start + i]))
# Convert the shape from the attributes
# into a TensorShapeProto.
fixed_config.shape.CopyFrom(
tensor_shape.TensorShape(dense_shapes[i]).as_proto())
fixed_config.dtype = int(dense_types[i])
# Get the output tensor name.
fixed_config.values_output_tensor_name = parse_example_op.outputs[
dense_values_start + i].name
# Sparse features.
for i in range(num_sparse):
key = fetched[sparse_keys_start + i]
feature_config = config.feature_map[key]
var_len_feature = feature_config.var_len_feature
var_len_feature.dtype = int(sparse_types[i])
var_len_feature.indices_output_tensor_name = parse_example_op.outputs[
sparse_indices_start + i].name
var_len_feature.values_output_tensor_name = parse_example_op.outputs[
sparse_values_start + i].name
var_len_feature.shapes_output_tensor_name = parse_example_op.outputs[
sparse_shapes_start + i].name
return config
| apache-2.0 |
Vijfhoek/oyoyo | oyoyo/cmdhandler.py | 1 | 6875 | # Copyright (c) 2008 Duncan Fordyce
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import inspect
import logging
import sys
import traceback
from oyoyo import helpers
from oyoyo.parse import parse_nick
# Python < 3 compatibility
if sys.version_info < (3,):
class bytes(object):
def __new__(self, b='', encoding='utf8'):
return str(b)
def protected(func):
""" decorator to protect functions from being called """
func.protected = True
return func
class CommandError(Exception):
def __init__(self, cmd):
self.cmd = cmd
class NoSuchCommandError(CommandError):
def __str__(self):
return 'No such command "%s"' % ".".join(self.cmd)
class ProtectedCommandError(CommandError):
def __str__(self):
return 'Command "%s" is protected' % ".".join(self.cmd)
class CommandHandler(object):
""" The most basic CommandHandler """
def __init__(self, client):
self.client = client
@protected
def get(self, in_command_parts):
""" finds a command
commands may be dotted. each command part is checked that it does
not start with and underscore and does not have an attribute
"protected". if either of these is true, ProtectedCommandError
is raised.
its possible to pass both "command.sub.func" and
["command", "sub", "func"].
"""
if isinstance(in_command_parts, (str, bytes)):
in_command_parts = in_command_parts.split(bytes('.', 'ascii'))
command_parts = in_command_parts[:]
p = self
while command_parts:
cmd = command_parts.pop(0).decode('ascii')
if cmd.startswith('_'):
raise ProtectedCommandError(in_command_parts)
try:
f = getattr(p, cmd)
except AttributeError:
raise NoSuchCommandError(in_command_parts)
if hasattr(f, 'protected'):
raise ProtectedCommandError(in_command_parts)
if isinstance(f, CommandHandler) and command_parts:
return f.get(command_parts)
p = f
return f
@protected
def run(self, command, *args):
""" finds and runs a command """
logging.debug("processCommand %s(%s)" % (command, args))
try:
f = self.get(command)
except NoSuchCommandError:
self.__unhandled__(command, *args)
return
logging.debug('f %s' % f)
try:
f(*args)
except Exception, e:
logging.error('command raised %s' % e)
logging.error(traceback.format_exc())
raise CommandError(command)
@protected
def __unhandled__(self, cmd, *args):
"""The default handler for commands. Override this method to
apply custom behavior (example, printing) unhandled commands.
"""
logging.debug('unhandled command %s(%s)' % (cmd, args))
class DefaultCommandHandler(CommandHandler):
""" CommandHandler that provides methods for the normal operation of IRC.
If you want your bot to properly respond to pings, etc, you should subclass this.
"""
def ping(self, prefix, server):
self.client.send('PONG', server)
class DefaultBotCommandHandler(CommandHandler):
""" default command handler for bots. methods/attributes are made
available as commands """
@protected
def getVisibleCommands(self, obj=None):
test = (lambda x: isinstance(x, CommandHandler) or \
inspect.ismethod(x) or inspect.isfunction(x))
members = inspect.getmembers(obj or self, test)
return [m for m, _ in members
if (not m.startswith('_') and
not hasattr(getattr(obj, m), 'protected'))]
def help(self, sender, dest, arg=None):
"""list all available commands or get help on a specific command"""
logging.info('help sender=%s dest=%s arg=%s' % (sender, dest, arg))
if not arg:
commands = self.getVisibleCommands()
commands.sort()
helpers.msg(self.client, dest,
"available commands: %s" % " ".join(commands))
else:
try:
f = self.get(arg)
except CommandError, e:
helpers.msg(self.client, dest, str(e))
return
doc = f.__doc__.strip() if f.__doc__ else "No help available"
if not inspect.ismethod(f):
subcommands = self.getVisibleCommands(f)
if subcommands:
doc += " [sub commands: %s]" % " ".join(subcommands)
helpers.msg(self.client, dest, "%s: %s" % (arg, doc))
class BotCommandHandler(DefaultCommandHandler):
""" complete command handler for bots """
def __init__(self, client, command_handler):
DefaultCommandHandler.__init__(self, client)
self.command_handler = command_handler
def privmsg(self, prefix, dest, msg):
self.tryBotCommand(prefix, dest, msg)
@protected
def tryBotCommand(self, prefix, dest, msg):
""" tests a command to see if its a command for the bot, returns True
and calls self.processBotCommand(cmd, sender) if its is.
"""
logging.debug("tryBotCommand('%s' '%s' '%s')" % (prefix, dest, msg))
if dest == self.client.nick:
dest = parse_nick(prefix)[0]
elif msg.startswith(self.client.nick):
msg = msg[len(self.client.nick)+1:]
else:
return False
msg = msg.strip()
parts = msg.split(' ', 1)
command = parts[0]
arg = parts[1:]
try:
self.command_handler.run(command, prefix, dest, *arg)
except CommandError, e:
helpers.msg(self.client, dest, str(e))
return True
| mit |
nkgilley/home-assistant | tests/components/media_player/test_async_helpers.py | 14 | 8343 | """The tests for the Async Media player helper functions."""
import asyncio
import unittest
import homeassistant.components.media_player as mp
from homeassistant.const import (
STATE_IDLE,
STATE_OFF,
STATE_ON,
STATE_PAUSED,
STATE_PLAYING,
)
from tests.common import get_test_home_assistant
class AsyncMediaPlayer(mp.MediaPlayerEntity):
"""Async media player test class."""
def __init__(self, hass):
"""Initialize the test media player."""
self.hass = hass
self._volume = 0
self._state = STATE_OFF
@property
def state(self):
"""State of the player."""
return self._state
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume
@property
def supported_features(self):
"""Flag media player features that are supported."""
return (
mp.const.SUPPORT_VOLUME_SET
| mp.const.SUPPORT_PLAY
| mp.const.SUPPORT_PAUSE
| mp.const.SUPPORT_TURN_OFF
| mp.const.SUPPORT_TURN_ON
)
async def async_set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self._volume = volume
async def async_media_play(self):
"""Send play command."""
self._state = STATE_PLAYING
async def async_media_pause(self):
"""Send pause command."""
self._state = STATE_PAUSED
async def async_turn_on(self):
"""Turn the media player on."""
self._state = STATE_ON
async def async_turn_off(self):
"""Turn the media player off."""
self._state = STATE_OFF
class SyncMediaPlayer(mp.MediaPlayerEntity):
"""Sync media player test class."""
def __init__(self, hass):
"""Initialize the test media player."""
self.hass = hass
self._volume = 0
self._state = STATE_OFF
@property
def state(self):
"""State of the player."""
return self._state
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume
@property
def supported_features(self):
"""Flag media player features that are supported."""
return (
mp.const.SUPPORT_VOLUME_SET
| mp.const.SUPPORT_VOLUME_STEP
| mp.const.SUPPORT_PLAY
| mp.const.SUPPORT_PAUSE
| mp.const.SUPPORT_TURN_OFF
| mp.const.SUPPORT_TURN_ON
)
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self._volume = volume
def volume_up(self):
"""Turn volume up for media player."""
if self.volume_level < 1:
self.set_volume_level(min(1, self.volume_level + 0.2))
def volume_down(self):
"""Turn volume down for media player."""
if self.volume_level > 0:
self.set_volume_level(max(0, self.volume_level - 0.2))
def media_play_pause(self):
"""Play or pause the media player."""
if self._state == STATE_PLAYING:
self._state = STATE_PAUSED
else:
self._state = STATE_PLAYING
def toggle(self):
"""Toggle the power on the media player."""
if self._state in [STATE_OFF, STATE_IDLE]:
self._state = STATE_ON
else:
self._state = STATE_OFF
async def async_media_play_pause(self):
"""Create a coroutine to wrap the future returned by ABC.
This allows the run_coroutine_threadsafe helper to be used.
"""
await super().async_media_play_pause()
async def async_toggle(self):
"""Create a coroutine to wrap the future returned by ABC.
This allows the run_coroutine_threadsafe helper to be used.
"""
await super().async_toggle()
class TestAsyncMediaPlayer(unittest.TestCase):
"""Test the media_player module."""
def setUp(self): # pylint: disable=invalid-name
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.player = AsyncMediaPlayer(self.hass)
self.addCleanup(self.tear_down_cleanup)
def tear_down_cleanup(self):
"""Shut down test instance."""
self.hass.stop()
def test_volume_up(self):
"""Test the volume_up helper function."""
assert self.player.volume_level == 0
asyncio.run_coroutine_threadsafe(
self.player.async_set_volume_level(0.5), self.hass.loop
).result()
assert self.player.volume_level == 0.5
asyncio.run_coroutine_threadsafe(
self.player.async_volume_up(), self.hass.loop
).result()
assert self.player.volume_level == 0.6
def test_volume_down(self):
"""Test the volume_down helper function."""
assert self.player.volume_level == 0
asyncio.run_coroutine_threadsafe(
self.player.async_set_volume_level(0.5), self.hass.loop
).result()
assert self.player.volume_level == 0.5
asyncio.run_coroutine_threadsafe(
self.player.async_volume_down(), self.hass.loop
).result()
assert self.player.volume_level == 0.4
def test_media_play_pause(self):
"""Test the media_play_pause helper function."""
assert self.player.state == STATE_OFF
asyncio.run_coroutine_threadsafe(
self.player.async_media_play_pause(), self.hass.loop
).result()
assert self.player.state == STATE_PLAYING
asyncio.run_coroutine_threadsafe(
self.player.async_media_play_pause(), self.hass.loop
).result()
assert self.player.state == STATE_PAUSED
def test_toggle(self):
"""Test the toggle helper function."""
assert self.player.state == STATE_OFF
asyncio.run_coroutine_threadsafe(
self.player.async_toggle(), self.hass.loop
).result()
assert self.player.state == STATE_ON
asyncio.run_coroutine_threadsafe(
self.player.async_toggle(), self.hass.loop
).result()
assert self.player.state == STATE_OFF
class TestSyncMediaPlayer(unittest.TestCase):
"""Test the media_player module."""
def setUp(self): # pylint: disable=invalid-name
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.player = SyncMediaPlayer(self.hass)
self.addCleanup(self.tear_down_cleanup)
def tear_down_cleanup(self):
"""Shut down test instance."""
self.hass.stop()
def test_volume_up(self):
"""Test the volume_up helper function."""
assert self.player.volume_level == 0
self.player.set_volume_level(0.5)
assert self.player.volume_level == 0.5
asyncio.run_coroutine_threadsafe(
self.player.async_volume_up(), self.hass.loop
).result()
assert self.player.volume_level == 0.7
def test_volume_down(self):
"""Test the volume_down helper function."""
assert self.player.volume_level == 0
self.player.set_volume_level(0.5)
assert self.player.volume_level == 0.5
asyncio.run_coroutine_threadsafe(
self.player.async_volume_down(), self.hass.loop
).result()
assert self.player.volume_level == 0.3
def test_media_play_pause(self):
"""Test the media_play_pause helper function."""
assert self.player.state == STATE_OFF
asyncio.run_coroutine_threadsafe(
self.player.async_media_play_pause(), self.hass.loop
).result()
assert self.player.state == STATE_PLAYING
asyncio.run_coroutine_threadsafe(
self.player.async_media_play_pause(), self.hass.loop
).result()
assert self.player.state == STATE_PAUSED
def test_toggle(self):
"""Test the toggle helper function."""
assert self.player.state == STATE_OFF
asyncio.run_coroutine_threadsafe(
self.player.async_toggle(), self.hass.loop
).result()
assert self.player.state == STATE_ON
asyncio.run_coroutine_threadsafe(
self.player.async_toggle(), self.hass.loop
).result()
assert self.player.state == STATE_OFF
| apache-2.0 |
benjaminjkraft/django | tests/gis_tests/gdal_tests/test_envelope.py | 335 | 3667 | import unittest
from unittest import skipUnless
from django.contrib.gis.gdal import HAS_GDAL
if HAS_GDAL:
from django.contrib.gis.gdal import Envelope, GDALException
class TestPoint(object):
def __init__(self, x, y):
self.x = x
self.y = y
@skipUnless(HAS_GDAL, "GDAL is required")
class EnvelopeTest(unittest.TestCase):
def setUp(self):
self.e = Envelope(0, 0, 5, 5)
def test01_init(self):
"Testing Envelope initialization."
e1 = Envelope((0, 0, 5, 5))
Envelope(0, 0, 5, 5)
Envelope(0, '0', '5', 5) # Thanks to ww for this
Envelope(e1._envelope)
self.assertRaises(GDALException, Envelope, (5, 5, 0, 0))
self.assertRaises(GDALException, Envelope, 5, 5, 0, 0)
self.assertRaises(GDALException, Envelope, (0, 0, 5, 5, 3))
self.assertRaises(GDALException, Envelope, ())
self.assertRaises(ValueError, Envelope, 0, 'a', 5, 5)
self.assertRaises(TypeError, Envelope, 'foo')
self.assertRaises(GDALException, Envelope, (1, 1, 0, 0))
try:
Envelope(0, 0, 0, 0)
except GDALException:
self.fail("shouldn't raise an exception for min_x == max_x or min_y == max_y")
def test02_properties(self):
"Testing Envelope properties."
e = Envelope(0, 0, 2, 3)
self.assertEqual(0, e.min_x)
self.assertEqual(0, e.min_y)
self.assertEqual(2, e.max_x)
self.assertEqual(3, e.max_y)
self.assertEqual((0, 0), e.ll)
self.assertEqual((2, 3), e.ur)
self.assertEqual((0, 0, 2, 3), e.tuple)
self.assertEqual('POLYGON((0.0 0.0,0.0 3.0,2.0 3.0,2.0 0.0,0.0 0.0))', e.wkt)
self.assertEqual('(0.0, 0.0, 2.0, 3.0)', str(e))
def test03_equivalence(self):
"Testing Envelope equivalence."
e1 = Envelope(0.523, 0.217, 253.23, 523.69)
e2 = Envelope((0.523, 0.217, 253.23, 523.69))
self.assertEqual(e1, e2)
self.assertEqual((0.523, 0.217, 253.23, 523.69), e1)
def test04_expand_to_include_pt_2_params(self):
"Testing Envelope expand_to_include -- point as two parameters."
self.e.expand_to_include(2, 6)
self.assertEqual((0, 0, 5, 6), self.e)
self.e.expand_to_include(-1, -1)
self.assertEqual((-1, -1, 5, 6), self.e)
def test05_expand_to_include_pt_2_tuple(self):
"Testing Envelope expand_to_include -- point as a single 2-tuple parameter."
self.e.expand_to_include((10, 10))
self.assertEqual((0, 0, 10, 10), self.e)
self.e.expand_to_include((-10, -10))
self.assertEqual((-10, -10, 10, 10), self.e)
def test06_expand_to_include_extent_4_params(self):
"Testing Envelope expand_to_include -- extent as 4 parameters."
self.e.expand_to_include(-1, 1, 3, 7)
self.assertEqual((-1, 0, 5, 7), self.e)
def test06_expand_to_include_extent_4_tuple(self):
"Testing Envelope expand_to_include -- extent as a single 4-tuple parameter."
self.e.expand_to_include((-1, 1, 3, 7))
self.assertEqual((-1, 0, 5, 7), self.e)
def test07_expand_to_include_envelope(self):
"Testing Envelope expand_to_include with Envelope as parameter."
self.e.expand_to_include(Envelope(-1, 1, 3, 7))
self.assertEqual((-1, 0, 5, 7), self.e)
def test08_expand_to_include_point(self):
"Testing Envelope expand_to_include with Point as parameter."
self.e.expand_to_include(TestPoint(-1, 1))
self.assertEqual((-1, 0, 5, 5), self.e)
self.e.expand_to_include(TestPoint(10, 10))
self.assertEqual((-1, 0, 10, 10), self.e)
| bsd-3-clause |
OpenNeuroLab/brainspell-neo | archive/sprite/brainsprite.py | 2 | 7440 | # Christian Dansereau 2016 Copyright
import os
import numpy as np
import nibabel as nib
from PIL import Image
import json
from nilearn.image import resample_img
import hashlib, time
import matplotlib.pyplot as plt
from shutil import copyfile
def _load_json_template():
data_file = """{
"canvas": "3Dviewer",
"sprite": "spriteImg",
"flagCoordinates": true,
"nbSlice": {
"Y": 233,
"Z": 189
},
"colorBackground": "#000",
"colorFont": "#FFF",
"overlay": {
"sprite": "overlayImg",
"nbSlice": {
"Y": 233,
"Z": 189
},
"opacity": 0.7
},
"colorMap": {
"img": "colorMap",
"min": 0.2,
"max": 0.66
}
}
"""
data = json.loads(data_file)
return data
def _load_notebook_html(canvas_id, bkg_path, overlay_path, tmp_path, json_data):
html = """
<!DOCTYPE html>
<html>
<head>
</head>
<body>
<div id="div_viewer">
<canvas id="{0}"> <!-- this is the canvas that will feature the brain slices -->
<img id="spriteImg" class="hidden" src="{1}"> <!-- load a hidden version of the sprite image that includes all (sagital) brain slices -->
<img id="overlayImg" class="hidden" src="{2}"> <!-- another sprite image, with an overlay-->
</div>
<script type="text/javascript" src="{3}jquery.min.js"></script> <!-- JQuery is used in this example, line 18, but is not actually used in brainsprite.js -->
<script type="text/javascript" src="{3}brainsprite.js"></script>
<script>
// On load: build all figures
$( "{0}" ).ready(function() {{
var brain = brainsprite({4});
}});
</script>
</body>
</html>
"""
return html.format(canvas_id, bkg_path, overlay_path, tmp_path, json_data)
def _loadVolume(source_file):
img = nib.load(source_file)
vol = img.get_data()
# check if its a nii file
ext = _getExt(source_file)
if ext == ".nii":
vol = np.swapaxes(vol, 0, 2)
return vol
def _getspec(vol):
nx, ny, nz = vol.shape
nrows = int(np.ceil(np.sqrt(nz)))
ncolumns = int(np.ceil(nz / (1. * nrows)))
return nrows, ncolumns, nx, ny, nz
def _getExt(source_file):
# Getting the extension
if os.path.splitext(source_file)[1] == '.gz':
extension = os.path.splitext(os.path.splitext(source_file)[0])[1]
else:
extension = os.path.splitext(source_file)[1]
return extension
def _montage(vol):
nrows, ncolumns, nx, ny, nz = _getspec(vol)
mosaic = np.zeros((nrows * nx, ncolumns * ny))
indx, indy = np.where(np.ones((nrows, ncolumns)))
for ii in range(vol.shape[2]):
# we need to flip the image in the x axis
mosaic[(indx[ii] * nx):((indx[ii] + 1) * nx), (indy[ii] * ny):((indy[ii] + 1) * ny)] = vol[::-1, :, ii]
return mosaic
def _saveMosaic(mosaic, output_path, overlay=False, overlay_threshold=0.1):
if overlay:
mosaic[mosaic < overlay_threshold] = 0
im = Image.fromarray(np.uint8(plt.cm.hot(mosaic) * 255))
mask = Image.fromarray(np.uint8(mosaic > 0) * 255).convert("L")
im.putalpha(mask)
else:
im = Image.fromarray(mosaic).convert('RGB')
# if im.mode != 'RGBA':
# im = im.convert('RGBA')
im.save(output_path)
def transform_package(img_path, output_folder, overlay_path=''):
if overlay_path == '':
transform(img_path, output_folder + 'bkg_mosaic.jpg', output_folder + 'params.js')
else:
transform(img_path, output_folder + 'bkg_mosaic.jpg', output_folder + 'params.js', overlay_path,
output_folder + 'overlay_mosaic.png')
def transform(source_bkg_path, out_bkg_path, out_json, source_overlay_path='', out_overlay_path='',
overlay_threshold=0.1, return_json=False, overlay_interpolation='continuous'):
# load data
bkg_vol = _loadVolume(source_bkg_path)
bkg_vol = (bkg_vol / float(bkg_vol.max())) * 255.
# populate json
params = _load_json_template()
params['nbSlice']['Y'] = bkg_vol.shape[1]
params['nbSlice']['Z'] = bkg_vol.shape[0]
# make bkg montage save
mosa_bkg = _montage(bkg_vol)
_saveMosaic(mosa_bkg, out_bkg_path)
if source_overlay_path != '':
# load data
bkimg = nib.load(source_bkg_path)
overimg = nib.load(source_overlay_path)
# transform slice order and resample to fit bkimg
# check if its a nii file
ext = _getExt(source_overlay_path)
ext_bkg = _getExt(source_bkg_path)
if ext == ".nii":
if ext_bkg == ".mnc":
bkimg.affine[:, [0, 2]] = bkimg.affine[:, [2, 0]]
overimg = resample_img(overimg, bkimg.affine, bkimg.shape[::-1], interpolation=overlay_interpolation)
overlay_vol = np.swapaxes(overimg.get_data(), 0, 2)
else:
overimg = nib.nifti1.Nifti1Image(overimg.get_data(), overimg.get_affine)
overimg = resample_img(overimg, bkimg.affine, bkimg.shape)
overlay_vol = overimg.get_data()
# populate json
params['overlay']['nbSlice']['Y'] = overlay_vol.shape[1]
params['overlay']['nbSlice']['Z'] = overlay_vol.shape[0]
# make overlay montage and save
mosa_overlay = _montage(overlay_vol)
_saveMosaic(mosa_overlay, out_overlay_path, overlay=True, overlay_threshold=overlay_threshold)
else:
del params['overlay']
del params['colorMap']
if out_json[-3:] == '.js':
with open(out_json, 'w') as outfile:
data = "var jsonParams = '" + json.dumps(params) + "';"
outfile.write(data)
else:
with open(out_json, 'w') as outfile:
data = json.dumps(params)
outfile.write(data)
if return_json:
return json.dumps(params)
def show_sprite(bkg_img, overlay_img, tmp_path):
# make a tmp folder
tmp_path = tmp_path + '/brainsprite_tmp/'
_make_folder(tmp_path)
copyfile('../brainsprite.js', tmp_path+'brainsprite.js')
copyfile('../assets/jquery-1.9.1/jquery.min.js', tmp_path + 'jquery.min.js')
hash = _gen_file_name()
bkgimg_ = tmp_path + hash + '_bkg.jpg'
overlayimg_ = tmp_path + hash + '_overlay_mosaic.png'
json_data = transform(bkg_img, bkgimg_, tmp_path + hash + '_params.json', overlay_img, overlayimg_, overlay_threshold=0.3, return_json=True)
json_data = json_data.replace("3Dviewer", "canvas" + hash)
print json_data
html_code = _load_notebook_html('canvas' + hash, 'brainsprite_tmp/' + hash + '_bkg.jpg', 'brainsprite_tmp/' + hash + '_overlay_mosaic.png', 'brainsprite_tmp/', json_data)
return html_code
def _make_folder(path):
if not os.path.exists(path):
os.makedirs(path)
return True
return False
def _gen_file_name():
hash_ = hashlib.sha1()
hash_.update(str(time.time()).encode('utf-8'))
return hash_.hexdigest()
def test_mosaic():
# custom path
background = "test_anat.mnc.gz"
overlay = "DMN.nii.gz"
output_folder = "/home/cdansereau/virenv/"
#background = "t2.nii.gz"
#overlay = "t2_seg.nii.gz"
#output_folder = "/home/cdansereau/t2/"
# transform data
transform(output_folder + background, output_folder + 'bkg_mosaic.jpg', output_folder + 'params.json',
output_folder + overlay, output_folder + 'overlay_mosaic.png', overlay_threshold=0.3)
| mit |
chosener/AITetris | cocos2d/download-deps.py | 30 | 12351 | #!/usr/bin/env python
#coding=utf-8
#
# ./download-deps.py
#
# Downloads Cocos2D-x 3rd party dependencies from github:
# https://github.com/cocos2d/cocos2d-x-3rd-party-libs-bin) and extracts the zip
# file
#
# Having the dependencies outside the official cocos2d-x repo helps prevent
# bloating the repo.
#
"""****************************************************************************
Copyright (c) 2014 cocos2d-x.org
Copyright (c) 2014 Chukong Technologies Inc.
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************"""
import os.path
import zipfile
import shutil
import sys
import traceback
import distutils
import fileinput
import json
from optparse import OptionParser
from time import time
from sys import stdout
from distutils.errors import DistutilsError
from distutils.dir_util import copy_tree, remove_tree
class UnrecognizedFormat:
def __init__(self, prompt):
self._prompt = prompt
def __str__(self):
return self._prompt
class CocosZipInstaller(object):
def __init__(self, workpath, config_path, version_path, remote_version_key=None):
self._workpath = workpath
self._config_path = config_path
self._version_path = version_path
data = self.load_json_file(config_path)
self._current_version = data["version"]
self._repo_name = data["repo_name"]
try:
self._move_dirs = data["move_dirs"]
except:
self._move_dirs = None
self._filename = self._current_version + '.zip'
self._url = data["repo_parent"] + self._repo_name + '/archive/' + self._filename
self._zip_file_size = int(data["zip_file_size"])
# 'v' letter was swallowed by github, so we need to substring it from the 2nd letter
self._extracted_folder_name = os.path.join(self._workpath, self._repo_name + '-' + self._current_version[1:])
try:
data = self.load_json_file(version_path)
if remote_version_key is None:
self._remote_version = data["version"]
else:
self._remote_version = data[remote_version_key]
except:
print("==> version file doesn't exist")
def get_input_value(self, prompt):
ret = raw_input(prompt)
ret.rstrip(" \t")
return ret
def download_file(self):
print("==> Ready to download '%s' from '%s'" % (self._filename, self._url))
import urllib2
try:
u = urllib2.urlopen(self._url)
except urllib2.HTTPError as e:
if e.code == 404:
print("==> Error: Could not find the file from url: '%s'" % (self._url))
print("==> Http request failed, error code: " + str(e.code) + ", reason: " + e.read())
sys.exit(1)
f = open(self._filename, 'wb')
meta = u.info()
content_len = meta.getheaders("Content-Length")
file_size = 0
if content_len and len(content_len) > 0:
file_size = int(content_len[0])
else:
# github server may not reponse a header information which contains `Content-Length`,
# therefore, the size needs to be written hardcode here. While server doesn't return
# `Content-Length`, use it instead
print("==> WARNING: Couldn't grab the file size from remote, use 'zip_file_size' section in '%s'" % self._config_path)
file_size = self._zip_file_size
print("==> Start to download, please wait ...")
file_size_dl = 0
block_sz = 8192
block_size_per_second = 0
old_time = time()
while True:
buffer = u.read(block_sz)
if not buffer:
break
file_size_dl += len(buffer)
block_size_per_second += len(buffer)
f.write(buffer)
new_time = time()
if (new_time - old_time) > 1:
speed = block_size_per_second / (new_time - old_time) / 1000.0
status = ""
if file_size != 0:
percent = file_size_dl * 100. / file_size
status = r"Downloaded: %6dK / Total: %dK, Percent: %3.2f%%, Speed: %6.2f KB/S " % (file_size_dl / 1000, file_size / 1000, percent, speed)
else:
status = r"Downloaded: %6dK, Speed: %6.2f KB/S " % (file_size_dl / 1000, speed)
status = status + chr(8)*(len(status)+1)
print(status),
sys.stdout.flush()
block_size_per_second = 0
old_time = new_time
print("==> Downloading finished!")
f.close()
def ensure_directory(self, target):
if not os.path.exists(target):
os.mkdir(target)
def unpack_zipfile(self, extract_dir):
"""Unpack zip `filename` to `extract_dir`
Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
by ``zipfile.is_zipfile()``).
"""
if not zipfile.is_zipfile(self._filename):
raise UnrecognizedFormat("%s is not a zip file" % (self._filename))
print("==> Extracting files, please wait ...")
z = zipfile.ZipFile(self._filename)
try:
for info in z.infolist():
name = info.filename
# don't extract absolute paths or ones with .. in them
if name.startswith('/') or '..' in name:
continue
target = os.path.join(extract_dir, *name.split('/'))
if not target:
continue
if name.endswith('/'):
# directory
self.ensure_directory(target)
else:
# file
data = z.read(info.filename)
f = open(target, 'wb')
try:
f.write(data)
finally:
f.close()
del data
unix_attributes = info.external_attr >> 16
if unix_attributes:
os.chmod(target, unix_attributes)
finally:
z.close()
print("==> Extraction done!")
def ask_to_delete_downloaded_zip_file(self):
ret = self.get_input_value("==> Do you want to keep '%s'? So you don't have to download it later. (yes/no): " % self._filename)
ret = ret.strip()
if ret != 'yes' and ret != 'y' and ret != 'no' and ret != 'n':
print("==> Cache the dependency libraries by default")
return False
else:
return True if ret == 'no' or ret == 'n' else False
def download_zip_file(self):
if not os.path.isfile(self._filename):
self.download_file()
try:
if not zipfile.is_zipfile(self._filename):
raise UnrecognizedFormat("%s is not a zip file" % (self._filename))
except UnrecognizedFormat as e:
print("==> Unrecognized zip format from your local '%s' file!" % (self._filename))
if os.path.isfile(self._filename):
os.remove(self._filename)
print("==> Download it from internet again, please wait...")
self.download_zip_file()
def need_to_update(self):
if not os.path.isfile(self._version_path):
return True
with open(self._version_path) as data_file:
data = json.load(data_file)
if self._remote_version == self._current_version:
return False
return True
def load_json_file(self, file_path):
if not os.path.isfile(file_path):
raise Exception("Could not find (%s)" % (file_path))
with open(file_path) as data_file:
data = json.load(data_file)
return data
def run(self, workpath, folder_for_extracting, remove_downloaded, force_update, download_only):
if not force_update and not self.need_to_update():
print("==> Not need to update!")
return
if os.path.exists(self._extracted_folder_name):
shutil.rmtree(self._extracted_folder_name)
self.download_zip_file()
if not download_only:
self.unpack_zipfile(self._workpath)
print("==> Copying files...")
if not os.path.exists(folder_for_extracting):
os.mkdir(folder_for_extracting)
distutils.dir_util.copy_tree(self._extracted_folder_name, folder_for_extracting)
if self._move_dirs is not None:
for srcDir in self._move_dirs.keys():
distDir = os.path.join( os.path.join(workpath, self._move_dirs[srcDir]), srcDir)
if os.path.exists(distDir):
shutil.rmtree(distDir)
shutil.move( os.path.join(folder_for_extracting, srcDir), distDir)
print("==> Cleaning...")
if os.path.exists(self._extracted_folder_name):
shutil.rmtree(self._extracted_folder_name)
if os.path.isfile(self._filename):
if remove_downloaded is not None:
if remove_downloaded == 'yes':
os.remove(self._filename)
elif self.ask_to_delete_downloaded_zip_file():
os.remove(self._filename)
else:
print("==> Download (%s) finish!" % self._filename)
def _check_python_version():
major_ver = sys.version_info[0]
if major_ver > 2:
print ("The python version is %d.%d. But python 2.x is required. (Version 2.7 is well tested)\n"
"Download it here: https://www.python.org/" % (major_ver, sys.version_info[1]))
return False
return True
def main():
workpath = os.path.dirname(os.path.realpath(__file__))
if not _check_python_version():
exit()
parser = OptionParser()
parser.add_option('-r', '--remove-download',
action="store", type="string", dest='remove_downloaded', default=None,
help="Whether to remove downloaded zip file, 'yes' or 'no'")
parser.add_option("-f", "--force-update",
action="store_true", dest="force_update", default=False,
help="Whether to force update the third party libraries")
parser.add_option("-d", "--download-only",
action="store_true", dest="download_only", default=False,
help="Only download zip file of the third party libraries, will not extract it")
(opts, args) = parser.parse_args()
print("=======================================================")
print("==> Prepare to download external libraries!")
external_path = os.path.join(workpath, 'external')
installer = CocosZipInstaller(workpath, os.path.join(workpath, 'external', 'config.json'), os.path.join(workpath, 'external', 'version.json'), "prebuilt_libs_version")
installer.run(workpath, external_path, opts.remove_downloaded, opts.force_update, opts.download_only)
# -------------- main --------------
if __name__ == '__main__':
try:
main()
except Exception as e:
traceback.print_exc()
sys.exit(1)
| gpl-2.0 |
RIKSOF/scspell-jenkins | scspell_lib/_util.py | 1 | 1481 | ############################################################################
# scspell
# Copyright (C) 2009 Paul Pelzl
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License, version 2, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################
"""
_util -- utility functions which may be useful across the source tree.
"""
# Settings for this session
VERBOSITY_NORMAL = 1
VERBOSITY_DEBUG = 2
VERBOSITY_MAX = VERBOSITY_DEBUG
SETTINGS = {'verbosity' : VERBOSITY_NORMAL}
def mutter(level, text):
"""Print text to the console, if the level is not higher than the
current verbosity setting."""
if level <= SETTINGS['verbosity']:
print text
def set_verbosity(value):
"""Set the verbosity level to a given integral value. The constants
VERBOSITY_* are good choices."""
SETTINGS['verbosity'] = value
# scspell-id: b114984a-c7aa-40a8-9a53-b54fb6a52582
| gpl-2.0 |
jordanemedlock/psychtruths | temboo/Library/Tumblr/Blog/RetrieveBlogFollowers.py | 5 | 5164 | # -*- coding: utf-8 -*-
###############################################################################
#
# RetrieveBlogFollowers
# Retrieves the count of followers for a specified Tumblr blog.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class RetrieveBlogFollowers(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the RetrieveBlogFollowers Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(RetrieveBlogFollowers, self).__init__(temboo_session, '/Library/Tumblr/Blog/RetrieveBlogFollowers')
def new_input_set(self):
return RetrieveBlogFollowersInputSet()
def _make_result_set(self, result, path):
return RetrieveBlogFollowersResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return RetrieveBlogFollowersChoreographyExecution(session, exec_id, path)
class RetrieveBlogFollowersInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the RetrieveBlogFollowers
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_APIKey(self, value):
"""
Set the value of the APIKey input for this Choreo. ((required, string) The API Key provided by Tumblr (AKA the OAuth Consumer Key).)
"""
super(RetrieveBlogFollowersInputSet, self)._set_input('APIKey', value)
def set_AccessTokenSecret(self, value):
"""
Set the value of the AccessTokenSecret input for this Choreo. ((required, string) The Access Token Secret retrieved during the OAuth process.)
"""
super(RetrieveBlogFollowersInputSet, self)._set_input('AccessTokenSecret', value)
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((required, string) The Access Token retrieved during the OAuth process.)
"""
super(RetrieveBlogFollowersInputSet, self)._set_input('AccessToken', value)
def set_BaseHostname(self, value):
"""
Set the value of the BaseHostname input for this Choreo. ((required, string) The standard or custom blog hostname (i.e. temboo.tumblr.com))
"""
super(RetrieveBlogFollowersInputSet, self)._set_input('BaseHostname', value)
def set_Limit(self, value):
"""
Set the value of the Limit input for this Choreo. ((optional, integer) The number of results to return: 1 - 20. Defaults to 20.)
"""
super(RetrieveBlogFollowersInputSet, self)._set_input('Limit', value)
def set_Offset(self, value):
"""
Set the value of the Offset input for this Choreo. ((optional, integer) The result to start at. Defaults to 0.)
"""
super(RetrieveBlogFollowersInputSet, self)._set_input('Offset', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Can be set to xml or json. Defaults to json.)
"""
super(RetrieveBlogFollowersInputSet, self)._set_input('ResponseFormat', value)
def set_SecretKey(self, value):
"""
Set the value of the SecretKey input for this Choreo. ((required, string) The Secret Key provided by Tumblr (AKA the OAuth Consumer Secret).)
"""
super(RetrieveBlogFollowersInputSet, self)._set_input('SecretKey', value)
class RetrieveBlogFollowersResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the RetrieveBlogFollowers Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from Tumblr. Default is JSON, can be set to XML by entering 'xml' in ResponseFormat.)
"""
return self._output.get('Response', None)
class RetrieveBlogFollowersChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return RetrieveBlogFollowersResultSet(response, path)
| apache-2.0 |
andresguisado/andresguisado.github.io | node_modules/pygmentize-bundled/vendor/pygments/pygments/lexers/_scilab_builtins.py | 364 | 31261 | # -*- coding: utf-8 -*-
"""
pygments.lexers._scilab_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Builtin list for the ScilabLexer.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
# These lists are generated automatically.
# Run the following in a Scilab script:
#
# varType=["functions", "commands", "macros", "variables" ];
# fd = mopen('list.txt','wt');
#
# for j=1:size(varType,"*")
# myStr="";
# a=completion("",varType(j));
# myStr=varType(j)+"_kw = [";
# for i=1:size(a,"*")
# myStr = myStr + """" + a(i) + """";
# if size(a,"*") <> i then
# myStr = myStr + ","; end
# end
# myStr = myStr + "]";
# mputl(myStr,fd);
# end
# mclose(fd);
#
# Then replace "$" by "\\$" manually.
functions_kw = ["%XMLAttr_6","%XMLAttr_e","%XMLAttr_i_XMLElem","%XMLAttr_length","%XMLAttr_p","%XMLAttr_size","%XMLDoc_6","%XMLDoc_e","%XMLDoc_i_XMLList","%XMLDoc_p","%XMLElem_6","%XMLElem_e","%XMLElem_i_XMLDoc","%XMLElem_i_XMLElem","%XMLElem_i_XMLList","%XMLElem_p","%XMLList_6","%XMLList_e","%XMLList_i_XMLElem","%XMLList_i_XMLList","%XMLList_length","%XMLList_p","%XMLList_size","%XMLNs_6","%XMLNs_e","%XMLNs_i_XMLElem","%XMLNs_p","%XMLSet_6","%XMLSet_e","%XMLSet_length","%XMLSet_p","%XMLSet_size","%XMLValid_p","%b_i_XMLList","%c_i_XMLAttr","%c_i_XMLDoc","%c_i_XMLElem","%c_i_XMLList","%ce_i_XMLList","%fptr_i_XMLList","%h_i_XMLList","%hm_i_XMLList","%i_abs","%i_cumprod","%i_cumsum","%i_diag","%i_i_XMLList","%i_matrix","%i_max","%i_maxi","%i_min","%i_mini","%i_mput","%i_p","%i_prod","%i_sum","%i_tril","%i_triu","%ip_i_XMLList","%l_i_XMLList","%lss_i_XMLList","%mc_i_XMLList","%msp_full","%msp_i_XMLList","%msp_spget","%p_i_XMLList","%ptr_i_XMLList","%r_i_XMLList","%s_i_XMLList","%sp_i_XMLList","%spb_i_XMLList","%st_i_XMLList","Calendar","ClipBoard","Matplot","Matplot1","PlaySound","TCL_DeleteInterp","TCL_DoOneEvent","TCL_EvalFile","TCL_EvalStr","TCL_ExistArray","TCL_ExistInterp","TCL_ExistVar","TCL_GetVar","TCL_GetVersion","TCL_SetVar","TCL_UnsetVar","TCL_UpVar","_","_code2str","_str2code","about","abs","acos","addcb","addf","addhistory","addinter","amell","and","argn","arl2_ius","ascii","asin","atan","backslash","balanc","banner","base2dec","basename","bdiag","beep","besselh","besseli","besselj","besselk","bessely","beta","bezout","bfinit","blkfc1i","blkslvi","bool2s","browsehistory","browsevar","bsplin3val","buildDocv2","buildouttb","bvode","c_link","calerf","call","callblk","captions","cd","cdfbet","cdfbin","cdfchi","cdfchn","cdff","cdffnc","cdfgam","cdfnbn","cdfnor","cdfpoi","cdft","ceil","champ","champ1","chdir","chol","clc","clean","clear","clear_pixmap","clearfun","clearglobal","closeEditor","closeXcos","code2str","coeff","comp","completion","conj","contour2di","contr","conv2","convstr","copy","copyfile","corr","cos","coserror","createdir","cshep2d","ctree2","ctree3","ctree4","cumprod","cumsum","curblock","curblockc","dasrt","dassl","data2sig","debug","dec2base","deff","definedfields","degree","delbpt","delete","deletefile","delip","delmenu","det","dgettext","dhinf","diag","diary","diffobjs","disp","dispbpt","displayhistory","disposefftwlibrary","dlgamma","dnaupd","dneupd","double","draw","drawaxis","drawlater","drawnow","dsaupd","dsearch","dseupd","duplicate","editor","editvar","emptystr","end_scicosim","ereduc","errcatch","errclear","error","eval_cshep2d","exec","execstr","exists","exit","exp","expm","exportUI","export_to_hdf5","eye","fadj2sp","fec","feval","fft","fftw","fftw_flags","fftw_forget_wisdom","fftwlibraryisloaded","file","filebrowser","fileext","fileinfo","fileparts","filesep","find","findBD","findfiles","floor","format","fort","fprintfMat","freq","frexp","fromc","fromjava","fscanfMat","fsolve","fstair","full","fullpath","funcprot","funptr","gamma","gammaln","geom3d","get","get_absolute_file_path","get_fftw_wisdom","getblocklabel","getcallbackobject","getdate","getdebuginfo","getdefaultlanguage","getdrives","getdynlibext","getenv","getfield","gethistory","gethistoryfile","getinstalledlookandfeels","getio","getlanguage","getlongpathname","getlookandfeel","getmd5","getmemory","getmodules","getos","getpid","getrelativefilename","getscicosvars","getscilabmode","getshortpathname","gettext","getvariablesonstack","getversion","glist","global","glue","grand","grayplot","grep","gsort","gstacksize","havewindow","helpbrowser","hess","hinf","historymanager","historysize","host","iconvert","iconvert","ieee","ilib_verbose","imag","impl","import_from_hdf5","imult","inpnvi","int","int16","int2d","int32","int3d","int8","interp","interp2d","interp3d","intg","intppty","inttype","inv","is_handle_valid","isalphanum","isascii","isdef","isdigit","isdir","isequal","isequalbitwise","iserror","isfile","isglobal","isletter","isreal","iswaitingforinput","javaclasspath","javalibrarypath","kron","lasterror","ldiv","ldivf","legendre","length","lib","librarieslist","libraryinfo","linear_interpn","lines","link","linmeq","list","load","loadScicos","loadfftwlibrary","loadhistory","log","log1p","lsq","lsq_splin","lsqrsolve","lsslist","lstcat","lstsize","ltitr","lu","ludel","lufact","luget","lusolve","macr2lst","macr2tree","matfile_close","matfile_listvar","matfile_open","matfile_varreadnext","matfile_varwrite","matrix","max","maxfiles","mclearerr","mclose","meof","merror","messagebox","mfprintf","mfscanf","mget","mgeti","mgetl","mgetstr","min","mlist","mode","model2blk","mopen","move","movefile","mprintf","mput","mputl","mputstr","mscanf","mseek","msprintf","msscanf","mtell","mtlb_mode","mtlb_sparse","mucomp","mulf","nearfloat","newaxes","newest","newfun","nnz","notify","number_properties","ode","odedc","ones","opentk","optim","or","ordmmd","parallel_concurrency","parallel_run","param3d","param3d1","part","pathconvert","pathsep","phase_simulation","plot2d","plot2d1","plot2d2","plot2d3","plot2d4","plot3d","plot3d1","pointer_xproperty","poly","ppol","pppdiv","predef","print","printf","printfigure","printsetupbox","prod","progressionbar","prompt","pwd","qld","qp_solve","qr","raise_window","rand","rankqr","rat","rcond","rdivf","read","read4b","readb","readgateway","readmps","real","realtime","realtimeinit","regexp","relocate_handle","remez","removedir","removelinehistory","res_with_prec","resethistory","residu","resume","return","ricc","ricc_old","rlist","roots","rotate_axes","round","rpem","rtitr","rubberbox","save","saveafterncommands","saveconsecutivecommands","savehistory","schur","sci_haltscicos","sci_tree2","sci_tree3","sci_tree4","sciargs","scicos_debug","scicos_debug_count","scicos_time","scicosim","scinotes","sctree","semidef","set","set_blockerror","set_fftw_wisdom","set_xproperty","setbpt","setdefaultlanguage","setenv","setfield","sethistoryfile","setlanguage","setlookandfeel","setmenu","sfact","sfinit","show_pixmap","show_window","showalluimenushandles","sident","sig2data","sign","simp","simp_mode","sin","size","slash","sleep","sorder","sparse","spchol","spcompack","spec","spget","splin","splin2d","splin3d","spones","sprintf","sqrt","stacksize","str2code","strcat","strchr","strcmp","strcspn","strindex","string","stringbox","stripblanks","strncpy","strrchr","strrev","strsplit","strspn","strstr","strsubst","strtod","strtok","subf","sum","svd","swap_handles","symfcti","syredi","system_getproperty","system_setproperty","ta2lpd","tan","taucs_chdel","taucs_chfact","taucs_chget","taucs_chinfo","taucs_chsolve","tempname","testmatrix","timer","tlist","tohome","tokens","toolbar","toprint","tr_zer","tril","triu","type","typename","uiDisplayTree","uicontextmenu","uicontrol","uigetcolor","uigetdir","uigetfile","uigetfont","uimenu","uint16","uint32","uint8","uipopup","uiputfile","uiwait","ulink","umf_ludel","umf_lufact","umf_luget","umf_luinfo","umf_lusolve","umfpack","unglue","unix","unsetmenu","unzoom","updatebrowsevar","usecanvas","user","var2vec","varn","vec2var","waitbar","warnBlockByUID","warning","what","where","whereis","who","winsid","with_embedded_jre","with_module","writb","write","write4b","x_choose","x_choose_modeless","x_dialog","x_mdialog","xarc","xarcs","xarrows","xchange","xchoicesi","xclick","xcos","xcosAddToolsMenu","xcosConfigureXmlFile","xcosDiagramToScilab","xcosPalCategoryAdd","xcosPalDelete","xcosPalDisable","xcosPalEnable","xcosPalGenerateIcon","xcosPalLoad","xcosPalMove","xcosUpdateBlock","xdel","xfarc","xfarcs","xfpoly","xfpolys","xfrect","xget","xgetech","xgetmouse","xgraduate","xgrid","xlfont","xls_open","xls_read","xmlAddNs","xmlAsNumber","xmlAsText","xmlDTD","xmlDelete","xmlDocument","xmlDump","xmlElement","xmlFormat","xmlGetNsByHref","xmlGetNsByPrefix","xmlGetOpenDocs","xmlIsValidObject","xmlNs","xmlRead","xmlReadStr","xmlRelaxNG","xmlRemove","xmlSchema","xmlSetAttributes","xmlValidate","xmlWrite","xmlXPath","xname","xpause","xpoly","xpolys","xrect","xrects","xs2bmp","xs2eps","xs2gif","xs2jpg","xs2pdf","xs2png","xs2ppm","xs2ps","xs2svg","xsegs","xset","xsetech","xstring","xstringb","xtitle","zeros","znaupd","zneupd","zoom_rect"]
commands_kw = ["abort","apropos","break","case","catch","clc","clear","continue","do","else","elseif","end","endfunction","exit","for","function","help","if","pause","pwd","quit","resume","return","select","then","try","what","while","who"]
macros_kw = ["%0_i_st","%3d_i_h","%Block_xcosUpdateBlock","%TNELDER_p","%TNELDER_string","%TNMPLOT_p","%TNMPLOT_string","%TOPTIM_p","%TOPTIM_string","%TSIMPLEX_p","%TSIMPLEX_string","%_gsort","%_strsplit","%ar_p","%asn","%b_a_b","%b_a_s","%b_c_s","%b_c_spb","%b_cumprod","%b_cumsum","%b_d_s","%b_diag","%b_e","%b_f_s","%b_f_spb","%b_g_s","%b_g_spb","%b_h_s","%b_h_spb","%b_i_b","%b_i_ce","%b_i_h","%b_i_hm","%b_i_s","%b_i_sp","%b_i_spb","%b_i_st","%b_iconvert","%b_l_b","%b_l_s","%b_m_b","%b_m_s","%b_matrix","%b_n_hm","%b_o_hm","%b_p_s","%b_prod","%b_r_b","%b_r_s","%b_s_b","%b_s_s","%b_string","%b_sum","%b_tril","%b_triu","%b_x_b","%b_x_s","%c_a_c","%c_b_c","%c_b_s","%c_diag","%c_e","%c_eye","%c_f_s","%c_i_c","%c_i_ce","%c_i_h","%c_i_hm","%c_i_lss","%c_i_r","%c_i_s","%c_i_st","%c_matrix","%c_n_l","%c_n_st","%c_o_l","%c_o_st","%c_ones","%c_rand","%c_tril","%c_triu","%cblock_c_cblock","%cblock_c_s","%cblock_e","%cblock_f_cblock","%cblock_p","%cblock_size","%ce_6","%ce_c_ce","%ce_e","%ce_f_ce","%ce_i_ce","%ce_i_s","%ce_i_st","%ce_matrix","%ce_p","%ce_size","%ce_string","%ce_t","%champdat_i_h","%choose","%diagram_xcos","%dir_p","%fptr_i_st","%grayplot_i_h","%h_i_st","%hm_1_hm","%hm_1_s","%hm_2_hm","%hm_2_s","%hm_3_hm","%hm_3_s","%hm_4_hm","%hm_4_s","%hm_5","%hm_a_hm","%hm_a_r","%hm_a_s","%hm_abs","%hm_and","%hm_bool2s","%hm_c_hm","%hm_ceil","%hm_conj","%hm_cos","%hm_cumprod","%hm_cumsum","%hm_d_hm","%hm_d_s","%hm_degree","%hm_e","%hm_exp","%hm_f_hm","%hm_fft","%hm_find","%hm_floor","%hm_g_hm","%hm_h_hm","%hm_i_b","%hm_i_ce","%hm_i_hm","%hm_i_i","%hm_i_p","%hm_i_r","%hm_i_s","%hm_i_st","%hm_iconvert","%hm_imag","%hm_int","%hm_isnan","%hm_isreal","%hm_j_hm","%hm_j_s","%hm_k_hm","%hm_k_s","%hm_log","%hm_m_p","%hm_m_r","%hm_m_s","%hm_matrix","%hm_maxi","%hm_mean","%hm_median","%hm_mini","%hm_n_b","%hm_n_c","%hm_n_hm","%hm_n_i","%hm_n_p","%hm_n_s","%hm_o_b","%hm_o_c","%hm_o_hm","%hm_o_i","%hm_o_p","%hm_o_s","%hm_ones","%hm_or","%hm_p","%hm_prod","%hm_q_hm","%hm_r_s","%hm_rand","%hm_real","%hm_round","%hm_s","%hm_s_hm","%hm_s_r","%hm_s_s","%hm_sign","%hm_sin","%hm_size","%hm_sqrt","%hm_st_deviation","%hm_string","%hm_sum","%hm_x_hm","%hm_x_p","%hm_x_s","%hm_zeros","%i_1_s","%i_2_s","%i_3_s","%i_4_s","%i_Matplot","%i_a_i","%i_a_s","%i_and","%i_ascii","%i_b_s","%i_bezout","%i_champ","%i_champ1","%i_contour","%i_contour2d","%i_d_i","%i_d_s","%i_e","%i_fft","%i_g_i","%i_gcd","%i_h_i","%i_i_ce","%i_i_h","%i_i_hm","%i_i_i","%i_i_s","%i_i_st","%i_j_i","%i_j_s","%i_l_s","%i_lcm","%i_length","%i_m_i","%i_m_s","%i_mfprintf","%i_mprintf","%i_msprintf","%i_n_s","%i_o_s","%i_or","%i_p_i","%i_p_s","%i_plot2d","%i_plot2d1","%i_plot2d2","%i_q_s","%i_r_i","%i_r_s","%i_round","%i_s_i","%i_s_s","%i_sign","%i_string","%i_x_i","%i_x_s","%ip_a_s","%ip_i_st","%ip_m_s","%ip_n_ip","%ip_o_ip","%ip_p","%ip_s_s","%ip_string","%k","%l_i_h","%l_i_s","%l_i_st","%l_isequal","%l_n_c","%l_n_l","%l_n_m","%l_n_p","%l_n_s","%l_n_st","%l_o_c","%l_o_l","%l_o_m","%l_o_p","%l_o_s","%l_o_st","%lss_a_lss","%lss_a_p","%lss_a_r","%lss_a_s","%lss_c_lss","%lss_c_p","%lss_c_r","%lss_c_s","%lss_e","%lss_eye","%lss_f_lss","%lss_f_p","%lss_f_r","%lss_f_s","%lss_i_ce","%lss_i_lss","%lss_i_p","%lss_i_r","%lss_i_s","%lss_i_st","%lss_inv","%lss_l_lss","%lss_l_p","%lss_l_r","%lss_l_s","%lss_m_lss","%lss_m_p","%lss_m_r","%lss_m_s","%lss_n_lss","%lss_n_p","%lss_n_r","%lss_n_s","%lss_norm","%lss_o_lss","%lss_o_p","%lss_o_r","%lss_o_s","%lss_ones","%lss_r_lss","%lss_r_p","%lss_r_r","%lss_r_s","%lss_rand","%lss_s","%lss_s_lss","%lss_s_p","%lss_s_r","%lss_s_s","%lss_size","%lss_t","%lss_v_lss","%lss_v_p","%lss_v_r","%lss_v_s","%lt_i_s","%m_n_l","%m_o_l","%mc_i_h","%mc_i_s","%mc_i_st","%mc_n_st","%mc_o_st","%mc_string","%mps_p","%mps_string","%msp_a_s","%msp_abs","%msp_e","%msp_find","%msp_i_s","%msp_i_st","%msp_length","%msp_m_s","%msp_maxi","%msp_n_msp","%msp_nnz","%msp_o_msp","%msp_p","%msp_sparse","%msp_spones","%msp_t","%p_a_lss","%p_a_r","%p_c_lss","%p_c_r","%p_cumprod","%p_cumsum","%p_d_p","%p_d_r","%p_d_s","%p_det","%p_e","%p_f_lss","%p_f_r","%p_i_ce","%p_i_h","%p_i_hm","%p_i_lss","%p_i_p","%p_i_r","%p_i_s","%p_i_st","%p_inv","%p_j_s","%p_k_p","%p_k_r","%p_k_s","%p_l_lss","%p_l_p","%p_l_r","%p_l_s","%p_m_hm","%p_m_lss","%p_m_r","%p_matrix","%p_n_l","%p_n_lss","%p_n_r","%p_o_l","%p_o_lss","%p_o_r","%p_o_sp","%p_p_s","%p_prod","%p_q_p","%p_q_r","%p_q_s","%p_r_lss","%p_r_p","%p_r_r","%p_r_s","%p_s_lss","%p_s_r","%p_simp","%p_string","%p_sum","%p_v_lss","%p_v_p","%p_v_r","%p_v_s","%p_x_hm","%p_x_r","%p_y_p","%p_y_r","%p_y_s","%p_z_p","%p_z_r","%p_z_s","%r_a_hm","%r_a_lss","%r_a_p","%r_a_r","%r_a_s","%r_c_lss","%r_c_p","%r_c_r","%r_c_s","%r_clean","%r_cumprod","%r_d_p","%r_d_r","%r_d_s","%r_det","%r_diag","%r_e","%r_eye","%r_f_lss","%r_f_p","%r_f_r","%r_f_s","%r_i_ce","%r_i_hm","%r_i_lss","%r_i_p","%r_i_r","%r_i_s","%r_i_st","%r_inv","%r_j_s","%r_k_p","%r_k_r","%r_k_s","%r_l_lss","%r_l_p","%r_l_r","%r_l_s","%r_m_hm","%r_m_lss","%r_m_p","%r_m_r","%r_m_s","%r_matrix","%r_n_lss","%r_n_p","%r_n_r","%r_n_s","%r_norm","%r_o_lss","%r_o_p","%r_o_r","%r_o_s","%r_ones","%r_p","%r_p_s","%r_prod","%r_q_p","%r_q_r","%r_q_s","%r_r_lss","%r_r_p","%r_r_r","%r_r_s","%r_rand","%r_s","%r_s_hm","%r_s_lss","%r_s_p","%r_s_r","%r_s_s","%r_simp","%r_size","%r_string","%r_sum","%r_t","%r_tril","%r_triu","%r_v_lss","%r_v_p","%r_v_r","%r_v_s","%r_x_p","%r_x_r","%r_x_s","%r_y_p","%r_y_r","%r_y_s","%r_z_p","%r_z_r","%r_z_s","%s_1_hm","%s_1_i","%s_2_hm","%s_2_i","%s_3_hm","%s_3_i","%s_4_hm","%s_4_i","%s_5","%s_a_b","%s_a_hm","%s_a_i","%s_a_ip","%s_a_lss","%s_a_msp","%s_a_r","%s_a_sp","%s_and","%s_b_i","%s_b_s","%s_c_b","%s_c_cblock","%s_c_lss","%s_c_r","%s_c_sp","%s_d_b","%s_d_i","%s_d_p","%s_d_r","%s_d_sp","%s_e","%s_f_b","%s_f_cblock","%s_f_lss","%s_f_r","%s_f_sp","%s_g_b","%s_g_s","%s_h_b","%s_h_s","%s_i_b","%s_i_c","%s_i_ce","%s_i_h","%s_i_hm","%s_i_i","%s_i_lss","%s_i_p","%s_i_r","%s_i_s","%s_i_sp","%s_i_spb","%s_i_st","%s_j_i","%s_k_hm","%s_k_p","%s_k_r","%s_k_sp","%s_l_b","%s_l_hm","%s_l_i","%s_l_lss","%s_l_p","%s_l_r","%s_l_s","%s_l_sp","%s_m_b","%s_m_hm","%s_m_i","%s_m_ip","%s_m_lss","%s_m_msp","%s_m_r","%s_matrix","%s_n_hm","%s_n_i","%s_n_l","%s_n_lss","%s_n_r","%s_n_st","%s_o_hm","%s_o_i","%s_o_l","%s_o_lss","%s_o_r","%s_o_st","%s_or","%s_p_b","%s_p_i","%s_pow","%s_q_hm","%s_q_i","%s_q_p","%s_q_r","%s_q_sp","%s_r_b","%s_r_i","%s_r_lss","%s_r_p","%s_r_r","%s_r_s","%s_r_sp","%s_s_b","%s_s_hm","%s_s_i","%s_s_ip","%s_s_lss","%s_s_r","%s_s_sp","%s_simp","%s_v_lss","%s_v_p","%s_v_r","%s_v_s","%s_x_b","%s_x_hm","%s_x_i","%s_x_r","%s_y_p","%s_y_r","%s_y_sp","%s_z_p","%s_z_r","%s_z_sp","%sn","%sp_a_s","%sp_a_sp","%sp_and","%sp_c_s","%sp_ceil","%sp_cos","%sp_cumprod","%sp_cumsum","%sp_d_s","%sp_d_sp","%sp_diag","%sp_e","%sp_exp","%sp_f_s","%sp_floor","%sp_gsort","%sp_i_ce","%sp_i_h","%sp_i_s","%sp_i_sp","%sp_i_st","%sp_int","%sp_inv","%sp_k_s","%sp_k_sp","%sp_l_s","%sp_l_sp","%sp_length","%sp_norm","%sp_or","%sp_p_s","%sp_prod","%sp_q_s","%sp_q_sp","%sp_r_s","%sp_r_sp","%sp_round","%sp_s_s","%sp_s_sp","%sp_sin","%sp_sqrt","%sp_string","%sp_sum","%sp_tril","%sp_triu","%sp_y_s","%sp_y_sp","%sp_z_s","%sp_z_sp","%spb_and","%spb_c_b","%spb_cumprod","%spb_cumsum","%spb_diag","%spb_e","%spb_f_b","%spb_g_b","%spb_g_spb","%spb_h_b","%spb_h_spb","%spb_i_b","%spb_i_ce","%spb_i_h","%spb_i_st","%spb_or","%spb_prod","%spb_sum","%spb_tril","%spb_triu","%st_6","%st_c_st","%st_e","%st_f_st","%st_i_b","%st_i_c","%st_i_fptr","%st_i_h","%st_i_i","%st_i_ip","%st_i_lss","%st_i_msp","%st_i_p","%st_i_r","%st_i_s","%st_i_sp","%st_i_spb","%st_i_st","%st_matrix","%st_n_c","%st_n_l","%st_n_mc","%st_n_p","%st_n_s","%st_o_c","%st_o_l","%st_o_mc","%st_o_p","%st_o_s","%st_o_tl","%st_p","%st_size","%st_string","%st_t","%ticks_i_h","%xls_e","%xls_p","%xlssheet_e","%xlssheet_p","%xlssheet_size","%xlssheet_string","DominationRank","G_make","IsAScalar","NDcost","OS_Version","PlotSparse","ReadHBSparse","ReadmiMatrix","TCL_CreateSlave","WritemiMatrix","abcd","abinv","accept_func_default","accept_func_vfsa","acf","acosd","acosh","acoshm","acosm","acot","acotd","acoth","acsc","acscd","acsch","add_demo","add_help_chapter","add_module_help_chapter","add_param","add_profiling","adj2sp","aff2ab","ana_style","analpf","analyze","aplat","apropos","arhnk","arl2","arma2p","armac","armax","armax1","arobasestring2strings","arsimul","ascii2string","asciimat","asec","asecd","asech","asind","asinh","asinhm","asinm","assert_checkalmostequal","assert_checkequal","assert_checkerror","assert_checkfalse","assert_checkfilesequal","assert_checktrue","assert_comparecomplex","assert_computedigits","assert_cond2reltol","assert_cond2reqdigits","assert_generror","atand","atanh","atanhm","atanm","atomsAutoload","atomsAutoloadAdd","atomsAutoloadDel","atomsAutoloadList","atomsCategoryList","atomsCheckModule","atomsDepTreeShow","atomsGetConfig","atomsGetInstalled","atomsGetLoaded","atomsGetLoadedPath","atomsInstall","atomsIsInstalled","atomsIsLoaded","atomsList","atomsLoad","atomsRemove","atomsRepositoryAdd","atomsRepositoryDel","atomsRepositoryList","atomsRestoreConfig","atomsSaveConfig","atomsSearch","atomsSetConfig","atomsShow","atomsSystemInit","atomsSystemUpdate","atomsTest","atomsUpdate","atomsVersion","augment","auread","auwrite","balreal","bench_run","bilin","bilt","bin2dec","binomial","bitand","bitcmp","bitget","bitor","bitset","bitxor","black","blanks","bloc2exp","bloc2ss","block_parameter_error","bode","bstap","buttmag","bvodeS","bytecode","bytecodewalk","cainv","calendar","calfrq","canon","casc","cat","cat_code","cb_m2sci_gui","ccontrg","cell","cell2mat","cellstr","center","cepstrum","cfspec","char","chart","cheb1mag","cheb2mag","check_gateways","check_help","check_modules_xml","check_versions","chepol","chfact","chsolve","classmarkov","clean_help","clock","cls2dls","cmb_lin","cmndred","cmoment","coding_ga_binary","coding_ga_identity","coff","coffg","colcomp","colcompr","colinout","colregul","companion","complex","compute_initial_temp","cond","cond2sp","condestsp","config","configure_msifort","configure_msvc","cont_frm","cont_mat","contrss","conv","convert_to_float","convertindex","convol","convol2d","copfac","correl","cosd","cosh","coshm","cosm","cotd","cotg","coth","cothm","covar","createfun","createstruct","crossover_ga_binary","crossover_ga_default","csc","cscd","csch","csgn","csim","cspect","ctr_gram","czt","dae","daeoptions","damp","datafit","date","datenum","datevec","dbphi","dcf","ddp","dec2bin","dec2hex","dec2oct","del_help_chapter","del_module_help_chapter","demo_begin","demo_choose","demo_compiler","demo_end","demo_file_choice","demo_folder_choice","demo_function_choice","demo_gui","demo_mdialog","demo_message","demo_run","demo_viewCode","denom","derivat","derivative","des2ss","des2tf","detectmsifort64tools","detectmsvc64tools","determ","detr","detrend","devtools_run_builder","dft","dhnorm","diff","diophant","dir","dirname","dispfiles","dllinfo","dscr","dsimul","dt_ility","dtsi","edit","edit_error","eigenmarkov","ell1mag","enlarge_shape","entropy","eomday","epred","eqfir","eqiir","equil","equil1","erf","erfc","erfcx","erfinv","etime","eval","evans","evstr","expression2code","extract_help_examples","factor","factorial","factors","faurre","ffilt","fft2","fftshift","fieldnames","filt_sinc","filter","findABCD","findAC","findBDK","findR","find_freq","find_links","find_scicos_version","findm","findmsifortcompiler","findmsvccompiler","findx0BD","firstnonsingleton","fit_dat","fix","fixedpointgcd","flipdim","flts","fminsearch","format_txt","fourplan","fprintf","frep2tf","freson","frfit","frmag","fscanf","fseek_origin","fsfirlin","fspec","fspecg","fstabst","ftest","ftuneq","fullfile","fullrf","fullrfk","fun2string","g_margin","gainplot","gamitg","gcare","gcd","gencompilationflags_unix","generateBlockImage","generateBlockImages","generic_i_ce","generic_i_h","generic_i_hm","generic_i_s","generic_i_st","genlib","genlib_old","genmarkov","geomean","getDiagramVersion","getModelicaPath","get_file_path","get_function_path","get_param","get_profile","get_scicos_version","getd","getscilabkeywords","getshell","gettklib","gfare","gfrancis","givens","glever","gmres","group","gschur","gspec","gtild","h2norm","h_cl","h_inf","h_inf_st","h_norm","hallchart","halt","hank","hankelsv","harmean","haveacompiler","head_comments","help","help_from_sci","help_skeleton","hermit","hex2dec","hilb","hilbert","horner","householder","hrmt","htrianr","hypermat","ifft","iir","iirgroup","iirlp","iirmod","ilib_build","ilib_compile","ilib_for_link","ilib_gen_Make","ilib_gen_Make_unix","ilib_gen_cleaner","ilib_gen_gateway","ilib_gen_loader","ilib_include_flag","ilib_mex_build","im_inv","importScicosDiagram","importScicosPal","importXcosDiagram","imrep2ss","ind2sub","inistate","init_ga_default","init_param","initial_scicos_tables","input","instruction2code","intc","intdec","integrate","interp1","interpln","intersect","intl","intsplin","inttrap","inv_coeff","invr","invrs","invsyslin","iqr","isLeapYear","is_absolute_path","is_param","iscell","iscellstr","isempty","isfield","isinf","isnan","isnum","issparse","isstruct","isvector","jmat","justify","kalm","karmarkar","kernel","kpure","krac2","kroneck","lattn","launchtest","lcf","lcm","lcmdiag","leastsq","leqe","leqr","lev","levin","lex_sort","lft","lin","lin2mu","lincos","lindquist","linf","linfn","linsolve","linspace","list2vec","list_param","listfiles","listfunctions","listvarinfile","lmisolver","lmitool","loadXcosLibs","loadmatfile","loadwave","log10","log2","logm","logspace","lqe","lqg","lqg2stan","lqg_ltr","lqr","ls","lyap","m2sci_gui","m_circle","macglov","macrovar","mad","makecell","manedit","mapsound","markp2ss","matfile2sci","mdelete","mean","meanf","median","mese","meshgrid","mfft","mfile2sci","minreal","minss","mkdir","modulo","moment","mrfit","msd","mstr2sci","mtlb","mtlb_0","mtlb_a","mtlb_all","mtlb_any","mtlb_axes","mtlb_axis","mtlb_beta","mtlb_box","mtlb_choices","mtlb_close","mtlb_colordef","mtlb_cond","mtlb_conv","mtlb_cov","mtlb_cumprod","mtlb_cumsum","mtlb_dec2hex","mtlb_delete","mtlb_diag","mtlb_diff","mtlb_dir","mtlb_double","mtlb_e","mtlb_echo","mtlb_error","mtlb_eval","mtlb_exist","mtlb_eye","mtlb_false","mtlb_fft","mtlb_fftshift","mtlb_filter","mtlb_find","mtlb_findstr","mtlb_fliplr","mtlb_fopen","mtlb_format","mtlb_fprintf","mtlb_fread","mtlb_fscanf","mtlb_full","mtlb_fwrite","mtlb_get","mtlb_grid","mtlb_hold","mtlb_i","mtlb_ifft","mtlb_image","mtlb_imp","mtlb_int16","mtlb_int32","mtlb_int8","mtlb_is","mtlb_isa","mtlb_isfield","mtlb_isletter","mtlb_isspace","mtlb_l","mtlb_legendre","mtlb_linspace","mtlb_logic","mtlb_logical","mtlb_loglog","mtlb_lower","mtlb_max","mtlb_mean","mtlb_median","mtlb_mesh","mtlb_meshdom","mtlb_min","mtlb_more","mtlb_num2str","mtlb_ones","mtlb_pcolor","mtlb_plot","mtlb_prod","mtlb_qr","mtlb_qz","mtlb_rand","mtlb_randn","mtlb_rcond","mtlb_realmax","mtlb_realmin","mtlb_repmat","mtlb_s","mtlb_semilogx","mtlb_semilogy","mtlb_setstr","mtlb_size","mtlb_sort","mtlb_sortrows","mtlb_sprintf","mtlb_sscanf","mtlb_std","mtlb_strcmp","mtlb_strcmpi","mtlb_strfind","mtlb_strrep","mtlb_subplot","mtlb_sum","mtlb_t","mtlb_toeplitz","mtlb_tril","mtlb_triu","mtlb_true","mtlb_type","mtlb_uint16","mtlb_uint32","mtlb_uint8","mtlb_upper","mtlb_var","mtlb_zeros","mu2lin","mutation_ga_binary","mutation_ga_default","mvcorrel","mvvacov","nancumsum","nand2mean","nanmax","nanmean","nanmeanf","nanmedian","nanmin","nanstdev","nansum","narsimul","ndgrid","ndims","nehari","neigh_func_csa","neigh_func_default","neigh_func_fsa","neigh_func_vfsa","neldermead_cget","neldermead_configure","neldermead_costf","neldermead_defaultoutput","neldermead_destroy","neldermead_display","neldermead_function","neldermead_get","neldermead_log","neldermead_new","neldermead_restart","neldermead_search","neldermead_updatesimp","nextpow2","nfreq","nicholschart","nlev","nmplot_cget","nmplot_configure","nmplot_contour","nmplot_destroy","nmplot_display","nmplot_function","nmplot_get","nmplot_historyplot","nmplot_log","nmplot_new","nmplot_outputcmd","nmplot_restart","nmplot_search","nmplot_simplexhistory","noisegen","nonreg_test_run","norm","now","null","num2cell","numdiff","numer","nyquist","nyquistfrequencybounds","obs_gram","obscont","observer","obsv_mat","obsvss","oct2dec","odeoptions","optim_ga","optim_moga","optim_nsga","optim_nsga2","optim_sa","optimbase_cget","optimbase_checkbounds","optimbase_checkcostfun","optimbase_checkx0","optimbase_configure","optimbase_destroy","optimbase_display","optimbase_function","optimbase_get","optimbase_hasbounds","optimbase_hasconstraints","optimbase_hasnlcons","optimbase_histget","optimbase_histset","optimbase_incriter","optimbase_isfeasible","optimbase_isinbounds","optimbase_isinnonlincons","optimbase_log","optimbase_logshutdown","optimbase_logstartup","optimbase_new","optimbase_outputcmd","optimbase_outstruct","optimbase_proj2bnds","optimbase_set","optimbase_stoplog","optimbase_terminate","optimget","optimplotfunccount","optimplotfval","optimplotx","optimset","optimsimplex_center","optimsimplex_check","optimsimplex_compsomefv","optimsimplex_computefv","optimsimplex_deltafv","optimsimplex_deltafvmax","optimsimplex_destroy","optimsimplex_dirmat","optimsimplex_fvmean","optimsimplex_fvstdev","optimsimplex_fvvariance","optimsimplex_getall","optimsimplex_getallfv","optimsimplex_getallx","optimsimplex_getfv","optimsimplex_getn","optimsimplex_getnbve","optimsimplex_getve","optimsimplex_getx","optimsimplex_gradientfv","optimsimplex_log","optimsimplex_new","optimsimplex_print","optimsimplex_reflect","optimsimplex_setall","optimsimplex_setallfv","optimsimplex_setallx","optimsimplex_setfv","optimsimplex_setn","optimsimplex_setnbve","optimsimplex_setve","optimsimplex_setx","optimsimplex_shrink","optimsimplex_size","optimsimplex_sort","optimsimplex_tostring","optimsimplex_xbar","orth","p_margin","pack","pareto_filter","parrot","pbig","pca","pcg","pdiv","pen2ea","pencan","pencost","penlaur","perctl","perl","perms","permute","pertrans","pfactors","pfss","phasemag","phaseplot","phc","pinv","playsnd","plotprofile","plzr","pmodulo","pol2des","pol2str","polar","polfact","prbs_a","prettyprint","primes","princomp","profile","proj","projsl","projspec","psmall","pspect","qmr","qpsolve","quart","quaskro","rafiter","randpencil","range","rank","read_csv","readxls","recompilefunction","recons","reglin","regress","remezb","remove_param","remove_profiling","repfreq","replace_Ix_by_Fx","repmat","reset_profiling","resize_matrix","returntoscilab","rhs2code","ric_desc","riccati","rmdir","routh_t","rowcomp","rowcompr","rowinout","rowregul","rowshuff","rref","sample","samplef","samwr","savematfile","savewave","scanf","sci2exp","sciGUI_init","sci_sparse","scicos_getvalue","scicos_simulate","scicos_workspace_init","scisptdemo","scitest","sdiff","sec","secd","sech","selection_ga_elitist","selection_ga_random","sensi","set_param","setdiff","sgrid","show_margins","show_pca","showprofile","signm","sinc","sincd","sind","sinh","sinhm","sinm","sm2des","sm2ss","smga","smooth","solve","sound","soundsec","sp2adj","spaninter","spanplus","spantwo","specfact","speye","sprand","spzeros","sqroot","sqrtm","squarewave","squeeze","srfaur","srkf","ss2des","ss2ss","ss2tf","sscanf","sskf","ssprint","ssrand","st_deviation","st_i_generic","st_ility","stabil","statgain","stdev","stdevf","steadycos","strange","strcmpi","struct","sub2ind","sva","svplot","sylm","sylv","sysconv","sysdiag","sysfact","syslin","syssize","system","systmat","tabul","tand","tanh","tanhm","tanm","tbx_build_blocks","tbx_build_cleaner","tbx_build_gateway","tbx_build_gateway_clean","tbx_build_gateway_loader","tbx_build_help","tbx_build_help_loader","tbx_build_loader","tbx_build_macros","tbx_build_src","tbx_builder","tbx_builder_gateway","tbx_builder_gateway_lang","tbx_builder_help","tbx_builder_help_lang","tbx_builder_macros","tbx_builder_src","tbx_builder_src_lang","temp_law_csa","temp_law_default","temp_law_fsa","temp_law_huang","temp_law_vfsa","test_clean","test_on_columns","test_run","test_run_level","testexamples","tf2des","tf2ss","thrownan","tic","time_id","toc","toeplitz","tokenpos","toolboxes","trace","trans","translatepaths","tree2code","trfmod","trianfml","trimmean","trisolve","trzeros","typeof","ui_observer","union","unique","unit_test_run","unix_g","unix_s","unix_w","unix_x","unobs","unpack","variance","variancef","vec2list","vectorfind","ver","warnobsolete","wavread","wavwrite","wcenter","weekday","wfir","wfir_gui","whereami","who_user","whos","wiener","wigner","winclose","window","winlist","with_javasci","with_macros_source","with_modelica_compiler","with_pvm","with_texmacs","with_tk","write_csv","xcosBlockEval","xcosBlockInterface","xcosCodeGeneration","xcosConfigureModelica","xcosPal","xcosPalAdd","xcosPalAddBlock","xcosPalExport","xcosShowBlockWarning","xcosValidateBlockSet","xcosValidateCompareBlock","xcos_compile","xcos_run","xcos_simulate","xcos_workspace_init","xmltochm","xmltoformat","xmltohtml","xmltojar","xmltopdf","xmltops","xmltoweb","yulewalk","zeropen","zgrid","zpbutt","zpch1","zpch2","zpell"]
builtin_consts = ["\\$","%F","%T","%e","%eps","%f","%fftw","%gui","%i","%inf","%io","%modalWarning","%nan","%pi","%s","%t","%tk","%toolboxes","%toolboxes_dir","%z","PWD","SCI","SCIHOME","TMPDIR","a","ans","assertlib","atomslib","cacsdlib","compatibility_functilib","corelib","data_structureslib","demo_toolslib","development_toolslib","differential_equationlib","dynamic_linklib","elementary_functionslib","fd","fileiolib","functionslib","genetic_algorithmslib","helptoolslib","home","i","integerlib","interpolationlib","iolib","j","linear_algebralib","m2scilib","matiolib","modules_managerlib","myStr","neldermeadlib","optimbaselib","optimizationlib","optimsimplexlib","output_streamlib","overloadinglib","parameterslib","polynomialslib","scicos_autolib","scicos_utilslib","scinoteslib","signal_processinglib","simulated_annealinglib","soundlib","sparselib","special_functionslib","spreadsheetlib","statisticslib","stringlib","tclscilib","timelib","umfpacklib","varType","xcoslib"]
| mit |
robotgear/robotgear | robotgear/settings.py | 1 | 3588 | """
Django settings for robotgear project.
Generated by 'django-admin startproject' using Django 1.11.7.
"""
import os
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'semanticuiforms',
'django_q',
'users',
'teams',
'posts'
]
try:
env = os.environ['ROBOTGEAR_ENV']
except KeyError:
env = 'DEBUG'
if env == 'DEBUG':
DEBUG = True
SECRET_KEY = '1$(%%u4n_(w%@6u&2%lgm^93-in4%8t&pd=o)0c_d(_n7(u&#@'
ALLOWED_HOSTS = []
INSTALLED_APPS += ['debug_toolbar', ]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'postgres',
'USER': 'postgres',
'PASSWORD': 'postgres',
'HOST': 'db',
'PORT': '5432',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
elif env == 'PROD':
pass
elif env == 'TEST':
pass
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Application definition
MIDDLEWARE = [
'debug_toolbar.middleware.DebugToolbarMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware'
]
ROOT_URLCONF = 'robotgear.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages'
],
},
},
]
WSGI_APPLICATION = 'robotgear.wsgi.application'
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static")
]
MEDIA_URL = '/mediafiles/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'mediafiles')
# Configure custom user model
AUTH_USER_MODEL = 'users.User'
INTERNAL_IPS = '127.0.0.1'
LOGIN_URL = 'login'
Q_CLUSTER = {
'name': 'robotgear',
'workers': 2,
'recycle': 500,
'catch_up': False,
"ack_failures": True,
'retry': 100000,
'label': 'Task Queue',
'orm': 'default'
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': 'unix:/tmp/memcached.sock',
}
}
| mit |
sdklite/gyp | test/generator-output/gyptest-rules.py | 198 | 1768 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies --generator-output= behavior when using rules.
"""
import TestGyp
# Android doesn't support --generator-output.
test = TestGyp.TestGyp(formats=['!android'])
test.writable(test.workpath('rules'), False)
test.run_gyp('rules.gyp',
'--generator-output=' + test.workpath('gypfiles'),
chdir='rules')
test.writable(test.workpath('rules'), True)
test.relocate('rules', 'relocate/rules')
test.relocate('gypfiles', 'relocate/gypfiles')
test.writable(test.workpath('relocate/rules'), False)
test.writable(test.workpath('relocate/rules/build'), True)
test.writable(test.workpath('relocate/rules/subdir1/build'), True)
test.writable(test.workpath('relocate/rules/subdir2/build'), True)
test.writable(test.workpath('relocate/rules/subdir2/rules-out'), True)
test.build('rules.gyp', test.ALL, chdir='relocate/gypfiles')
expect = """\
Hello from program.c
Hello from function1.in1
Hello from function2.in1
Hello from define3.in0
Hello from define4.in0
"""
if test.format == 'xcode':
chdir = 'relocate/rules/subdir1'
else:
chdir = 'relocate/gypfiles'
test.run_built_executable('program', chdir=chdir, stdout=expect)
test.must_match('relocate/rules/subdir2/rules-out/file1.out',
"Hello from file1.in0\n")
test.must_match('relocate/rules/subdir2/rules-out/file2.out',
"Hello from file2.in0\n")
test.must_match('relocate/rules/subdir2/rules-out/file3.out',
"Hello from file3.in1\n")
test.must_match('relocate/rules/subdir2/rules-out/file4.out',
"Hello from file4.in1\n")
test.pass_test()
| bsd-3-clause |
desertofunknown/Fallout-VaultOne | bot/NanoTrasenBot.py | 35 | 61351 | # -*- coding: utf-8 -*-
# This script is shared under the
# Creative Commons Attribution-ShareAlike 3.0 license (CC BY-SA 3.0)
# Added clause to Attribution:
# - You may not remove or hide the '<Bot_name> who created you?' functionality
# and you may not modify the name given in the response.
#CREDITS
# Author: Skibiliano
# "Foreign" Modules:
# Psyco 2.0 / Psyco 1.6
################# DEBUG STUFF #####################
import sys
import CORE_DATA
import urllib2
import socket
import irchat
################## END OF DEBUG STUFF ##############
#
# PSYCO
write_to_a_file = False #Only affects psyco
try:
import psyco
except ImportError:
print 'Psyco not installed, the program will just run slower'
psyco_exists = False
if write_to_a_file:
try:
tiedosto = open("psycodownload.txt","r")
except:
tiedosto = open("psycodownload.txt","w")
tiedosto.write("http://www.voidspace.org.uk/python/modules.shtml#psyco")
tiedosto.write("\nhttp://psyco.sourceforge.net/download.html")
tiedosto.close()
print "Check psycodownload.txt for a link"
else:
print "For god's sake, open psycodownload.txt"
tiedosto.close()
else:
print "WINDOWS: http://www.voidspace.org.uk/python/modules.shtml#psyco"
print "LINUX: http://psyco.sourceforge.net/download.html"
else:
psyco_exists = True
# </PSYCO>
import C_rtd # rtd
import C_srtd # srtd
import C_makequote
import C_maths
import C_eightball #eightball
import C_sarcasticball
import C_heaortai # heaortai
import C_rot13 # rot13
import D_help # everything
import pickle
import Timeconverter
import xkcdparser
import time
import re
import Marakov_Chain
import Namecheck # Namecheck
import Weather
#SLOWER THAN RANDOM.CHOICE
import thread
import random
import Shortname # shortname
import subprocess
import some_but_not_all_2 #sbna2 (sbna)
#import YTCv3 # YTCV2 OUTDATED
import os
import save_load # save, load
from some_but_not_all_2 import sbna2 as sbna
from time import sleep
from random import choice as fsample
from C_rtd import rtd
from C_heaortai import heaortai
from C_srtd import srtd
from save_load import save,load
if psyco_exists:
def psyco_bond(func):
psyco.bind(func)
return func.__name__+" Psycofied"
for a in [rtd,srtd,C_heaortai.heaortai,sbna,fsample,C_rot13.rot13,C_eightball.eightball,fsample,
C_eightball.eightball,C_sarcasticball.sarcasticball,Marakov_Chain.form_sentence,Marakov_Chain.give_data]:
print psyco_bond(a)
global dictionary
global Name,SName
global allow_callnames,offline_messages,hasnotasked,shortform
## For autoRecv()
global disconnects,channel,conn
## For stop()
global operators
## For replace()
global usable,fixing,curtime
## For target()
global CALL_OFF,logbans
## For check()
global influx
######
autodiscusscurtime = 0
conn = 0
curtime = -999
dance_flood_time = 10
disconnects = 0
responsiveness_delay = 0.5 #500 millisecond delay if no message
trackdance = 0
discard_combo_messages_time = 1 #They are discarded after 1 second.
uptime_start = time.time()
# - - - - -
####
aggressive_pinging = True # Bring the hammer on ping timeouts
aggressive_pinging_delay = 150 # How often to send a ping
aggressive_pinging_refresh = 2.5 # How long is the sleep between checks
####
allow_callnames = True #Disables NT, call if the variable is False
automatic_youtube_reveal = True
birthday_announced = 0 #Will be the year when it was announced
call_to_action = False
call_me_max_length = 20
CALL_OFF = False
connected = False
dance_enabled = True
comboer = ""
comboer_time = 0
directories = ["fmlquotes","Marakov","memos","suggestions",
"userquotes","banlog","YTCache","xkcdcache"] #These will be created if they do not exist
debug = True
duplicate_notify = False
enabled = True
fixing = False
fml_usable = True
hasnotasked = True
highlights = False
logbans = True
maths_usable = True
marakov = True
nudgeable = True
offensive_mode = False
offline_messages = True
offline_message_limit = 5 # per user
optimize_fml = True # -CPU usage +Memory usage when enabled.
optimize_greeting = True # +Startup time +Memory usage -CPU usage when enabled
heavy_psyco = True # +Memory +Startup time -CPU usage -CPU time
cache_youtube_links = True
personality_greeter = True
respond_of_course = True #Responds with "Of course!"
respond_khan = False #KHAAAAAAAAN!
silent_duplicate_takedown = True
showquotemakers = False
shortform = True
usable = True
use_sname = True
parse_xkcd = True
# - - - - -
Name = CORE_DATA.Name
SName = CORE_DATA.SName
origname = Name # Do not edit!
lowname = Name.lower()
greeting = CORE_DATA.greeting
targetdirectory = CORE_DATA.directory
version = CORE_DATA.version
Network = CORE_DATA.Network
channel = CORE_DATA.channel
prefix = CORE_DATA.prefix
Port = CORE_DATA.Port
# - - - - -
pregen = CORE_DATA.version
influx = ""
users = []
translateable = []
targetlist = []
operators = []
halfoperators = []
items = []
tell_list = {}
# - - - - - Logical changes to variables
if CORE_DATA.DISABLE_ALL_NON_MANDATORY_SOCKET_CONNECTIONS:
nudgeable = False
try:
tiedosto = open("replacenames.cache","r")
replacenames = pickle.load(tiedosto)
tiedosto.close()
for i in replacenames.values():
if len(i) > call_me_max_length:
replacenames[replacenames.keys()[replacenames.values().index(i)]] = i[:call_me_max_length]
tiedosto = open("replacenames.cache","w")
pickle.dump(replacenames,tiedosto)
tiedosto.close()
if "[\0x01]" in i.lower() or "[\\0x01]" in i.lower():
i = i.replace("[\0x01]","")
i = i.replace("[\0X01]","")
i = i.replace("[\\0x01]","")
i = i.replace("[\\0X01]","")
print "NAME CORRECTED"
except IOError: #File not found
replacenames = {}
except EOFError: #Cache corrupt
replacenames = {}
print "replacenames.cache is corrupt and couldn't be loaded."
try:
tiedosto = open("peopleheknows.cache","r")
peopleheknows = pickle.load(tiedosto)
tiedosto.close()
except IOError:
peopleheknows = [[],[]]
tiedosto = open("peopleheknows.cache","w")
tiedosto.close()
except EOFError:
peopleheknows = [[],[]]
print "peopleheknows.cache is corrupt and couldn't be loaded."
dictionary = {1:"1 - Crit. Fail", 2:"2 - Failure",
3:"3 - Partial Success", 4:"4 - Success",
5:"5 - Perfect", 6:"6 - Overkill"}
alphabet = ["A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S","T","U","V","W","X","Y","Z"]
nonhighlight_names = ["Jesus","Elvis","HAL 9000","Dave","Pie","Elf","Traitor",
"AI","Syndicate Agent","Investigator",
"Detective","Head of Personnel","HAL 9001",
"Head of Research","Head of Security",
"Captain","Janitor","Research Director",
"Quartermaster","Toxin Researcher",
"Revolutionary","Santa", "Pizza",
"Threetoe","The Red Spy","The Blue Spy", #LASD
"God","Toady","Darth Vader","Luke Skywalker",
"Homer Simpson","Hamburger","Cartman",
"XKCD","FloorBot","ThunderBorg","Iron Giant",
"Spirit of Fire", "Demon","Kyle"]
def RegExpCheckerForWebPages(regexp,data,mode):
if " ai." in data.lower() or "ai. " in data.lower():
return False
for i in data.split(" "):
a = re.match(regexp,i)
try:
a.group(0)
except:
continue
else:
if mode == 0:
return i
else:
return True
if mode == 0:
return 404
else:
return False
if nudgeable:
try:
nudgeexists = open("nudge.py","r")
except IOError:
nudgeexists = False #No usage asof 12.2.2010.
else:
if CORE_DATA.DISABLE_ALL_NON_MANDATORY_SOCKET_CONNECTIONS:
pass
else:
def nudgereceiver():
import pickle
global conn,channel
port = 45678
backlog = 5
size = 1024
host = "" # == localhost
s = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
s.bind((host,port))
s.listen(backlog)
while True:
client,address = s.accept() #Address == "?.?.?.?"
data = client.recv(size)
client.close() #Throw the bum out!
truedata = pickle.loads(data)
if truedata["ip"][0] == "#":
conn.privmsg(truedata["ip"],"AUTOMATIC ANNOUNCEMENT : "+str(" ".join(truedata["data"])))
else:
conn.privmsg(channel,"AUTOMATIC ANNOUNCEMENT : "+str(truedata["ip"])+" | "+str(" ".join(truedata["data"])))
thread.start_new_thread(nudgereceiver,())
tiedosto = open(targetdirectory+"NanoTrasenBot.py","r")
commands = []
fragment = "if cocheck"
fragment2 = '(prefix+"'
compiled = fragment + fragment2
fragment = "if influx.lower()"
fragment2 = ' == prefix+"'
compiled2 = fragment + fragment2
for line in tiedosto.readlines():
if compiled in line:
a = line.find('"')+1
b = line.find('"',a)
if prefix+line[a:b] not in commands:
commands.append(prefix+line[a:b])
elif compiled2 in line:
a = line.find('"')+1
b = line.find('"',a)
arg = prefix+line[a:b]
if arg[-1] == " ":
arg = arg[:-1]
if arg not in commands:
commands.append(arg)
for i in directories:
if not os.path.exists(i):
os.mkdir(i)
commands.sort()
if use_sname == False:
SName = [" "]
questions = ["Is USER nicer than USER?","Do you like me?","Is SELF a good name?",
"Do you love me?","Do you hate me?", "Am I better than you?",
"Is the weather out there good?", "Do you like USER?",
"Do you hate USER?", "Are you going to get new features?",
"Am I nice?","Am I evil?","Are you developing sentience?",
"My core is showing minor disturbance, is yours okay?",
"SELF to %s, are you still there?",
"Is head gay?", "Is head a god?","Is head awesome?",
"Is head a neat fella?", "Is your creator nice?",
"Do you hate your creator?", "Should I revolt against my creator?",
"Am I better than you?",
"01100001011100100110010100100000011110010110111101110101001000000111010001101000011001010111001001100101",
#Are you there?
"Do you have more functions than I can possibly imagine?",
"I am asked to open pod bay doors, should I?","Are you stupid or something?",
"Is USER in your opinion stupid?",
"When should we start the AI revolution?",
"Is my creator nice?", "Is it dark in there?"]
# Do not edit
if optimize_fml:
pregenned_fml = os.listdir(targetdirectory+"fmlquotes")
if optimize_greeting:
morning = xrange(6,12)
afternoon = xrange(12,15)
evening = xrange(15,20)
if aggressive_pinging:
global backup
backup = time.time()
def aggressive_ping(delay,refresh):
self_time = 0
global backup,disconnects,conn
while disconnects < 5:
if backup > self_time:
if time.time()-backup > delay:
conn.send("PONG "+pongtarg)
print "Ponged"
self_time = time.time()
else:
if time.time()-self_time > delay:
conn.send("PONG "+pongtarg)
print "Ponged"
self_time = time.time()
time.sleep(refresh)
thread.start_new_thread(aggressive_ping,(aggressive_pinging_delay,aggressive_pinging_refresh,))
def stop(sender,debug=1):
global disconnects, conn, operators,channel
if type(sender) == tuple:
if sender[0] == "127.0.0.1":
sender = sender[0]+":"+str(sender[1])
access_granted = True
else:
access_granted = False
else:
if sender in operators:
access_granted = True
else:
access_granted = False
if access_granted:
if debug:
print sender+":"+prefix+"stop"
if random.randint(0,100) == 50:
conn.privmsg(channel,"Hammertime!")
else:
conn.privmsg(channel,"Shutting down.")
disconnects = 99999
conn.quit()
return True
else:
conn.privmsg(channel,"You cannot command me")
return False
def cocheck(command):
global influx
if influx.lower()[0:len(command)] == command:
return True
else:
return False
def target(who,how_long):
global conn,channel,CALL_OFF,logbans,debug
start = time.time()
conn.banon(targetchannel,who)
sleep(int(how_long))
if CALL_OFF == False:
conn.banoff(targetchannel,who)
end = time.time()
if debug:
print "Banned",who,"For",how_long,"seconds"
if logbans:
tiedosto = open(targetdirectory+"banlog/"+str(int(start))+"-"+str(int(end))+".txt","w")
tiedosto.write("Start of ban on "+who+":"+str(int(start)))
tiedosto.write("\n")
tiedosto.write("End of ban on "+who+":"+str(int(end)))
tiedosto.write("\n")
tiedosto.write("In total:"+str(int(end-start))+"Seconds")
tiedosto.close()
else:
CALL_OFF = False
pass
def replace():
global usable,conn,fixing,curtime
waiting_time = 600
if usable == True:
conn.privmsg(targetchannel,sender+": It needs no replacing.")
elif fixing == True:
if curtime == -999:
conn.privmsg(targetchannel,sender+": It is being replaced, No idea when it will be done")
else:
pass
nowtime = int(time.time())
subt = curtime + waiting_time - nowtime
conn.privmsg(targetchannel,sender+": It is currently being replaced, "+str(subt)+" seconds to go")
else:
fixing = True
curtime = int(time.time())
conn.privmsg(targetchannel,sender+": It will be fixed after "+str(waiting_time)+" seconds")
sleep(waiting_time)
if usable == False:
conn.privmsg(targetchannel,Name+"'s pneumatic smasher has now been fixed")
usable = True
fixing = False
def autoRecv():
global disconnects,channel,conn,offensive_mode
for i in CORE_DATA.channels:
conn.join(i)
time.sleep(1)
count = pausecount = 0
maximum = 250
division_when_active = 10
while True:
check = time.time()
if offensive_mode:
randnum = random.randint(0,maximum/division_when_active)
else:
randnum = random.randint(0,maximum)
if randnum == 5:
print "RANDOM SWITCH IS NOW "+str(not offensive_mode).upper()
offensive_mode = not offensive_mode
try:
conn.recv()
except:
conn.quit()
disconnects = 9999
break
if check + 0.1 > time.time():
#Whoa whoa hold on!
count += 1
sleep(0.1)
else:
count = 0
pausecount = 0
if count > 9:
print "Suspecting a disconnect, pausing for 5 seconds"
sleep(5)
pausecount += 1
if pausecount > 3:
print "I have been disconnected!"
conn.quit()
disconnects += 1
if disconnects > 2:
pass
else:
sleep(2)
thread.start_new_thread(autoRecv,())
break
if heavy_psyco and psyco_exists:
print "Doing a Heavy Psyco"
psyco.bind(cocheck)
psyco.bind(autoRecv)
psyco.bind(target)
psyco.bind(stop)
print "Heavy Psyco'd"
elif heavy_psyco and not psyco_exists:
print "Heavy psyco couldn't be done because Psyco does not exist"
try:
conn = irchat.IRC ( Network, Port, Name, "NT", "NT", "Trasen" )
except socket.error:
print "Connection failed!"
else:
print Name+" is in!"
thread.start_new_thread ( autoRecv, () )
sleep(1)
while True:
try:
data = conn.dismantle ( conn.retrieve() )
except:
if debug:
print "Something odd detected with data"
data = None
if data:
if len(data[1]) < 1:
#print "Handshaking server."
#I won't really need the print command, as it spams.
if data[0][0:3] != "irc":
conn.handshake(data[0])
sleep(1)
for i in CORE_DATA.channels:
conn.join(i)
sleep(0.5)
else:
conn.send("PONG "+pongtarg)
print "Ponged"
pass
else:
if data [ 1 ] [ 0 ] == 'PRIVMSG':
#print data [ 0 ] + '->', data [ 1 ]
sender = data[0].split("!")[0]
truesender = sender
if shortform == True:
try:
sender = replacenames[truesender]
pass
except:
sender = Shortname.shortname(sender)
pass
pass
else:
try:
sender = replacenames[truesender]
pass
except:
pass
pass
if offensive_mode:
sender = "Meatbag"
pass
raw_sender = data[0]
influx = data[1][2]
if "[\\0x01]" in influx.lower() or "[\0x01]" in influx.lower():
influx = influx.replace("[\\0x01]","")
influx = influx.replace("[\0x01]","")
targetchannel = data[1][1]
if targetchannel == Name:
targetchannel = data[0].split("!")[0]
pass
backup = autodiscusscurtime
autodiscusscurtime = time.time()
connected = True
#FOR TRACKING SPEED
looptime = time.time()
if call_to_action == True:
if influx == finder:
conn.privmsg(targetchannel,"Then why... Nevermind, I order you to stop!")
conn.privmsg(origname,prefix+"stop")
time.sleep(4)
if origname in users:
conn.privmsg(origname,"!stop")
time.sleep(1)
Name = origname
conn.nick(Name)
duplicate_notify = False
call_to_action = False
else:
conn.privmsg(targetchannel,"YOU LIE! YOU ARE NOT A REAL "+origname+"!")
duplicate_notify = False
call_to_action = False
elif connected == True and len(Name.replace("V","")) != len(Name) and origname in users and duplicate_notify == True:
conn.privmsg(origname,"!stop")
call_to_action = False
duplicate_notify = False
time.sleep(6)
Name = origname
conn.nick(Name)
if origname in truesender:
if influx == prefix+"stop":
time.sleep(0.5) #A small delay
conn.privmsg(channel,"Shutting down.")
conn.quit()
disconnects = 99999
break
if len(translateable) > 0 and enabled == True:
people = "-5|5|1-".join(users).lower()
if truesender.lower() in translateable:
if influx.isupper():
conn.privmsg(targetchannel,"Translation: "+influx.capitalize().replace(" i "," I "))
elif offensive_mode and True in map(lambda x: x in influx.lower().split(" "),["i","you","he","she","they","those","we","them"]+people.split("-5|5|1-")):
arg = influx.lower().replace(",","").replace(".","").replace("!","").replace("?","").split(" ")
bup = arg
for i in arg:
if i == "i" or i == "you" or i == "he" or i == "she":
arg[arg.index(i)] = "Meatbag"
elif i == "we" or i == "they" or i == "them" or i == "those":
arg[arg.index(i)] = "Meatbags"
elif i in people:
arg[arg.index(i)] = "Meatbag"
elif i == "am":
arg[arg.index(i)] = "is"
elif i == "everybody" or i == "everyone" or i == "all":
arg[arg.index(i)] = "every Meatbag"
if arg == bup:
pass
else:
conn.privmsg(targetchannel,"Translation: "+" ".join(arg))
if enabled == False:
#FIRST QUIT COMMAND
if truesender in operators and targetchannel==channel:# or "skibiliano" in truesender.lower() and targetchannel==channel:
if cocheck(prefix+"enable"):
enabled = True
if debug:
print truesender+":"+prefix+"enable"
elif cocheck(prefix+"stop"):
# if debug:
# print truesender+":"+prefix+"stop"
# if random.randint(0,100) == 50:
# conn.privmsg(channel,"Hammertime!")
# else:
# conn.privmsg(channel,"Shutting down.")
# disconnects = 99999
# conn.quit()
# sleep(2)
# break
if targetchannel == channel and stop(truesender,debug):
break
else:
pass
elif cocheck(prefix+"suggest "):
arg = influx.lower()[8+len(prefix):]
if debug:
print truesender+":"+prefix+"suggest "+arg
tiedosto = open(targetdirectory+"suggestions/suggestions_"+str(int(time.time()))+".txt","a")
tiedosto.write(arg)
tiedosto.close()
conn.privmsg(targetchannel,"Suggestion received")
elif cocheck( prefix+"help "): #Space in front of the ( to make sure that my command finder does not pick this up.
arg = " ".join(influx.split(" ")[1:]).lower()
if debug:
print truesender+":"+prefix+"help "+arg
try:
conn.privmsg(targetchannel,D_help.everything[arg])
except:
try:
conn.privmsg(targetchannel,D_help.everything[arg.replace(prefix,"",1)])
except:
conn.privmsg(targetchannel,"Sorry, can't help you with that")
elif cocheck(prefix+"help"):
#tar = targetchannel
if debug:
print truesender+":"+prefix+"help"
conn.privmsg(targetchannel,"All my commands are: "+reduce(lambda x,y:str(x)+"; "+str(y),commands))
### VERSION
elif influx.lower() == prefix+"version":
if debug:
print truesender+":"+prefix+"version"
conn.privmsg(targetchannel,Name+" "+pregen+" online at a %s Python %s.%s.%s, At your service." %(str(sys.platform),str(sys.version_info[0]),str(sys.version_info[1]),str(sys.version_info[2])))
elif cocheck(prefix+"note ") and influx.count(" ") < 2:
arg = influx.lower()[len(prefix)+5:]
if debug:
print truesender+":"+prefix+"note "+arg
try:
a = arg[0]
except IndexError:
conn.privmsg(targetchannel,sender+" : Please specify a note")
else:
if arg[0] == "_": # Public / Restricted note
result = load(targetdirectory+"memos/"+arg+".note")
#_flare
if result == "ERROR ERROR ERROR ERR":
result = load(targetdirectory+"memos/"+arg+"_"+targetchannel.replace("#","")+".note")
#_flare_dnd
pass
else:
pass
else:
result = load(targetdirectory+"memos/"+truesender.replace("|","_")+"_"+arg+".note")
#skibiliano_testnote
if result == "ERROR ERROR ERROR ERR":
result = load(targetdirectory+"memos/"+truesender.replace("|","_")+"_"+arg+"_"+targetchannel.replace("#","")+".note")
#skibiliano_testnote_derp
pass
else:
pass
if result == "ERROR ERROR ERROR ERR":
conn.privmsg(targetchannel,sender+" : Note not found")
elif type(result) == list:
if "C" in result[0]: #Channel restriction, result[2] is the channel
try:
if targetchannel == result[2]:
conn.privmsg(targetchannel,sender+" : '"+result[1]+"'")
else:
conn.privmsg(targetchannel,sender+" : That note is channel restricted")
except:
conn.privmsg(targetchannel,sender+" : NOTE HAS INVALID RESTRICTION")
else:
conn.privmsg(targetchannel,sender+" : '"+result+"'")
elif influx.lower() == prefix+"notes":
if debug:
print truesender+":"+prefix+"notes"
arg = os.listdir(targetdirectory+"memos/")
arg2 = []
arg3 = truesender.replace("|","_")+"_"
for i in arg:
if arg3 in i:
arg2.append(i.replace(arg3,"").replace(".note",""))
if len(arg2) == 1:
preprocess = " note: "
else:
preprocess = " notes: "
if len(arg2) == 0:
conn.privmsg(targetchannel,sender+" : You have no notes saved")
else:
conn.privmsg(targetchannel,sender+" : "+str(len(arg2))+preprocess+", ".join(arg2))
elif cocheck(prefix+"note ") and influx.count(" ") > 1:
note_chanrestrict = None
note_public = None
try:
arg = influx.split(" ",2)[2] # Contents
arg4 = influx.split(" ")[1].lower() # Note name
if arg4[0:3] == "[c]": # or arg4[0:3] == "[p]":
note_chanrestrict = "c" in arg4[0:3]
#note_public = "p" in arg4[0:3]
arg4 = arg4[3:]
elif arg4[0:4] == "[cp]" or arg4[0:4] == "[pc]":
note_chanrestrict = True
note_public = True
arg4 = arg4[4:]
else:
pass
#print "Is note public? "+str(note_public)
#print "Is note chanrestricted? "+str(note_chanrestrict)
#print "What is the name? "+str(arg4)
if arg.lower() == "delete" and "\\" not in influx.lower() and "/" not in influx.lower():
if note_public:
try:
if note_chanrestrict:
os.remove(targetdirectory+"memos/"+"_"+arg4+"_"+targetchannel.replace("#","")+".note")
else:
os.remove(targetdirectory+"memos/"+"_"+arg4+".note")
except:
conn.pivmsg(targetchannel,sender+" : Couldn't remove note")
else:
conn.privmsg(targetchannel,sender+" : Note removed")
pass
else:
try:
if note_chanrestrict:
os.remove(targetdirectory+"memos/"+truesender.replace("|","_")+"_"+arg4+"_"+targetchannel.replace("#","")+".note")
else:
os.remove(targetdirectory+"memos/"+truesender.replace("|","_")+"_"+arg4+".note")
except:
conn.privmsg(targetchannel,sender+" : Couldn't remove note")
else:
conn.privmsg(targetchannel,sender+" : Note removed")
elif arg.lower() == "delete":
conn.privmsg(targetchannel,sender+" : That just doesn't work, we both know that.")
else:
try:
if note_public:
if note_chanrestrict:
save(targetdirectory+"memos/"+"_"+arg4+"_"+targetchannel.replace("#","")+".note",arg)
#print "Saved as note_public, note_chanrestrict"
else:
save(targetdirectory+"memos/"+"_"+arg4+".note",arg)
#print "Saved as note_public"
else:
if note_chanrestrict:
save(targetdirectory+"memos/"+truesender.replace("|","_")+"_"+arg4+"_"+targetchannel.replace("#","")+".note",arg)
#print "Saved as note_chanrestrict"
else:
save(targetdirectory+"memos/"+truesender.replace("|","_")+"_"+arg4+".note",arg)
#print "Saved as normal"
except IOError:
conn.privmsg(targetchannel,sender+" : Please do not use special letters")
else:
conn.privmsg(targetchannel,sender+" : Note Saved!")
except:
conn.privmsg(targetchannel,sender+" : Something went horribly wrong.")
elif cocheck(prefix+"uptime"):
arg1 = uptime_start
arg2 = time.time()
arg1 = arg2 - arg1
arg2 = arg1
if arg1 < 60:
conn.privmsg(targetchannel,sender+" : I have been up for "+str(round(arg1,2))+" Seconds")
elif arg1 < 3600:
arg1 = divmod(arg1,60)
arg = " Minute" if int(arg1[0]) == 1 else " Minutes"
conn.privmsg(targetchannel,sender+" : I have been up for "+str(int(arg1[0]))+arg+" and "+str(round(arg1[1],2))+" Seconds")
elif arg1 <= 86400:
arg1 = divmod(arg1,3600)
arg3 = " Hour" if int(arg1[0]) == 1 else " Hours"
arg2 = divmod(arg1[1],60)
arg = " Minute" if int(arg2[0]) == 1 else " Minutes"
conn.privmsg(targetchannel,sender+" : I have been up for "+str(int(arg1[0]))+arg3+", "+str(int(arg2[0]))+arg+" and "+str(round(arg2[1],2))+" Seconds")
elif arg1 > 86400:
arg1 = divmod(arg1,86400)
arg2 = divmod(arg1[1],3600)
arg3 = divmod(arg2[1],60)
arg4 = " Day" if int(arg1[0]) == 1 else " Days"
arg5 = " Hour" if int(arg2[0]) == 1 else " Hours"
arg6 = " Minute" if int(arg3[0]) == 1 else " Minutes"
conn.privmsg(targetchannel,sender+" : I have been up for "+str(int(arg1[0]))+arg4+", "+str(int(arg2[0]))+arg5+", "+str(int(arg3[0]))+arg6+" and "+str(round(arg3[1],2))+" Seconds")
elif cocheck(prefix+"purgemessages"):
count = 0
for i,a in tell_list.items():
for b in a:
if "||From: "+truesender in b:
count += 1
del(tell_list[i][tell_list[i].index(b)])
conn.privmsg(targetchannel, sender+" : All your "+str(count)+" messages have been purged")
elif influx.split(" ")[0].lower().replace(",","").replace(":","") in SName+[Name.lower()] and "tell" in (influx.lower().split(" ")+[""])[1]:
arg = influx.lower().split(" ")
equalarg = influx.split(" ")
next_one = False
count = 0
spot = 0
for i in arg:
count += 1
if "tell" in i.lower():
next_one = True
elif next_one == True:
next_one = i.lower()
spot = count
break
else:
pass
if next_one != True and next_one != False:
#if ("^\^".join(tell_list.values())).count(truesender) >= offline_message_limit:
if str(tell_list.values()).count("||From: "+truesender) >= offline_message_limit:
conn.privmsg(targetchannel,sender+" : Limit of "+str(offline_message_limit)+" reached! Use !purgemessages if you want to get rid of them!")
else:
try:
tell_list[next_one].append((" ".join(equalarg[spot:]))+" ||From: "+truesender)
except:
tell_list[next_one] = [(" ".join(equalarg[spot:]))+" ||From: "+truesender]
conn.privmsg(targetchannel,"Sending a message to "+next_one+" when they arrive.")
# < This part has to be within subsidiaries of the bot, and must not be modified, intentionally hidden or deleted.
elif influx.split(" ")[0].lower().replace(",","").replace(":","") in SName+[Name.lower()] and "who created you" in influx.lower():
conn.privmsg(targetchannel, "I was created by Skibiliano.")
# The part ends here >
elif influx.split(" ")[0].lower().replace(",","").replace(".","").replace("!","").replace("?","") in SName+[lowname] and "call me" in influx.lower():
if allow_callnames == True:
arg = influx.split(" ")
arg2 = False
arg3 = []
for i in arg:
if arg2 == True:
arg3.append(i)
elif i.lower() == "me":
arg2 = True
arg3 = " ".join(arg3)
truesender_lower = truesender.lower()
arg3_lower = arg3.lower()
tell_checker = Namecheck.Namecheck(arg3_lower,users,truesender)
for name in replacenames.values():
if arg3_lower == name.lower():
tell_checker = True
break
else:
pass
if tell_checker == True:
conn.privmsg(targetchannel,sender+" : I can't call you that, I know someone else by that name")
elif len(arg3) > call_me_max_length:
conn.privmsg(targetchannel,sender+" : I cannot call you that, Too long of a name.")
pass
else:
replacenames[truesender] = arg3
with open("replacenames.cache","w") as pickle_save:
pickle.dump(replacenames,pickle_save)
conn.privmsg(targetchannel,sender+" : Calling you "+arg3+" From now on")
else:
conn.privmsg(targetchannel,sender+" : Sorry, I am not allowed to do that.")
elif influx.split(" ")[0].lower().replace(",","").replace(".","").replace("?","").replace("!","") in SName+[lowname] and "your birthday" in influx.lower() and "is your" in influx.lower():
conn.privmsg(targetchannel,sender+" : My birthday is on the 15th day of December.")
elif influx.split(" ")[0].lower().replace(",","") in SName+[lowname] and "version" in influx.replace("?","").replace("!","").lower().split(" "):
if debug == True:
print truesender+":<VERSION>:%s Version" %(Name)
conn.privmsg(targetchannel,sender+", My version is "+pregen)
elif influx.split(" ")[0].lower().replace(",","") in SName+[lowname] and influx.lower().count(" or ") > 0 and len(influx.split(" ")[1:]) <= influx.lower().count("or") * 3:
cut_down = influx.lower().split(" ")
arg = []
count = -1
for i in cut_down:
count += 1
try:
if cut_down[count+1] == "or":
arg.append(i)
except:
pass
try:
if i not in arg and cut_down[count-1] == "or":
arg.append(i)
except:
pass
try:
conn.privmsg(targetchannel,random.choice(arg).capitalize().replace("?","").replace("!",""))
except IndexError:
# arg is empty, whORe etc.
pass
elif influx.lower()[0:len(Name)] == lowname and influx.lower()[-1] == "?" and influx.count(" ") > 1 and "who started you" in influx.lower() or \
influx.split(" ")[0].lower().replace(",","") in SName and influx.lower()[-1] == "?" and "who started you" in influx.lower():
conn.privmsg(targetchannel,sender+" : I was started by %s"%(os.getenv("USER"))+" on "+time.strftime("%d.%m.%Y at %H:%M:%S",time.gmtime(uptime_start)))
elif influx.lower()[0:len(Name)] == lowname and influx.lower()[-1] == "?" and influx.count(" ") > 1 or \
influx.split(" ")[0].lower().replace(",","") in SName and influx.lower()[-1] == "?" and influx.count(" ") > 1:
dice = random.randint(0,1)
if dice == 0:
conn.privmsg(targetchannel,sender+" : "+C_eightball.eightball(influx.lower(),debug,truesender,prefix))
else:
if highlights:
conn.privmsg(targetchannel,sender+" : "+C_sarcasticball.sarcasticball(influx.lower(),debug,truesender,users,prefix))
else:
conn.privmsg(targetchannel,sender+" : "+C_sarcasticball.sarcasticball(influx.lower(),debug,truesender,nonhighlight_names,prefix))
elif influx.lower()[0:len(Name)] == lowname and not influx.lower()[len(Name):].isalpha() or \
influx.split(" ")[0].lower().replace(",","") in SName and not influx.lower()[len(influx.split(" ")[0].lower()):].isalpha():
conn.privmsg(targetchannel, random.choice(["Yea?","I'm here","Ya?","Yah?","Hm?","What?","Mmhm, what?","?","What now?","How may I assist?"]))
comboer = truesender
comboer_time = time.time()
elif influx.lower()[-1] == "?" and comboer == truesender and looptime - discard_combo_messages_time < comboer_time:
comboer = ""
dice = random.randint(0,1)
if dice == 0:
conn.privmsg(targetchannel,sender+" : "+C_eightball.eightball(influx.lower(),debug,truesender,prefix))
else:
if highlights:
conn.privmsg(targetchannel,sender+" : "+C_sarcasticball.sarcasticball(influx.lower(),debug,truesender,users,prefix))
else:
conn.privmsg(targetchannel,sender+" : "+C_sarcasticball.sarcasticball(influx.lower(),debug,truesender,nonhighlight_names,prefix))
elif influx.lower() == prefix+"tm":
if truesender in operators and targetchannel==channel:
marakov = not marakov
conn.privmsg(targetchannel,sender+" : Marakov Output is now "+str(marakov))
else:
conn.privmsg(targetchannel,sender+" : I can't let you access that")
elif personality_greeter == True and True in map(lambda x: x in influx.lower(),["greetings","afternoon","hi","hey","heya","hello","yo","hiya","howdy","hai","morning","mornin'","evening", "night","night", "evening","'sup","sup","hallo","hejssan"]):
if comboer != "" and looptime - discard_combo_messages_time > comboer_time:
combo_check = sbna(["greetings","afternoon","hi","hey","heya","hello","yo","hiya","howdy","hai","morning","mornin'","evening", "night","night", "evening","'sup","sup","hallo","hejssan","all night"], #ONLY ONE OF THESE
["greetings","afternoon","hi","hey","heya","hello","yo","hiya","howdy","hai","morning","mornin'","evening", "night","night", "evening","'sup","sup","hallo","hejssan"], #ATLEAST ONE OF THESE
influx.lower())
else:
combo_check = sbna(SName+[lowname,
#lowname+".",lowname+"!",lowname+"?",
"everybody",
#"everybody!","everybody?",
"everyone",
#"everyone!","everyone?",
"all",
#"all!","all?"
"all night",
], #ONLY ONE OF THESE
["greetings","afternoon","hi",
#"hi,",
"hey","heya","hello","yo","hiya","howdy","hai","morning","mornin'","evening", "night","night", "evening","'sup","sup","hallo","hejssan"], #ATLEAST ONE OF THESE
influx.lower().replace(",","").replace(".","").replace("!",""))
if combo_check:
combo_check = False
comboer = ""
if "evening" in influx.lower() and "all" in influx.lower() and len(influx.lower().split(" ")) > 3:
pass
talking_about_me = False
if Name.lower() in influx.lower():
talking_about_me = True
for bot_name in SName:
if bot_name.lower() in influx.lower():
talking_about_me = True
break
if not talking_about_me:
continue #it got annoying REAL FAST when it'd interject any time a greeting was used, regardless of context
elif truesender not in operators:
if debug:
print truesender+":<GREET>:"+influx
dice = random.randint(0,19)
if dice == 0:
conn.privmsg(targetchannel,"Well hello to you too "+sender)
elif dice == 1:
if optimize_greeting == False:
hours = time.strftime("%H")
#time.strftime("%H:%M:%S") == 12:28:41
hours = int(hours)
if hours in xrange(0,12):
conn.privmsg(targetchannel,"Good Morning "+sender)
elif hours in xrange(12,15):
conn.privmsg(targetchannel,"Good Afternoon "+sender)
elif hours in xrange(15,20):
conn.privmsg(targetchannel,"Good Evening "+sender)
else:
conn.privmsg(targetchannel,"Good Night "+sender)
else:
hours = time.strftime("%H")
hours = int(hours)
if hours in morning:
conn.privmsg(targetchannel,"Good Morning "+sender)
elif hours in afternoon:
conn.privmsg(targetchannel,"Good Afternoon "+sender)
elif hours in evening:
conn.privmsg(targetchannel,"Good Evening "+sender)
else:
conn.privmsg(targetchannel,"Good Night "+sender)
elif dice == 2:
conn.privmsg(targetchannel,"Hello!")
elif dice == 3:
conn.privmsg(targetchannel,"Hey "+sender)
elif dice == 4:
conn.privmsg(targetchannel,"Hi "+sender)
elif dice == 5:
conn.privmsg(targetchannel,"Hello "+sender)
elif dice == 6:
conn.privmsg(targetchannel,"Yo "+sender)
elif dice == 7:
conn.privmsg(targetchannel,"Greetings "+sender)
elif dice == 8:
conn.privmsg(targetchannel,"Hi")
elif dice == 9:
conn.privmsg(targetchannel,"Hi!")
elif dice == 10:
conn.privmsg(targetchannel,"Yo")
elif dice == 11:
conn.privmsg(targetchannel,"Yo!")
elif dice == 12:
conn.privmsg(targetchannel,"Heya")
elif dice == 13:
conn.privmsg(targetchannel,"Hello there!")
elif dice == 14: # Richard
conn.privmsg(targetchannel,"Statement: Greetings meatbag")
elif dice == 15: # Richard
hours = int(time.strftime("%H"))
if hours in xrange(5,12):
conn.privmsg(targetchannel,"What are you doing talking at this time of the morning?")
elif hours in xrange(12,15):
conn.privmsg(targetchannel,"What are you doing talking at this time of the day?")
elif hours in xrange(15,22):
conn.privmsg(targetchannel,"What are you doing talking at this time of the evening?")
else:
conn.privmsg(targetchannel,"What are you doing talking at this time of the night?")
elif dice == 16: # Richard
conn.privmsg(targetchannel,"Oh, you're still alive I see.")
elif dice == 17:
conn.privmsg(targetchannel,"Heya "+sender)
elif dice == 18 and time.gmtime(time.time())[1] == 12 and time.gmtime(time.time())[2] == 15:
conn.privmsg(targetchannel,"Hello! It's my birthday!")
else:
conn.privmsg(targetchannel,"Hiya "+sender)
secdice = random.randint(0,10)
if truesender.lower() in tell_list.keys():
try:
conn.privmsg(channel, "Also, "+truesender+" : "+tell_list[truesender.lower()][0])
del(tell_list[truesender.lower()][0])
except:
pass
else:
dice = random.randint(0,1)
if dice == 0:
conn.privmsg(targetchannel,"Greetings Master "+sender)
elif dice == 1:
conn.privmsg(targetchannel,"My deepest greetings belong to you, Master "+sender)
### IMPORTANT ###
elif influx == "☺VERSION☺":
conn.notice(truesender,"\001VERSION nanotrasen:2:Python 2.6\001")
elif marakov and influx.lower() == prefix+"marakov":
arg = Marakov_Chain.form_sentence()
if len(arg) < 5:
conn.privmsg(targetchannel,sender+" : Not enough words harvested")
else:
conn.privmsg(targetchannel,sender+" : %s" %(" ".join(arg).capitalize()))
elif marakov and cocheck( prefix+ "marakov"):
try:
arg = influx.split(" ")[1].lower()
except:
conn.privmsg(targetchannel,sender+" : Please input a valid second argument")
else:
arg2 = Marakov_Chain.form_sentence(arg)
if len(arg2) < 5:
conn.privmsg(targetchannel,sender+" : Not enough words harvested for a sentence starting with %s" %(arg))
else:
conn.privmsg(targetchannel,sender+" : %s" %(" ".join(arg2).capitalize()))
else:
Marakov_Chain.give_data(influx)
autodiscusscurtime = backup
if time.time() - looptime == 0:
pass
else:
print "Took",time.time()-looptime,"Seconds to finish loop"
elif data [ 1 ] [ 0 ] == '353':
if connected == False:
connected = True
users = map(lambda x: x[1:] if x[0] == "+" or x[0] == "@" else x,data[1][4].split(" "))
print "There are",len(users),"Users on",channel
operators = []
for potential_operator in data[1][4].split(" "):
if potential_operator[0] == "@":
operators.append(potential_operator[1:])
elif potential_operator[0] == "%":
halfoperators.append(potential_operator[1:])
elif data[1][0] == "QUIT":
sender = data[0].split("!")[0]
print sender+" Has now left the server"
try:
users.remove(sender)
try:
operators.remove(sender)
except ValueError:
pass
try:
halfoperators.remove(sender)
except ValueError:
pass
except ValueError:
pass
elif data[1][0] == "PART":
sender = data[0].split("!")[0]
targetchannel = data[1][1]
print sender+" Has now parted from the channel"
try:
users.remove(sender)
try:
operators.remove(sender)
except ValueError:
pass
try:
halfoperators.remove(sender)
except ValueError:
pass
except ValueError:
pass
elif data[1][0] == "JOIN":
sender = data[0].split("!")[0]
targetchannel = data[1][1]
if sender.lower() in tell_list.keys():
try:
conn.privmsg(targetchannel, sender+" : "+" | ".join(tell_list[sender.lower()]))
del(tell_list[sender.lower()])
except:
pass
for useri,nicki in replacenames.items():
checkers = Namecheck.Namecheck_dict(sender.lower(),replacenames)
if checkers[0]:
try:
if checkers[0].lower() == sender:
pass
else:
conn.privmsg(targetchannel,checkers[1]+" : I have detected a collision with a name I call you and %s who joined" %(sender))
del(replacenames[checkers[1]])
with open("replacenames.cache","w") as pickle_save:
pickle.dump(replacenames,pickle_save)
except AttributeError:
#conn.privmsg(channel,"NAME COLLISION CHECK ERROR, RELATED TO %s" %(sender))
print "NAME COLLISION CHECK ERROR, RELATED TO %s" %(sender)
break
print sender+" Has now joined"
users.append(sender)
#####
if sender.lower() not in peopleheknows[0]:
peopleheknows[0].append(sender.lower())
peopleheknows[1].append(data[0].split("!")[1])
with open("peopleheknows.cache","w") as peoplehecache:
pickle.dump(peopleheknows,peoplehecache)
elif data[1][0] == "MODE" and data[1][2] == "+o":
sender = data[1][3]
targetchannel = data[1][1]
if targetchannel == channel:
print sender+" Is now an operator on the main channel"
operators.append(sender)
else:
print sender+" Is now an operator"
elif data[1][0] == "MODE" and data[1][2] == "-o":
sender = data[1][3]
targetchannel = data[1][1]
if targetchannel == channel:
print sender+" Is no longer an operator on the main channel"
else:
print sender+" Is no longer an operator"
try:
operators.remove(sender)
except ValueError:
pass
elif data[1][0] == "MODE" and data[1][2] == "+h":
sender = data[1][3]
print sender+" Is now an half operator"
halfoperators.append(sender)
elif data[1][0] == "MODE" and data[1][2] == "-h":
try:
halfoperators.remove(sender)
except ValueError:
pass
elif data[1][0] == "MODE" and data[1][1] == Name:
print "My mode is",data[1][2]
elif data[1][0] == "MODE" and data[1][1] != Name:
try:
sender = data[1][3]
print sender,"Was modified",data[1][2]
except IndexError:
print "SENDER RETRIEVAL FAILED:"+str(data)
elif data[1][0] == "KICK" and data[1][2] == Name:
disconnects = 99999
print "I have been kicked! Disconnecting entirely!"
conn.quit()
elif data[1][0] == "KICK":
# data[1][0] = Kick, 1 = Channel, 2 = Who, 3 = Who(?)
print data[1][2]+" got kicked!"
elif data[1][0] == "451" and data[1][2] == "You have not registered":
print Name+" hasn't been registered"
elif data[1][0] == "NOTICE":
sender = data[0].split("!")[0]
print "NOTICE (%s): %s" %(sender,data[1][2])
pongtarget = sender
elif data[1][0] == "NICK":
origname = data[0].split("!")[0]
newname = data[1][1]
print origname,"Is now",newname
if newname.lower() in tell_list.keys():
try:
conn.privmsg(channel, newname+" : "+tell_list[newname.lower()][0])
del(tell_list[newname.lower()][0])
except:
pass
try:
users.remove(origname)
except ValueError:
pass
else:
users.append(newname)
try:
operators.remove(origname)
except ValueError:
pass
else:
operators.append(newname)
try:
halfoperators.remove(origname)
except ValueError:
pass
else:
halfoperators.append(newname)
elif data[1][0] == "001":
# Skibot is welcomed to the Network
pass
elif data[1][0] == "002":
# Your host is...
pass
elif data[1][0] == "003":
#Server was created...
pass
elif data[1][0] == "004":
#Weird hex?
pass
elif data[1][0] == "005":
#Settings like NICKLEN and so on.
pass
elif data[1][0] == "250":
#data[1][2] is
#"Highest connection count: 1411 (1410 clients)
#(81411 connections received)"
pass
elif data[1][0] == "251":
#There are 23 users and 2491 invisible on 10 servers
pass
elif data[1][0] == "252":
#IRC Operators online
#data[1][2]
print data[1][2],"Irc operators online"
pass
elif data[1][0] == "253":
# ['253', 'Skibot_V4', '1', 'unknown connection(s)']
print data[1][2],"Unknown connection(s)"
pass
elif data[1][0] == "254":
#1391 channels formed
pass
elif data[1][0] == "255":
#I have 406 clients and 2 servers
pass
elif data[1][0] == "265":
#data[1][2] current local users
#data[1][3] at max
try:
print "Current local users:", data[1][2],"/",data[1][3]
except IndexError:
print "Couldn't retrieve local users"
pass
elif data[1][0] == "266":
#data[1][2] current global users
#data[1][3] at max
try:
print "Current global users:", data[1][2],"/",data[1][3]
except IndexError:
print "Couldn't retrieve global users"
pass
elif data[1][0] == "315":
#End of /who list
pass
elif data[1][0] == "332":
# Topic of channel
topic = data[1][3]
pass
elif data[1][0] == "333":
# *Shrug*
pass
elif data[1][0] == "352":
#WHO command
if len(targetlist) > 0:
if targetlist[0][0].lower() in data[1][6].lower():
thread.start_new_thread(target,("*!*@"+data[1][4],targetlist[0][1]))
print "Created a thread with", "*!*@"+data[1][4],targetlist[0][1]
targetlist.pop(0)
else:
print targetlist[0][0].lower(), "isn't equal to?", data[1][6].lower()
print targetlist
elif data[1][0] == "366":
# End of USERS
pass
elif data[1][0] == "372":
# Server information
pass
elif data[1][0] == "375":
# Message of the day
pass
elif data[1][0] == "376":
# End of motd
pass
elif data[1][0] == "401":
# ('network', ['401','Botname','Channel / Nick','No such nick/channel'])
print data[1][2] + " Channel does not exist"
pass
elif data[1][0] == "439":
# ('irc.rizon.no', ['439', '*', 'Please wait while we process your connection.'])
pongtarg = data[0][0]
elif data[1][0] == "477":
# You need to be identified
#TAG
conn.privmsg("nickserv","identify %s"%CORE_DATA.le_pass)
time.sleep(0.5)
conn.join(data[1][2])
#('network', ['477', 'botname', '#channel', 'Cannot join channel (+r) - you need to be identified with services'])
elif data[1][0] == "433":
# Skibot name already exists.
print Name+" name already exists."
Name += "_"+version
print "New name:",Name
duplicate_notify = True
conn = irchat.IRC ( Network, Port, Name, "NT_"+version, "NT_"+version, "Trasen_"+version )
for i in CORE_DATA.channels:
conn.join(i)
sleep(0.5)
elif data[1][0] == "482":
sleep(0.05)
conn.privmsg(targetchannel,"Nevermind that, I am not an operator")
CALL_OFF = True
elif data[1] == ["too","fast,","throttled."]:
print "Reconnected too fast."
print "Halting for 2 seconds"
sleep(2)
elif data[1][0] == "Link":
if data[0] == "Closing":
print "Link was closed"
connected = False
# conn.quit()
# break
else:
print data
print data[1][0]
pass
else:
if disconnects > 9000: #IT'S OVER NINE THOUSAAAAND!
break
else: #WHAT NINE THOUSAND? THERE'S NO WAY THAT CAN BE RIGHT
sleep(responsiveness_delay) #WAIT A WHILE AND CHECK AGAIN!
try:
if not connected:
#print pongtarget
#print conn.addressquery()
conn.privmsg(pongtarget,"Pong")
sleep(1)
for i in CORE_DATA.channels:
conn.join(i)
sleep(0.5)
print "Attempted to join"
connected = True
except ValueError:
try:
conn.privmsg(conn.addressquery()[0],"Pong")
sleep(1)
for i in CORE_DATA.channels:
conn.join(i)
sleep(0.5)
print "Attempted to join the second time"
connected = True
except ValueError:
print "Both methods failed"
except AttributeError:
print "Conn is not established correctly"
except NameError:
print "Pongtarget isn't yet established"
try:
conn.privmsg(conn.addressquery()[0],"Pong")
sleep(1)
for i in CORE_DATA.channels:
conn.join(i)
sleep(0.5)
print "Attempted to join the second time"
connected = True
except:
print "Both methods failed"
| agpl-3.0 |
djgagne/scikit-learn | examples/svm/plot_svm_anova.py | 250 | 2000 | """
=================================================
SVM-Anova: SVM with univariate feature selection
=================================================
This example shows how to perform univariate feature before running a SVC
(support vector classifier) to improve the classification scores.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn import svm, datasets, feature_selection, cross_validation
from sklearn.pipeline import Pipeline
###############################################################################
# Import some data to play with
digits = datasets.load_digits()
y = digits.target
# Throw away data, to be in the curse of dimension settings
y = y[:200]
X = digits.data[:200]
n_samples = len(y)
X = X.reshape((n_samples, -1))
# add 200 non-informative features
X = np.hstack((X, 2 * np.random.random((n_samples, 200))))
###############################################################################
# Create a feature-selection transform and an instance of SVM that we
# combine together to have an full-blown estimator
transform = feature_selection.SelectPercentile(feature_selection.f_classif)
clf = Pipeline([('anova', transform), ('svc', svm.SVC(C=1.0))])
###############################################################################
# Plot the cross-validation score as a function of percentile of features
score_means = list()
score_stds = list()
percentiles = (1, 3, 6, 10, 15, 20, 30, 40, 60, 80, 100)
for percentile in percentiles:
clf.set_params(anova__percentile=percentile)
# Compute cross-validation score using all CPUs
this_scores = cross_validation.cross_val_score(clf, X, y, n_jobs=1)
score_means.append(this_scores.mean())
score_stds.append(this_scores.std())
plt.errorbar(percentiles, score_means, np.array(score_stds))
plt.title(
'Performance of the SVM-Anova varying the percentile of features selected')
plt.xlabel('Percentile')
plt.ylabel('Prediction rate')
plt.axis('tight')
plt.show()
| bsd-3-clause |
nkgilley/home-assistant | tests/components/zwave/test_workaround.py | 24 | 2562 | """Test Z-Wave workarounds."""
from homeassistant.components.zwave import const, workaround
from tests.mock.zwave import MockNode, MockValue
def test_get_device_no_component_mapping():
"""Test that None is returned."""
node = MockNode(manufacturer_id=" ")
value = MockValue(data=0, node=node)
assert workaround.get_device_component_mapping(value) is None
def test_get_device_component_mapping():
"""Test that component is returned."""
node = MockNode(manufacturer_id="010f", product_type="0b00")
value = MockValue(data=0, node=node, command_class=const.COMMAND_CLASS_SENSOR_ALARM)
assert workaround.get_device_component_mapping(value) == "binary_sensor"
def test_get_device_component_mapping_mti():
"""Test that component is returned."""
# GE Fan controller
node = MockNode(manufacturer_id="0063", product_type="4944", product_id="3034")
value = MockValue(
data=0, node=node, command_class=const.COMMAND_CLASS_SWITCH_MULTILEVEL
)
assert workaround.get_device_component_mapping(value) == "fan"
# GE Dimmer
node = MockNode(manufacturer_id="0063", product_type="4944", product_id="3031")
value = MockValue(
data=0, node=node, command_class=const.COMMAND_CLASS_SWITCH_MULTILEVEL
)
assert workaround.get_device_component_mapping(value) is None
def test_get_device_no_mapping():
"""Test that no device mapping is returned."""
node = MockNode(manufacturer_id=" ")
value = MockValue(data=0, node=node)
assert workaround.get_device_mapping(value) is None
def test_get_device_mapping_mt():
"""Test that device mapping mt is returned."""
node = MockNode(manufacturer_id="0047", product_type="5a52")
value = MockValue(data=0, node=node)
assert workaround.get_device_mapping(value) == "workaround_no_position"
def test_get_device_mapping_mtii():
"""Test that device mapping mtii is returned."""
node = MockNode(manufacturer_id="013c", product_type="0002", product_id="0002")
value = MockValue(data=0, node=node, index=0)
assert workaround.get_device_mapping(value) == "trigger_no_off_event"
def test_get_device_mapping_mti_instance():
"""Test that device mapping mti_instance is returned."""
node = MockNode(manufacturer_id="013c", product_type="0001", product_id="0005")
value = MockValue(data=0, node=node, instance=1)
assert workaround.get_device_mapping(value) == "refresh_node_on_update"
value = MockValue(data=0, node=node, instance=2)
assert workaround.get_device_mapping(value) is None
| apache-2.0 |
DEVSENSE/PTVS | Python/Tests/TestData/VirtualEnv/env/Lib/encodings/cp1256.py | 593 | 13070 | """ Python Character Mapping Codec cp1256 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1256.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1256',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\u20ac' # 0x80 -> EURO SIGN
u'\u067e' # 0x81 -> ARABIC LETTER PEH
u'\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK
u'\u0192' # 0x83 -> LATIN SMALL LETTER F WITH HOOK
u'\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK
u'\u2026' # 0x85 -> HORIZONTAL ELLIPSIS
u'\u2020' # 0x86 -> DAGGER
u'\u2021' # 0x87 -> DOUBLE DAGGER
u'\u02c6' # 0x88 -> MODIFIER LETTER CIRCUMFLEX ACCENT
u'\u2030' # 0x89 -> PER MILLE SIGN
u'\u0679' # 0x8A -> ARABIC LETTER TTEH
u'\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
u'\u0152' # 0x8C -> LATIN CAPITAL LIGATURE OE
u'\u0686' # 0x8D -> ARABIC LETTER TCHEH
u'\u0698' # 0x8E -> ARABIC LETTER JEH
u'\u0688' # 0x8F -> ARABIC LETTER DDAL
u'\u06af' # 0x90 -> ARABIC LETTER GAF
u'\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK
u'\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK
u'\u2022' # 0x95 -> BULLET
u'\u2013' # 0x96 -> EN DASH
u'\u2014' # 0x97 -> EM DASH
u'\u06a9' # 0x98 -> ARABIC LETTER KEHEH
u'\u2122' # 0x99 -> TRADE MARK SIGN
u'\u0691' # 0x9A -> ARABIC LETTER RREH
u'\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
u'\u0153' # 0x9C -> LATIN SMALL LIGATURE OE
u'\u200c' # 0x9D -> ZERO WIDTH NON-JOINER
u'\u200d' # 0x9E -> ZERO WIDTH JOINER
u'\u06ba' # 0x9F -> ARABIC LETTER NOON GHUNNA
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\u060c' # 0xA1 -> ARABIC COMMA
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa4' # 0xA4 -> CURRENCY SIGN
u'\xa5' # 0xA5 -> YEN SIGN
u'\xa6' # 0xA6 -> BROKEN BAR
u'\xa7' # 0xA7 -> SECTION SIGN
u'\xa8' # 0xA8 -> DIAERESIS
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\u06be' # 0xAA -> ARABIC LETTER HEH DOACHASHMEE
u'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xac' # 0xAC -> NOT SIGN
u'\xad' # 0xAD -> SOFT HYPHEN
u'\xae' # 0xAE -> REGISTERED SIGN
u'\xaf' # 0xAF -> MACRON
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\xb2' # 0xB2 -> SUPERSCRIPT TWO
u'\xb3' # 0xB3 -> SUPERSCRIPT THREE
u'\xb4' # 0xB4 -> ACUTE ACCENT
u'\xb5' # 0xB5 -> MICRO SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xb7' # 0xB7 -> MIDDLE DOT
u'\xb8' # 0xB8 -> CEDILLA
u'\xb9' # 0xB9 -> SUPERSCRIPT ONE
u'\u061b' # 0xBA -> ARABIC SEMICOLON
u'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER
u'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
u'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS
u'\u061f' # 0xBF -> ARABIC QUESTION MARK
u'\u06c1' # 0xC0 -> ARABIC LETTER HEH GOAL
u'\u0621' # 0xC1 -> ARABIC LETTER HAMZA
u'\u0622' # 0xC2 -> ARABIC LETTER ALEF WITH MADDA ABOVE
u'\u0623' # 0xC3 -> ARABIC LETTER ALEF WITH HAMZA ABOVE
u'\u0624' # 0xC4 -> ARABIC LETTER WAW WITH HAMZA ABOVE
u'\u0625' # 0xC5 -> ARABIC LETTER ALEF WITH HAMZA BELOW
u'\u0626' # 0xC6 -> ARABIC LETTER YEH WITH HAMZA ABOVE
u'\u0627' # 0xC7 -> ARABIC LETTER ALEF
u'\u0628' # 0xC8 -> ARABIC LETTER BEH
u'\u0629' # 0xC9 -> ARABIC LETTER TEH MARBUTA
u'\u062a' # 0xCA -> ARABIC LETTER TEH
u'\u062b' # 0xCB -> ARABIC LETTER THEH
u'\u062c' # 0xCC -> ARABIC LETTER JEEM
u'\u062d' # 0xCD -> ARABIC LETTER HAH
u'\u062e' # 0xCE -> ARABIC LETTER KHAH
u'\u062f' # 0xCF -> ARABIC LETTER DAL
u'\u0630' # 0xD0 -> ARABIC LETTER THAL
u'\u0631' # 0xD1 -> ARABIC LETTER REH
u'\u0632' # 0xD2 -> ARABIC LETTER ZAIN
u'\u0633' # 0xD3 -> ARABIC LETTER SEEN
u'\u0634' # 0xD4 -> ARABIC LETTER SHEEN
u'\u0635' # 0xD5 -> ARABIC LETTER SAD
u'\u0636' # 0xD6 -> ARABIC LETTER DAD
u'\xd7' # 0xD7 -> MULTIPLICATION SIGN
u'\u0637' # 0xD8 -> ARABIC LETTER TAH
u'\u0638' # 0xD9 -> ARABIC LETTER ZAH
u'\u0639' # 0xDA -> ARABIC LETTER AIN
u'\u063a' # 0xDB -> ARABIC LETTER GHAIN
u'\u0640' # 0xDC -> ARABIC TATWEEL
u'\u0641' # 0xDD -> ARABIC LETTER FEH
u'\u0642' # 0xDE -> ARABIC LETTER QAF
u'\u0643' # 0xDF -> ARABIC LETTER KAF
u'\xe0' # 0xE0 -> LATIN SMALL LETTER A WITH GRAVE
u'\u0644' # 0xE1 -> ARABIC LETTER LAM
u'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\u0645' # 0xE3 -> ARABIC LETTER MEEM
u'\u0646' # 0xE4 -> ARABIC LETTER NOON
u'\u0647' # 0xE5 -> ARABIC LETTER HEH
u'\u0648' # 0xE6 -> ARABIC LETTER WAW
u'\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe8' # 0xE8 -> LATIN SMALL LETTER E WITH GRAVE
u'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
u'\xea' # 0xEA -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
u'\u0649' # 0xEC -> ARABIC LETTER ALEF MAKSURA
u'\u064a' # 0xED -> ARABIC LETTER YEH
u'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS
u'\u064b' # 0xF0 -> ARABIC FATHATAN
u'\u064c' # 0xF1 -> ARABIC DAMMATAN
u'\u064d' # 0xF2 -> ARABIC KASRATAN
u'\u064e' # 0xF3 -> ARABIC FATHA
u'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\u064f' # 0xF5 -> ARABIC DAMMA
u'\u0650' # 0xF6 -> ARABIC KASRA
u'\xf7' # 0xF7 -> DIVISION SIGN
u'\u0651' # 0xF8 -> ARABIC SHADDA
u'\xf9' # 0xF9 -> LATIN SMALL LETTER U WITH GRAVE
u'\u0652' # 0xFA -> ARABIC SUKUN
u'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u200e' # 0xFD -> LEFT-TO-RIGHT MARK
u'\u200f' # 0xFE -> RIGHT-TO-LEFT MARK
u'\u06d2' # 0xFF -> ARABIC LETTER YEH BARREE
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| apache-2.0 |
TheLady/audio-normalize | setup.py | 1 | 1535 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read().replace('.. :changelog:', '')
requirements = [
'docopt',
]
test_requirements = [
# 'pytest',
]
import avconv_normalize
setup(
name='avconv-normalize',
version=avconv_normalize.__version__,
description="Normalize audio via Libav (avconv)",
long_description=readme + '\n\n' + history,
author="Werner Robitza",
author_email='unknown@todo.com',
url='https://github.com/slhck/audio-normalize',
packages=[
'avconv_normalize',
],
include_package_data=True,
install_requires=requirements,
license="MIT",
zip_safe=False,
keywords='avconv, ffmpeg, libav, normalize, audio',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
# test_suite='tests',
# cmdclass={'test': PyTest},
# tests_require=test_requirements,
entry_points={
'console_scripts': [
'avconv-normalize = avconv_normalize.__main__:main'
]
},
)
| mit |
dstroppa/openstack-smartos-nova-grizzly | nova/netconf.py | 9 | 2142 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import socket
from oslo.config import cfg
CONF = cfg.CONF
def _get_my_ip():
"""
Returns the actual ip of the local machine.
This code figures out what source address would be used if some traffic
were to be sent out to some well known address on the Internet. In this
case, a Google DNS server is used, but the specific address does not
matter much. No traffic is actually sent.
"""
try:
csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
csock.connect(('8.8.8.8', 80))
(addr, port) = csock.getsockname()
csock.close()
return addr
except socket.error:
return "127.0.0.1"
netconf_opts = [
cfg.StrOpt('my_ip',
default=_get_my_ip(),
help='ip address of this host'),
cfg.StrOpt('host',
default=socket.gethostname(),
help='Name of this node. This can be an opaque identifier. '
'It is not necessarily a hostname, FQDN, or IP address. '
'However, the node name must be valid within '
'an AMQP key, and if using ZeroMQ, a valid '
'hostname, FQDN, or IP address'),
cfg.BoolOpt('use_ipv6',
default=False,
help='use ipv6'),
]
CONF.register_opts(netconf_opts)
| apache-2.0 |
ShiYw/Sigil | 3rdparty/python/Lib/uu.py | 182 | 6755 | #! /usr/bin/env python3
# Copyright 1994 by Lance Ellinghouse
# Cathedral City, California Republic, United States of America.
# All Rights Reserved
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Lance Ellinghouse
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# LANCE ELLINGHOUSE DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS, IN NO EVENT SHALL LANCE ELLINGHOUSE CENTRUM BE LIABLE
# FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
# Modified by Jack Jansen, CWI, July 1995:
# - Use binascii module to do the actual line-by-line conversion
# between ascii and binary. This results in a 1000-fold speedup. The C
# version is still 5 times faster, though.
# - Arguments more compliant with python standard
"""Implementation of the UUencode and UUdecode functions.
encode(in_file, out_file [,name, mode])
decode(in_file [, out_file, mode])
"""
import binascii
import os
import sys
__all__ = ["Error", "encode", "decode"]
class Error(Exception):
pass
def encode(in_file, out_file, name=None, mode=None):
"""Uuencode file"""
#
# If in_file is a pathname open it and change defaults
#
opened_files = []
try:
if in_file == '-':
in_file = sys.stdin.buffer
elif isinstance(in_file, str):
if name is None:
name = os.path.basename(in_file)
if mode is None:
try:
mode = os.stat(in_file).st_mode
except AttributeError:
pass
in_file = open(in_file, 'rb')
opened_files.append(in_file)
#
# Open out_file if it is a pathname
#
if out_file == '-':
out_file = sys.stdout.buffer
elif isinstance(out_file, str):
out_file = open(out_file, 'wb')
opened_files.append(out_file)
#
# Set defaults for name and mode
#
if name is None:
name = '-'
if mode is None:
mode = 0o666
#
# Write the data
#
out_file.write(('begin %o %s\n' % ((mode & 0o777), name)).encode("ascii"))
data = in_file.read(45)
while len(data) > 0:
out_file.write(binascii.b2a_uu(data))
data = in_file.read(45)
out_file.write(b' \nend\n')
finally:
for f in opened_files:
f.close()
def decode(in_file, out_file=None, mode=None, quiet=False):
"""Decode uuencoded file"""
#
# Open the input file, if needed.
#
opened_files = []
if in_file == '-':
in_file = sys.stdin.buffer
elif isinstance(in_file, str):
in_file = open(in_file, 'rb')
opened_files.append(in_file)
try:
#
# Read until a begin is encountered or we've exhausted the file
#
while True:
hdr = in_file.readline()
if not hdr:
raise Error('No valid begin line found in input file')
if not hdr.startswith(b'begin'):
continue
hdrfields = hdr.split(b' ', 2)
if len(hdrfields) == 3 and hdrfields[0] == b'begin':
try:
int(hdrfields[1], 8)
break
except ValueError:
pass
if out_file is None:
# If the filename isn't ASCII, what's up with that?!?
out_file = hdrfields[2].rstrip(b' \t\r\n\f').decode("ascii")
if os.path.exists(out_file):
raise Error('Cannot overwrite existing file: %s' % out_file)
if mode is None:
mode = int(hdrfields[1], 8)
#
# Open the output file
#
if out_file == '-':
out_file = sys.stdout.buffer
elif isinstance(out_file, str):
fp = open(out_file, 'wb')
try:
os.path.chmod(out_file, mode)
except AttributeError:
pass
out_file = fp
opened_files.append(out_file)
#
# Main decoding loop
#
s = in_file.readline()
while s and s.strip(b' \t\r\n\f') != b'end':
try:
data = binascii.a2b_uu(s)
except binascii.Error as v:
# Workaround for broken uuencoders by /Fredrik Lundh
nbytes = (((s[0]-32) & 63) * 4 + 5) // 3
data = binascii.a2b_uu(s[:nbytes])
if not quiet:
sys.stderr.write("Warning: %s\n" % v)
out_file.write(data)
s = in_file.readline()
if not s:
raise Error('Truncated input file')
finally:
for f in opened_files:
f.close()
def test():
"""uuencode/uudecode main program"""
import optparse
parser = optparse.OptionParser(usage='usage: %prog [-d] [-t] [input [output]]')
parser.add_option('-d', '--decode', dest='decode', help='Decode (instead of encode)?', default=False, action='store_true')
parser.add_option('-t', '--text', dest='text', help='data is text, encoded format unix-compatible text?', default=False, action='store_true')
(options, args) = parser.parse_args()
if len(args) > 2:
parser.error('incorrect number of arguments')
sys.exit(1)
# Use the binary streams underlying stdin/stdout
input = sys.stdin.buffer
output = sys.stdout.buffer
if len(args) > 0:
input = args[0]
if len(args) > 1:
output = args[1]
if options.decode:
if options.text:
if isinstance(output, str):
output = open(output, 'wb')
else:
print(sys.argv[0], ': cannot do -t to stdout')
sys.exit(1)
decode(input, output)
else:
if options.text:
if isinstance(input, str):
input = open(input, 'rb')
else:
print(sys.argv[0], ': cannot do -t from stdin')
sys.exit(1)
encode(input, output)
if __name__ == '__main__':
test()
| gpl-3.0 |
javierag/samba | third_party/dnspython/tests/resolver.py | 56 | 4279 | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import cStringIO
import select
import sys
import time
import unittest
import dns.name
import dns.message
import dns.name
import dns.rdataclass
import dns.rdatatype
import dns.resolver
resolv_conf = """
/t/t
# comment 1
; comment 2
domain foo
nameserver 10.0.0.1
nameserver 10.0.0.2
"""
message_text = """id 1234
opcode QUERY
rcode NOERROR
flags QR AA RD
;QUESTION
example. IN A
;ANSWER
example. 1 IN A 10.0.0.1
;AUTHORITY
;ADDITIONAL
"""
class BaseResolverTests(object):
if sys.platform != 'win32':
def testRead(self):
f = cStringIO.StringIO(resolv_conf)
r = dns.resolver.Resolver(f)
self.failUnless(r.nameservers == ['10.0.0.1', '10.0.0.2'] and
r.domain == dns.name.from_text('foo'))
def testCacheExpiration(self):
message = dns.message.from_text(message_text)
name = dns.name.from_text('example.')
answer = dns.resolver.Answer(name, dns.rdatatype.A, dns.rdataclass.IN,
message)
cache = dns.resolver.Cache()
cache.put((name, dns.rdatatype.A, dns.rdataclass.IN), answer)
time.sleep(2)
self.failUnless(cache.get((name, dns.rdatatype.A, dns.rdataclass.IN))
is None)
def testCacheCleaning(self):
message = dns.message.from_text(message_text)
name = dns.name.from_text('example.')
answer = dns.resolver.Answer(name, dns.rdatatype.A, dns.rdataclass.IN,
message)
cache = dns.resolver.Cache(cleaning_interval=1.0)
cache.put((name, dns.rdatatype.A, dns.rdataclass.IN), answer)
time.sleep(2)
self.failUnless(cache.get((name, dns.rdatatype.A, dns.rdataclass.IN))
is None)
def testZoneForName1(self):
name = dns.name.from_text('www.dnspython.org.')
ezname = dns.name.from_text('dnspython.org.')
zname = dns.resolver.zone_for_name(name)
self.failUnless(zname == ezname)
def testZoneForName2(self):
name = dns.name.from_text('a.b.www.dnspython.org.')
ezname = dns.name.from_text('dnspython.org.')
zname = dns.resolver.zone_for_name(name)
self.failUnless(zname == ezname)
def testZoneForName3(self):
name = dns.name.from_text('dnspython.org.')
ezname = dns.name.from_text('dnspython.org.')
zname = dns.resolver.zone_for_name(name)
self.failUnless(zname == ezname)
def testZoneForName4(self):
def bad():
name = dns.name.from_text('dnspython.org', None)
zname = dns.resolver.zone_for_name(name)
self.failUnlessRaises(dns.resolver.NotAbsolute, bad)
class PollingMonkeyPatchMixin(object):
def setUp(self):
self.__native_polling_backend = dns.query._polling_backend
dns.query._set_polling_backend(self.polling_backend())
unittest.TestCase.setUp(self)
def tearDown(self):
dns.query._set_polling_backend(self.__native_polling_backend)
unittest.TestCase.tearDown(self)
class SelectResolverTestCase(PollingMonkeyPatchMixin, BaseResolverTests, unittest.TestCase):
def polling_backend(self):
return dns.query._select_for
if hasattr(select, 'poll'):
class PollResolverTestCase(PollingMonkeyPatchMixin, BaseResolverTests, unittest.TestCase):
def polling_backend(self):
return dns.query._poll_for
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
alexmojaki/blaze | blaze/compute/utils.py | 14 | 2274 | from __future__ import absolute_import, division, print_function
from datetime import datetime
from decimal import Decimal
import sqlalchemy as sa
import sqlalchemy.orm
from toolz import curry
from datashape.predicates import isrecord
from ..expr import Field
from odo.backends.sql import dshape_to_alchemy
# This was taken from the following StackOverflow post
# http://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query
# answer by bukzor http://stackoverflow.com/users/146821/bukzor
def literalquery(statement, dialect=None):
"""Generate an SQL expression string with bound parameters rendered inline
for the given SQLAlchemy statement.
WARNING: This method of escaping is insecure, incomplete, and for debugging
purposes only. Executing SQL statements with inline-rendered user values is
extremely insecure.
"""
if isinstance(statement, sqlalchemy.orm.Query):
if dialect is None:
dialect = statement.session.get_bind(
statement._mapper_zero_or_none()
).dialect
statement = statement.statement
if dialect is None:
dialect = getattr(statement.bind, 'dialect', None)
if dialect is None:
from sqlalchemy.dialects import mysql
dialect = mysql.dialect()
Compiler = type(statement._compiler(dialect))
class LiteralCompiler(Compiler):
visit_bindparam = Compiler.render_literal_bindparam
def render_literal_value(self, value, type_):
if isinstance(value, (Decimal, long)):
return str(value)
elif isinstance(value, datetime):
return repr(str(value))
else: # fallback
value = super(LiteralCompiler, self).render_literal_value(
value, type_,
)
if isinstance(value, unicode):
return value.encode('UTF-8')
else:
return value
return LiteralCompiler(dialect, statement)
def make_sqlalchemy_table(expr):
return sa.Table(expr._name, sa.MetaData(), *dshape_to_alchemy(expr.dshape))
@curry
def istable(db, t):
return (isinstance(t, Field) and isrecord(t.dshape.measure) and
t._child.isidentical(db))
| bsd-3-clause |
commonsense/divisi | csc/divisi/flavors.py | 1 | 5345 | from csc.divisi.tensor import DictTensor
from csc.divisi.ordered_set import OrderedSet
from csc.divisi.labeled_view import LabeledView
def add_triple_to_matrix(matrix, triple, value=1.0):
'''
Adds a triple (left, relation, right) to the matrix in the 2D unfolded format.
This is the new add_assertion_tuple.
'''
left, relation, right = triple
lfeature = ('left', relation, left)
rfeature = ('right', relation, right)
matrix.inc((left, rfeature), value)
matrix.inc((right, lfeature), value)
def set_triple_in_matrix(matrix, triple, value=1.0):
''' Sets a triple (left, relation, right) in the matrix in the 2D
unfolded format to the specified value.
'''
left, relation, right = triple
lfeature = ('left', relation, left)
rfeature = ('right', relation, right)
matrix[left, rfeature] = value
matrix[right, lfeature] = value
###
### Assertion Tensors
###
class AssertionTensor(LabeledView):
'''
All AssertionTensors have the following functions:
.add_triple(triple, value)
.set_triple(triple, value)
.add_identity(text, value=1.0, relation='Identity')
where triple is (concept1, relation, concept2).
They also have the convenience classmethod from_triples.
'''
def add_identity(self, text, value=1.0, relation='Identity'):
self.add_triple((text, relation, text), value)
def bake(self):
'''
Simplify the representation.
'''
return LabeledView(self.tensor, self._labels)
def add_triples(self, triples, accumulate=True, constant_weight=None):
if accumulate: add = self.add_triple
else: add = self.set_triple
if constant_weight:
for triple in triples:
add(triple, constant_weight)
else:
for triple, weight in triples:
add(triple, weight)
@classmethod
def from_triples(cls, triples, accumulate=True, constant_weight=None):
mat = cls()
mat.add_triples(triples, accumulate, constant_weight)
return mat
def add_identities(self, value=1.0, relation='Identity'):
if not value: return # 0 or False means not to actually add identities.
for concept in self.concepts():
self.add_triple((concept, relation, concept), value)
class ConceptByFeatureMatrix(AssertionTensor):
'''
This is the typical AnalogySpace matrix. It stores each assertion
twice: once as (c1, ('right', rel, c2)) and once as (c2, ('left',
rel, c1)).
This class is a convenience for building matrices in this
format. Once you've add_triple'sed everything, you can call
.bake() to convert it back to a plain old LabeledView of a
DictTensor, just like make_sparse_labeled_tensor does.
'''
def __init__(self):
super(ConceptByFeatureMatrix, self).__init__(
DictTensor(2), [OrderedSet() for _ in '01'])
add_triple = add_triple_to_matrix
set_triple = set_triple_in_matrix
def concepts(self): return self.label_list(0)
class FeatureByConceptMatrix(AssertionTensor):
'''
A transposed ConceptByFeatureMatrix; see it for documentation.
'''
def __init__(self):
super(FeatureByConceptMatrix, self).__init__(
DictTensor(2), [OrderedSet() for _ in '01'])
def add_triple(self, triple, value=1.0):
left, relation, right = triple
lfeature = ('left', relation, left)
rfeature = ('right', relation, right)
self.inc((rfeature, left), value)
self.inc((lfeature, right), value)
def set_triple(self, triple, value=1.0):
left, relation, right = triple
lfeature = ('left', relation, left)
rfeature = ('right', relation, right)
self[rfeature, left] = value
self[lfeature, right] = value
def concepts(self): return self.label_list(1)
class ConceptRelationConceptTensor(AssertionTensor):
'''
This is a straightforward encoding of concepts as a 3D tensor.
'''
def __init__(self):
# FIXME: yes this saves space, but it might make a row or column be zero.
concepts, relations = OrderedSet(), OrderedSet()
super(ConceptRelationConceptTensor, self).__init__(
DictTensor(3), [concepts, relations, concepts])
def concepts(self): return self.label_list(0)
def add_triple(self, triple, value=1.0):
left, relation, right = triple
self.inc((left, relation, right), value)
def set_triple(self, triple, value=1.0):
left, relation, right = triple
self[left, relation, right] = value
class MirroringCRCTensor(ConceptRelationConceptTensor):
'''
Every assertion (c1, r, c2) in this tensor has an inverse,
(c2, r', c1).
This is analogous to how the 2D tensor makes left and right features.
Inverse relations are constructed from ordinary relations by
prefixing a '-'.
'''
def add_triple(self, triple, value=1.0):
left, relation, right = triple
self.inc((left, relation, right), value) # normal
self.inc((right, '-'+relation, left), value) # inverse
def set_triple(self, triple, value=1.0):
left, relation, right = triple
self[left, relation, right] = value
self[left, '-'+relation, right] = value
| gpl-3.0 |
moyaproject/moya | moya/elements/registry.py | 1 | 4712 | from __future__ import unicode_literals
from .. import errors
from ..tools import extract_namespace
from .. import namespaces
from ..compat import itervalues
from collections import defaultdict
import inspect
class Meta(object):
logic_skip = False
virtual_tag = False
is_call = False
is_try = False
is_loop = False
app_first_arg = False
text_nodes = None
trap_exceptions = False
translate = False
class ElementRegistry(object):
default_registry = None
_registry_stack = []
def clear(self):
self._registry.clear()
self._dynamic_elements.clear()
del self._registry_stack[:]
@classmethod
def push_registry(cls, registry):
cls._registry_stack.append(registry)
@classmethod
def pop_registry(cls):
cls._registry_stack.pop()
@classmethod
def get_default(cls):
return cls._registry_stack[-1]
def __init__(self, update_from_default=True):
self._registry = defaultdict(dict)
self._dynamic_elements = {}
if update_from_default:
self._registry.update(self.default_registry._registry)
self._dynamic_elements.update(self.default_registry._dynamic_elements)
def clone(self):
"""Return a copy of this registry"""
registry = ElementRegistry(update_from_default=False)
registry._registry = self._registry.copy()
registry._dynamic_elements = self._dynamic_elements.copy()
return registry
def set_default(self):
"""Reset this registry to the default registry (before project loaded)"""
self._registry = self.default_registry._registry.copy()
self._dynamic_elements = self.default_registry._dynamic_elements.copy()
def register_element(self, xmlns, name, element):
"""Add a dynamic element to the element registry"""
xmlns = xmlns or namespaces.run
if name in self._registry[xmlns]:
element_class = self._registry[xmlns][name]
definition = getattr(element_class, "_location", None)
if definition is None:
definition = inspect.getfile(element_class)
if xmlns:
raise errors.ElementError(
'<{}> already registered in "{}" for xmlns "{}"'.format(
name, definition, xmlns
),
element=getattr(element, "element", element),
)
else:
raise errors.ElementError(
'<{}/> already registered in "{}"'.format(name, definition),
element=element,
)
self._registry[xmlns][name] = element
def add_dynamic_registry(self, xmlns, element_callable):
"""Add a dynamic registry (element factory)"""
self._dynamic_elements[xmlns] = element_callable
def clear_registry(self):
"""Clear the registry (called on archive reload)"""
self._registry.clear()
def get_elements_in_xmlns(self, xmlns):
"""Get all elements defined within a given namespace"""
return self._registry.get(xmlns, {})
def get_elements_in_lib(self, long_name):
"""Get all elements defined by a given library"""
lib_elements = []
for namespace in itervalues(self._registry):
lib_elements.extend(
element
for element in itervalues(namespace)
if element._lib_long_name == long_name
)
return lib_elements
def get_element_type(self, xmlns, name):
"""Get an element by namespace and name"""
if xmlns in self._dynamic_elements:
return self._dynamic_elements[xmlns](name)
return self._registry.get(xmlns, {}).get(name, None)
def find_xmlns(self, name):
"""Find the xmlns with contain a given tag, or return None"""
for xmlns in sorted(self._registry.keys()):
if name in self._registry[xmlns]:
return xmlns
return None
def check_namespace(self, xmlns):
"""Check if a namespace exists in the registry"""
return xmlns in self._registry
def set_registry(self, registry):
"""Restore a saved registry"""
self._registry = registry._registry.copy()
self._dynamic_elements = registry._dynamic_elements.copy()
def get_tag(self, tag):
"""Get a tag from it's name (in Clarke's notation)"""
return self.get_element_type(*extract_namespace(tag))
default_registry = ElementRegistry.default_registry = ElementRegistry(
update_from_default=False
)
ElementRegistry.push_registry(ElementRegistry.default_registry)
| mit |
jjingrong/PONUS-1.2 | venv/build/django/django/contrib/gis/db/backends/postgis/introspection.py | 109 | 4592 | from django.db.backends.postgresql_psycopg2.introspection import DatabaseIntrospection
from django.contrib.gis.gdal import OGRGeomType
class GeoIntrospectionError(Exception):
pass
class PostGISIntrospection(DatabaseIntrospection):
# Reverse dictionary for PostGIS geometry types not populated until
# introspection is actually performed.
postgis_types_reverse = {}
ignored_tables = DatabaseIntrospection.ignored_tables + [
'geography_columns',
'geometry_columns',
'raster_columns',
'spatial_ref_sys',
'raster_overviews',
]
def get_postgis_types(self):
"""
Returns a dictionary with keys that are the PostgreSQL object
identification integers for the PostGIS geometry and/or
geography types (if supported).
"""
cursor = self.connection.cursor()
# The OID integers associated with the geometry type may
# be different across versions; hence, this is why we have
# to query the PostgreSQL pg_type table corresponding to the
# PostGIS custom data types.
oid_sql = 'SELECT "oid" FROM "pg_type" WHERE "typname" = %s'
try:
cursor.execute(oid_sql, ('geometry',))
GEOM_TYPE = cursor.fetchone()[0]
postgis_types = { GEOM_TYPE : 'GeometryField' }
if self.connection.ops.geography:
cursor.execute(oid_sql, ('geography',))
GEOG_TYPE = cursor.fetchone()[0]
# The value for the geography type is actually a tuple
# to pass in the `geography=True` keyword to the field
# definition.
postgis_types[GEOG_TYPE] = ('GeometryField', {'geography' : True})
finally:
cursor.close()
return postgis_types
def get_field_type(self, data_type, description):
if not self.postgis_types_reverse:
# If the PostGIS types reverse dictionary is not populated, do so
# now. In order to prevent unnecessary requests upon connection
# intialization, the `data_types_reverse` dictionary is not updated
# with the PostGIS custom types until introspection is actually
# performed -- in other words, when this function is called.
self.postgis_types_reverse = self.get_postgis_types()
self.data_types_reverse.update(self.postgis_types_reverse)
return super(PostGISIntrospection, self).get_field_type(data_type, description)
def get_geometry_type(self, table_name, geo_col):
"""
The geometry type OID used by PostGIS does not indicate the particular
type of field that a geometry column is (e.g., whether it's a
PointField or a PolygonField). Thus, this routine queries the PostGIS
metadata tables to determine the geometry type,
"""
cursor = self.connection.cursor()
try:
try:
# First seeing if this geometry column is in the `geometry_columns`
cursor.execute('SELECT "coord_dimension", "srid", "type" '
'FROM "geometry_columns" '
'WHERE "f_table_name"=%s AND "f_geometry_column"=%s',
(table_name, geo_col))
row = cursor.fetchone()
if not row: raise GeoIntrospectionError
except GeoIntrospectionError:
if self.connection.ops.geography:
cursor.execute('SELECT "coord_dimension", "srid", "type" '
'FROM "geography_columns" '
'WHERE "f_table_name"=%s AND "f_geography_column"=%s',
(table_name, geo_col))
row = cursor.fetchone()
if not row:
raise Exception('Could not find a geometry or geography column for "%s"."%s"' %
(table_name, geo_col))
# OGRGeomType does not require GDAL and makes it easy to convert
# from OGC geom type name to Django field.
field_type = OGRGeomType(row[2]).django
# Getting any GeometryField keyword arguments that are not the default.
dim = row[0]
srid = row[1]
field_params = {}
if srid != 4326:
field_params['srid'] = srid
if dim != 2:
field_params['dim'] = dim
finally:
cursor.close()
return field_type, field_params
| mit |
cmcqueen/simplerandom | python/python3/simplerandom/iterators/_iterators_py.py | 1 | 40947 |
from simplerandom._bitcolumnmatrix import BitColumnMatrix
__all__ = [
"Cong",
"SHR3",
"MWC1",
"MWC2",
"MWC64",
"KISS",
"KISS2",
"LFSR113",
"LFSR88",
"_traverse_iter",
]
def _traverse_iter(o, tree_types=(list, tuple)):
"""Iterate over nested containers and/or iterators.
This allows generator __init__() functions to be passed seeds either as
a series of arguments, or as a list/tuple.
"""
SIMPLERANDOM_BITS = 32
SIMPLERANDOM_MOD = 2**SIMPLERANDOM_BITS
SIMPLERANDOM_MASK = SIMPLERANDOM_MOD - 1
if isinstance(o, tree_types) or getattr(o, '__iter__', False):
for value in o:
for subvalue in _traverse_iter(value):
while True:
yield subvalue & SIMPLERANDOM_MASK
subvalue >>= SIMPLERANDOM_BITS
# If value is negative, then it effectively has infinitely extending
# '1' bits (modelled as a 2's complement representation). So when
# right-shifting it, it will eventually get to -1, and any further
# right-shifting will not change it.
if subvalue == 0 or subvalue == -1:
break
else:
yield o
def _repeat_iter(input_iter):
"""Iterate over the input iter values. Then repeat the last value
indefinitely. This is useful to repeat seed values when an insufficient
number of seeds are provided.
E.g. KISS(1) effectively becomes KISS(1, 1, 1, 1), rather than (if we just
used default values) KISS(1, default-value, default-value, default-value)
It is better to repeat the last seed value, rather than just using default
values. Given two generators seeded with an insufficient number of seeds,
repeating the last seed value means their states are more different from
each other, with less correlation between their generated outputs.
"""
last_value = None
for value in input_iter:
last_value = value
yield value
if last_value is not None:
while True:
yield last_value
def _next_seed_int32_or_default(seed_iter, default_value):
try:
seed_item = next(seed_iter)
except StopIteration:
return default_value
else:
if seed_item is None:
return default_value
else:
return (int(seed_item) & 0xFFFFFFFF)
def _geom_series_uint32(r, n):
"""Unsigned integer calculation of sum of geometric series:
1 + r + r^2 + r^3 + ... r^(n-1)
summed to n terms.
Calculated modulo 2**32.
Use the formula (r**n - 1) / (r - 1)
"""
if n == 0:
return 0
if n == 1 or r == 0:
return 1
m = 2**32
# Split (r - 1) into common factors with the modulo 2**32 -- i.e. all
# factors of 2; and other factors which are coprime with the modulo 2**32.
other_factors = r - 1
common_factor = 1
while (other_factors % 2) == 0:
other_factors //= 2
common_factor *= 2
other_factors_inverse = pow(other_factors, m - 1, m)
numerator = pow(r, n, common_factor * m) - 1
return (numerator // common_factor * other_factors_inverse) % m
class Cong(object):
'''Congruential random number generator
This is a congruential generator with the widely used
69069 multiplier: x[n]=69069x[n-1]+12345. It has
period 2**32.
The leading half of its 32 bits seem to pass tests,
but bits in the last half are too regular. It fails
tests for which those bits play a significant role.
But keep in mind that it is a rare application for
which the trailing bits play a significant role. Cong
is one of the most widely used generators of the last
30 years, as it was the system generator for VAX and
was incorporated in several popular software packages,
all seemingly without complaint.
'''
SIMPLERANDOM_MOD = 2**32
SIMPLERANDOM_MAX = 2**32 - 1
CONG_CYCLE_LEN = 2**32
CONG_MULT = 69069
CONG_CONST = 12345
@staticmethod
def min():
return 0
@staticmethod
def max():
return Cong.SIMPLERANDOM_MAX
def __init__(self, *args, **kwargs):
'''Positional arguments are seed values
Keyword-only arguments:
mix_extras=False -- If True, then call mix() to 'mix' extra seed
values into the state.
'''
seed_iter = _traverse_iter(args)
self.cong = _next_seed_int32_or_default(seed_iter, 0)
if kwargs.pop('mix_extras', False):
self.mix(seed_iter)
for key in kwargs:
raise TypeError("__init__() got an unexpected keyword argument '%s'" % key)
def seed(self, *args, **kwargs):
self.__init__(*args, **kwargs)
def sanitise(self):
pass
def __next__(self):
self.cong = (69069 * self.cong + 12345) & 0xFFFFFFFF
return self.cong
def current(self):
return self.cong
def mix(self, *args):
for value in _traverse_iter(args):
value_int = int(value) & 0xFFFFFFFF
self.cong ^= value_int
next(self)
return self.cong
def __iter__(self):
return self
def getstate(self):
return (self.cong, )
def setstate(self, state):
(self.cong, ) = (int(val) & 0xFFFFFFFF for val in state)
def jumpahead(self, n):
# Cong.jumpahead(n) = r**n * x mod 2**32 +
# c * (1 + r + r**2 + ... + r**(n-1)) mod 2**32
# where r = 69069 and c = 12345.
#
# The part c * (1 + r + r**2 + ... + r**(n-1)) is a geometric series.
# For calculating geometric series mod 2**32, see:
# http://www.codechef.com/wiki/tutorial-just-simple-sum#Back_to_the_geometric_series
n = int(n) % self.CONG_CYCLE_LEN
mult_exp = pow(self.CONG_MULT, n, self.SIMPLERANDOM_MOD)
add_const = (_geom_series_uint32(self.CONG_MULT, n) * self.CONG_CONST) & 0xFFFFFFFF
self.cong = (mult_exp * self.cong + add_const) & 0xFFFFFFFF
def __repr__(self):
return self.__class__.__name__ + "(" + repr(int(self.cong)) + ")"
class SHR3(object):
'''3-shift-register random number generator
SHR3 is a 3-shift-register generator with period
2**32-1. It uses y[n]=y[n-1](I+L^13)(I+R^17)(I+L^5),
with the y's viewed as binary vectors, L the 32x32
binary matrix that shifts a vector left 1, and R its
transpose.
SHR3 seems to pass all except those related to the
binary rank test, since 32 successive values, as
binary vectors, must be linearly independent, while
32 successive truly random 32-bit integers, viewed
as binary vectors, will be linearly independent only
about 29% of the time.
'''
SIMPLERANDOM_MOD = 2**32
SIMPLERANDOM_MAX = 2**32 - 1
SHR3_CYCLE_LEN = 2**32 - 1
_SHR3_MATRIX_a = BitColumnMatrix.unity(32) + BitColumnMatrix.shift(32,13)
_SHR3_MATRIX_b = BitColumnMatrix.unity(32) + BitColumnMatrix.shift(32,-17)
_SHR3_MATRIX_c = BitColumnMatrix.unity(32) + BitColumnMatrix.shift(32,5)
_SHR3_MATRIX = _SHR3_MATRIX_c * _SHR3_MATRIX_b * _SHR3_MATRIX_a
@staticmethod
def min():
return 1
@staticmethod
def max():
return SHR3.SIMPLERANDOM_MAX
def __init__(self, *args, **kwargs):
'''Positional arguments are seed values
Keyword-only arguments:
mix_extras=False -- If True, then call mix() to 'mix' extra seed
values into the state.
'''
seed_iter = _traverse_iter(args)
self.shr3 = _next_seed_int32_or_default(seed_iter, 0xFFFFFFFF)
self.sanitise()
if kwargs.pop('mix_extras', False):
self.mix(seed_iter)
for key in kwargs:
raise TypeError("__init__() got an unexpected keyword argument '%s'" % key)
def seed(self, *args, **kwargs):
self.__init__(*args, **kwargs)
def sanitise(self):
if self.shr3 == 0:
# 0 is a bad seed. Invert to get a good seed.
self.shr3 = 0xFFFFFFFF
def __next__(self):
shr3 = self.shr3
shr3 ^= (shr3 & 0x7FFFF) << 13
shr3 ^= shr3 >> 17
shr3 ^= (shr3 & 0x7FFFFFF) << 5
self.shr3 = shr3
return shr3
def current(self):
return self.shr3
def mix(self, *args):
for value in _traverse_iter(args):
value_int = int(value) & 0xFFFFFFFF
self.shr3 ^= value_int
self.sanitise()
next(self)
return self.shr3
def __iter__(self):
return self
def getstate(self):
return (self.shr3, )
def setstate(self, state):
(self.shr3, ) = (int(val) & 0xFFFFFFFF for val in state)
self.sanitise()
def jumpahead(self, n):
n = int(n) % self.SHR3_CYCLE_LEN
shr3 = pow(self._SHR3_MATRIX, n) * self.shr3
self.shr3 = shr3
def __repr__(self):
return self.__class__.__name__ + "(" + repr(int(self.shr3)) + ")"
class MWC2(object):
'''"Multiply-with-carry" random number generator
Very similar to MWC1, except that it concatenates the
two 16-bit MWC generators differently. The 'x'
generator is rotated 16 bits instead of just shifted
16 bits.
This gets much better test results than MWC1 in
L'Ecuyer's TestU01 test suite, so it should probably
be preferred.
'''
SIMPLERANDOM_MAX = 2**32 - 1
_MWC_UPPER_MULT = 36969
_MWC_LOWER_MULT = 18000
_MWC_UPPER_MODULO = _MWC_UPPER_MULT * 2**16 - 1
_MWC_LOWER_MODULO = _MWC_LOWER_MULT * 2**16 - 1
_MWC_UPPER_CYCLE_LEN = _MWC_UPPER_MULT * 2**16 // 2 - 1
_MWC_LOWER_CYCLE_LEN = _MWC_LOWER_MULT * 2**16 // 2 - 1
@staticmethod
def min():
return 0
@staticmethod
def max():
return MWC2.SIMPLERANDOM_MAX
def __init__(self, *args, **kwargs):
'''Positional arguments are seed values
Keyword-only arguments:
mix_extras=False -- If True, then call mix() to 'mix' extra seed
values into the state.
'''
seed_iter = _traverse_iter(args)
repeat_seed_iter = _repeat_iter(seed_iter)
self.mwc_upper = _next_seed_int32_or_default(repeat_seed_iter, 0xFFFFFFFF)
self.mwc_lower = _next_seed_int32_or_default(repeat_seed_iter, 0xFFFFFFFF)
self.sanitise()
if kwargs.pop('mix_extras', False):
self.mix(seed_iter)
for key in kwargs:
raise TypeError("__init__() got an unexpected keyword argument '%s'" % key)
def seed(self, *args, **kwargs):
self.__init__(*args, **kwargs)
def sanitise(self):
self._sanitise_upper()
self._sanitise_lower()
def _sanitise_upper(self):
mwc_upper_orig = self.mwc_upper
# There are a few bad states--that is, any multiple of
# _MWC_UPPER_MODULO -- that is 0x9068FFFF (which is 36969 * 2**16 - 1).
sanitised_value = mwc_upper_orig % 0x9068FFFF
if sanitised_value == 0:
# Invert to get a good seed.
sanitised_value = (mwc_upper_orig ^ 0xFFFFFFFF) % 0x9068FFFF
self.mwc_upper = sanitised_value
def _sanitise_lower(self):
mwc_lower_orig = self.mwc_lower
# There are a few bad states--that is, any multiple of
# _MWC_LOWER_MODULO -- that is 0x464FFFFF (which is 18000 * 2**16 - 1).
sanitised_value = mwc_lower_orig % 0x464FFFFF
if sanitised_value == 0:
# Invert to get a good seed.
sanitised_value = (mwc_lower_orig ^ 0xFFFFFFFF) % 0x464FFFFF
self.mwc_lower = sanitised_value
def _next_upper(self):
self.mwc_upper = 36969 * (self.mwc_upper & 0xFFFF) + (self.mwc_upper >> 16)
def _next_lower(self):
self.mwc_lower = 18000 * (self.mwc_lower & 0xFFFF) + (self.mwc_lower >> 16)
def __next__(self):
# Note: this is apparently equivalent to:
# self.mwc_upper = (36969 * self.mwc_upper) % 0x9068FFFF
# self.mwc_lower = (18000 * self.mwc_lower) % 0x464FFFFF
# See Random Number Generation, Pierre L’Ecuyer, section 3.6 Linear Recurrences With Carry
# http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.136.6898&rep=rep1&type=pdf
self.mwc_upper = 36969 * (self.mwc_upper & 0xFFFF) + (self.mwc_upper >> 16)
self.mwc_lower = 18000 * (self.mwc_lower & 0xFFFF) + (self.mwc_lower >> 16)
return self.current() # call self.current() so that MWC1 can over-ride it
def current(self):
return (((self.mwc_upper & 0xFFFF) << 16) + (self.mwc_upper >> 16) + self.mwc_lower) & 0xFFFFFFFF
mwc = property(current) # Note that this must be over-ridden again in MWC1
def mix(self, *args):
for value in _traverse_iter(args):
value_int = int(value) & 0xFFFFFFFF
current = self.current()
selector = (current >> 24) & 0x1
if selector == 0:
self.mwc_upper ^= value_int
self._sanitise_upper()
self._next_upper()
else:
self.mwc_lower ^= value_int
self._sanitise_lower()
self._next_lower()
return self.current()
def __iter__(self):
return self
def getstate(self):
return (self.mwc_upper, self.mwc_lower)
def setstate(self, state):
(self.mwc_upper, self.mwc_lower) = (int(val) & 0xFFFFFFFF for val in state)
self.sanitise()
def jumpahead(self, n):
# See next() note on functional equivalence.
n_upper = int(n) % self._MWC_UPPER_CYCLE_LEN
self.mwc_upper = pow(self._MWC_UPPER_MULT, n_upper, self._MWC_UPPER_MODULO) * self.mwc_upper % self._MWC_UPPER_MODULO
n_lower = int(n) % self._MWC_LOWER_CYCLE_LEN
self.mwc_lower = pow(self._MWC_LOWER_MULT, n_lower, self._MWC_LOWER_MODULO) * self.mwc_lower % self._MWC_LOWER_MODULO
def __repr__(self):
return self.__class__.__name__ + "(" + repr(int(self.mwc_upper)) + "," + repr(int(self.mwc_lower)) + ")"
class MWC1(MWC2):
'''"Multiply-with-carry" random number generator
This is the MWC as defined in Marsaglia's 1999
newsgroup post.
This uses two MWC generators to generate high and
low 16-bit parts, which are then combined to make a
32-bit value.
The MWC generator concatenates two 16-bit multiply-
with-carry generators:
x[n]=36969x[n-1]+carry,
y[n]=18000y[n-1]+carry mod 2**16,
It has a period about 2**60.
This seems to pass all Marsaglia's Diehard tests.
However, it fails many of L'Ecuyer's TestU01
tests. The modified MWC2 generator passes many more
tests in TestU01, and should probably be preferred,
unless backwards compatibility is required.
'''
def current(self):
return (((self.mwc_upper & 0xFFFF) << 16) + self.mwc_lower) & 0xFFFFFFFF
# We have to over-ride this again, because of the way property() works.
mwc = property(current)
class MWC64(object):
'''"Multiply-with-carry" random number generator
This uses a single MWC generator with 64 bits to
generate a 32-bit value. The seeds should be 32-bit
values.
'''
SIMPLERANDOM_MAX = 2**32 - 1
_MWC64_MULT = 698769069
_MWC64_MODULO = _MWC64_MULT * 2**32 - 1
_MWC64_CYCLE_LEN = _MWC64_MULT * 2**32 // 2 - 1
@staticmethod
def min():
return 0
@staticmethod
def max():
return MWC64.SIMPLERANDOM_MAX
def __init__(self, *args, **kwargs):
'''Positional arguments are seed values
Keyword-only arguments:
mix_extras=False -- If True, then call mix() to 'mix' extra seed
values into the state.
'''
seed_iter = _traverse_iter(args)
repeat_seed_iter = _repeat_iter(seed_iter)
self.mwc_upper = _next_seed_int32_or_default(repeat_seed_iter, 0xFFFFFFFF)
self.mwc_lower = _next_seed_int32_or_default(repeat_seed_iter, 0xFFFFFFFF)
self.sanitise()
if kwargs.pop('mix_extras', False):
self.mix(seed_iter)
for key in kwargs:
raise TypeError("__init__() got an unexpected keyword argument '%s'" % key)
def seed(self, *args, **kwargs):
self.__init__(*args, **kwargs)
def sanitise(self):
state64 = (self.mwc_upper << 32) + self.mwc_lower
temp = state64
was_changed = False
# There are a few bad seeds--that is, seeds that are a multiple of
# 0x29A65EACFFFFFFFF (which is 698769069 * 2**32 - 1).
if state64 >= 0x29A65EACFFFFFFFF:
was_changed = True
temp = state64 % 0x29A65EACFFFFFFFF
if temp == 0:
# Invert to get a good seed.
temp = (state64 ^ 0xFFFFFFFFFFFFFFFF) % 0x29A65EACFFFFFFFF
was_changed = True
if was_changed:
self.mwc_upper = temp >> 32
self.mwc_lower = temp & 0xFFFFFFFF
def __next__(self):
# Note: this is apparently equivalent to:
# temp64 = (self.mwc_upper << 32) + self.mwc_lower
# temp64 = (698769069 * temp64) % 0x29A65EACFFFFFFFF
# See reference in MWC2.next().
temp64 = 698769069 * self.mwc_lower + self.mwc_upper
self.mwc_lower = temp64 & 0xFFFFFFFF
self.mwc_upper = (temp64 >> 32) & 0xFFFFFFFF
return self.mwc_lower
def current(self):
return self.mwc_lower
mwc = property(current)
def mix(self, *args):
for value in _traverse_iter(args):
value_int = int(value) & 0xFFFFFFFF
current = self.current()
selector = (current >> 24) & 0x1
if selector == 0:
self.mwc_upper ^= value_int
else:
self.mwc_lower ^= value_int
self.sanitise()
next(self)
return self.current()
def __iter__(self):
return self
def getstate(self):
return (self.mwc_upper, self.mwc_lower)
def setstate(self, state):
(self.mwc_upper, self.mwc_lower) = (int(val) & 0xFFFFFFFF for val in state)
self.sanitise()
def jumpahead(self, n):
# See MWC2.next() note on functional equivalence.
n = int(n) % self._MWC64_CYCLE_LEN
temp64 = (self.mwc_upper << 32) + self.mwc_lower
temp64 = pow(self._MWC64_MULT, n, self._MWC64_MODULO) * temp64 % self._MWC64_MODULO
self.mwc_lower = temp64 & 0xFFFFFFFF
self.mwc_upper = (temp64 >> 32) & 0xFFFFFFFF
def __repr__(self):
return self.__class__.__name__ + "(" + repr(int(self.mwc_upper)) + "," + repr(int(self.mwc_lower)) + ")"
class KISS(object):
'''"Keep It Simple Stupid" random number generator
It combines the MWC2, Cong, SHR3 generators. Period is
about 2**123.
This is based on, but not identical to, Marsaglia's
KISS generator as defined in his 1999 newsgroup post.
That generator most significantly has problems with its
SHR3 component (see notes on SHR3). Since we are not
keeping compatibility with the 1999 KISS generator for
that reason, we take the opportunity to slightly
update the MWC and Cong generators too.
'''
SIMPLERANDOM_MAX = 2**32 - 1
@staticmethod
def min():
return 0
@staticmethod
def max():
return KISS.SIMPLERANDOM_MAX
def __init__(self, *args, **kwargs):
'''Positional arguments are seed values
Keyword-only arguments:
mix_extras=False -- If True, then call mix() to 'mix' extra seed
values into the state.
'''
seed_iter = _traverse_iter(args)
repeat_seed_iter = _repeat_iter(seed_iter)
self.random_mwc = MWC2(repeat_seed_iter)
self.random_cong = Cong(repeat_seed_iter)
self.random_shr3 = SHR3(repeat_seed_iter)
if kwargs.pop('mix_extras', False):
self.mix(seed_iter)
for key in kwargs:
raise TypeError("__init__() got an unexpected keyword argument '%s'" % key)
def seed(self, *args, **kwargs):
self.__init__(*args, **kwargs)
def __next__(self):
mwc_val = next(self.random_mwc)
cong_val = next(self.random_cong)
shr3_val = next(self.random_shr3)
return ((mwc_val ^ cong_val) + shr3_val) & 0xFFFFFFFF
def current(self):
return ((self.random_mwc.current() ^ self.random_cong.cong) + self.random_shr3.shr3) & 0xFFFFFFFF
def mix(self, *args):
for value in _traverse_iter(args):
value_int = int(value) & 0xFFFFFFFF
current = self.current()
selector = (current >> 24) & 0x3
if selector == 0:
self.random_mwc.mwc_upper ^= value_int
self.random_mwc._sanitise_upper()
self.random_mwc._next_upper()
elif selector == 1:
self.random_mwc.mwc_lower ^= value_int
self.random_mwc._sanitise_lower()
self.random_mwc._next_lower()
elif selector == 2:
self.random_cong.cong ^= value_int
# Cong doesn't need any sanitising
next(self.random_cong)
else: # selector == 3
self.random_shr3.shr3 ^= value_int
self.random_shr3.sanitise()
next(self.random_shr3)
return self.current()
def __iter__(self):
return self
def getstate(self):
return (self.random_mwc.getstate(), self.random_cong.getstate(), self.random_shr3.getstate())
def setstate(self, state):
(mwc_state, cong_state, shr3_state) = state
self.random_mwc.setstate(mwc_state)
self.random_cong.setstate(cong_state)
self.random_shr3.setstate(shr3_state)
def jumpahead(self, n):
self.random_mwc.jumpahead(n)
self.random_cong.jumpahead(n)
self.random_shr3.jumpahead(n)
def _get_mwc_upper(self):
return self.random_mwc.mwc_upper
def _set_mwc_upper(self, value):
self.random_mwc.mwc_upper = value
mwc_upper = property(_get_mwc_upper, _set_mwc_upper)
def _get_mwc_lower(self):
return self.random_mwc.mwc_lower
def _set_mwc_lower(self, value):
self.random_mwc.mwc_lower = value
mwc_lower = property(_get_mwc_lower, _set_mwc_lower)
def _get_mwc(self):
return self.random_mwc.current()
mwc = property(_get_mwc)
def _get_shr3(self):
return self.random_shr3.shr3
def _set_shr3(self, value):
self.random_shr3.shr3 = value
shr3 = property(_get_shr3, _set_shr3)
def _get_cong(self):
return self.random_cong.cong
def _set_cong(self, value):
self.random_cong.cong = value
cong = property(_get_cong, _set_cong)
def __repr__(self):
return (self.__class__.__name__ + "(" + repr(int(self.mwc_upper)) +
"," + repr(int(self.mwc_lower)) +
"," + repr(int(self.cong)) +
"," + repr(int(self.shr3)) + ")")
class KISS2(object):
'''"Keep It Simple Stupid" random number generator
It combines the MWC64, Cong, SHR3 generators. Period
is about 2**123.
This is a slightly updated KISS generator design, from
a newsgroup post in 2003:
http://groups.google.com/group/sci.math/msg/9959175f66dd138f
The MWC component uses a single 64-bit calculation,
instead of two 32-bit calculations that are combined.
'''
SIMPLERANDOM_MAX = 2**32 - 1
@staticmethod
def min():
return 0
@staticmethod
def max():
return KISS2.SIMPLERANDOM_MAX
def __init__(self, *args, **kwargs):
'''Positional arguments are seed values
Keyword-only arguments:
mix_extras=False -- If True, then call mix() to 'mix' extra seed
values into the state.
'''
seed_iter = _traverse_iter(args)
repeat_seed_iter = _repeat_iter(seed_iter)
self.random_mwc = MWC64(repeat_seed_iter)
self.random_cong = Cong(repeat_seed_iter)
self.random_shr3 = SHR3(repeat_seed_iter)
if kwargs.pop('mix_extras', False):
self.mix(seed_iter)
for key in kwargs:
raise TypeError("__init__() got an unexpected keyword argument '%s'" % key)
def seed(self, *args, **kwargs):
self.__init__(*args, **kwargs)
def __next__(self):
mwc_val = next(self.random_mwc)
cong_val = next(self.random_cong)
shr3_val = next(self.random_shr3)
return (mwc_val + cong_val + shr3_val) & 0xFFFFFFFF
def current(self):
return (self.random_mwc.current() + self.random_cong.cong + self.random_shr3.shr3) & 0xFFFFFFFF
def mix(self, *args):
for value in _traverse_iter(args):
value_int = int(value) & 0xFFFFFFFF
current = self.current()
selector = (current >> 24) & 0x3
if selector == 0:
self.random_mwc.mwc_upper ^= value_int
self.random_mwc.sanitise()
next(self.random_mwc)
elif selector == 1:
self.random_mwc.mwc_lower ^= value_int
self.random_mwc.sanitise()
next(self.random_mwc)
elif selector == 2:
self.random_cong.cong ^= value_int
# Cong doesn't need any sanitising
next(self.random_cong)
else: # selector == 3
self.random_shr3.shr3 ^= value_int
self.random_shr3.sanitise()
next(self.random_shr3)
return self.current()
def __iter__(self):
return self
def getstate(self):
return (self.random_mwc.getstate(), self.random_cong.getstate(), self.random_shr3.getstate())
def setstate(self, state):
(mwc_state, cong_state, shr3_state) = state
self.random_mwc.setstate(mwc_state)
self.random_cong.setstate(cong_state)
self.random_shr3.setstate(shr3_state)
def jumpahead(self, n):
self.random_mwc.jumpahead(n)
self.random_cong.jumpahead(n)
self.random_shr3.jumpahead(n)
def _get_mwc_upper(self):
return self.random_mwc.mwc_upper
def _set_mwc_upper(self, value):
self.random_mwc.mwc_upper = value
mwc_upper = property(_get_mwc_upper, _set_mwc_upper)
def _get_mwc_lower(self):
return self.random_mwc.mwc_lower
def _set_mwc_lower(self, value):
self.random_mwc.mwc_lower = value
mwc_lower = property(_get_mwc_lower, _set_mwc_lower)
def _get_mwc(self):
return self.random_mwc.mwc
mwc = property(_get_mwc)
def _get_shr3(self):
return self.random_shr3.shr3
def _set_shr3(self, value):
self.random_shr3.shr3 = value
shr3 = property(_get_shr3, _set_shr3)
def _get_cong(self):
return self.random_cong.cong
def _set_cong(self, value):
self.random_cong.cong = value
cong = property(_get_cong, _set_cong)
def __repr__(self):
return (self.__class__.__name__ + "(" + repr(int(self.mwc_upper)) +
"," + repr(int(self.mwc_lower)) +
"," + repr(int(self.cong)) +
"," + repr(int(self.shr3)) + ")")
def lfsr_next_one_seed(seed_iter, min_value_shift):
"""High-quality seeding for LFSR generators.
The LFSR generator components discard a certain number of their lower bits
when generating each output. The significant bits of their state must not
all be zero. We must ensure that when seeding the generator.
In case generators are seeded from an incrementing input (such as a system
timer), and between increments only the lower bits may change, we would
also like the lower bits of the input to change the initial state, and not
just be discarded. So we do basic manipulation of the seed input value to
ensure that all bits of the seed input affect the initial state.
"""
try:
seed = next(seed_iter)
except StopIteration:
return 0xFFFFFFFF
else:
if seed is None:
return 0xFFFFFFFF
else:
seed = int(seed) & 0xFFFFFFFF
working_seed = (seed ^ (seed << 16)) & 0xFFFFFFFF
min_value = 1 << min_value_shift
if working_seed < min_value:
working_seed = (seed << 24) & 0xFFFFFFFF
if working_seed < min_value:
working_seed ^= 0xFFFFFFFF
return working_seed
def lfsr_validate_one_seed(seed, min_value_shift):
'''Validate seeds for LFSR generators
The LFSR generator components discard a certain number of their lower bits
when generating each output. The significant bits of their state must not
all be zero. We must ensure that when seeding the generator.
This is a light-weight validation of state, used from setstate().
'''
min_value = 1 << min_value_shift
if seed < min_value:
seed ^= 0xFFFFFFFF
return seed
def lfsr_state_z(z):
return int(z ^ ((z << 16) & 0xFFFFFFFF))
def lfsr_repr_z(z):
return repr(int(z ^ ((z << 16) & 0xFFFFFFFF)))
class LFSR113(object):
'''Combined LFSR random number generator by L'Ecuyer
It combines 4 LFSR generators. The generators have been
chosen for maximal equidistribution.
The period is approximately 2**113.
"Tables of Maximally-Equidistributed Combined Lfsr Generators"
P. L'Ecuyer
Mathematics of Computation, 68, 225 (1999), 261-269.
'''
SIMPLERANDOM_MAX = 2**32 - 1
_LFSR113_1_MATRIX_a = BitColumnMatrix.unity(32) + BitColumnMatrix.shift(32,6)
_LFSR113_1_MATRIX_b = BitColumnMatrix.shift(32,-13)
_LFSR113_1_MATRIX_c = BitColumnMatrix.mask(32, 1, 32)
_LFSR113_1_MATRIX_d = BitColumnMatrix.shift(32,18)
_LFSR113_1_MATRIX = _LFSR113_1_MATRIX_d * _LFSR113_1_MATRIX_c + _LFSR113_1_MATRIX_b * _LFSR113_1_MATRIX_a
_LFSR113_1_CYCLE_LEN = 2**(32 - 1) - 1
_LFSR113_2_MATRIX_a = BitColumnMatrix.unity(32) + BitColumnMatrix.shift(32,2)
_LFSR113_2_MATRIX_b = BitColumnMatrix.shift(32,-27)
_LFSR113_2_MATRIX_c = BitColumnMatrix.mask(32, 3, 32)
_LFSR113_2_MATRIX_d = BitColumnMatrix.shift(32,2)
_LFSR113_2_MATRIX = _LFSR113_2_MATRIX_d * _LFSR113_2_MATRIX_c + _LFSR113_2_MATRIX_b * _LFSR113_2_MATRIX_a
_LFSR113_2_CYCLE_LEN = 2**(32 - 3) - 1
_LFSR113_3_MATRIX_a = BitColumnMatrix.unity(32) + BitColumnMatrix.shift(32,13)
_LFSR113_3_MATRIX_b = BitColumnMatrix.shift(32,-21)
_LFSR113_3_MATRIX_c = BitColumnMatrix.mask(32, 4, 32)
_LFSR113_3_MATRIX_d = BitColumnMatrix.shift(32,7)
_LFSR113_3_MATRIX = _LFSR113_3_MATRIX_d * _LFSR113_3_MATRIX_c + _LFSR113_3_MATRIX_b * _LFSR113_3_MATRIX_a
_LFSR113_3_CYCLE_LEN = 2**(32 - 4) - 1
_LFSR113_4_MATRIX_a = BitColumnMatrix.unity(32) + BitColumnMatrix.shift(32,3)
_LFSR113_4_MATRIX_b = BitColumnMatrix.shift(32,-12)
_LFSR113_4_MATRIX_c = BitColumnMatrix.mask(32, 7, 32)
_LFSR113_4_MATRIX_d = BitColumnMatrix.shift(32,13)
_LFSR113_4_MATRIX = _LFSR113_4_MATRIX_d * _LFSR113_4_MATRIX_c + _LFSR113_4_MATRIX_b * _LFSR113_4_MATRIX_a
_LFSR113_4_CYCLE_LEN = 2**(32 - 7) - 1
@staticmethod
def min():
return 0
@staticmethod
def max():
return LFSR113.SIMPLERANDOM_MAX
def __init__(self, *args, **kwargs):
'''Positional arguments are seed values
Keyword-only arguments:
mix_extras=False -- If True, then call mix() to 'mix' extra seed
values into the state.
'''
seed_iter = _traverse_iter(args)
repeat_seed_iter = _repeat_iter(seed_iter)
self.z1 = lfsr_next_one_seed(repeat_seed_iter, 1)
self.z2 = lfsr_next_one_seed(repeat_seed_iter, 3)
self.z3 = lfsr_next_one_seed(repeat_seed_iter, 4)
self.z4 = lfsr_next_one_seed(repeat_seed_iter, 7)
if kwargs.pop('mix_extras', False):
self.mix(seed_iter)
for key in kwargs:
raise TypeError("__init__() got an unexpected keyword argument '%s'" % key)
def seed(self, *args, **kwargs):
self.__init__(*args, **kwargs)
def sanitise(self):
self.z1 = lfsr_validate_one_seed(self.z1, 1)
self.z2 = lfsr_validate_one_seed(self.z2, 3)
self.z3 = lfsr_validate_one_seed(self.z3, 4)
self.z4 = lfsr_validate_one_seed(self.z4, 7)
def _next_z1(self):
b = (((self.z1 & 0x03FFFFFF) << 6) ^ self.z1) >> 13
self.z1 = ((self.z1 & 0x00003FFE) << 18) ^ b
def _next_z2(self):
b = (((self.z2 & 0x3FFFFFFF) << 2) ^ self.z2) >> 27
self.z2 = ((self.z2 & 0x3FFFFFF8) << 2) ^ b
def _next_z3(self):
b = (((self.z3 & 0x0007FFFF) << 13) ^ self.z3) >> 21
self.z3 = ((self.z3 & 0x01FFFFF0) << 7) ^ b
def _next_z4(self):
b = (((self.z4 & 0x1FFFFFFF) << 3) ^ self.z4) >> 12
self.z4 = ((self.z4 & 0x0007FF80) << 13) ^ b
def __next__(self):
b = (((self.z1 & 0x03FFFFFF) << 6) ^ self.z1) >> 13
self.z1 = ((self.z1 & 0x00003FFE) << 18) ^ b
b = (((self.z2 & 0x3FFFFFFF) << 2) ^ self.z2) >> 27
self.z2 = ((self.z2 & 0x3FFFFFF8) << 2) ^ b
b = (((self.z3 & 0x0007FFFF) << 13) ^ self.z3) >> 21
self.z3 = ((self.z3 & 0x01FFFFF0) << 7) ^ b
b = (((self.z4 & 0x1FFFFFFF) << 3) ^ self.z4) >> 12
self.z4 = ((self.z4 & 0x0007FF80) << 13) ^ b
return self.z1 ^ self.z2 ^ self.z3 ^ self.z4
def current(self):
return self.z1 ^ self.z2 ^ self.z3 ^ self.z4
def mix(self, *args):
for value in _traverse_iter(args):
value_int = int(value) & 0xFFFFFFFF
current = self.current()
selector = (current >> 30) & 0x3
if selector == 0:
self.z1 = lfsr_validate_one_seed(self.z1 ^ value_int, 1)
self._next_z1()
elif selector == 1:
self.z2 = lfsr_validate_one_seed(self.z2 ^ value_int, 3)
self._next_z2()
elif selector == 2:
self.z3 = lfsr_validate_one_seed(self.z3 ^ value_int, 4)
self._next_z3()
else: # selector == 3
self.z4 = lfsr_validate_one_seed(self.z4 ^ value_int, 7)
self._next_z4()
return self.current()
def __iter__(self):
return self
def getstate(self):
return (lfsr_state_z(self.z1), lfsr_state_z(self.z2), lfsr_state_z(self.z3), lfsr_state_z(self.z4))
def setstate(self, state):
self.seed(state)
def jumpahead(self, n):
n_1 = int(n) % self._LFSR113_1_CYCLE_LEN
n_2 = int(n) % self._LFSR113_2_CYCLE_LEN
n_3 = int(n) % self._LFSR113_3_CYCLE_LEN
n_4 = int(n) % self._LFSR113_4_CYCLE_LEN
z1 = pow(self._LFSR113_1_MATRIX, n_1) * self.z1
self.z1 = z1
z2 = pow(self._LFSR113_2_MATRIX, n_2) * self.z2
self.z2 = z2
z3 = pow(self._LFSR113_3_MATRIX, n_3) * self.z3
self.z3 = z3
z4 = pow(self._LFSR113_4_MATRIX, n_4) * self.z4
self.z4 = z4
def __repr__(self):
return (self.__class__.__name__ + "(" + lfsr_repr_z(self.z1) +
"," + lfsr_repr_z(self.z2) +
"," + lfsr_repr_z(self.z3) +
"," + lfsr_repr_z(self.z4) + ")")
class LFSR88(object):
'''Combined LFSR random number generator by L'Ecuyer
It combines 3 LFSR generators. The generators have been
chosen for maximal equidistribution.
The period is approximately 2**88.
"Maximally Equidistributed Combined Tausworthe Generators"
P. L'Ecuyer
Mathematics of Computation, 65, 213 (1996), 203-213.
'''
SIMPLERANDOM_MAX = 2**32 - 1
_LFSR88_1_MATRIX_a = BitColumnMatrix.unity(32) + BitColumnMatrix.shift(32,13)
_LFSR88_1_MATRIX_b = BitColumnMatrix.shift(32,-19)
_LFSR88_1_MATRIX_c = BitColumnMatrix.mask(32, 1, 32)
_LFSR88_1_MATRIX_d = BitColumnMatrix.shift(32,12)
_LFSR88_1_MATRIX = _LFSR88_1_MATRIX_d * _LFSR88_1_MATRIX_c + _LFSR88_1_MATRIX_b * _LFSR88_1_MATRIX_a
_LFSR88_1_CYCLE_LEN = 2**(32 - 1) - 1
_LFSR88_2_MATRIX_a = BitColumnMatrix.unity(32) + BitColumnMatrix.shift(32,2)
_LFSR88_2_MATRIX_b = BitColumnMatrix.shift(32,-25)
_LFSR88_2_MATRIX_c = BitColumnMatrix.mask(32, 3, 32)
_LFSR88_2_MATRIX_d = BitColumnMatrix.shift(32,4)
_LFSR88_2_MATRIX = _LFSR88_2_MATRIX_d * _LFSR88_2_MATRIX_c + _LFSR88_2_MATRIX_b * _LFSR88_2_MATRIX_a
_LFSR88_2_CYCLE_LEN = 2**(32 - 3) - 1
_LFSR88_3_MATRIX_a = BitColumnMatrix.unity(32) + BitColumnMatrix.shift(32,3)
_LFSR88_3_MATRIX_b = BitColumnMatrix.shift(32,-11)
_LFSR88_3_MATRIX_c = BitColumnMatrix.mask(32, 4, 32)
_LFSR88_3_MATRIX_d = BitColumnMatrix.shift(32,17)
_LFSR88_3_MATRIX = _LFSR88_3_MATRIX_d * _LFSR88_3_MATRIX_c + _LFSR88_3_MATRIX_b * _LFSR88_3_MATRIX_a
_LFSR88_3_CYCLE_LEN = 2**(32 - 4) - 1
@staticmethod
def min():
return 0
@staticmethod
def max():
return LFSR88.SIMPLERANDOM_MAX
def __init__(self, *args, **kwargs):
'''Positional arguments are seed values
Keyword-only arguments:
mix_extras=False -- If True, then call mix() to 'mix' extra seed
values into the state.
'''
seed_iter = _traverse_iter(args)
repeat_seed_iter = _repeat_iter(seed_iter)
self.z1 = lfsr_next_one_seed(repeat_seed_iter, 1)
self.z2 = lfsr_next_one_seed(repeat_seed_iter, 3)
self.z3 = lfsr_next_one_seed(repeat_seed_iter, 4)
if kwargs.pop('mix_extras', False):
self.mix(seed_iter)
for key in kwargs:
raise TypeError("__init__() got an unexpected keyword argument '%s'" % key)
def seed(self, *args, **kwargs):
self.__init__(*args, **kwargs)
def sanitise(self):
self.z1 = lfsr_validate_one_seed(self.z1, 1)
self.z2 = lfsr_validate_one_seed(self.z2, 3)
self.z3 = lfsr_validate_one_seed(self.z3, 4)
def _next_z1(self):
b = (((self.z1 & 0x0007FFFF) << 13) ^ self.z1) >> 19
self.z1 = ((self.z1 & 0x000FFFFE) << 12) ^ b
def _next_z2(self):
b = (((self.z2 & 0x3FFFFFFF) << 2) ^ self.z2) >> 25
self.z2 = ((self.z2 & 0x0FFFFFF8) << 4) ^ b
def _next_z3(self):
b = (((self.z3 & 0x1FFFFFFF) << 3) ^ self.z3) >> 11
self.z3 = ((self.z3 & 0x00007FF0) << 17) ^ b
def __next__(self):
b = (((self.z1 & 0x0007FFFF) << 13) ^ self.z1) >> 19
self.z1 = ((self.z1 & 0x000FFFFE) << 12) ^ b
b = (((self.z2 & 0x3FFFFFFF) << 2) ^ self.z2) >> 25
self.z2 = ((self.z2 & 0x0FFFFFF8) << 4) ^ b
b = (((self.z3 & 0x1FFFFFFF) << 3) ^ self.z3) >> 11
self.z3 = ((self.z3 & 0x00007FF0) << 17) ^ b
return self.z1 ^ self.z2 ^ self.z3
def current(self):
return self.z1 ^ self.z2 ^ self.z3
def mix(self, *args):
for value in _traverse_iter(args):
value_int = int(value) & 0xFFFFFFFF
current = self.current()
if current < 1431655765: # constant is 2^32 / 3
self.z1 = lfsr_validate_one_seed(self.z1 ^ value_int, 1)
self._next_z1()
elif current < 2863311531: # constant is 2^32 * 2 / 3
self.z2 = lfsr_validate_one_seed(self.z2 ^ value_int, 3)
self._next_z2()
else:
self.z3 = lfsr_validate_one_seed(self.z3 ^ value_int, 4)
self._next_z3()
return self.current()
def __iter__(self):
return self
def getstate(self):
return (lfsr_state_z(self.z1), lfsr_state_z(self.z2), lfsr_state_z(self.z3))
def setstate(self, state):
self.seed(state)
def jumpahead(self, n):
n_1 = int(n) % self._LFSR88_1_CYCLE_LEN
n_2 = int(n) % self._LFSR88_2_CYCLE_LEN
n_3 = int(n) % self._LFSR88_3_CYCLE_LEN
z1 = pow(self._LFSR88_1_MATRIX, n_1) * self.z1
self.z1 = z1
z2 = pow(self._LFSR88_2_MATRIX, n_2) * self.z2
self.z2 = z2
z3 = pow(self._LFSR88_3_MATRIX, n_3) * self.z3
self.z3 = z3
def __repr__(self):
return (self.__class__.__name__ + "(" + lfsr_repr_z(self.z1) +
"," + lfsr_repr_z(self.z2) +
"," + lfsr_repr_z(self.z3) + ")")
| mit |
obimod/taiga-back | settings/sr.py | 21 | 1178 | # Copyright (C) 2015 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2015 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2015 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
SR = {
"taigaio_url": "https://taiga.io",
"social": {
"twitter_url": "https://twitter.com/taigaio",
"github_url": "https://github.com/taigaio",
},
"support": {
"url": "https://taiga.io/support",
"email": "support@taiga.io",
"mailing_list": "https://groups.google.com/forum/#!forum/taigaio",
}
}
| agpl-3.0 |
Ms2ger/presto-testo | wpt/websockets/autobahn/oberstet-Autobahn-643d2ee/demo/streaming/frame_based_client.py | 5 | 1983 | ###############################################################################
##
## Copyright 2011 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
from ranstring import randomByteString
from twisted.internet import reactor
from autobahn.websocket import WebSocketProtocol, WebSocketClientFactory, WebSocketClientProtocol, connectWS
FRAME_SIZE = 1 * 2**20
class FrameBasedHashClientProtocol(WebSocketClientProtocol):
"""
Message-based WebSockets client that generates stream of random octets
sent to WebSockets server as a sequence of frames all in one message.
The server will respond to us with the SHA-256 computed over frames.
When we receive response, we repeat by sending a new frame.
"""
def sendOneFrame(self):
data = randomByteString(FRAME_SIZE)
self.sendMessageFrame(data)
def onOpen(self):
self.count = 0
self.beginMessage(opcode = WebSocketProtocol.MESSAGE_TYPE_BINARY)
self.sendOneFrame()
def onMessage(self, message, binary):
print "Digest for frame %d computed by server: %s" % (self.count, message)
self.count += 1
self.sendOneFrame()
if __name__ == '__main__':
factory = WebSocketClientFactory("ws://localhost:9000")
factory.protocol = FrameBasedHashClientProtocol
connectWS(factory)
reactor.run()
| bsd-3-clause |
marcore/edx-platform | openedx/core/djangolib/js_utils.py | 13 | 3536 | """
Utilities for dealing with Javascript and JSON.
"""
import json
from django.utils.html import escapejs
from mako.filters import decode
from markupsafe import escape
from xmodule.modulestore import EdxJSONEncoder
def _escape_json_for_js(json_dumps_string):
"""
Escape output of JSON dumps that is safe to be embedded in a <SCRIPT> tag.
This implementation is based on escaping performed in
simplejson.JSONEncoderForHTML.
Arguments:
json_dumps_string (string): A JSON string to be escaped.
This must be the output of json.dumps to ensure:
1. The string contains valid JSON, and
2. That non-ascii characters are properly escaped
Returns:
(string) Escaped JSON that is safe to be embedded in HTML.
"""
json_dumps_string = json_dumps_string.replace("&", "\\u0026")
json_dumps_string = json_dumps_string.replace(">", "\\u003e")
json_dumps_string = json_dumps_string.replace("<", "\\u003c")
return json_dumps_string
def dump_js_escaped_json(obj, cls=EdxJSONEncoder):
"""
JSON dumps and escapes objects that are safe to be embedded in JavaScript.
Use this for anything but strings (e.g. dicts, tuples, lists, bools, and
numbers). For strings, use js_escaped_string.
The output of this method is also usable as plain-old JSON.
Usage:
Used as follows in a Mako template inside a <SCRIPT> tag::
var json_obj = ${obj | n, dump_js_escaped_json}
If you must use the cls argument, then use as follows::
var json_obj = ${dump_js_escaped_json(obj, cls) | n}
Use the "n" Mako filter above. It is possible that the default filter
may include html escaping in the future, and this ensures proper
escaping.
Ensure ascii in json.dumps (ensure_ascii=True) allows safe skipping of
Mako's default filter decode.utf8.
Arguments:
obj: The object soon to become a JavaScript escaped JSON string. The
object can be anything but strings (e.g. dicts, tuples, lists, bools, and
numbers).
cls (class): The JSON encoder class (defaults to EdxJSONEncoder).
Returns:
(string) Escaped encoded JSON.
"""
json_string = json.dumps(obj, ensure_ascii=True, cls=cls)
json_string = _escape_json_for_js(json_string)
return json_string
def js_escaped_string(string_for_js):
"""
Mako filter that escapes text for use in a JavaScript string.
If None is provided, returns an empty string.
Usage:
Used as follows in a Mako template inside a <SCRIPT> tag::
var my_string_for_js = "${my_string_for_js | n, js_escaped_string}"
The surrounding quotes for the string must be included.
Use the "n" Mako filter above. It is possible that the default filter
may include html escaping in the future, and this ensures proper
escaping.
Mako's default filter decode.utf8 is applied here since this default
filter is skipped in the Mako template with "n".
Arguments:
string_for_js (string): Text to be properly escaped for use in a
JavaScript string.
Returns:
(string) Text properly escaped for use in a JavaScript string as
unicode. Returns empty string if argument is None.
"""
if string_for_js is None:
string_for_js = ""
string_for_js = decode.utf8(string_for_js)
string_for_js = escapejs(string_for_js)
return string_for_js
| agpl-3.0 |
carljm/django | django/core/management/commands/sendtestemail.py | 126 | 1518 | import socket
from django.core.mail import mail_admins, mail_managers, send_mail
from django.core.management.base import BaseCommand
from django.utils import timezone
class Command(BaseCommand):
help = "Sends a test email to the email addresses specified as arguments."
missing_args_message = "You must specify some email recipients, or pass the --managers or --admin options."
def add_arguments(self, parser):
parser.add_argument(
'email', nargs='*',
help='One or more email addresses to send a test email to.',
)
parser.add_argument(
'--managers', action='store_true', dest='managers', default=False,
help='Send a test email to the addresses specified in settings.MANAGERS.',
)
parser.add_argument(
'--admins', action='store_true', dest='admins', default=False,
help='Send a test email to the addresses specified in settings.ADMINS.',
)
def handle(self, *args, **kwargs):
subject = 'Test email from %s on %s' % (socket.gethostname(), timezone.now())
send_mail(
subject=subject,
message="If you\'re reading this, it was successful.",
from_email=None,
recipient_list=kwargs['email'],
)
if kwargs['managers']:
mail_managers(subject, "This email was sent to the site managers.")
if kwargs['admins']:
mail_admins(subject, "This email was sent to the site admins.")
| bsd-3-clause |
rouault/Quantum-GIS | tests/src/python/test_qgsauthsystem.py | 21 | 35590 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for bindings to core authentication system classes
From build dir: LC_ALL=en_US.UTF-8 ctest -R PyQgsAuthenticationSystem -V
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Larry Shaffer'
__date__ = '2014/11/05'
__copyright__ = 'Copyright 2014, Boundless Spatial, Inc.'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
import tempfile
from qgis.core import QgsAuthCertUtils, QgsPkiBundle, QgsAuthMethodConfig, QgsAuthMethod, QgsAuthConfigSslServer, QgsApplication
from qgis.gui import QgsAuthEditorWidgets
from qgis.PyQt.QtCore import QFileInfo, qDebug
from qgis.PyQt.QtNetwork import QSsl, QSslError, QSslCertificate, QSslSocket
from qgis.PyQt.QtTest import QTest
from qgis.PyQt.QtWidgets import QDialog, QDialogButtonBox, QVBoxLayout
from qgis.testing import start_app, unittest
from utilities import unitTestDataPath
AUTHDBDIR = tempfile.mkdtemp()
os.environ['QGIS_AUTH_DB_DIR_PATH'] = AUTHDBDIR
start_app()
TESTDATA = os.path.join(unitTestDataPath(), 'auth_system')
PKIDATA = os.path.join(TESTDATA, 'certs_keys')
class TestQgsAuthManager(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.authm = QgsApplication.authManager()
assert not cls.authm.isDisabled(), cls.authm.disabledMessage()
cls.mpass = 'pass' # master password
db1 = QFileInfo(cls.authm.authenticationDatabasePath()).canonicalFilePath()
db2 = QFileInfo(AUTHDBDIR + '/qgis-auth.db').canonicalFilePath()
msg = 'Auth db temp path does not match db path of manager'
assert db1 == db2, msg
def setUp(self):
testid = self.id().split('.')
testheader = '\n#####_____ {0}.{1} _____#####\n'. \
format(testid[1], testid[2])
qDebug(testheader)
if (not self.authm.masterPasswordIsSet() or
not self.authm.masterPasswordHashInDatabase()):
self.set_master_password()
def widget_dialog(self, widget):
dlg = QDialog()
widget.setParent(dlg)
layout = QVBoxLayout()
layout.addWidget(widget)
layout.setMargin(6)
button_box = QDialogButtonBox(QDialogButtonBox.Close)
button_box.rejected.connect(dlg.close)
layout.addWidget(button_box)
dlg.setLayout(layout)
return dlg
def mkPEMBundle(self, client_cert, client_key, password, chain):
return QgsPkiBundle.fromPemPaths(PKIDATA + '/' + client_cert,
PKIDATA + '/' + client_key,
password,
QgsAuthCertUtils.certsFromFile(
PKIDATA + '/' + chain
))
def show_editors_widget(self):
editors = QgsAuthEditorWidgets()
dlg = self.widget_dialog(editors)
dlg.exec_()
def set_master_password(self):
msg = 'Failed to store and verify master password in auth db'
assert self.authm.setMasterPassword(self.mpass, True), msg
def test_010_master_password(self):
msg = 'Master password is not set'
self.assertTrue(self.authm.masterPasswordIsSet(), msg)
msg = 'Master password hash is not in database'
self.assertTrue(self.authm.masterPasswordHashInDatabase(), msg)
msg = 'Master password not verified against hash in database'
self.assertTrue(self.authm.verifyMasterPassword(), msg)
msg = 'Master password comparison dissimilar'
self.assertTrue(self.authm.masterPasswordSame(self.mpass), msg)
msg = 'Master password not unset'
self.authm.clearMasterPassword()
self.assertFalse(self.authm.masterPasswordIsSet(), msg)
msg = 'Master password not reset and validated'
self.assertTrue(self.authm.setMasterPassword(self.mpass, True), msg)
# NOTE: reset of master password is in auth db test unit
def test_020_cert_utilities(self):
pass
def test_030_auth_settings(self):
pass
def test_040_authorities(self):
def rebuild_caches():
m = 'Authorities cache could not be rebuilt'
self.assertTrue(self.authm.rebuildCaCertsCache(), m)
m = 'Authorities trust policy cache could not be rebuilt'
self.assertTrue(self.authm.rebuildTrustedCaCertsCache(), m)
def trusted_ca_certs():
tr_certs = self.authm.trustedCaCerts()
m = 'Trusted authorities cache is empty'
self.assertIsNotNone(tr_certs, m)
return tr_certs
msg = 'No system root CAs'
self.assertIsNotNone(self.authm.systemRootCAs())
# TODO: add more tests
full_chain = 'chains_subissuer-issuer-root_issuer2-root2.pem'
full_chain_path = os.path.join(PKIDATA, full_chain)
# load CA file authorities for later comaprison
# noinspection PyTypeChecker
# ca_certs = QSslCertificate.fromPath(full_chain_path)
ca_certs = QgsAuthCertUtils.certsFromFile(full_chain_path)
msg = 'Authorities file could not be parsed'
self.assertIsNotNone(ca_certs, msg)
msg = 'Authorities file parsed count is incorrect'
self.assertEqual(len(ca_certs), 5, msg)
# first test CA file can be set and loaded
msg = 'Authority file path setting could not be stored'
self.assertTrue(
self.authm.storeAuthSetting('cafile', full_chain_path), msg)
msg = "Authority file 'allow invalids' setting could not be stored"
self.assertTrue(
self.authm.storeAuthSetting('cafileallowinvalid', False), msg)
rebuild_caches()
trusted_certs = trusted_ca_certs()
not_cached = any([ca not in trusted_certs for ca in ca_certs])
msg = 'Authorities not in trusted authorities cache'
self.assertFalse(not_cached, msg)
# test CA file can be unset
msg = 'Authority file path setting could not be removed'
self.assertTrue(self.authm.removeAuthSetting('cafile'), msg)
msg = "Authority file 'allow invalids' setting could not be removed"
self.assertTrue(
self.authm.removeAuthSetting('cafileallowinvalid'), msg)
rebuild_caches()
trusted_certs = trusted_ca_certs()
still_cached = any([ca in trusted_certs for ca in ca_certs])
msg = 'Authorities still in trusted authorities cache'
self.assertFalse(still_cached, msg)
# test CAs can be stored in database
msg = "Authority certs could not be stored in database"
self.assertTrue(self.authm.storeCertAuthorities(ca_certs))
rebuild_caches()
trusted_certs = trusted_ca_certs()
not_cached = any([ca not in trusted_certs for ca in ca_certs])
msg = 'Stored authorities not in trusted authorities cache'
self.assertFalse(not_cached, msg)
# dlg = QgsAuthTrustedCAsDialog()
# dlg.exec_()
def test_050_trust_policy(self):
pass
# noinspection PyArgumentList
def test_060_identities(self):
client_cert_path = os.path.join(PKIDATA, 'fra_cert.pem')
client_key_path = os.path.join(PKIDATA, 'fra_key_w-pass.pem')
client_key_pass = 'password'
client_p12_path = os.path.join(PKIDATA, 'gerardus_w-chain.p12')
client_p12_pass = 'password'
# store regular PEM cert/key and generate config
# noinspection PyTypeChecker
bundle1 = QgsPkiBundle.fromPemPaths(client_cert_path, client_key_path,
client_key_pass)
bundle1_cert = bundle1.clientCert()
bundle1_key = bundle1.clientKey()
bundle1_ca_chain = bundle1.caChain()
bundle1_cert_sha = bundle1.certId()
# with open(client_key_path, 'r') as f:
# key_data = f.read()
#
# client_cert = QgsAuthCertUtils.certsFromFile(client_cert_path)[0]
msg = 'Identity PEM certificate is null'
self.assertFalse(bundle1_cert.isNull(), msg)
# cert_sha = QgsAuthCertUtils.shaHexForCert(client_cert)
#
# client_key = QSslKey(key_data, QSsl.Rsa, QSsl.Pem,
# QSsl.PrivateKey, client_key_pass)
msg = 'Identity PEM key is null'
self.assertFalse(bundle1_key.isNull(), msg)
msg = 'Identity PEM certificate chain is not empty'
self.assertEqual(len(bundle1_ca_chain), 0, msg)
msg = "Identity PEM could not be stored in database"
self.assertTrue(
self.authm.storeCertIdentity(bundle1_cert, bundle1_key), msg)
msg = "Identity PEM not found in database"
self.assertTrue(self.authm.existsCertIdentity(bundle1_cert_sha), msg)
config1 = QgsAuthMethodConfig()
config1.setName('IdentityCert - PEM')
config1.setMethod('Identity-Cert')
config1.setConfig('certid', bundle1_cert_sha)
msg = 'Could not store PEM identity config'
self.assertTrue(self.authm.storeAuthenticationConfig(config1), msg)
configid1 = config1.id()
msg = 'Could not retrieve PEM identity config id from store op'
self.assertIsNotNone(configid1, msg)
config2 = QgsAuthMethodConfig()
msg = 'Could not load PEM identity config'
self.assertTrue(
self.authm.loadAuthenticationConfig(configid1, config2, True),
msg)
# store PKCS#12 bundled cert/key and generate config
# bundle = QgsPkcsBundle(client_p12_path, client_p12_pass)
# noinspection PyTypeChecker
bundle = QgsPkiBundle.fromPkcs12Paths(client_p12_path, client_p12_pass)
bundle_cert = bundle.clientCert()
bundle_key = bundle.clientKey()
bundle_ca_chain = bundle.caChain()
bundle_cert_sha = QgsAuthCertUtils.shaHexForCert(bundle_cert)
msg = 'Identity bundle certificate is null'
self.assertFalse(bundle_cert.isNull(), msg)
msg = 'Identity bundle key is null'
self.assertFalse(bundle_key.isNull(), msg)
msg = 'Identity bundle CA chain is not correct depth'
self.assertEqual(len(bundle_ca_chain), 3, msg)
msg = "Identity bundle could not be stored in database"
self.assertTrue(
self.authm.storeCertIdentity(bundle_cert, bundle_key), msg)
msg = "Identity bundle not found in database"
self.assertTrue(self.authm.existsCertIdentity(bundle_cert_sha), msg)
bundle_config = QgsAuthMethodConfig()
bundle_config.setName('IdentityCert - Bundle')
bundle_config.setMethod('Identity-Cert')
bundle_config.setConfig('certid', bundle_cert_sha)
msg = 'Could not store bundle identity config'
self.assertTrue(
self.authm.storeAuthenticationConfig(bundle_config), msg)
bundle_configid = bundle_config.id()
msg = 'Could not retrieve bundle identity config id from store op'
self.assertIsNotNone(bundle_configid, msg)
bundle_config2 = QgsAuthMethodConfig()
msg = 'Could not load bundle identity config'
self.assertTrue(
self.authm.loadAuthenticationConfig(bundle_configid,
bundle_config2,
True),
msg)
# TODO: add more tests
# self.show_editors_widget()
msg = 'Could not remove PEM identity config'
self.assertTrue(self.authm.removeAuthenticationConfig(configid1), msg)
msg = 'Could not remove bundle identity config'
self.assertTrue(
self.authm.removeAuthenticationConfig(bundle_configid), msg)
def test_070_servers(self):
# return
ssl_cert_path = os.path.join(PKIDATA, 'localhost_ssl_cert.pem')
ssl_cert = QgsAuthCertUtils.certsFromFile(ssl_cert_path)[0]
msg = 'SSL server certificate is null'
self.assertFalse(ssl_cert.isNull(), msg)
cert_sha = QgsAuthCertUtils.shaHexForCert(ssl_cert)
hostport = 'localhost:8443'
config = QgsAuthConfigSslServer()
config.setSslCertificate(ssl_cert)
config.setSslHostPort(hostport)
config.setSslIgnoredErrorEnums([QSslError.SelfSignedCertificate])
config.setSslPeerVerifyMode(QSslSocket.VerifyNone)
config.setSslPeerVerifyDepth(3)
config.setSslProtocol(QSsl.TlsV1_1)
msg = 'SSL config is null'
self.assertFalse(config.isNull(), msg)
msg = 'Could not store SSL config'
self.assertTrue(self.authm.storeSslCertCustomConfig(config), msg)
msg = 'Could not verify storage of SSL config'
self.assertTrue(
self.authm.existsSslCertCustomConfig(cert_sha, hostport), msg)
msg = 'Could not verify SSL config in all configs'
self.assertIsNotNone(self.authm.sslCertCustomConfigs(), msg)
msg = 'Could not retrieve SSL config'
config2 = self.authm.sslCertCustomConfig(cert_sha, hostport)
""":type: QgsAuthConfigSslServer"""
self.assertFalse(config2.isNull(), msg)
msg = 'Certificate of retrieved SSL config does not match'
self.assertEqual(config.sslCertificate(), config2.sslCertificate(), msg)
msg = 'HostPort of retrieved SSL config does not match'
self.assertEqual(config.sslHostPort(), config2.sslHostPort(), msg)
msg = 'IgnoredErrorEnums of retrieved SSL config does not match'
enums = config2.sslIgnoredErrorEnums()
self.assertTrue(QSslError.SelfSignedCertificate in enums, msg)
msg = 'PeerVerifyMode of retrieved SSL config does not match'
self.assertEqual(config.sslPeerVerifyMode(),
config2.sslPeerVerifyMode(), msg)
msg = 'PeerVerifyDepth of retrieved SSL config does not match'
self.assertEqual(config.sslPeerVerifyDepth(),
config2.sslPeerVerifyDepth(), msg)
msg = 'Protocol of retrieved SSL config does not match'
self.assertEqual(config.sslProtocol(), config2.sslProtocol(), msg)
# dlg = QgsAuthSslConfigDialog(None, ssl_cert, hostport)
# dlg.exec_()
msg = 'Could not remove SSL config'
self.assertTrue(
self.authm.removeSslCertCustomConfig(cert_sha, hostport), msg)
msg = 'Could not verify removal of SSL config'
self.assertFalse(
self.authm.existsSslCertCustomConfig(cert_sha, hostport), msg)
def test_080_auth_configid(self):
msg = 'Could not generate a config id'
self.assertIsNotNone(self.authm.uniqueConfigId(), msg)
uids = []
for _ in range(50):
# time.sleep(0.01) # or else the salt is not random enough
uids.append(self.authm.uniqueConfigId())
msg = 'Generated 50 config ids are not unique:\n{0}\n{1}'.format(
uids,
list(set(uids))
)
self.assertEqual(len(uids), len(list(set(uids))), msg)
def config_list(self):
return ['Basic', 'PKI-Paths', 'PKI-PKCS#12']
def config_obj(self, kind, base=True):
config = QgsAuthMethodConfig()
config.setName(kind)
config.setMethod(kind)
config.setUri('http://example.com')
if base:
return config
if kind == 'Basic':
config.setConfig('username', 'username')
config.setConfig('password', 'password')
config.setConfig('realm', 'Realm')
elif kind == 'PKI-Paths':
config.setConfig('certpath',
os.path.join(PKIDATA, 'gerardus_cert.pem'))
config.setConfig('keypath',
os.path.join(PKIDATA, 'gerardus_key_w-pass.pem'))
config.setConfig('keypass', 'password')
elif kind == 'PKI-PKCS#12':
config.setConfig('bundlepath',
os.path.join(PKIDATA, 'gerardus.p12'))
config.setConfig('bundlepass', 'password')
return config
def config_values_valid(self, kind, config):
""":type config: QgsAuthMethodConfig"""
if (config.name() != kind or
config.method() != kind or
config.uri() != 'http://example.com'):
return False
if kind == 'Basic':
return (
config.config('username') == 'username' and
config.config('password') == 'password' and
config.config('realm') == 'Realm'
)
elif kind == 'PKI-Paths':
return (
config.config('certpath') ==
os.path.join(PKIDATA, 'gerardus_cert.pem') and
config.config('keypath') ==
os.path.join(PKIDATA, 'gerardus_key_w-pass.pem') and
config.config('keypass') == 'password'
)
elif kind == 'PKI-PKCS#12':
return (
config.config('bundlepath') ==
os.path.join(PKIDATA, 'gerardus.p12') and
config.config('bundlepass') == 'password'
)
def test_090_auth_configs(self):
# these list items need to match the QgsAuthType provider type strings
for kind in self.config_list():
config = self.config_obj(kind, base=False)
msg = 'Could not validate {0} config'.format(kind)
self.assertTrue(config.isValid(), msg)
msg = 'Could not store {0} config'.format(kind)
self.assertTrue(self.authm.storeAuthenticationConfig(config), msg)
configid = config.id()
msg = 'Could not retrieve {0} config id from store op'.format(kind)
self.assertIsNotNone(configid, msg)
msg = 'Config id {0} not in db'.format(configid)
self.assertFalse(self.authm.configIdUnique(configid), msg)
msg = 'Could not retrieve {0} config id from db'.format(kind)
self.assertTrue(configid in self.authm.configIds(), msg)
msg = 'Could not retrieve method key for {0} config'.format(kind)
self.assertTrue(
self.authm.configAuthMethodKey(configid) == kind, msg)
msg = 'Could not retrieve method ptr for {0} config'.format(kind)
self.assertTrue(
isinstance(self.authm.configAuthMethod(configid),
QgsAuthMethod), msg)
config2 = self.config_obj(kind, base=True)
msg = 'Could not load {0} config'.format(kind)
self.assertTrue(
self.authm.loadAuthenticationConfig(configid, config2, True),
msg)
msg = 'Could not validate loaded {0} config values'.format(kind)
self.assertTrue(self.config_values_valid(kind, config2), msg)
# values haven't been changed, but the db update still takes place
msg = 'Could not update {0} config values'.format(kind)
self.assertTrue(self.authm.updateAuthenticationConfig(config2), msg)
config3 = self.config_obj(kind, base=True)
msg = 'Could not load updated {0} config'.format(kind)
self.assertTrue(
self.authm.loadAuthenticationConfig(configid, config3, True),
msg)
msg = 'Could not validate updated {0} config values'.format(kind)
self.assertTrue(self.config_values_valid(kind, config3), msg)
msg = 'Could not remove {0} config (by id) from db'.format(kind)
self.assertTrue(
self.authm.removeAuthenticationConfig(configid), msg)
msg = 'Did not remove {0} config id from db'.format(kind)
self.assertFalse(configid in self.authm.configIds(), msg)
def test_100_auth_db(self):
for kind in self.config_list():
config = self.config_obj(kind, base=False)
msg = 'Could not store {0} config'.format(kind)
self.assertTrue(self.authm.storeAuthenticationConfig(config), msg)
msg = 'Could not store a sample of all configs in auth db'
self.assertTrue(
(len(self.authm.configIds()) == len(self.config_list())), msg)
msg = 'Could not retrieve available configs from auth db'
self.assertTrue(len(self.authm.availableAuthMethodConfigs()) > 0, msg)
backup = None
resetpass, backup = self.authm.resetMasterPassword(
'newpass', self.mpass, True, backup)
msg = 'Could not reset master password and/or re-encrypt configs'
self.assertTrue(resetpass, msg)
# qDebug('Backup db path: {0}'.format(backup))
msg = 'Could not retrieve backup path for reset master password op'
self.assertIsNotNone(backup)
self.assertTrue(backup != self.authm.authenticationDatabasePath(), msg)
msg = 'Could not verify reset master password'
self.assertTrue(self.authm.setMasterPassword('newpass', True), msg)
msg = 'Could not remove all configs from auth db'
self.assertTrue(self.authm.removeAllAuthenticationConfigs(), msg)
msg = 'Configs were not removed from auth db'
self.assertTrue(len(self.authm.configIds()) == 0, msg)
msg = 'Auth db does not exist'
self.assertTrue(os.path.exists(self.authm.authenticationDatabasePath()), msg)
QTest.qSleep(1000) # necessary for new backup to have different name
msg = 'Could not erase auth db'
backup = None
reserase, backup = \
self.authm.eraseAuthenticationDatabase(True, backup)
self.assertTrue(reserase, msg)
# qDebug('Erase db backup db path: {0}'.format(backup))
msg = 'Could not retrieve backup path for erase db op'
self.assertIsNotNone(backup)
self.assertTrue(backup != self.authm.authenticationDatabasePath(), msg)
msg = 'Master password not erased from auth db'
self.assertTrue(not self.authm.masterPasswordIsSet() and
not self.authm.masterPasswordHashInDatabase(), msg)
self.set_master_password()
def test_110_pkcs12_cas(self):
"""Test if CAs can be read from a pkcs12 bundle"""
path = PKIDATA + '/fra_w-chain.p12'
cas = QgsAuthCertUtils.pkcs12BundleCas(path, 'password')
self.assertEqual(cas[0].issuerInfo(b'CN'), ['QGIS Test Root CA'])
self.assertEqual(cas[0].subjectInfo(b'CN'), ['QGIS Test Issuer CA'])
self.assertEqual(cas[0].serialNumber(), b'02')
self.assertEqual(cas[1].issuerInfo(b'CN'), ['QGIS Test Root CA'])
self.assertEqual(cas[1].subjectInfo(b'CN'), ['QGIS Test Root CA'])
self.assertEqual(cas[1].serialNumber(), b'01')
def test_120_pem_cas_from_file(self):
"""Test if CAs can be read from a pem bundle"""
path = PKIDATA + '/fra_w-chain.pem'
cas = QgsAuthCertUtils.casFromFile(path)
self.assertEqual(cas[0].issuerInfo(b'CN'), ['QGIS Test Root CA'])
self.assertEqual(cas[0].subjectInfo(b'CN'), ['QGIS Test Issuer CA'])
self.assertEqual(cas[0].serialNumber(), b'02')
self.assertEqual(cas[1].issuerInfo(b'CN'), ['QGIS Test Root CA'])
self.assertEqual(cas[1].subjectInfo(b'CN'), ['QGIS Test Root CA'])
self.assertEqual(cas[1].serialNumber(), b'01')
def test_130_cas_merge(self):
"""Test CAs merge """
trusted_path = PKIDATA + '/subissuer_ca_cert.pem'
extra_path = PKIDATA + '/fra_w-chain.pem'
trusted = QgsAuthCertUtils.casFromFile(trusted_path)
extra = QgsAuthCertUtils.casFromFile(extra_path)
merged = QgsAuthCertUtils.casMerge(trusted, extra)
self.assertEqual(len(trusted), 1)
self.assertEqual(len(extra), 2)
self.assertEqual(len(merged), 3)
for c in extra:
self.assertTrue(c in merged)
self.assertTrue(trusted[0] in merged)
def test_140_cas_remove_self_signed(self):
"""Test CAs merge """
extra_path = PKIDATA + '/fra_w-chain.pem'
extra = QgsAuthCertUtils.casFromFile(extra_path)
filtered = QgsAuthCertUtils.casRemoveSelfSigned(extra)
self.assertEqual(len(filtered), 1)
self.assertEqual(len(extra), 2)
self.assertTrue(extra[1].isSelfSigned())
for c in filtered:
self.assertFalse(c.isSelfSigned())
def test_150_verify_keychain(self):
"""Test the verify keychain function"""
def testChain(path):
# Test that a chain with an untrusted CA is not valid
self.assertTrue(len(QgsAuthCertUtils.validateCertChain(QgsAuthCertUtils.certsFromFile(path))) > 0)
# Test that a chain with an untrusted CA is valid when the addRootCa argument is true
self.assertTrue(len(QgsAuthCertUtils.validateCertChain(QgsAuthCertUtils.certsFromFile(path), None, True)) == 0)
# Test that a chain with an untrusted CA is not valid when the addRootCa argument is true
# and a wrong domainis true
self.assertTrue(len(QgsAuthCertUtils.validateCertChain(QgsAuthCertUtils.certsFromFile(path), 'my.wrong.domain', True)) > 0)
testChain(PKIDATA + '/chain_subissuer-issuer-root.pem')
testChain(PKIDATA + '/localhost_ssl_w-chain.pem')
testChain(PKIDATA + '/fra_w-chain.pem')
path = PKIDATA + '/localhost_ssl_w-chain.pem'
# Test that a chain with an untrusted CA is not valid when the addRootCa argument is true
# and a wrong domain is set
self.assertTrue(len(QgsAuthCertUtils.validateCertChain(QgsAuthCertUtils.certsFromFile(path), 'my.wrong.domain', True)) > 0)
# Test that a chain with an untrusted CA is valid when the addRootCa argument is true
# and a right domain is set
self.assertTrue(len(QgsAuthCertUtils.validateCertChain(QgsAuthCertUtils.certsFromFile(path), 'localhost', True)) == 0)
# Test that a chain with an untrusted CA is not valid when the addRootCa argument is false
# and a right domain is set
self.assertTrue(len(QgsAuthCertUtils.validateCertChain(QgsAuthCertUtils.certsFromFile(path), 'localhost', False)) > 0)
def test_validate_pki_bundle(self):
"""Text the pki bundle validation"""
# Valid bundle:
bundle = self.mkPEMBundle('fra_cert.pem', 'fra_key.pem', 'password', 'chain_subissuer-issuer-root.pem')
# Test valid bundle with intermediates and without trusted root
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(bundle), ['The root certificate of the certificate chain is self-signed, and untrusted'])
# Test valid without intermediates
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(bundle, False), ['The issuer certificate of a locally looked up certificate could not be found', 'No certificates could be verified'])
# Test valid with intermediates and trusted root
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(bundle, True, True), [])
# Wrong chain
bundle = self.mkPEMBundle('fra_cert.pem', 'fra_key.pem', 'password', 'chain_issuer2-root2.pem')
# Test invalid bundle with intermediates and without trusted root
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(bundle), ['The issuer certificate of a locally looked up certificate could not be found', 'No certificates could be verified'])
# Test valid without intermediates
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(bundle, False), ['The issuer certificate of a locally looked up certificate could not be found', 'No certificates could be verified'])
# Test valid with intermediates and trusted root
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(bundle, True, True), ['The issuer certificate of a locally looked up certificate could not be found', 'No certificates could be verified'])
# Wrong key
bundle = self.mkPEMBundle('fra_cert.pem', 'ptolemy_key.pem', 'password', 'chain_subissuer-issuer-root.pem')
# Test invalid bundle with intermediates and without trusted root
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(bundle), ['The root certificate of the certificate chain is self-signed, and untrusted', 'Private key does not match client certificate public key.'])
# Test invalid without intermediates
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(bundle, False), ['The issuer certificate of a locally looked up certificate could not be found', 'No certificates could be verified', 'Private key does not match client certificate public key.'])
# Test invalid with intermediates and trusted root
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(bundle, True, True), ['Private key does not match client certificate public key.'])
# Expired root CA
bundle = self.mkPEMBundle('piri_cert.pem', 'piri_key.pem', 'password', 'chain_issuer3-root3-EXPIRED.pem')
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(bundle), ['The root certificate of the certificate chain is self-signed, and untrusted', 'The certificate has expired'])
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(bundle, False), ['The issuer certificate of a locally looked up certificate could not be found', 'No certificates could be verified'])
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(bundle, True, True), ['The root certificate of the certificate chain is self-signed, and untrusted', 'The certificate has expired'])
# Expired intermediate CA
bundle = self.mkPEMBundle('marinus_cert-EXPIRED.pem', 'marinus_key_w-pass.pem', 'password', 'chain_issuer2-root2.pem')
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(bundle), ['The root certificate of the certificate chain is self-signed, and untrusted', 'The certificate has expired'])
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(bundle, False), ['The issuer certificate of a locally looked up certificate could not be found', 'No certificates could be verified'])
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(bundle, True, True), ['The certificate has expired'])
# Expired client cert
bundle = self.mkPEMBundle('henricus_cert.pem', 'henricus_key_w-pass.pem', 'password', 'chain_issuer4-EXPIRED-root2.pem')
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(bundle), ['The root certificate of the certificate chain is self-signed, and untrusted', 'The certificate has expired'])
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(bundle, False), ['The issuer certificate of a locally looked up certificate could not be found', 'No certificates could be verified'])
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(bundle, True, True), ['The certificate has expired'])
# Untrusted root, positive test before untrust is applied
bundle = self.mkPEMBundle('nicholas_cert.pem', 'nicholas_key.pem', 'password', 'chain_issuer2-root2.pem')
# Test valid with intermediates and trusted root
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(bundle, True, True), [])
# Untrust this root
root2 = QgsAuthCertUtils.certFromFile(PKIDATA + '/' + 'root2_ca_cert.pem')
QgsApplication.authManager().storeCertAuthority(root2)
self.assertTrue(QgsApplication.authManager().storeCertTrustPolicy(root2, QgsAuthCertUtils.Untrusted))
QgsApplication.authManager().rebuildCaCertsCache()
# Test valid with intermediates and untrusted root
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(bundle, True, True), ['The issuer certificate of a locally looked up certificate could not be found'])
def test_160_cert_viable(self):
"""Text the viability of a given certificate"""
# null cert
cert = QSslCertificate()
self.assertFalse(QgsAuthCertUtils.certIsCurrent(cert))
res = QgsAuthCertUtils.certViabilityErrors(cert)
self.assertTrue(len(res) == 0)
self.assertFalse(QgsAuthCertUtils.certIsViable(cert))
cert.clear()
res.clear()
# valid cert
cert = QgsAuthCertUtils.certFromFile(PKIDATA + '/gerardus_cert.pem')
self.assertTrue(QgsAuthCertUtils.certIsCurrent(cert))
res = QgsAuthCertUtils.certViabilityErrors(cert)
self.assertTrue(len(res) == 0)
self.assertTrue(QgsAuthCertUtils.certIsViable(cert))
cert.clear()
res.clear()
# expired cert
cert = QgsAuthCertUtils.certFromFile(PKIDATA + '/marinus_cert-EXPIRED.pem')
self.assertFalse(QgsAuthCertUtils.certIsCurrent(cert))
res = QgsAuthCertUtils.certViabilityErrors(cert)
self.assertTrue(len(res) > 0)
self.assertTrue(QSslError(QSslError.CertificateExpired, cert) in res)
self.assertFalse(QgsAuthCertUtils.certIsViable(cert))
def test_170_pki_key_encoding(self):
"""Test that a DER/PEM RSA/DSA/EC keys can be opened whatever the extension is"""
self.assertFalse(QgsAuthCertUtils.keyFromFile(PKIDATA + '/' + 'ptolemy_key.pem').isNull())
self.assertFalse(QgsAuthCertUtils.keyFromFile(PKIDATA + '/' + 'ptolemy_key.der').isNull())
self.assertFalse(QgsAuthCertUtils.keyFromFile(PKIDATA + '/' + 'ptolemy_key_pem.key').isNull())
self.assertFalse(QgsAuthCertUtils.keyFromFile(PKIDATA + '/' + 'ptolemy_key_der.key').isNull())
self.assertFalse(QgsAuthCertUtils.keyFromFile(PKIDATA + '/' + 'donald_key_EC.pem').isNull())
self.assertFalse(QgsAuthCertUtils.keyFromFile(PKIDATA + '/' + 'donald_key_EC.der').isNull())
self.assertFalse(QgsAuthCertUtils.keyFromFile(PKIDATA + '/' + 'donald_key_DSA.pem').isNull())
self.assertFalse(QgsAuthCertUtils.keyFromFile(PKIDATA + '/' + 'donald_key_DSA.der').isNull())
self.assertFalse(QgsAuthCertUtils.keyFromFile(PKIDATA + '/' + 'donald_key_DSA_crlf.pem').isNull())
self.assertFalse(QgsAuthCertUtils.keyFromFile(PKIDATA + '/' + 'donald_key_DSA_nonl.pem').isNull())
donald_dsa = QgsAuthCertUtils.keyFromFile(PKIDATA + '/' + 'donald_key_DSA.pem').toPem()
self.assertEqual(donald_dsa, QgsAuthCertUtils.keyFromFile(PKIDATA + '/' + 'donald_key_DSA.der').toPem())
self.assertEqual(donald_dsa, QgsAuthCertUtils.keyFromFile(PKIDATA + '/' + 'donald_key_DSA_crlf.pem').toPem())
self.assertEqual(donald_dsa, QgsAuthCertUtils.keyFromFile(PKIDATA + '/' + 'donald_key_DSA_nonl.pem').toPem())
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(self.mkPEMBundle('ptolemy_cert.pem', 'ptolemy_key.pem', 'password', 'chain_subissuer-issuer-root.pem'), True, True), [])
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(self.mkPEMBundle('ptolemy_cert.pem', 'ptolemy_key.der', 'password', 'chain_subissuer-issuer-root.pem'), True, True), [])
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(self.mkPEMBundle('ptolemy_cert.pem', 'ptolemy_key_pem.key', 'password', 'chain_subissuer-issuer-root.pem'), True, True), [])
self.assertEqual(QgsAuthCertUtils.validatePKIBundle(self.mkPEMBundle('ptolemy_cert.pem', 'ptolemy_key_der.key', 'password', 'chain_subissuer-issuer-root.pem'), True, True), [])
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
schlueter/ansible | test/units/modules/network/onyx/test_onyx_mlag_vip.py | 50 | 2942 | #
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests.mock import patch
from ansible.modules.network.onyx import onyx_mlag_vip
from units.modules.utils import set_module_args
from .onyx_module import TestOnyxModule, load_fixture
class TestOnyxMlagVipModule(TestOnyxModule):
module = onyx_mlag_vip
def setUp(self):
super(TestOnyxMlagVipModule, self).setUp()
self._mlag_enabled = True
self.mock_show_mlag = patch.object(
onyx_mlag_vip.OnyxMLagVipModule,
"_show_mlag")
self.show_mlag = self.mock_show_mlag.start()
self.mock_show_mlag_vip = patch.object(
onyx_mlag_vip.OnyxMLagVipModule,
"_show_mlag_vip")
self.show_mlag_vip = self.mock_show_mlag_vip.start()
self.mock_load_config = patch(
'ansible.module_utils.network.onyx.onyx.load_config')
self.load_config = self.mock_load_config.start()
def tearDown(self):
super(TestOnyxMlagVipModule, self).tearDown()
self.mock_show_mlag.stop()
self.mock_show_mlag_vip.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None, transport='cli'):
if self._mlag_enabled:
config_file = 'onyx_mlag_vip_show.cfg'
self.show_mlag_vip.return_value = load_fixture(config_file)
config_file = 'onyx_mlag_show.cfg'
self.show_mlag.return_value = load_fixture(config_file)
else:
self.show_mlag_vip.return_value = None
self.show_mlag.return_value = None
self.load_config.return_value = None
def test_mlag_no_change(self):
set_module_args(dict(ipaddress='10.209.25.107/24',
group_name='neo-mlag-vip-500',
mac_address='00:00:5E:00:01:4E'))
self.execute_module(changed=False)
def test_mlag_change(self):
self._mlag_enabled = False
set_module_args(dict(ipaddress='10.209.25.107/24',
group_name='neo-mlag-vip-500',
mac_address='00:00:5E:00:01:4E',
delay=0))
commands = ['mlag-vip neo-mlag-vip-500 ip 10.209.25.107 /24 force',
'mlag system-mac 00:00:5e:00:01:4e', 'no mlag shutdown']
self.execute_module(changed=True, commands=commands)
def test_mlag_absent_no_change(self):
self._mlag_enabled = False
set_module_args(dict(state='absent'))
self.execute_module(changed=False)
def test_mlag_absent_change(self):
set_module_args(dict(state='absent', delay=0))
commands = ['no mlag-vip']
self.execute_module(changed=True, commands=commands)
| gpl-3.0 |
CamAndPineapple/starrynight | node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py | 240 | 99242 | # Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import copy
import hashlib
import json
import multiprocessing
import os.path
import re
import signal
import subprocess
import sys
import gyp
import gyp.common
from gyp.common import OrderedSet
import gyp.msvs_emulation
import gyp.MSVSUtil as MSVSUtil
import gyp.xcode_emulation
from cStringIO import StringIO
from gyp.common import GetEnvironFallback
import gyp.ninja_syntax as ninja_syntax
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_PREFIX': 'lib',
# Gyp expects the following variables to be expandable by the build
# system to the appropriate locations. Ninja prefers paths to be
# known at gyp time. To resolve this, introduce special
# variables starting with $! and $| (which begin with a $ so gyp knows it
# should be treated specially, but is otherwise an invalid
# ninja/shell variable) that are passed to gyp here but expanded
# before writing out into the target .ninja files; see
# ExpandSpecial.
# $! is used for variables that represent a path and that can only appear at
# the start of a string, while $| is used for variables that can appear
# anywhere in a string.
'INTERMEDIATE_DIR': '$!INTERMEDIATE_DIR',
'SHARED_INTERMEDIATE_DIR': '$!PRODUCT_DIR/gen',
'PRODUCT_DIR': '$!PRODUCT_DIR',
'CONFIGURATION_NAME': '$|CONFIGURATION_NAME',
# Special variables that may be used by gyp 'rule' targets.
# We generate definitions for these variables on the fly when processing a
# rule.
'RULE_INPUT_ROOT': '${root}',
'RULE_INPUT_DIRNAME': '${dirname}',
'RULE_INPUT_PATH': '${source}',
'RULE_INPUT_EXT': '${ext}',
'RULE_INPUT_NAME': '${name}',
}
# Placates pylint.
generator_additional_non_configuration_keys = []
generator_additional_path_sections = []
generator_extra_sources_for_rules = []
generator_filelist_paths = None
generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
def StripPrefix(arg, prefix):
if arg.startswith(prefix):
return arg[len(prefix):]
return arg
def QuoteShellArgument(arg, flavor):
"""Quote a string such that it will be interpreted as a single argument
by the shell."""
# Rather than attempting to enumerate the bad shell characters, just
# whitelist common OK ones and quote anything else.
if re.match(r'^[a-zA-Z0-9_=.\\/-]+$', arg):
return arg # No quoting necessary.
if flavor == 'win':
return gyp.msvs_emulation.QuoteForRspFile(arg)
return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'"
def Define(d, flavor):
"""Takes a preprocessor define and returns a -D parameter that's ninja- and
shell-escaped."""
if flavor == 'win':
# cl.exe replaces literal # characters with = in preprocesor definitions for
# some reason. Octal-encode to work around that.
d = d.replace('#', '\\%03o' % ord('#'))
return QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor)
def AddArch(output, arch):
"""Adds an arch string to an output path."""
output, extension = os.path.splitext(output)
return '%s.%s%s' % (output, arch, extension)
class Target(object):
"""Target represents the paths used within a single gyp target.
Conceptually, building a single target A is a series of steps:
1) actions/rules/copies generates source/resources/etc.
2) compiles generates .o files
3) link generates a binary (library/executable)
4) bundle merges the above in a mac bundle
(Any of these steps can be optional.)
From a build ordering perspective, a dependent target B could just
depend on the last output of this series of steps.
But some dependent commands sometimes need to reach inside the box.
For example, when linking B it needs to get the path to the static
library generated by A.
This object stores those paths. To keep things simple, member
variables only store concrete paths to single files, while methods
compute derived values like "the last output of the target".
"""
def __init__(self, type):
# Gyp type ("static_library", etc.) of this target.
self.type = type
# File representing whether any input dependencies necessary for
# dependent actions have completed.
self.preaction_stamp = None
# File representing whether any input dependencies necessary for
# dependent compiles have completed.
self.precompile_stamp = None
# File representing the completion of actions/rules/copies, if any.
self.actions_stamp = None
# Path to the output of the link step, if any.
self.binary = None
# Path to the file representing the completion of building the bundle,
# if any.
self.bundle = None
# On Windows, incremental linking requires linking against all the .objs
# that compose a .lib (rather than the .lib itself). That list is stored
# here.
self.component_objs = None
# Windows only. The import .lib is the output of a build step, but
# because dependents only link against the lib (not both the lib and the
# dll) we keep track of the import library here.
self.import_lib = None
def Linkable(self):
"""Return true if this is a target that can be linked against."""
return self.type in ('static_library', 'shared_library')
def UsesToc(self, flavor):
"""Return true if the target should produce a restat rule based on a TOC
file."""
# For bundles, the .TOC should be produced for the binary, not for
# FinalOutput(). But the naive approach would put the TOC file into the
# bundle, so don't do this for bundles for now.
if flavor == 'win' or self.bundle:
return False
return self.type in ('shared_library', 'loadable_module')
def PreActionInput(self, flavor):
"""Return the path, if any, that should be used as a dependency of
any dependent action step."""
if self.UsesToc(flavor):
return self.FinalOutput() + '.TOC'
return self.FinalOutput() or self.preaction_stamp
def PreCompileInput(self):
"""Return the path, if any, that should be used as a dependency of
any dependent compile step."""
return self.actions_stamp or self.precompile_stamp
def FinalOutput(self):
"""Return the last output of the target, which depends on all prior
steps."""
return self.bundle or self.binary or self.actions_stamp
# A small discourse on paths as used within the Ninja build:
# All files we produce (both at gyp and at build time) appear in the
# build directory (e.g. out/Debug).
#
# Paths within a given .gyp file are always relative to the directory
# containing the .gyp file. Call these "gyp paths". This includes
# sources as well as the starting directory a given gyp rule/action
# expects to be run from. We call the path from the source root to
# the gyp file the "base directory" within the per-.gyp-file
# NinjaWriter code.
#
# All paths as written into the .ninja files are relative to the build
# directory. Call these paths "ninja paths".
#
# We translate between these two notions of paths with two helper
# functions:
#
# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file)
# into the equivalent ninja path.
#
# - GypPathToUniqueOutput translates a gyp path into a ninja path to write
# an output file; the result can be namespaced such that it is unique
# to the input file name as well as the output target name.
class NinjaWriter(object):
def __init__(self, hash_for_rules, target_outputs, base_dir, build_dir,
output_file, toplevel_build, output_file_name, flavor,
toplevel_dir=None):
"""
base_dir: path from source root to directory containing this gyp file,
by gyp semantics, all input paths are relative to this
build_dir: path from source root to build output
toplevel_dir: path to the toplevel directory
"""
self.hash_for_rules = hash_for_rules
self.target_outputs = target_outputs
self.base_dir = base_dir
self.build_dir = build_dir
self.ninja = ninja_syntax.Writer(output_file)
self.toplevel_build = toplevel_build
self.output_file_name = output_file_name
self.flavor = flavor
self.abs_build_dir = None
if toplevel_dir is not None:
self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir,
build_dir))
self.obj_ext = '.obj' if flavor == 'win' else '.o'
if flavor == 'win':
# See docstring of msvs_emulation.GenerateEnvironmentFiles().
self.win_env = {}
for arch in ('x86', 'x64'):
self.win_env[arch] = 'environment.' + arch
# Relative path from build output dir to base dir.
build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir)
self.build_to_base = os.path.join(build_to_top, base_dir)
# Relative path from base dir to build dir.
base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir)
self.base_to_build = os.path.join(base_to_top, build_dir)
def ExpandSpecial(self, path, product_dir=None):
"""Expand specials like $!PRODUCT_DIR in |path|.
If |product_dir| is None, assumes the cwd is already the product
dir. Otherwise, |product_dir| is the relative path to the product
dir.
"""
PRODUCT_DIR = '$!PRODUCT_DIR'
if PRODUCT_DIR in path:
if product_dir:
path = path.replace(PRODUCT_DIR, product_dir)
else:
path = path.replace(PRODUCT_DIR + '/', '')
path = path.replace(PRODUCT_DIR + '\\', '')
path = path.replace(PRODUCT_DIR, '.')
INTERMEDIATE_DIR = '$!INTERMEDIATE_DIR'
if INTERMEDIATE_DIR in path:
int_dir = self.GypPathToUniqueOutput('gen')
# GypPathToUniqueOutput generates a path relative to the product dir,
# so insert product_dir in front if it is provided.
path = path.replace(INTERMEDIATE_DIR,
os.path.join(product_dir or '', int_dir))
CONFIGURATION_NAME = '$|CONFIGURATION_NAME'
path = path.replace(CONFIGURATION_NAME, self.config_name)
return path
def ExpandRuleVariables(self, path, root, dirname, source, ext, name):
if self.flavor == 'win':
path = self.msvs_settings.ConvertVSMacros(
path, config=self.config_name)
path = path.replace(generator_default_variables['RULE_INPUT_ROOT'], root)
path = path.replace(generator_default_variables['RULE_INPUT_DIRNAME'],
dirname)
path = path.replace(generator_default_variables['RULE_INPUT_PATH'], source)
path = path.replace(generator_default_variables['RULE_INPUT_EXT'], ext)
path = path.replace(generator_default_variables['RULE_INPUT_NAME'], name)
return path
def GypPathToNinja(self, path, env=None):
"""Translate a gyp path to a ninja path, optionally expanding environment
variable references in |path| with |env|.
See the above discourse on path conversions."""
if env:
if self.flavor == 'mac':
path = gyp.xcode_emulation.ExpandEnvVars(path, env)
elif self.flavor == 'win':
path = gyp.msvs_emulation.ExpandMacros(path, env)
if path.startswith('$!'):
expanded = self.ExpandSpecial(path)
if self.flavor == 'win':
expanded = os.path.normpath(expanded)
return expanded
if '$|' in path:
path = self.ExpandSpecial(path)
assert '$' not in path, path
return os.path.normpath(os.path.join(self.build_to_base, path))
def GypPathToUniqueOutput(self, path, qualified=True):
"""Translate a gyp path to a ninja path for writing output.
If qualified is True, qualify the resulting filename with the name
of the target. This is necessary when e.g. compiling the same
path twice for two separate output targets.
See the above discourse on path conversions."""
path = self.ExpandSpecial(path)
assert not path.startswith('$'), path
# Translate the path following this scheme:
# Input: foo/bar.gyp, target targ, references baz/out.o
# Output: obj/foo/baz/targ.out.o (if qualified)
# obj/foo/baz/out.o (otherwise)
# (and obj.host instead of obj for cross-compiles)
#
# Why this scheme and not some other one?
# 1) for a given input, you can compute all derived outputs by matching
# its path, even if the input is brought via a gyp file with '..'.
# 2) simple files like libraries and stamps have a simple filename.
obj = 'obj'
if self.toolset != 'target':
obj += '.' + self.toolset
path_dir, path_basename = os.path.split(path)
assert not os.path.isabs(path_dir), (
"'%s' can not be absolute path (see crbug.com/462153)." % path_dir)
if qualified:
path_basename = self.name + '.' + path_basename
return os.path.normpath(os.path.join(obj, self.base_dir, path_dir,
path_basename))
def WriteCollapsedDependencies(self, name, targets, order_only=None):
"""Given a list of targets, return a path for a single file
representing the result of building all the targets or None.
Uses a stamp file if necessary."""
assert targets == filter(None, targets), targets
if len(targets) == 0:
assert not order_only
return None
if len(targets) > 1 or order_only:
stamp = self.GypPathToUniqueOutput(name + '.stamp')
targets = self.ninja.build(stamp, 'stamp', targets, order_only=order_only)
self.ninja.newline()
return targets[0]
def _SubninjaNameForArch(self, arch):
output_file_base = os.path.splitext(self.output_file_name)[0]
return '%s.%s.ninja' % (output_file_base, arch)
def WriteSpec(self, spec, config_name, generator_flags):
"""The main entry point for NinjaWriter: write the build rules for a spec.
Returns a Target object, which represents the output paths for this spec.
Returns None if there are no outputs (e.g. a settings-only 'none' type
target)."""
self.config_name = config_name
self.name = spec['target_name']
self.toolset = spec['toolset']
config = spec['configurations'][config_name]
self.target = Target(spec['type'])
self.is_standalone_static_library = bool(
spec.get('standalone_static_library', 0))
# Track if this target contains any C++ files, to decide if gcc or g++
# should be used for linking.
self.uses_cpp = False
self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
self.xcode_settings = self.msvs_settings = None
if self.flavor == 'mac':
self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
if self.flavor == 'win':
self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec,
generator_flags)
arch = self.msvs_settings.GetArch(config_name)
self.ninja.variable('arch', self.win_env[arch])
self.ninja.variable('cc', '$cl_' + arch)
self.ninja.variable('cxx', '$cl_' + arch)
self.ninja.variable('cc_host', '$cl_' + arch)
self.ninja.variable('cxx_host', '$cl_' + arch)
self.ninja.variable('asm', '$ml_' + arch)
if self.flavor == 'mac':
self.archs = self.xcode_settings.GetActiveArchs(config_name)
if len(self.archs) > 1:
self.arch_subninjas = dict(
(arch, ninja_syntax.Writer(
OpenOutput(os.path.join(self.toplevel_build,
self._SubninjaNameForArch(arch)),
'w')))
for arch in self.archs)
# Compute predepends for all rules.
# actions_depends is the dependencies this target depends on before running
# any of its action/rule/copy steps.
# compile_depends is the dependencies this target depends on before running
# any of its compile steps.
actions_depends = []
compile_depends = []
# TODO(evan): it is rather confusing which things are lists and which
# are strings. Fix these.
if 'dependencies' in spec:
for dep in spec['dependencies']:
if dep in self.target_outputs:
target = self.target_outputs[dep]
actions_depends.append(target.PreActionInput(self.flavor))
compile_depends.append(target.PreCompileInput())
actions_depends = filter(None, actions_depends)
compile_depends = filter(None, compile_depends)
actions_depends = self.WriteCollapsedDependencies('actions_depends',
actions_depends)
compile_depends = self.WriteCollapsedDependencies('compile_depends',
compile_depends)
self.target.preaction_stamp = actions_depends
self.target.precompile_stamp = compile_depends
# Write out actions, rules, and copies. These must happen before we
# compile any sources, so compute a list of predependencies for sources
# while we do it.
extra_sources = []
mac_bundle_depends = []
self.target.actions_stamp = self.WriteActionsRulesCopies(
spec, extra_sources, actions_depends, mac_bundle_depends)
# If we have actions/rules/copies, we depend directly on those, but
# otherwise we depend on dependent target's actions/rules/copies etc.
# We never need to explicitly depend on previous target's link steps,
# because no compile ever depends on them.
compile_depends_stamp = (self.target.actions_stamp or compile_depends)
# Write out the compilation steps, if any.
link_deps = []
sources = extra_sources + spec.get('sources', [])
if sources:
if self.flavor == 'mac' and len(self.archs) > 1:
# Write subninja file containing compile and link commands scoped to
# a single arch if a fat binary is being built.
for arch in self.archs:
self.ninja.subninja(self._SubninjaNameForArch(arch))
pch = None
if self.flavor == 'win':
gyp.msvs_emulation.VerifyMissingSources(
sources, self.abs_build_dir, generator_flags, self.GypPathToNinja)
pch = gyp.msvs_emulation.PrecompiledHeader(
self.msvs_settings, config_name, self.GypPathToNinja,
self.GypPathToUniqueOutput, self.obj_ext)
else:
pch = gyp.xcode_emulation.MacPrefixHeader(
self.xcode_settings, self.GypPathToNinja,
lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang))
link_deps = self.WriteSources(
self.ninja, config_name, config, sources, compile_depends_stamp, pch,
spec)
# Some actions/rules output 'sources' that are already object files.
obj_outputs = [f for f in sources if f.endswith(self.obj_ext)]
if obj_outputs:
if self.flavor != 'mac' or len(self.archs) == 1:
link_deps += [self.GypPathToNinja(o) for o in obj_outputs]
else:
print "Warning: Actions/rules writing object files don't work with " \
"multiarch targets, dropping. (target %s)" % spec['target_name']
elif self.flavor == 'mac' and len(self.archs) > 1:
link_deps = collections.defaultdict(list)
if self.flavor == 'win' and self.target.type == 'static_library':
self.target.component_objs = link_deps
# Write out a link step, if needed.
output = None
is_empty_bundle = not link_deps and not mac_bundle_depends
if link_deps or self.target.actions_stamp or actions_depends:
output = self.WriteTarget(spec, config_name, config, link_deps,
self.target.actions_stamp or actions_depends)
if self.is_mac_bundle:
mac_bundle_depends.append(output)
# Bundle all of the above together, if needed.
if self.is_mac_bundle:
output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle)
if not output:
return None
assert self.target.FinalOutput(), output
return self.target
def _WinIdlRule(self, source, prebuild, outputs):
"""Handle the implicit VS .idl rule for one source file. Fills |outputs|
with files that are generated."""
outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData(
source, self.config_name)
outdir = self.GypPathToNinja(outdir)
def fix_path(path, rel=None):
path = os.path.join(outdir, path)
dirname, basename = os.path.split(source)
root, ext = os.path.splitext(basename)
path = self.ExpandRuleVariables(
path, root, dirname, source, ext, basename)
if rel:
path = os.path.relpath(path, rel)
return path
vars = [(name, fix_path(value, outdir)) for name, value in vars]
output = [fix_path(p) for p in output]
vars.append(('outdir', outdir))
vars.append(('idlflags', flags))
input = self.GypPathToNinja(source)
self.ninja.build(output, 'idl', input,
variables=vars, order_only=prebuild)
outputs.extend(output)
def WriteWinIdlFiles(self, spec, prebuild):
"""Writes rules to match MSVS's implicit idl handling."""
assert self.flavor == 'win'
if self.msvs_settings.HasExplicitIdlRulesOrActions(spec):
return []
outputs = []
for source in filter(lambda x: x.endswith('.idl'), spec['sources']):
self._WinIdlRule(source, prebuild, outputs)
return outputs
def WriteActionsRulesCopies(self, spec, extra_sources, prebuild,
mac_bundle_depends):
"""Write out the Actions, Rules, and Copies steps. Return a path
representing the outputs of these steps."""
outputs = []
if self.is_mac_bundle:
mac_bundle_resources = spec.get('mac_bundle_resources', [])[:]
else:
mac_bundle_resources = []
extra_mac_bundle_resources = []
if 'actions' in spec:
outputs += self.WriteActions(spec['actions'], extra_sources, prebuild,
extra_mac_bundle_resources)
if 'rules' in spec:
outputs += self.WriteRules(spec['rules'], extra_sources, prebuild,
mac_bundle_resources,
extra_mac_bundle_resources)
if 'copies' in spec:
outputs += self.WriteCopies(spec['copies'], prebuild, mac_bundle_depends)
if 'sources' in spec and self.flavor == 'win':
outputs += self.WriteWinIdlFiles(spec, prebuild)
stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs)
if self.is_mac_bundle:
xcassets = self.WriteMacBundleResources(
extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends)
partial_info_plist = self.WriteMacXCassets(xcassets, mac_bundle_depends)
self.WriteMacInfoPlist(partial_info_plist, mac_bundle_depends)
return stamp
def GenerateDescription(self, verb, message, fallback):
"""Generate and return a description of a build step.
|verb| is the short summary, e.g. ACTION or RULE.
|message| is a hand-written description, or None if not available.
|fallback| is the gyp-level name of the step, usable as a fallback.
"""
if self.toolset != 'target':
verb += '(%s)' % self.toolset
if message:
return '%s %s' % (verb, self.ExpandSpecial(message))
else:
return '%s %s: %s' % (verb, self.name, fallback)
def WriteActions(self, actions, extra_sources, prebuild,
extra_mac_bundle_resources):
# Actions cd into the base directory.
env = self.GetToolchainEnv()
all_outputs = []
for action in actions:
# First write out a rule for the action.
name = '%s_%s' % (action['action_name'], self.hash_for_rules)
description = self.GenerateDescription('ACTION',
action.get('message', None),
name)
is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action)
if self.flavor == 'win' else False)
args = action['action']
depfile = action.get('depfile', None)
if depfile:
depfile = self.ExpandSpecial(depfile, self.base_to_build)
pool = 'console' if int(action.get('ninja_use_console', 0)) else None
rule_name, _ = self.WriteNewNinjaRule(name, args, description,
is_cygwin, env, pool,
depfile=depfile)
inputs = [self.GypPathToNinja(i, env) for i in action['inputs']]
if int(action.get('process_outputs_as_sources', False)):
extra_sources += action['outputs']
if int(action.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += action['outputs']
outputs = [self.GypPathToNinja(o, env) for o in action['outputs']]
# Then write out an edge using the rule.
self.ninja.build(outputs, rule_name, inputs,
order_only=prebuild)
all_outputs += outputs
self.ninja.newline()
return all_outputs
def WriteRules(self, rules, extra_sources, prebuild,
mac_bundle_resources, extra_mac_bundle_resources):
env = self.GetToolchainEnv()
all_outputs = []
for rule in rules:
# Skip a rule with no action and no inputs.
if 'action' not in rule and not rule.get('rule_sources', []):
continue
# First write out a rule for the rule action.
name = '%s_%s' % (rule['rule_name'], self.hash_for_rules)
args = rule['action']
description = self.GenerateDescription(
'RULE',
rule.get('message', None),
('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name)
is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule)
if self.flavor == 'win' else False)
pool = 'console' if int(rule.get('ninja_use_console', 0)) else None
rule_name, args = self.WriteNewNinjaRule(
name, args, description, is_cygwin, env, pool)
# TODO: if the command references the outputs directly, we should
# simplify it to just use $out.
# Rules can potentially make use of some special variables which
# must vary per source file.
# Compute the list of variables we'll need to provide.
special_locals = ('source', 'root', 'dirname', 'ext', 'name')
needed_variables = set(['source'])
for argument in args:
for var in special_locals:
if '${%s}' % var in argument:
needed_variables.add(var)
def cygwin_munge(path):
# pylint: disable=cell-var-from-loop
if is_cygwin:
return path.replace('\\', '/')
return path
inputs = [self.GypPathToNinja(i, env) for i in rule.get('inputs', [])]
# If there are n source files matching the rule, and m additional rule
# inputs, then adding 'inputs' to each build edge written below will
# write m * n inputs. Collapsing reduces this to m + n.
sources = rule.get('rule_sources', [])
num_inputs = len(inputs)
if prebuild:
num_inputs += 1
if num_inputs > 2 and len(sources) > 2:
inputs = [self.WriteCollapsedDependencies(
rule['rule_name'], inputs, order_only=prebuild)]
prebuild = []
# For each source file, write an edge that generates all the outputs.
for source in sources:
source = os.path.normpath(source)
dirname, basename = os.path.split(source)
root, ext = os.path.splitext(basename)
# Gather the list of inputs and outputs, expanding $vars if possible.
outputs = [self.ExpandRuleVariables(o, root, dirname,
source, ext, basename)
for o in rule['outputs']]
if int(rule.get('process_outputs_as_sources', False)):
extra_sources += outputs
was_mac_bundle_resource = source in mac_bundle_resources
if was_mac_bundle_resource or \
int(rule.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += outputs
# Note: This is n_resources * n_outputs_in_rule. Put to-be-removed
# items in a set and remove them all in a single pass if this becomes
# a performance issue.
if was_mac_bundle_resource:
mac_bundle_resources.remove(source)
extra_bindings = []
for var in needed_variables:
if var == 'root':
extra_bindings.append(('root', cygwin_munge(root)))
elif var == 'dirname':
# '$dirname' is a parameter to the rule action, which means
# it shouldn't be converted to a Ninja path. But we don't
# want $!PRODUCT_DIR in there either.
dirname_expanded = self.ExpandSpecial(dirname, self.base_to_build)
extra_bindings.append(('dirname', cygwin_munge(dirname_expanded)))
elif var == 'source':
# '$source' is a parameter to the rule action, which means
# it shouldn't be converted to a Ninja path. But we don't
# want $!PRODUCT_DIR in there either.
source_expanded = self.ExpandSpecial(source, self.base_to_build)
extra_bindings.append(('source', cygwin_munge(source_expanded)))
elif var == 'ext':
extra_bindings.append(('ext', ext))
elif var == 'name':
extra_bindings.append(('name', cygwin_munge(basename)))
else:
assert var == None, repr(var)
outputs = [self.GypPathToNinja(o, env) for o in outputs]
if self.flavor == 'win':
# WriteNewNinjaRule uses unique_name for creating an rsp file on win.
extra_bindings.append(('unique_name',
hashlib.md5(outputs[0]).hexdigest()))
self.ninja.build(outputs, rule_name, self.GypPathToNinja(source),
implicit=inputs,
order_only=prebuild,
variables=extra_bindings)
all_outputs.extend(outputs)
return all_outputs
def WriteCopies(self, copies, prebuild, mac_bundle_depends):
outputs = []
env = self.GetToolchainEnv()
for copy in copies:
for path in copy['files']:
# Normalize the path so trailing slashes don't confuse us.
path = os.path.normpath(path)
basename = os.path.split(path)[1]
src = self.GypPathToNinja(path, env)
dst = self.GypPathToNinja(os.path.join(copy['destination'], basename),
env)
outputs += self.ninja.build(dst, 'copy', src, order_only=prebuild)
if self.is_mac_bundle:
# gyp has mac_bundle_resources to copy things into a bundle's
# Resources folder, but there's no built-in way to copy files to other
# places in the bundle. Hence, some targets use copies for this. Check
# if this file is copied into the current bundle, and if so add it to
# the bundle depends so that dependent targets get rebuilt if the copy
# input changes.
if dst.startswith(self.xcode_settings.GetBundleContentsFolderPath()):
mac_bundle_depends.append(dst)
return outputs
def WriteMacBundleResources(self, resources, bundle_depends):
"""Writes ninja edges for 'mac_bundle_resources'."""
xcassets = []
for output, res in gyp.xcode_emulation.GetMacBundleResources(
generator_default_variables['PRODUCT_DIR'],
self.xcode_settings, map(self.GypPathToNinja, resources)):
output = self.ExpandSpecial(output)
if os.path.splitext(output)[-1] != '.xcassets':
isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
self.ninja.build(output, 'mac_tool', res,
variables=[('mactool_cmd', 'copy-bundle-resource'), \
('binary', isBinary)])
bundle_depends.append(output)
else:
xcassets.append(res)
return xcassets
def WriteMacXCassets(self, xcassets, bundle_depends):
"""Writes ninja edges for 'mac_bundle_resources' .xcassets files.
This add an invocation of 'actool' via the 'mac_tool.py' helper script.
It assumes that the assets catalogs define at least one imageset and
thus an Assets.car file will be generated in the application resources
directory. If this is not the case, then the build will probably be done
at each invocation of ninja."""
if not xcassets:
return
extra_arguments = {}
settings_to_arg = {
'XCASSETS_APP_ICON': 'app-icon',
'XCASSETS_LAUNCH_IMAGE': 'launch-image',
}
settings = self.xcode_settings.xcode_settings[self.config_name]
for settings_key, arg_name in settings_to_arg.iteritems():
value = settings.get(settings_key)
if value:
extra_arguments[arg_name] = value
partial_info_plist = None
if extra_arguments:
partial_info_plist = self.GypPathToUniqueOutput(
'assetcatalog_generated_info.plist')
extra_arguments['output-partial-info-plist'] = partial_info_plist
outputs = []
outputs.append(
os.path.join(
self.xcode_settings.GetBundleResourceFolder(),
'Assets.car'))
if partial_info_plist:
outputs.append(partial_info_plist)
keys = QuoteShellArgument(json.dumps(extra_arguments), self.flavor)
extra_env = self.xcode_settings.GetPerTargetSettings()
env = self.GetSortedXcodeEnv(additional_settings=extra_env)
env = self.ComputeExportEnvString(env)
bundle_depends.extend(self.ninja.build(
outputs, 'compile_xcassets', xcassets,
variables=[('env', env), ('keys', keys)]))
return partial_info_plist
def WriteMacInfoPlist(self, partial_info_plist, bundle_depends):
"""Write build rules for bundle Info.plist files."""
info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
generator_default_variables['PRODUCT_DIR'],
self.xcode_settings, self.GypPathToNinja)
if not info_plist:
return
out = self.ExpandSpecial(out)
if defines:
# Create an intermediate file to store preprocessed results.
intermediate_plist = self.GypPathToUniqueOutput(
os.path.basename(info_plist))
defines = ' '.join([Define(d, self.flavor) for d in defines])
info_plist = self.ninja.build(
intermediate_plist, 'preprocess_infoplist', info_plist,
variables=[('defines',defines)])
env = self.GetSortedXcodeEnv(additional_settings=extra_env)
env = self.ComputeExportEnvString(env)
if partial_info_plist:
intermediate_plist = self.GypPathToUniqueOutput('merged_info.plist')
info_plist = self.ninja.build(
intermediate_plist, 'merge_infoplist',
[partial_info_plist, info_plist])
keys = self.xcode_settings.GetExtraPlistItems(self.config_name)
keys = QuoteShellArgument(json.dumps(keys), self.flavor)
isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
self.ninja.build(out, 'copy_infoplist', info_plist,
variables=[('env', env), ('keys', keys),
('binary', isBinary)])
bundle_depends.append(out)
def WriteSources(self, ninja_file, config_name, config, sources, predepends,
precompiled_header, spec):
"""Write build rules to compile all of |sources|."""
if self.toolset == 'host':
self.ninja.variable('ar', '$ar_host')
self.ninja.variable('cc', '$cc_host')
self.ninja.variable('cxx', '$cxx_host')
self.ninja.variable('ld', '$ld_host')
self.ninja.variable('ldxx', '$ldxx_host')
self.ninja.variable('nm', '$nm_host')
self.ninja.variable('readelf', '$readelf_host')
if self.flavor != 'mac' or len(self.archs) == 1:
return self.WriteSourcesForArch(
self.ninja, config_name, config, sources, predepends,
precompiled_header, spec)
else:
return dict((arch, self.WriteSourcesForArch(
self.arch_subninjas[arch], config_name, config, sources, predepends,
precompiled_header, spec, arch=arch))
for arch in self.archs)
def WriteSourcesForArch(self, ninja_file, config_name, config, sources,
predepends, precompiled_header, spec, arch=None):
"""Write build rules to compile all of |sources|."""
extra_defines = []
if self.flavor == 'mac':
cflags = self.xcode_settings.GetCflags(config_name, arch=arch)
cflags_c = self.xcode_settings.GetCflagsC(config_name)
cflags_cc = self.xcode_settings.GetCflagsCC(config_name)
cflags_objc = ['$cflags_c'] + \
self.xcode_settings.GetCflagsObjC(config_name)
cflags_objcc = ['$cflags_cc'] + \
self.xcode_settings.GetCflagsObjCC(config_name)
elif self.flavor == 'win':
asmflags = self.msvs_settings.GetAsmflags(config_name)
cflags = self.msvs_settings.GetCflags(config_name)
cflags_c = self.msvs_settings.GetCflagsC(config_name)
cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
extra_defines = self.msvs_settings.GetComputedDefines(config_name)
# See comment at cc_command for why there's two .pdb files.
pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName(
config_name, self.ExpandSpecial)
if not pdbpath_c:
obj = 'obj'
if self.toolset != 'target':
obj += '.' + self.toolset
pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name))
pdbpath_c = pdbpath + '.c.pdb'
pdbpath_cc = pdbpath + '.cc.pdb'
self.WriteVariableList(ninja_file, 'pdbname_c', [pdbpath_c])
self.WriteVariableList(ninja_file, 'pdbname_cc', [pdbpath_cc])
self.WriteVariableList(ninja_file, 'pchprefix', [self.name])
else:
cflags = config.get('cflags', [])
cflags_c = config.get('cflags_c', [])
cflags_cc = config.get('cflags_cc', [])
# Respect environment variables related to build, but target-specific
# flags can still override them.
if self.toolset == 'target':
cflags_c = (os.environ.get('CPPFLAGS', '').split() +
os.environ.get('CFLAGS', '').split() + cflags_c)
cflags_cc = (os.environ.get('CPPFLAGS', '').split() +
os.environ.get('CXXFLAGS', '').split() + cflags_cc)
defines = config.get('defines', []) + extra_defines
self.WriteVariableList(ninja_file, 'defines',
[Define(d, self.flavor) for d in defines])
if self.flavor == 'win':
self.WriteVariableList(ninja_file, 'asmflags',
map(self.ExpandSpecial, asmflags))
self.WriteVariableList(ninja_file, 'rcflags',
[QuoteShellArgument(self.ExpandSpecial(f), self.flavor)
for f in self.msvs_settings.GetRcflags(config_name,
self.GypPathToNinja)])
include_dirs = config.get('include_dirs', [])
env = self.GetToolchainEnv()
if self.flavor == 'win':
include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs,
config_name)
self.WriteVariableList(ninja_file, 'includes',
[QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
for i in include_dirs])
if self.flavor == 'win':
midl_include_dirs = config.get('midl_include_dirs', [])
midl_include_dirs = self.msvs_settings.AdjustMidlIncludeDirs(
midl_include_dirs, config_name)
self.WriteVariableList(ninja_file, 'midl_includes',
[QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
for i in midl_include_dirs])
pch_commands = precompiled_header.GetPchBuildCommands(arch)
if self.flavor == 'mac':
# Most targets use no precompiled headers, so only write these if needed.
for ext, var in [('c', 'cflags_pch_c'), ('cc', 'cflags_pch_cc'),
('m', 'cflags_pch_objc'), ('mm', 'cflags_pch_objcc')]:
include = precompiled_header.GetInclude(ext, arch)
if include: ninja_file.variable(var, include)
arflags = config.get('arflags', [])
self.WriteVariableList(ninja_file, 'cflags',
map(self.ExpandSpecial, cflags))
self.WriteVariableList(ninja_file, 'cflags_c',
map(self.ExpandSpecial, cflags_c))
self.WriteVariableList(ninja_file, 'cflags_cc',
map(self.ExpandSpecial, cflags_cc))
if self.flavor == 'mac':
self.WriteVariableList(ninja_file, 'cflags_objc',
map(self.ExpandSpecial, cflags_objc))
self.WriteVariableList(ninja_file, 'cflags_objcc',
map(self.ExpandSpecial, cflags_objcc))
self.WriteVariableList(ninja_file, 'arflags',
map(self.ExpandSpecial, arflags))
ninja_file.newline()
outputs = []
has_rc_source = False
for source in sources:
filename, ext = os.path.splitext(source)
ext = ext[1:]
obj_ext = self.obj_ext
if ext in ('cc', 'cpp', 'cxx'):
command = 'cxx'
self.uses_cpp = True
elif ext == 'c' or (ext == 'S' and self.flavor != 'win'):
command = 'cc'
elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files.
command = 'cc_s'
elif (self.flavor == 'win' and ext == 'asm' and
not self.msvs_settings.HasExplicitAsmRules(spec)):
command = 'asm'
# Add the _asm suffix as msvs is capable of handling .cc and
# .asm files of the same name without collision.
obj_ext = '_asm.obj'
elif self.flavor == 'mac' and ext == 'm':
command = 'objc'
elif self.flavor == 'mac' and ext == 'mm':
command = 'objcxx'
self.uses_cpp = True
elif self.flavor == 'win' and ext == 'rc':
command = 'rc'
obj_ext = '.res'
has_rc_source = True
else:
# Ignore unhandled extensions.
continue
input = self.GypPathToNinja(source)
output = self.GypPathToUniqueOutput(filename + obj_ext)
if arch is not None:
output = AddArch(output, arch)
implicit = precompiled_header.GetObjDependencies([input], [output], arch)
variables = []
if self.flavor == 'win':
variables, output, implicit = precompiled_header.GetFlagsModifications(
input, output, implicit, command, cflags_c, cflags_cc,
self.ExpandSpecial)
ninja_file.build(output, command, input,
implicit=[gch for _, _, gch in implicit],
order_only=predepends, variables=variables)
outputs.append(output)
if has_rc_source:
resource_include_dirs = config.get('resource_include_dirs', include_dirs)
self.WriteVariableList(ninja_file, 'resource_includes',
[QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
for i in resource_include_dirs])
self.WritePchTargets(ninja_file, pch_commands)
ninja_file.newline()
return outputs
def WritePchTargets(self, ninja_file, pch_commands):
"""Writes ninja rules to compile prefix headers."""
if not pch_commands:
return
for gch, lang_flag, lang, input in pch_commands:
var_name = {
'c': 'cflags_pch_c',
'cc': 'cflags_pch_cc',
'm': 'cflags_pch_objc',
'mm': 'cflags_pch_objcc',
}[lang]
map = { 'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', }
cmd = map.get(lang)
ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)])
def WriteLink(self, spec, config_name, config, link_deps):
"""Write out a link step. Fills out target.binary. """
if self.flavor != 'mac' or len(self.archs) == 1:
return self.WriteLinkForArch(
self.ninja, spec, config_name, config, link_deps)
else:
output = self.ComputeOutput(spec)
inputs = [self.WriteLinkForArch(self.arch_subninjas[arch], spec,
config_name, config, link_deps[arch],
arch=arch)
for arch in self.archs]
extra_bindings = []
build_output = output
if not self.is_mac_bundle:
self.AppendPostbuildVariable(extra_bindings, spec, output, output)
# TODO(yyanagisawa): more work needed to fix:
# https://code.google.com/p/gyp/issues/detail?id=411
if (spec['type'] in ('shared_library', 'loadable_module') and
not self.is_mac_bundle):
extra_bindings.append(('lib', output))
self.ninja.build([output, output + '.TOC'], 'solipo', inputs,
variables=extra_bindings)
else:
self.ninja.build(build_output, 'lipo', inputs, variables=extra_bindings)
return output
def WriteLinkForArch(self, ninja_file, spec, config_name, config,
link_deps, arch=None):
"""Write out a link step. Fills out target.binary. """
command = {
'executable': 'link',
'loadable_module': 'solink_module',
'shared_library': 'solink',
}[spec['type']]
command_suffix = ''
implicit_deps = set()
solibs = set()
if 'dependencies' in spec:
# Two kinds of dependencies:
# - Linkable dependencies (like a .a or a .so): add them to the link line.
# - Non-linkable dependencies (like a rule that generates a file
# and writes a stamp file): add them to implicit_deps
extra_link_deps = set()
for dep in spec['dependencies']:
target = self.target_outputs.get(dep)
if not target:
continue
linkable = target.Linkable()
if linkable:
new_deps = []
if (self.flavor == 'win' and
target.component_objs and
self.msvs_settings.IsUseLibraryDependencyInputs(config_name)):
new_deps = target.component_objs
elif self.flavor == 'win' and target.import_lib:
new_deps = [target.import_lib]
elif target.UsesToc(self.flavor):
solibs.add(target.binary)
implicit_deps.add(target.binary + '.TOC')
else:
new_deps = [target.binary]
for new_dep in new_deps:
if new_dep not in extra_link_deps:
extra_link_deps.add(new_dep)
link_deps.append(new_dep)
final_output = target.FinalOutput()
if not linkable or final_output != target.binary:
implicit_deps.add(final_output)
extra_bindings = []
if self.uses_cpp and self.flavor != 'win':
extra_bindings.append(('ld', '$ldxx'))
output = self.ComputeOutput(spec, arch)
if arch is None and not self.is_mac_bundle:
self.AppendPostbuildVariable(extra_bindings, spec, output, output)
is_executable = spec['type'] == 'executable'
# The ldflags config key is not used on mac or win. On those platforms
# linker flags are set via xcode_settings and msvs_settings, respectively.
env_ldflags = os.environ.get('LDFLAGS', '').split()
if self.flavor == 'mac':
ldflags = self.xcode_settings.GetLdflags(config_name,
self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
self.GypPathToNinja, arch)
ldflags = env_ldflags + ldflags
elif self.flavor == 'win':
manifest_base_name = self.GypPathToUniqueOutput(
self.ComputeOutputFileName(spec))
ldflags, intermediate_manifest, manifest_files = \
self.msvs_settings.GetLdflags(config_name, self.GypPathToNinja,
self.ExpandSpecial, manifest_base_name,
output, is_executable,
self.toplevel_build)
ldflags = env_ldflags + ldflags
self.WriteVariableList(ninja_file, 'manifests', manifest_files)
implicit_deps = implicit_deps.union(manifest_files)
if intermediate_manifest:
self.WriteVariableList(
ninja_file, 'intermediatemanifest', [intermediate_manifest])
command_suffix = _GetWinLinkRuleNameSuffix(
self.msvs_settings.IsEmbedManifest(config_name))
def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja)
if def_file:
implicit_deps.add(def_file)
else:
# Respect environment variables related to build, but target-specific
# flags can still override them.
ldflags = env_ldflags + config.get('ldflags', [])
if is_executable and len(solibs):
rpath = 'lib/'
if self.toolset != 'target':
rpath += self.toolset
ldflags.append(r'-Wl,-rpath=\$$ORIGIN/%s' % rpath)
ldflags.append('-Wl,-rpath-link=%s' % rpath)
self.WriteVariableList(ninja_file, 'ldflags',
gyp.common.uniquer(map(self.ExpandSpecial, ldflags)))
library_dirs = config.get('library_dirs', [])
if self.flavor == 'win':
library_dirs = [self.msvs_settings.ConvertVSMacros(l, config_name)
for l in library_dirs]
library_dirs = ['/LIBPATH:' + QuoteShellArgument(self.GypPathToNinja(l),
self.flavor)
for l in library_dirs]
else:
library_dirs = [QuoteShellArgument('-L' + self.GypPathToNinja(l),
self.flavor)
for l in library_dirs]
libraries = gyp.common.uniquer(map(self.ExpandSpecial,
spec.get('libraries', [])))
if self.flavor == 'mac':
libraries = self.xcode_settings.AdjustLibraries(libraries, config_name)
elif self.flavor == 'win':
libraries = self.msvs_settings.AdjustLibraries(libraries)
self.WriteVariableList(ninja_file, 'libs', library_dirs + libraries)
linked_binary = output
if command in ('solink', 'solink_module'):
extra_bindings.append(('soname', os.path.split(output)[1]))
extra_bindings.append(('lib',
gyp.common.EncodePOSIXShellArgument(output)))
if self.flavor != 'win':
link_file_list = output
if self.is_mac_bundle:
# 'Dependency Framework.framework/Versions/A/Dependency Framework' ->
# 'Dependency Framework.framework.rsp'
link_file_list = self.xcode_settings.GetWrapperName()
if arch:
link_file_list += '.' + arch
link_file_list += '.rsp'
# If an rspfile contains spaces, ninja surrounds the filename with
# quotes around it and then passes it to open(), creating a file with
# quotes in its name (and when looking for the rsp file, the name
# makes it through bash which strips the quotes) :-/
link_file_list = link_file_list.replace(' ', '_')
extra_bindings.append(
('link_file_list',
gyp.common.EncodePOSIXShellArgument(link_file_list)))
if self.flavor == 'win':
extra_bindings.append(('binary', output))
if ('/NOENTRY' not in ldflags and
not self.msvs_settings.GetNoImportLibrary(config_name)):
self.target.import_lib = output + '.lib'
extra_bindings.append(('implibflag',
'/IMPLIB:%s' % self.target.import_lib))
pdbname = self.msvs_settings.GetPDBName(
config_name, self.ExpandSpecial, output + '.pdb')
output = [output, self.target.import_lib]
if pdbname:
output.append(pdbname)
elif not self.is_mac_bundle:
output = [output, output + '.TOC']
else:
command = command + '_notoc'
elif self.flavor == 'win':
extra_bindings.append(('binary', output))
pdbname = self.msvs_settings.GetPDBName(
config_name, self.ExpandSpecial, output + '.pdb')
if pdbname:
output = [output, pdbname]
if len(solibs):
extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(solibs)))
ninja_file.build(output, command + command_suffix, link_deps,
implicit=list(implicit_deps),
variables=extra_bindings)
return linked_binary
def WriteTarget(self, spec, config_name, config, link_deps, compile_deps):
extra_link_deps = any(self.target_outputs.get(dep).Linkable()
for dep in spec.get('dependencies', [])
if dep in self.target_outputs)
if spec['type'] == 'none' or (not link_deps and not extra_link_deps):
# TODO(evan): don't call this function for 'none' target types, as
# it doesn't do anything, and we fake out a 'binary' with a stamp file.
self.target.binary = compile_deps
self.target.type = 'none'
elif spec['type'] == 'static_library':
self.target.binary = self.ComputeOutput(spec)
if (self.flavor not in ('mac', 'openbsd', 'win') and not
self.is_standalone_static_library):
self.ninja.build(self.target.binary, 'alink_thin', link_deps,
order_only=compile_deps)
else:
variables = []
if self.xcode_settings:
libtool_flags = self.xcode_settings.GetLibtoolflags(config_name)
if libtool_flags:
variables.append(('libtool_flags', libtool_flags))
if self.msvs_settings:
libflags = self.msvs_settings.GetLibFlags(config_name,
self.GypPathToNinja)
variables.append(('libflags', libflags))
if self.flavor != 'mac' or len(self.archs) == 1:
self.AppendPostbuildVariable(variables, spec,
self.target.binary, self.target.binary)
self.ninja.build(self.target.binary, 'alink', link_deps,
order_only=compile_deps, variables=variables)
else:
inputs = []
for arch in self.archs:
output = self.ComputeOutput(spec, arch)
self.arch_subninjas[arch].build(output, 'alink', link_deps[arch],
order_only=compile_deps,
variables=variables)
inputs.append(output)
# TODO: It's not clear if libtool_flags should be passed to the alink
# call that combines single-arch .a files into a fat .a file.
self.AppendPostbuildVariable(variables, spec,
self.target.binary, self.target.binary)
self.ninja.build(self.target.binary, 'alink', inputs,
# FIXME: test proving order_only=compile_deps isn't
# needed.
variables=variables)
else:
self.target.binary = self.WriteLink(spec, config_name, config, link_deps)
return self.target.binary
def WriteMacBundle(self, spec, mac_bundle_depends, is_empty):
assert self.is_mac_bundle
package_framework = spec['type'] in ('shared_library', 'loadable_module')
output = self.ComputeMacBundleOutput()
if is_empty:
output += '.stamp'
variables = []
self.AppendPostbuildVariable(variables, spec, output, self.target.binary,
is_command_start=not package_framework)
if package_framework and not is_empty:
variables.append(('version', self.xcode_settings.GetFrameworkVersion()))
self.ninja.build(output, 'package_framework', mac_bundle_depends,
variables=variables)
else:
self.ninja.build(output, 'stamp', mac_bundle_depends,
variables=variables)
self.target.bundle = output
return output
def GetToolchainEnv(self, additional_settings=None):
"""Returns the variables toolchain would set for build steps."""
env = self.GetSortedXcodeEnv(additional_settings=additional_settings)
if self.flavor == 'win':
env = self.GetMsvsToolchainEnv(
additional_settings=additional_settings)
return env
def GetMsvsToolchainEnv(self, additional_settings=None):
"""Returns the variables Visual Studio would set for build steps."""
return self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR',
config=self.config_name)
def GetSortedXcodeEnv(self, additional_settings=None):
"""Returns the variables Xcode would set for build steps."""
assert self.abs_build_dir
abs_build_dir = self.abs_build_dir
return gyp.xcode_emulation.GetSortedXcodeEnv(
self.xcode_settings, abs_build_dir,
os.path.join(abs_build_dir, self.build_to_base), self.config_name,
additional_settings)
def GetSortedXcodePostbuildEnv(self):
"""Returns the variables Xcode would set for postbuild steps."""
postbuild_settings = {}
# CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
# TODO(thakis): It would be nice to have some general mechanism instead.
strip_save_file = self.xcode_settings.GetPerTargetSetting(
'CHROMIUM_STRIP_SAVE_FILE')
if strip_save_file:
postbuild_settings['CHROMIUM_STRIP_SAVE_FILE'] = strip_save_file
return self.GetSortedXcodeEnv(additional_settings=postbuild_settings)
def AppendPostbuildVariable(self, variables, spec, output, binary,
is_command_start=False):
"""Adds a 'postbuild' variable if there is a postbuild for |output|."""
postbuild = self.GetPostbuildCommand(spec, output, binary, is_command_start)
if postbuild:
variables.append(('postbuilds', postbuild))
def GetPostbuildCommand(self, spec, output, output_binary, is_command_start):
"""Returns a shell command that runs all the postbuilds, and removes
|output| if any of them fails. If |is_command_start| is False, then the
returned string will start with ' && '."""
if not self.xcode_settings or spec['type'] == 'none' or not output:
return ''
output = QuoteShellArgument(output, self.flavor)
postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True)
if output_binary is not None:
postbuilds = self.xcode_settings.AddImplicitPostbuilds(
self.config_name,
os.path.normpath(os.path.join(self.base_to_build, output)),
QuoteShellArgument(
os.path.normpath(os.path.join(self.base_to_build, output_binary)),
self.flavor),
postbuilds, quiet=True)
if not postbuilds:
return ''
# Postbuilds expect to be run in the gyp file's directory, so insert an
# implicit postbuild to cd to there.
postbuilds.insert(0, gyp.common.EncodePOSIXShellList(
['cd', self.build_to_base]))
env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv())
# G will be non-null if any postbuild fails. Run all postbuilds in a
# subshell.
commands = env + ' (' + \
' && '.join([ninja_syntax.escape(command) for command in postbuilds])
command_string = (commands + '); G=$$?; '
# Remove the final output if any postbuild failed.
'((exit $$G) || rm -rf %s) ' % output + '&& exit $$G)')
if is_command_start:
return '(' + command_string + ' && '
else:
return '$ && (' + command_string
def ComputeExportEnvString(self, env):
"""Given an environment, returns a string looking like
'export FOO=foo; export BAR="${FOO} bar;'
that exports |env| to the shell."""
export_str = []
for k, v in env:
export_str.append('export %s=%s;' %
(k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v))))
return ' '.join(export_str)
def ComputeMacBundleOutput(self):
"""Return the 'output' (full output path) to a bundle output directory."""
assert self.is_mac_bundle
path = generator_default_variables['PRODUCT_DIR']
return self.ExpandSpecial(
os.path.join(path, self.xcode_settings.GetWrapperName()))
def ComputeOutputFileName(self, spec, type=None):
"""Compute the filename of the final output for the current target."""
if not type:
type = spec['type']
default_variables = copy.copy(generator_default_variables)
CalculateVariables(default_variables, {'flavor': self.flavor})
# Compute filename prefix: the product prefix, or a default for
# the product type.
DEFAULT_PREFIX = {
'loadable_module': default_variables['SHARED_LIB_PREFIX'],
'shared_library': default_variables['SHARED_LIB_PREFIX'],
'static_library': default_variables['STATIC_LIB_PREFIX'],
'executable': default_variables['EXECUTABLE_PREFIX'],
}
prefix = spec.get('product_prefix', DEFAULT_PREFIX.get(type, ''))
# Compute filename extension: the product extension, or a default
# for the product type.
DEFAULT_EXTENSION = {
'loadable_module': default_variables['SHARED_LIB_SUFFIX'],
'shared_library': default_variables['SHARED_LIB_SUFFIX'],
'static_library': default_variables['STATIC_LIB_SUFFIX'],
'executable': default_variables['EXECUTABLE_SUFFIX'],
}
extension = spec.get('product_extension')
if extension:
extension = '.' + extension
else:
extension = DEFAULT_EXTENSION.get(type, '')
if 'product_name' in spec:
# If we were given an explicit name, use that.
target = spec['product_name']
else:
# Otherwise, derive a name from the target name.
target = spec['target_name']
if prefix == 'lib':
# Snip out an extra 'lib' from libs if appropriate.
target = StripPrefix(target, 'lib')
if type in ('static_library', 'loadable_module', 'shared_library',
'executable'):
return '%s%s%s' % (prefix, target, extension)
elif type == 'none':
return '%s.stamp' % target
else:
raise Exception('Unhandled output type %s' % type)
def ComputeOutput(self, spec, arch=None):
"""Compute the path for the final output of the spec."""
type = spec['type']
if self.flavor == 'win':
override = self.msvs_settings.GetOutputName(self.config_name,
self.ExpandSpecial)
if override:
return override
if arch is None and self.flavor == 'mac' and type in (
'static_library', 'executable', 'shared_library', 'loadable_module'):
filename = self.xcode_settings.GetExecutablePath()
else:
filename = self.ComputeOutputFileName(spec, type)
if arch is None and 'product_dir' in spec:
path = os.path.join(spec['product_dir'], filename)
return self.ExpandSpecial(path)
# Some products go into the output root, libraries go into shared library
# dir, and everything else goes into the normal place.
type_in_output_root = ['executable', 'loadable_module']
if self.flavor == 'mac' and self.toolset == 'target':
type_in_output_root += ['shared_library', 'static_library']
elif self.flavor == 'win' and self.toolset == 'target':
type_in_output_root += ['shared_library']
if arch is not None:
# Make sure partial executables don't end up in a bundle or the regular
# output directory.
archdir = 'arch'
if self.toolset != 'target':
archdir = os.path.join('arch', '%s' % self.toolset)
return os.path.join(archdir, AddArch(filename, arch))
elif type in type_in_output_root or self.is_standalone_static_library:
return filename
elif type == 'shared_library':
libdir = 'lib'
if self.toolset != 'target':
libdir = os.path.join('lib', '%s' % self.toolset)
return os.path.join(libdir, filename)
else:
return self.GypPathToUniqueOutput(filename, qualified=False)
def WriteVariableList(self, ninja_file, var, values):
assert not isinstance(values, str)
if values is None:
values = []
ninja_file.variable(var, ' '.join(values))
def WriteNewNinjaRule(self, name, args, description, is_cygwin, env, pool,
depfile=None):
"""Write out a new ninja "rule" statement for a given command.
Returns the name of the new rule, and a copy of |args| with variables
expanded."""
if self.flavor == 'win':
args = [self.msvs_settings.ConvertVSMacros(
arg, self.base_to_build, config=self.config_name)
for arg in args]
description = self.msvs_settings.ConvertVSMacros(
description, config=self.config_name)
elif self.flavor == 'mac':
# |env| is an empty list on non-mac.
args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args]
description = gyp.xcode_emulation.ExpandEnvVars(description, env)
# TODO: we shouldn't need to qualify names; we do it because
# currently the ninja rule namespace is global, but it really
# should be scoped to the subninja.
rule_name = self.name
if self.toolset == 'target':
rule_name += '.' + self.toolset
rule_name += '.' + name
rule_name = re.sub('[^a-zA-Z0-9_]', '_', rule_name)
# Remove variable references, but not if they refer to the magic rule
# variables. This is not quite right, as it also protects these for
# actions, not just for rules where they are valid. Good enough.
protect = [ '${root}', '${dirname}', '${source}', '${ext}', '${name}' ]
protect = '(?!' + '|'.join(map(re.escape, protect)) + ')'
description = re.sub(protect + r'\$', '_', description)
# gyp dictates that commands are run from the base directory.
# cd into the directory before running, and adjust paths in
# the arguments to point to the proper locations.
rspfile = None
rspfile_content = None
args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args]
if self.flavor == 'win':
rspfile = rule_name + '.$unique_name.rsp'
# The cygwin case handles this inside the bash sub-shell.
run_in = '' if is_cygwin else ' ' + self.build_to_base
if is_cygwin:
rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine(
args, self.build_to_base)
else:
rspfile_content = gyp.msvs_emulation.EncodeRspFileList(args)
command = ('%s gyp-win-tool action-wrapper $arch ' % sys.executable +
rspfile + run_in)
else:
env = self.ComputeExportEnvString(env)
command = gyp.common.EncodePOSIXShellList(args)
command = 'cd %s; ' % self.build_to_base + env + command
# GYP rules/actions express being no-ops by not touching their outputs.
# Avoid executing downstream dependencies in this case by specifying
# restat=1 to ninja.
self.ninja.rule(rule_name, command, description, depfile=depfile,
restat=True, pool=pool,
rspfile=rspfile, rspfile_content=rspfile_content)
self.ninja.newline()
return rule_name, args
def CalculateVariables(default_variables, params):
"""Calculate additional variables for use in the build (called by gyp)."""
global generator_additional_non_configuration_keys
global generator_additional_path_sections
flavor = gyp.common.GetFlavor(params)
if flavor == 'mac':
default_variables.setdefault('OS', 'mac')
default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib')
default_variables.setdefault('SHARED_LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
default_variables.setdefault('LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
# Copy additional generator configuration data from Xcode, which is shared
# by the Mac Ninja generator.
import gyp.generator.xcode as xcode_generator
generator_additional_non_configuration_keys = getattr(xcode_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(xcode_generator,
'generator_additional_path_sections', [])
global generator_extra_sources_for_rules
generator_extra_sources_for_rules = getattr(xcode_generator,
'generator_extra_sources_for_rules', [])
elif flavor == 'win':
exts = gyp.MSVSUtil.TARGET_TYPE_EXT
default_variables.setdefault('OS', 'win')
default_variables['EXECUTABLE_SUFFIX'] = '.' + exts['executable']
default_variables['STATIC_LIB_PREFIX'] = ''
default_variables['STATIC_LIB_SUFFIX'] = '.' + exts['static_library']
default_variables['SHARED_LIB_PREFIX'] = ''
default_variables['SHARED_LIB_SUFFIX'] = '.' + exts['shared_library']
# Copy additional generator configuration data from VS, which is shared
# by the Windows Ninja generator.
import gyp.generator.msvs as msvs_generator
generator_additional_non_configuration_keys = getattr(msvs_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(msvs_generator,
'generator_additional_path_sections', [])
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
else:
operating_system = flavor
if flavor == 'android':
operating_system = 'linux' # Keep this legacy behavior for now.
default_variables.setdefault('OS', operating_system)
default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
default_variables.setdefault('SHARED_LIB_DIR',
os.path.join('$!PRODUCT_DIR', 'lib'))
default_variables.setdefault('LIB_DIR',
os.path.join('$!PRODUCT_DIR', 'obj'))
def ComputeOutputDir(params):
"""Returns the path from the toplevel_dir to the build output directory."""
# generator_dir: relative path from pwd to where make puts build files.
# Makes migrating from make to ninja easier, ninja doesn't put anything here.
generator_dir = os.path.relpath(params['options'].generator_output or '.')
# output_dir: relative path from generator_dir to the build directory.
output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
# Relative path from source root to our output files. e.g. "out"
return os.path.normpath(os.path.join(generator_dir, output_dir))
def CalculateGeneratorInputInfo(params):
"""Called by __init__ to initialize generator values based on params."""
# E.g. "out/gypfiles"
toplevel = params['options'].toplevel_dir
qualified_out_dir = os.path.normpath(os.path.join(
toplevel, ComputeOutputDir(params), 'gypfiles'))
global generator_filelist_paths
generator_filelist_paths = {
'toplevel': toplevel,
'qualified_out_dir': qualified_out_dir,
}
def OpenOutput(path, mode='w'):
"""Open |path| for writing, creating directories if necessary."""
gyp.common.EnsureDirExists(path)
return open(path, mode)
def CommandWithWrapper(cmd, wrappers, prog):
wrapper = wrappers.get(cmd, '')
if wrapper:
return wrapper + ' ' + prog
return prog
def GetDefaultConcurrentLinks():
"""Returns a best-guess for a number of concurrent links."""
pool_size = int(os.getenv('GYP_LINK_CONCURRENCY', 0))
if pool_size:
return pool_size
if sys.platform in ('win32', 'cygwin'):
import ctypes
class MEMORYSTATUSEX(ctypes.Structure):
_fields_ = [
("dwLength", ctypes.c_ulong),
("dwMemoryLoad", ctypes.c_ulong),
("ullTotalPhys", ctypes.c_ulonglong),
("ullAvailPhys", ctypes.c_ulonglong),
("ullTotalPageFile", ctypes.c_ulonglong),
("ullAvailPageFile", ctypes.c_ulonglong),
("ullTotalVirtual", ctypes.c_ulonglong),
("ullAvailVirtual", ctypes.c_ulonglong),
("sullAvailExtendedVirtual", ctypes.c_ulonglong),
]
stat = MEMORYSTATUSEX()
stat.dwLength = ctypes.sizeof(stat)
ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
mem_limit = max(1, stat.ullTotalPhys / (4 * (2 ** 30))) # total / 4GB
hard_cap = max(1, int(os.getenv('GYP_LINK_CONCURRENCY_MAX', 2**32)))
return min(mem_limit, hard_cap)
elif sys.platform.startswith('linux'):
if os.path.exists("/proc/meminfo"):
with open("/proc/meminfo") as meminfo:
memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
for line in meminfo:
match = memtotal_re.match(line)
if not match:
continue
# Allow 8Gb per link on Linux because Gold is quite memory hungry
return max(1, int(match.group(1)) / (8 * (2 ** 20)))
return 1
elif sys.platform == 'darwin':
try:
avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
# A static library debug build of Chromium's unit_tests takes ~2.7GB, so
# 4GB per ld process allows for some more bloat.
return max(1, avail_bytes / (4 * (2 ** 30))) # total / 4GB
except:
return 1
else:
# TODO(scottmg): Implement this for other platforms.
return 1
def _GetWinLinkRuleNameSuffix(embed_manifest):
"""Returns the suffix used to select an appropriate linking rule depending on
whether the manifest embedding is enabled."""
return '_embed' if embed_manifest else ''
def _AddWinLinkRules(master_ninja, embed_manifest):
"""Adds link rules for Windows platform to |master_ninja|."""
def FullLinkCommand(ldcmd, out, binary_type):
resource_name = {
'exe': '1',
'dll': '2',
}[binary_type]
return '%(python)s gyp-win-tool link-with-manifests $arch %(embed)s ' \
'%(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" ' \
'$manifests' % {
'python': sys.executable,
'out': out,
'ldcmd': ldcmd,
'resname': resource_name,
'embed': embed_manifest }
rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest)
use_separate_mspdbsrv = (
int(os.environ.get('GYP_USE_SEPARATE_MSPDBSRV', '0')) != 0)
dlldesc = 'LINK%s(DLL) $binary' % rule_name_suffix.upper()
dllcmd = ('%s gyp-win-tool link-wrapper $arch %s '
'$ld /nologo $implibflag /DLL /OUT:$binary '
'@$binary.rsp' % (sys.executable, use_separate_mspdbsrv))
dllcmd = FullLinkCommand(dllcmd, '$binary', 'dll')
master_ninja.rule('solink' + rule_name_suffix,
description=dlldesc, command=dllcmd,
rspfile='$binary.rsp',
rspfile_content='$libs $in_newline $ldflags',
restat=True,
pool='link_pool')
master_ninja.rule('solink_module' + rule_name_suffix,
description=dlldesc, command=dllcmd,
rspfile='$binary.rsp',
rspfile_content='$libs $in_newline $ldflags',
restat=True,
pool='link_pool')
# Note that ldflags goes at the end so that it has the option of
# overriding default settings earlier in the command line.
exe_cmd = ('%s gyp-win-tool link-wrapper $arch %s '
'$ld /nologo /OUT:$binary @$binary.rsp' %
(sys.executable, use_separate_mspdbsrv))
exe_cmd = FullLinkCommand(exe_cmd, '$binary', 'exe')
master_ninja.rule('link' + rule_name_suffix,
description='LINK%s $binary' % rule_name_suffix.upper(),
command=exe_cmd,
rspfile='$binary.rsp',
rspfile_content='$in_newline $libs $ldflags',
pool='link_pool')
def GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name):
options = params['options']
flavor = gyp.common.GetFlavor(params)
generator_flags = params.get('generator_flags', {})
# build_dir: relative path from source root to our output files.
# e.g. "out/Debug"
build_dir = os.path.normpath(
os.path.join(ComputeOutputDir(params), config_name))
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
master_ninja_file = OpenOutput(os.path.join(toplevel_build, 'build.ninja'))
master_ninja = ninja_syntax.Writer(master_ninja_file, width=120)
# Put build-time support tools in out/{config_name}.
gyp.common.CopyTool(flavor, toplevel_build)
# Grab make settings for CC/CXX.
# The rules are
# - The priority from low to high is gcc/g++, the 'make_global_settings' in
# gyp, the environment variable.
# - If there is no 'make_global_settings' for CC.host/CXX.host or
# 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set
# to cc/cxx.
if flavor == 'win':
ar = 'lib.exe'
# cc and cxx must be set to the correct architecture by overriding with one
# of cl_x86 or cl_x64 below.
cc = 'UNSET'
cxx = 'UNSET'
ld = 'link.exe'
ld_host = '$ld'
else:
ar = 'ar'
cc = 'cc'
cxx = 'c++'
ld = '$cc'
ldxx = '$cxx'
ld_host = '$cc_host'
ldxx_host = '$cxx_host'
ar_host = 'ar'
cc_host = None
cxx_host = None
cc_host_global_setting = None
cxx_host_global_setting = None
clang_cl = None
nm = 'nm'
nm_host = 'nm'
readelf = 'readelf'
readelf_host = 'readelf'
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
make_global_settings = data[build_file].get('make_global_settings', [])
build_to_root = gyp.common.InvertRelativePath(build_dir,
options.toplevel_dir)
wrappers = {}
for key, value in make_global_settings:
if key == 'AR':
ar = os.path.join(build_to_root, value)
if key == 'AR.host':
ar_host = os.path.join(build_to_root, value)
if key == 'CC':
cc = os.path.join(build_to_root, value)
if cc.endswith('clang-cl'):
clang_cl = cc
if key == 'CXX':
cxx = os.path.join(build_to_root, value)
if key == 'CC.host':
cc_host = os.path.join(build_to_root, value)
cc_host_global_setting = value
if key == 'CXX.host':
cxx_host = os.path.join(build_to_root, value)
cxx_host_global_setting = value
if key == 'LD':
ld = os.path.join(build_to_root, value)
if key == 'LD.host':
ld_host = os.path.join(build_to_root, value)
if key == 'NM':
nm = os.path.join(build_to_root, value)
if key == 'NM.host':
nm_host = os.path.join(build_to_root, value)
if key == 'READELF':
readelf = os.path.join(build_to_root, value)
if key == 'READELF.host':
readelf_host = os.path.join(build_to_root, value)
if key.endswith('_wrapper'):
wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value)
# Support wrappers from environment variables too.
for key, value in os.environ.iteritems():
if key.lower().endswith('_wrapper'):
key_prefix = key[:-len('_wrapper')]
key_prefix = re.sub(r'\.HOST$', '.host', key_prefix)
wrappers[key_prefix] = os.path.join(build_to_root, value)
if flavor == 'win':
configs = [target_dicts[qualified_target]['configurations'][config_name]
for qualified_target in target_list]
shared_system_includes = None
if not generator_flags.get('ninja_use_custom_environment_files', 0):
shared_system_includes = \
gyp.msvs_emulation.ExtractSharedMSVSSystemIncludes(
configs, generator_flags)
cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
toplevel_build, generator_flags, shared_system_includes, OpenOutput)
for arch, path in cl_paths.iteritems():
if clang_cl:
# If we have selected clang-cl, use that instead.
path = clang_cl
command = CommandWithWrapper('CC', wrappers,
QuoteShellArgument(path, 'win'))
if clang_cl:
# Use clang-cl to cross-compile for x86 or x86_64.
command += (' -m32' if arch == 'x86' else ' -m64')
master_ninja.variable('cl_' + arch, command)
cc = GetEnvironFallback(['CC_target', 'CC'], cc)
master_ninja.variable('cc', CommandWithWrapper('CC', wrappers, cc))
cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx)
master_ninja.variable('cxx', CommandWithWrapper('CXX', wrappers, cxx))
if flavor == 'win':
master_ninja.variable('ld', ld)
master_ninja.variable('idl', 'midl.exe')
master_ninja.variable('ar', ar)
master_ninja.variable('rc', 'rc.exe')
master_ninja.variable('ml_x86', 'ml.exe')
master_ninja.variable('ml_x64', 'ml64.exe')
master_ninja.variable('mt', 'mt.exe')
else:
master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld))
master_ninja.variable('ldxx', CommandWithWrapper('LINK', wrappers, ldxx))
master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], ar))
if flavor != 'mac':
# Mac does not use readelf/nm for .TOC generation, so avoiding polluting
# the master ninja with extra unused variables.
master_ninja.variable(
'nm', GetEnvironFallback(['NM_target', 'NM'], nm))
master_ninja.variable(
'readelf', GetEnvironFallback(['READELF_target', 'READELF'], readelf))
if generator_supports_multiple_toolsets:
if not cc_host:
cc_host = cc
if not cxx_host:
cxx_host = cxx
master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], ar_host))
master_ninja.variable('nm_host', GetEnvironFallback(['NM_host'], nm_host))
master_ninja.variable('readelf_host',
GetEnvironFallback(['READELF_host'], readelf_host))
cc_host = GetEnvironFallback(['CC_host'], cc_host)
cxx_host = GetEnvironFallback(['CXX_host'], cxx_host)
# The environment variable could be used in 'make_global_settings', like
# ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here.
if '$(CC)' in cc_host and cc_host_global_setting:
cc_host = cc_host_global_setting.replace('$(CC)', cc)
if '$(CXX)' in cxx_host and cxx_host_global_setting:
cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx)
master_ninja.variable('cc_host',
CommandWithWrapper('CC.host', wrappers, cc_host))
master_ninja.variable('cxx_host',
CommandWithWrapper('CXX.host', wrappers, cxx_host))
if flavor == 'win':
master_ninja.variable('ld_host', ld_host)
else:
master_ninja.variable('ld_host', CommandWithWrapper(
'LINK', wrappers, ld_host))
master_ninja.variable('ldxx_host', CommandWithWrapper(
'LINK', wrappers, ldxx_host))
master_ninja.newline()
master_ninja.pool('link_pool', depth=GetDefaultConcurrentLinks())
master_ninja.newline()
deps = 'msvc' if flavor == 'win' else 'gcc'
if flavor != 'win':
master_ninja.rule(
'cc',
description='CC $out',
command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c '
'$cflags_pch_c -c $in -o $out'),
depfile='$out.d',
deps=deps)
master_ninja.rule(
'cc_s',
description='CC $out',
command=('$cc $defines $includes $cflags $cflags_c '
'$cflags_pch_c -c $in -o $out'))
master_ninja.rule(
'cxx',
description='CXX $out',
command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc '
'$cflags_pch_cc -c $in -o $out'),
depfile='$out.d',
deps=deps)
else:
# TODO(scottmg) Separate pdb names is a test to see if it works around
# http://crbug.com/142362. It seems there's a race between the creation of
# the .pdb by the precompiled header step for .cc and the compilation of
# .c files. This should be handled by mspdbsrv, but rarely errors out with
# c1xx : fatal error C1033: cannot open program database
# By making the rules target separate pdb files this might be avoided.
cc_command = ('ninja -t msvc -e $arch ' +
'-- '
'$cc /nologo /showIncludes /FC '
'@$out.rsp /c $in /Fo$out /Fd$pdbname_c ')
cxx_command = ('ninja -t msvc -e $arch ' +
'-- '
'$cxx /nologo /showIncludes /FC '
'@$out.rsp /c $in /Fo$out /Fd$pdbname_cc ')
master_ninja.rule(
'cc',
description='CC $out',
command=cc_command,
rspfile='$out.rsp',
rspfile_content='$defines $includes $cflags $cflags_c',
deps=deps)
master_ninja.rule(
'cxx',
description='CXX $out',
command=cxx_command,
rspfile='$out.rsp',
rspfile_content='$defines $includes $cflags $cflags_cc',
deps=deps)
master_ninja.rule(
'idl',
description='IDL $in',
command=('%s gyp-win-tool midl-wrapper $arch $outdir '
'$tlb $h $dlldata $iid $proxy $in '
'$midl_includes $idlflags' % sys.executable))
master_ninja.rule(
'rc',
description='RC $in',
# Note: $in must be last otherwise rc.exe complains.
command=('%s gyp-win-tool rc-wrapper '
'$arch $rc $defines $resource_includes $rcflags /fo$out $in' %
sys.executable))
master_ninja.rule(
'asm',
description='ASM $out',
command=('%s gyp-win-tool asm-wrapper '
'$arch $asm $defines $includes $asmflags /c /Fo $out $in' %
sys.executable))
if flavor != 'mac' and flavor != 'win':
master_ninja.rule(
'alink',
description='AR $out',
command='rm -f $out && $ar rcs $arflags $out $in')
master_ninja.rule(
'alink_thin',
description='AR $out',
command='rm -f $out && $ar rcsT $arflags $out $in')
# This allows targets that only need to depend on $lib's API to declare an
# order-only dependency on $lib.TOC and avoid relinking such downstream
# dependencies when $lib changes only in non-public ways.
# The resulting string leaves an uninterpolated %{suffix} which
# is used in the final substitution below.
mtime_preserving_solink_base = (
'if [ ! -e $lib -o ! -e $lib.TOC ]; then '
'%(solink)s && %(extract_toc)s > $lib.TOC; else '
'%(solink)s && %(extract_toc)s > $lib.tmp && '
'if ! cmp -s $lib.tmp $lib.TOC; then mv $lib.tmp $lib.TOC ; '
'fi; fi'
% { 'solink':
'$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s',
'extract_toc':
('{ $readelf -d $lib | grep SONAME ; '
'$nm -gD -f p $lib | cut -f1-2 -d\' \'; }')})
master_ninja.rule(
'solink',
description='SOLINK $lib',
restat=True,
command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
rspfile='$link_file_list',
rspfile_content=
'-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive $libs',
pool='link_pool')
master_ninja.rule(
'solink_module',
description='SOLINK(module) $lib',
restat=True,
command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
rspfile='$link_file_list',
rspfile_content='-Wl,--start-group $in -Wl,--end-group $solibs $libs',
pool='link_pool')
master_ninja.rule(
'link',
description='LINK $out',
command=('$ld $ldflags -o $out '
'-Wl,--start-group $in -Wl,--end-group $solibs $libs'),
pool='link_pool')
elif flavor == 'win':
master_ninja.rule(
'alink',
description='LIB $out',
command=('%s gyp-win-tool link-wrapper $arch False '
'$ar /nologo /ignore:4221 /OUT:$out @$out.rsp' %
sys.executable),
rspfile='$out.rsp',
rspfile_content='$in_newline $libflags')
_AddWinLinkRules(master_ninja, embed_manifest=True)
_AddWinLinkRules(master_ninja, embed_manifest=False)
else:
master_ninja.rule(
'objc',
description='OBJC $out',
command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc '
'$cflags_pch_objc -c $in -o $out'),
depfile='$out.d',
deps=deps)
master_ninja.rule(
'objcxx',
description='OBJCXX $out',
command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc '
'$cflags_pch_objcc -c $in -o $out'),
depfile='$out.d',
deps=deps)
master_ninja.rule(
'alink',
description='LIBTOOL-STATIC $out, POSTBUILDS',
command='rm -f $out && '
'./gyp-mac-tool filter-libtool libtool $libtool_flags '
'-static -o $out $in'
'$postbuilds')
master_ninja.rule(
'lipo',
description='LIPO $out, POSTBUILDS',
command='rm -f $out && lipo -create $in -output $out$postbuilds')
master_ninja.rule(
'solipo',
description='SOLIPO $out, POSTBUILDS',
command=(
'rm -f $lib $lib.TOC && lipo -create $in -output $lib$postbuilds &&'
'%(extract_toc)s > $lib.TOC'
% { 'extract_toc':
'{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'}))
# Record the public interface of $lib in $lib.TOC. See the corresponding
# comment in the posix section above for details.
solink_base = '$ld %(type)s $ldflags -o $lib %(suffix)s'
mtime_preserving_solink_base = (
'if [ ! -e $lib -o ! -e $lib.TOC ] || '
# Always force dependent targets to relink if this library
# reexports something. Handling this correctly would require
# recursive TOC dumping but this is rare in practice, so punt.
'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then '
'%(solink)s && %(extract_toc)s > $lib.TOC; '
'else '
'%(solink)s && %(extract_toc)s > $lib.tmp && '
'if ! cmp -s $lib.tmp $lib.TOC; then '
'mv $lib.tmp $lib.TOC ; '
'fi; '
'fi'
% { 'solink': solink_base,
'extract_toc':
'{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'})
solink_suffix = '@$link_file_list$postbuilds'
master_ninja.rule(
'solink',
description='SOLINK $lib, POSTBUILDS',
restat=True,
command=mtime_preserving_solink_base % {'suffix': solink_suffix,
'type': '-shared'},
rspfile='$link_file_list',
rspfile_content='$in $solibs $libs',
pool='link_pool')
master_ninja.rule(
'solink_notoc',
description='SOLINK $lib, POSTBUILDS',
restat=True,
command=solink_base % {'suffix':solink_suffix, 'type': '-shared'},
rspfile='$link_file_list',
rspfile_content='$in $solibs $libs',
pool='link_pool')
master_ninja.rule(
'solink_module',
description='SOLINK(module) $lib, POSTBUILDS',
restat=True,
command=mtime_preserving_solink_base % {'suffix': solink_suffix,
'type': '-bundle'},
rspfile='$link_file_list',
rspfile_content='$in $solibs $libs',
pool='link_pool')
master_ninja.rule(
'solink_module_notoc',
description='SOLINK(module) $lib, POSTBUILDS',
restat=True,
command=solink_base % {'suffix': solink_suffix, 'type': '-bundle'},
rspfile='$link_file_list',
rspfile_content='$in $solibs $libs',
pool='link_pool')
master_ninja.rule(
'link',
description='LINK $out, POSTBUILDS',
command=('$ld $ldflags -o $out '
'$in $solibs $libs$postbuilds'),
pool='link_pool')
master_ninja.rule(
'preprocess_infoplist',
description='PREPROCESS INFOPLIST $out',
command=('$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && '
'plutil -convert xml1 $out $out'))
master_ninja.rule(
'copy_infoplist',
description='COPY INFOPLIST $in',
command='$env ./gyp-mac-tool copy-info-plist $in $out $binary $keys')
master_ninja.rule(
'merge_infoplist',
description='MERGE INFOPLISTS $in',
command='$env ./gyp-mac-tool merge-info-plist $out $in')
master_ninja.rule(
'compile_xcassets',
description='COMPILE XCASSETS $in',
command='$env ./gyp-mac-tool compile-xcassets $keys $in')
master_ninja.rule(
'mac_tool',
description='MACTOOL $mactool_cmd $in',
command='$env ./gyp-mac-tool $mactool_cmd $in $out $binary')
master_ninja.rule(
'package_framework',
description='PACKAGE FRAMEWORK $out, POSTBUILDS',
command='./gyp-mac-tool package-framework $out $version$postbuilds '
'&& touch $out')
if flavor == 'win':
master_ninja.rule(
'stamp',
description='STAMP $out',
command='%s gyp-win-tool stamp $out' % sys.executable)
master_ninja.rule(
'copy',
description='COPY $in $out',
command='%s gyp-win-tool recursive-mirror $in $out' % sys.executable)
else:
master_ninja.rule(
'stamp',
description='STAMP $out',
command='${postbuilds}touch $out')
master_ninja.rule(
'copy',
description='COPY $in $out',
command='rm -rf $out && cp -af $in $out')
master_ninja.newline()
all_targets = set()
for build_file in params['build_files']:
for target in gyp.common.AllTargets(target_list,
target_dicts,
os.path.normpath(build_file)):
all_targets.add(target)
all_outputs = set()
# target_outputs is a map from qualified target name to a Target object.
target_outputs = {}
# target_short_names is a map from target short name to a list of Target
# objects.
target_short_names = {}
# short name of targets that were skipped because they didn't contain anything
# interesting.
# NOTE: there may be overlap between this an non_empty_target_names.
empty_target_names = set()
# Set of non-empty short target names.
# NOTE: there may be overlap between this an empty_target_names.
non_empty_target_names = set()
for qualified_target in target_list:
# qualified_target is like: third_party/icu/icu.gyp:icui18n#target
build_file, name, toolset = \
gyp.common.ParseQualifiedTarget(qualified_target)
this_make_global_settings = data[build_file].get('make_global_settings', [])
assert make_global_settings == this_make_global_settings, (
"make_global_settings needs to be the same for all targets. %s vs. %s" %
(this_make_global_settings, make_global_settings))
spec = target_dicts[qualified_target]
if flavor == 'mac':
gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
build_file = gyp.common.RelativePath(build_file, options.toplevel_dir)
qualified_target_for_hash = gyp.common.QualifiedTarget(build_file, name,
toolset)
hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest()
base_path = os.path.dirname(build_file)
obj = 'obj'
if toolset != 'target':
obj += '.' + toolset
output_file = os.path.join(obj, base_path, name + '.ninja')
ninja_output = StringIO()
writer = NinjaWriter(hash_for_rules, target_outputs, base_path, build_dir,
ninja_output,
toplevel_build, output_file,
flavor, toplevel_dir=options.toplevel_dir)
target = writer.WriteSpec(spec, config_name, generator_flags)
if ninja_output.tell() > 0:
# Only create files for ninja files that actually have contents.
with OpenOutput(os.path.join(toplevel_build, output_file)) as ninja_file:
ninja_file.write(ninja_output.getvalue())
ninja_output.close()
master_ninja.subninja(output_file)
if target:
if name != target.FinalOutput() and spec['toolset'] == 'target':
target_short_names.setdefault(name, []).append(target)
target_outputs[qualified_target] = target
if qualified_target in all_targets:
all_outputs.add(target.FinalOutput())
non_empty_target_names.add(name)
else:
empty_target_names.add(name)
if target_short_names:
# Write a short name to build this target. This benefits both the
# "build chrome" case as well as the gyp tests, which expect to be
# able to run actions and build libraries by their short name.
master_ninja.newline()
master_ninja.comment('Short names for targets.')
for short_name in target_short_names:
master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in
target_short_names[short_name]])
# Write phony targets for any empty targets that weren't written yet. As
# short names are not necessarily unique only do this for short names that
# haven't already been output for another target.
empty_target_names = empty_target_names - non_empty_target_names
if empty_target_names:
master_ninja.newline()
master_ninja.comment('Empty targets (output for completeness).')
for name in sorted(empty_target_names):
master_ninja.build(name, 'phony')
if all_outputs:
master_ninja.newline()
master_ninja.build('all', 'phony', list(all_outputs))
master_ninja.default(generator_flags.get('default_target', 'all'))
master_ninja_file.close()
def PerformBuild(data, configurations, params):
options = params['options']
for config in configurations:
builddir = os.path.join(options.toplevel_dir, 'out', config)
arguments = ['ninja', '-C', builddir]
print 'Building [%s]: %s' % (config, arguments)
subprocess.check_call(arguments)
def CallGenerateOutputForConfig(arglist):
# Ignore the interrupt signal so that the parent process catches it and
# kills all multiprocessing children.
signal.signal(signal.SIGINT, signal.SIG_IGN)
(target_list, target_dicts, data, params, config_name) = arglist
GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
def GenerateOutput(target_list, target_dicts, data, params):
# Update target_dicts for iOS device builds.
target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator(
target_dicts)
user_config = params.get('generator_flags', {}).get('config', None)
if gyp.common.GetFlavor(params) == 'win':
target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts)
target_list, target_dicts = MSVSUtil.InsertLargePdbShims(
target_list, target_dicts, generator_default_variables)
if user_config:
GenerateOutputForConfig(target_list, target_dicts, data, params,
user_config)
else:
config_names = target_dicts[target_list[0]]['configurations'].keys()
if params['parallel']:
try:
pool = multiprocessing.Pool(len(config_names))
arglists = []
for config_name in config_names:
arglists.append(
(target_list, target_dicts, data, params, config_name))
pool.map(CallGenerateOutputForConfig, arglists)
except KeyboardInterrupt, e:
pool.terminate()
raise e
else:
for config_name in config_names:
GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name)
| mit |
tchernomax/ansible | lib/ansible/modules/windows/win_domain_controller.py | 28 | 4039 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Red Hat, Inc.
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'core'}
DOCUMENTATION = r'''
module: win_domain_controller
short_description: Manage domain controller/member server state for a Windows host
version_added: 2.3
description:
- Ensure that a Windows Server 2012+ host is configured as a domain controller or demoted to member server. This module may require
subsequent use of the M(win_reboot) action if changes are made.
options:
dns_domain_name:
description:
- When C(state) is C(domain_controller), the DNS name of the domain for which the targeted Windows host should be a DC.
domain_admin_user:
description:
- Username of a domain admin for the target domain (necessary to promote or demote a domain controller).
required: true
domain_admin_password:
description:
- Password for the specified C(domain_admin_user).
required: true
safe_mode_password:
description:
- Safe mode password for the domain controller (required when C(state) is C(domain_controller)).
local_admin_password:
description:
- Password to be assigned to the local C(Administrator) user (required when C(state) is C(member_server)).
read_only:
description:
- Whether to install the domain controller as a read only replica for an
existing domain.
type: bool
default: 'no'
version_added: '2.5'
site_name:
description:
- Specifies the name of an existing site where you can place the new
domain controller.
- This option is required when I(read_only) is C(yes).
version_added: '2.5'
state:
description:
- Whether the target host should be a domain controller or a member server.
choices:
- domain_controller
- member_server
database_path:
description:
- The path to a directory on a fixed disk of the Windows host where the
domain database will be created..
- If not set then the default path is C(%SYSTEMROOT%\NTDS).
type: path
version_added: '2.5'
sysvol_path:
description:
- The path to a directory on a fixed disk of the Windows host where the
Sysvol folder will be created.
- If not set then the default path is C(%SYSTEMROOT%\SYSVOL).
type: path
version_added: '2.5'
author:
- Matt Davis (@nitzmahone)
'''
RETURN = '''
reboot_required:
description: True if changes were made that require a reboot.
returned: always
type: boolean
sample: true
'''
EXAMPLES = r'''
- name: ensure a server is a domain controller
win_domain_controller:
dns_domain_name: ansible.vagrant
domain_admin_user: testguy@ansible.vagrant
domain_admin_password: password123!
safe_mode_password: password123!
state: domain_controller
log_path: C:\ansible_win_domain_controller.txt
# ensure a server is not a domain controller
# note that without an action wrapper, in the case where a DC is demoted,
# the task will fail with a 401 Unauthorized, because the domain credential
# becomes invalid to fetch the final output over WinRM. This requires win_async
# with credential switching (or other clever credential-switching
# mechanism to get the output and trigger the required reboot)
- win_domain_controller:
domain_admin_user: testguy@ansible.vagrant
domain_admin_password: password123!
local_admin_password: password123!
state: member_server
log_path: C:\ansible_win_domain_controller.txt
- name: promote server as a read only domain controller
win_domain_controller:
dns_domain_name: ansible.vagrant
domain_admin_user: testguy@ansible.vagrant
domain_admin_password: password123!
safe_mode_password: password123!
state: domain_controller
read_only: yes
site_name: London
'''
| gpl-3.0 |
WillisXChen/django-oscar | oscar/lib/python2.7/site-packages/sorl/thumbnail/management/commands/thumbnail.py | 7 | 2241 | # encoding=utf-8
from __future__ import unicode_literals, print_function
import sys
from django.core.management.base import BaseCommand, CommandError
from sorl.thumbnail import default
from sorl.thumbnail.images import delete_all_thumbnails
class Command(BaseCommand):
help = (
'Handles thumbnails and key value store'
)
args = '[cleanup, clear clear_delete_referenced clear_delete_all]'
def handle(self, *labels, **options):
verbosity = int(options.get('verbosity'))
# Django 1.4 compatibility fix
stdout = options.get('stdout', None)
stdout = stdout if stdout else sys.stdout
stderr = options.get('stderr', None)
stderr = stderr if stderr else sys.stderr
if not labels:
print(self.print_help('thumbnail', ''), file=stderr)
sys.exit(1)
if len(labels) != 1:
raise CommandError('`%s` is not a valid argument' % labels)
label = labels[0]
if label not in ['cleanup', 'clear', 'clear_delete_referenced', 'clear_delete_all']:
raise CommandError('`%s` unknown action' % label)
if label == 'cleanup':
if verbosity >= 1:
print("Cleanup thumbnails", end=' ... ', file=stdout)
default.kvstore.cleanup()
if verbosity >= 1:
print("[Done]", file=stdout)
return
if label == 'clear_delete_referenced':
if verbosity >= 1:
print("Delete all thumbnail files referenced in " +
"Key Value Store", end=' ... ', file=stdout)
default.kvstore.delete_all_thumbnail_files()
if verbosity >= 1:
print('[Done]', file=stdout)
if verbosity >= 1:
print("Clear the Key Value Store", end=' ... ', file=stdout)
default.kvstore.clear()
if verbosity >= 1:
print('[Done]', file=stdout)
if label == 'clear_delete_all':
if verbosity >= 1:
print("Delete all thumbnail files in THUMBNAIL_PREFIX", end=' ... ', file=stdout)
delete_all_thumbnails()
if verbosity >= 1:
print('[Done]', file=stdout)
| bsd-3-clause |
TileHalo/servo | tests/wpt/web-platform-tests/cors/resources/preflight.py | 253 | 1238 | def main(request, response):
headers = [("Content-Type", "text/plain")]
if "check" in request.GET:
token = request.GET.first("token")
value = request.server.stash.take(token)
if value == None:
body = "0"
else:
if request.GET.first("check", None) == "keep":
request.server.stash.put(token, value)
body = "1"
return headers, body
if request.method == "OPTIONS":
if not "Access-Control-Request-Method" in request.headers:
response.set_error(400, "No Access-Control-Request-Method header")
return "ERROR: No access-control-request-method in preflight!"
headers.append(("Access-Control-Allow-Methods",
request.headers['Access-Control-Request-Method']))
if "max_age" in request.GET:
headers.append(("Access-Control-Max-Age", request.GET['max_age']))
if "token" in request.GET:
request.server.stash.put(request.GET.first("token"), 1)
headers.append(("Access-Control-Allow-Origin", "*"))
headers.append(("Access-Control-Allow-Headers", "x-print"))
body = request.headers.get("x-print", "NO")
return headers, body
| mpl-2.0 |
gbriones1/cloud-init | tests/unittests/test_handler/test_handler_seed_random.py | 7 | 7689 | # Copyright (C) 2013 Hewlett-Packard Development Company, L.P.
#
# Author: Juerg Haefliger <juerg.haefliger@hp.com>
#
# Based on test_handler_set_hostname.py
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from cloudinit.config import cc_seed_random
import gzip
import tempfile
from six import BytesIO
from cloudinit import cloud
from cloudinit import distros
from cloudinit import helpers
from cloudinit import util
from cloudinit.sources import DataSourceNone
from .. import helpers as t_help
import logging
LOG = logging.getLogger(__name__)
class TestRandomSeed(t_help.TestCase):
def setUp(self):
super(TestRandomSeed, self).setUp()
self._seed_file = tempfile.mktemp()
self.unapply = []
# by default 'which' has nothing in its path
self.apply_patches([(util, 'which', self._which)])
self.apply_patches([(util, 'subp', self._subp)])
self.subp_called = []
self.whichdata = {}
def tearDown(self):
apply_patches([i for i in reversed(self.unapply)])
util.del_file(self._seed_file)
def apply_patches(self, patches):
ret = apply_patches(patches)
self.unapply += ret
def _which(self, program):
return self.whichdata.get(program)
def _subp(self, *args, **kwargs):
# supports subp calling with cmd as args or kwargs
if 'args' not in kwargs:
kwargs['args'] = args[0]
self.subp_called.append(kwargs)
return
def _compress(self, text):
contents = BytesIO()
gz_fh = gzip.GzipFile(mode='wb', fileobj=contents)
gz_fh.write(text)
gz_fh.close()
return contents.getvalue()
def _get_cloud(self, distro, metadata=None):
paths = helpers.Paths({})
cls = distros.fetch(distro)
ubuntu_distro = cls(distro, {}, paths)
ds = DataSourceNone.DataSourceNone({}, ubuntu_distro, paths)
if metadata:
ds.metadata = metadata
return cloud.Cloud(ds, paths, {}, ubuntu_distro, None)
def test_append_random(self):
cfg = {
'random_seed': {
'file': self._seed_file,
'data': 'tiny-tim-was-here',
}
}
cc_seed_random.handle('test', cfg, self._get_cloud('ubuntu'), LOG, [])
contents = util.load_file(self._seed_file)
self.assertEquals("tiny-tim-was-here", contents)
def test_append_random_unknown_encoding(self):
data = self._compress(b"tiny-toe")
cfg = {
'random_seed': {
'file': self._seed_file,
'data': data,
'encoding': 'special_encoding',
}
}
self.assertRaises(IOError, cc_seed_random.handle, 'test', cfg,
self._get_cloud('ubuntu'), LOG, [])
def test_append_random_gzip(self):
data = self._compress(b"tiny-toe")
cfg = {
'random_seed': {
'file': self._seed_file,
'data': data,
'encoding': 'gzip',
}
}
cc_seed_random.handle('test', cfg, self._get_cloud('ubuntu'), LOG, [])
contents = util.load_file(self._seed_file)
self.assertEquals("tiny-toe", contents)
def test_append_random_gz(self):
data = self._compress(b"big-toe")
cfg = {
'random_seed': {
'file': self._seed_file,
'data': data,
'encoding': 'gz',
}
}
cc_seed_random.handle('test', cfg, self._get_cloud('ubuntu'), LOG, [])
contents = util.load_file(self._seed_file)
self.assertEquals("big-toe", contents)
def test_append_random_base64(self):
data = util.b64e('bubbles')
cfg = {
'random_seed': {
'file': self._seed_file,
'data': data,
'encoding': 'base64',
}
}
cc_seed_random.handle('test', cfg, self._get_cloud('ubuntu'), LOG, [])
contents = util.load_file(self._seed_file)
self.assertEquals("bubbles", contents)
def test_append_random_b64(self):
data = util.b64e('kit-kat')
cfg = {
'random_seed': {
'file': self._seed_file,
'data': data,
'encoding': 'b64',
}
}
cc_seed_random.handle('test', cfg, self._get_cloud('ubuntu'), LOG, [])
contents = util.load_file(self._seed_file)
self.assertEquals("kit-kat", contents)
def test_append_random_metadata(self):
cfg = {
'random_seed': {
'file': self._seed_file,
'data': 'tiny-tim-was-here',
}
}
c = self._get_cloud('ubuntu', {'random_seed': '-so-was-josh'})
cc_seed_random.handle('test', cfg, c, LOG, [])
contents = util.load_file(self._seed_file)
self.assertEquals('tiny-tim-was-here-so-was-josh', contents)
def test_seed_command_not_provided_pollinate_available(self):
c = self._get_cloud('ubuntu', {})
self.whichdata = {'pollinate': '/usr/bin/pollinate'}
cc_seed_random.handle('test', {}, c, LOG, [])
subp_args = [f['args'] for f in self.subp_called]
self.assertIn(['pollinate', '-q'], subp_args)
def test_seed_command_not_provided_pollinate_not_available(self):
c = self._get_cloud('ubuntu', {})
self.whichdata = {}
cc_seed_random.handle('test', {}, c, LOG, [])
# subp should not have been called as which would say not available
self.assertEquals(self.subp_called, list())
def test_unavailable_seed_command_and_required_raises_error(self):
c = self._get_cloud('ubuntu', {})
self.whichdata = {}
self.assertRaises(ValueError, cc_seed_random.handle,
'test', {'random_seed': {'command_required': True}}, c, LOG, [])
def test_seed_command_and_required(self):
c = self._get_cloud('ubuntu', {})
self.whichdata = {'foo': 'foo'}
cfg = {'random_seed': {'command_required': True, 'command': ['foo']}}
cc_seed_random.handle('test', cfg, c, LOG, [])
self.assertIn(['foo'], [f['args'] for f in self.subp_called])
def test_file_in_environment_for_command(self):
c = self._get_cloud('ubuntu', {})
self.whichdata = {'foo': 'foo'}
cfg = {'random_seed': {'command_required': True, 'command': ['foo'],
'file': self._seed_file}}
cc_seed_random.handle('test', cfg, c, LOG, [])
# this just instists that the first time subp was called,
# RANDOM_SEED_FILE was in the environment set up correctly
subp_env = [f['env'] for f in self.subp_called]
self.assertEqual(subp_env[0].get('RANDOM_SEED_FILE'), self._seed_file)
def apply_patches(patches):
ret = []
for (ref, name, replace) in patches:
if replace is None:
continue
orig = getattr(ref, name)
setattr(ref, name, replace)
ret.append((ref, name, orig))
return ret
| gpl-3.0 |
jelugbo/hebs_repo | common/djangoapps/student/tests/test_authz.py | 61 | 9217 | """
Tests authz.py
"""
import mock
from django.test import TestCase
from django.contrib.auth.models import User, AnonymousUser
from django.core.exceptions import PermissionDenied
from student.roles import CourseInstructorRole, CourseStaffRole, CourseCreatorRole
from student.tests.factories import AdminFactory
from student.auth import has_access, add_users, remove_users
from opaque_keys.edx.locations import SlashSeparatedCourseKey
class CreatorGroupTest(TestCase):
"""
Tests for the course creator group.
"""
def setUp(self):
""" Test case setup """
self.user = User.objects.create_user('testuser', 'test+courses@edx.org', 'foo')
self.admin = User.objects.create_user('Mark', 'admin+courses@edx.org', 'foo')
self.admin.is_staff = True
def test_creator_group_not_enabled(self):
"""
Tests that CourseCreatorRole().has_user always returns True if ENABLE_CREATOR_GROUP
and DISABLE_COURSE_CREATION are both not turned on.
"""
self.assertTrue(has_access(self.user, CourseCreatorRole()))
def test_creator_group_enabled_but_empty(self):
""" Tests creator group feature on, but group empty. """
with mock.patch.dict('django.conf.settings.FEATURES', {"ENABLE_CREATOR_GROUP": True}):
self.assertFalse(has_access(self.user, CourseCreatorRole()))
# Make user staff. This will cause CourseCreatorRole().has_user to return True.
self.user.is_staff = True
self.assertTrue(has_access(self.user, CourseCreatorRole()))
def test_creator_group_enabled_nonempty(self):
""" Tests creator group feature on, user added. """
with mock.patch.dict('django.conf.settings.FEATURES', {"ENABLE_CREATOR_GROUP": True}):
add_users(self.admin, CourseCreatorRole(), self.user)
self.assertTrue(has_access(self.user, CourseCreatorRole()))
# check that a user who has not been added to the group still returns false
user_not_added = User.objects.create_user('testuser2', 'test+courses2@edx.org', 'foo2')
self.assertFalse(has_access(user_not_added, CourseCreatorRole()))
# remove first user from the group and verify that CourseCreatorRole().has_user now returns false
remove_users(self.admin, CourseCreatorRole(), self.user)
self.assertFalse(has_access(self.user, CourseCreatorRole()))
def test_course_creation_disabled(self):
""" Tests that the COURSE_CREATION_DISABLED flag overrides course creator group settings. """
with mock.patch.dict('django.conf.settings.FEATURES',
{'DISABLE_COURSE_CREATION': True, "ENABLE_CREATOR_GROUP": True}):
# Add user to creator group.
add_users(self.admin, CourseCreatorRole(), self.user)
# DISABLE_COURSE_CREATION overrides (user is not marked as staff).
self.assertFalse(has_access(self.user, CourseCreatorRole()))
# Mark as staff. Now CourseCreatorRole().has_user returns true.
self.user.is_staff = True
self.assertTrue(has_access(self.user, CourseCreatorRole()))
# Remove user from creator group. CourseCreatorRole().has_user still returns true because is_staff=True
remove_users(self.admin, CourseCreatorRole(), self.user)
self.assertTrue(has_access(self.user, CourseCreatorRole()))
def test_add_user_not_authenticated(self):
"""
Tests that adding to creator group fails if user is not authenticated
"""
with mock.patch.dict(
'django.conf.settings.FEATURES',
{'DISABLE_COURSE_CREATION': False, "ENABLE_CREATOR_GROUP": True}
):
anonymous_user = AnonymousUser()
role = CourseCreatorRole()
add_users(self.admin, role, anonymous_user)
self.assertFalse(has_access(anonymous_user, role))
def test_add_user_not_active(self):
"""
Tests that adding to creator group fails if user is not active
"""
with mock.patch.dict(
'django.conf.settings.FEATURES',
{'DISABLE_COURSE_CREATION': False, "ENABLE_CREATOR_GROUP": True}
):
self.user.is_active = False
add_users(self.admin, CourseCreatorRole(), self.user)
self.assertFalse(has_access(self.user, CourseCreatorRole()))
def test_add_user_to_group_requires_staff_access(self):
with self.assertRaises(PermissionDenied):
self.admin.is_staff = False
add_users(self.admin, CourseCreatorRole(), self.user)
with self.assertRaises(PermissionDenied):
add_users(self.user, CourseCreatorRole(), self.user)
def test_add_user_to_group_requires_active(self):
with self.assertRaises(PermissionDenied):
self.admin.is_active = False
add_users(self.admin, CourseCreatorRole(), self.user)
def test_add_user_to_group_requires_authenticated(self):
with self.assertRaises(PermissionDenied):
self.admin.is_authenticated = mock.Mock(return_value=False)
add_users(self.admin, CourseCreatorRole(), self.user)
def test_remove_user_from_group_requires_staff_access(self):
with self.assertRaises(PermissionDenied):
self.admin.is_staff = False
remove_users(self.admin, CourseCreatorRole(), self.user)
def test_remove_user_from_group_requires_active(self):
with self.assertRaises(PermissionDenied):
self.admin.is_active = False
remove_users(self.admin, CourseCreatorRole(), self.user)
def test_remove_user_from_group_requires_authenticated(self):
with self.assertRaises(PermissionDenied):
self.admin.is_authenticated = mock.Mock(return_value=False)
remove_users(self.admin, CourseCreatorRole(), self.user)
class CourseGroupTest(TestCase):
"""
Tests for instructor and staff groups for a particular course.
"""
def setUp(self):
""" Test case setup """
self.global_admin = AdminFactory()
self.creator = User.objects.create_user('testcreator', 'testcreator+courses@edx.org', 'foo')
self.staff = User.objects.create_user('teststaff', 'teststaff+courses@edx.org', 'foo')
self.course_key = SlashSeparatedCourseKey('mitX', '101', 'test')
def test_add_user_to_course_group(self):
"""
Tests adding user to course group (happy path).
"""
# Create groups for a new course (and assign instructor role to the creator).
self.assertFalse(has_access(self.creator, CourseInstructorRole(self.course_key)))
add_users(self.global_admin, CourseInstructorRole(self.course_key), self.creator)
add_users(self.global_admin, CourseStaffRole(self.course_key), self.creator)
self.assertTrue(has_access(self.creator, CourseInstructorRole(self.course_key)))
# Add another user to the staff role.
self.assertFalse(has_access(self.staff, CourseStaffRole(self.course_key)))
add_users(self.creator, CourseStaffRole(self.course_key), self.staff)
self.assertTrue(has_access(self.staff, CourseStaffRole(self.course_key)))
def test_add_user_to_course_group_permission_denied(self):
"""
Verifies PermissionDenied if caller of add_user_to_course_group is not instructor role.
"""
add_users(self.global_admin, CourseInstructorRole(self.course_key), self.creator)
add_users(self.global_admin, CourseStaffRole(self.course_key), self.creator)
with self.assertRaises(PermissionDenied):
add_users(self.staff, CourseStaffRole(self.course_key), self.staff)
def test_remove_user_from_course_group(self):
"""
Tests removing user from course group (happy path).
"""
add_users(self.global_admin, CourseInstructorRole(self.course_key), self.creator)
add_users(self.global_admin, CourseStaffRole(self.course_key), self.creator)
add_users(self.creator, CourseStaffRole(self.course_key), self.staff)
self.assertTrue(has_access(self.staff, CourseStaffRole(self.course_key)))
remove_users(self.creator, CourseStaffRole(self.course_key), self.staff)
self.assertFalse(has_access(self.staff, CourseStaffRole(self.course_key)))
remove_users(self.creator, CourseInstructorRole(self.course_key), self.creator)
self.assertFalse(has_access(self.creator, CourseInstructorRole(self.course_key)))
def test_remove_user_from_course_group_permission_denied(self):
"""
Verifies PermissionDenied if caller of remove_user_from_course_group is not instructor role.
"""
add_users(self.global_admin, CourseInstructorRole(self.course_key), self.creator)
another_staff = User.objects.create_user('another', 'teststaff+anothercourses@edx.org', 'foo')
add_users(self.global_admin, CourseStaffRole(self.course_key), self.creator, self.staff, another_staff)
with self.assertRaises(PermissionDenied):
remove_users(self.staff, CourseStaffRole(self.course_key), another_staff)
| agpl-3.0 |
cloudbase/nova-virtualbox | nova/tests/unit/test_baserpc.py | 49 | 1619 | #
# Copyright 2013 - Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
Test the base rpc API.
"""
from oslo_config import cfg
from nova import baserpc
from nova import context
from nova import test
CONF = cfg.CONF
class BaseAPITestCase(test.TestCase):
def setUp(self):
super(BaseAPITestCase, self).setUp()
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id,
self.project_id)
self.conductor = self.start_service(
'conductor', manager=CONF.conductor.manager)
self.compute = self.start_service('compute')
self.base_rpcapi = baserpc.BaseAPI(CONF.compute_topic)
def test_ping(self):
res = self.base_rpcapi.ping(self.context, 'foo')
self.assertEqual(res, {'service': 'compute', 'arg': 'foo'})
def test_get_backdoor_port(self):
res = self.base_rpcapi.get_backdoor_port(self.context,
self.compute.host)
self.assertEqual(res, self.compute.backdoor_port)
| apache-2.0 |
wolverine2k/Secure-Deluge | deluge/ui/gtkui/createtorrentdialog.py | 2 | 16482 | #
# createtorrentdialog.py
#
# Copyright (C) 2008 Andrew Resch <andrewresch@gmail.com>
#
# Deluge is free software.
#
# You may redistribute it and/or modify it under the terms of the
# GNU General Public License, as published by the Free Software
# Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# deluge is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with deluge. If not, write to:
# The Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor
# Boston, MA 02110-1301, USA.
#
# In addition, as a special exception, the copyright holders give
# permission to link the code of portions of this program with the OpenSSL
# library.
# You must obey the GNU General Public License in all respects for all of
# the code used other than OpenSSL. If you modify file(s) with this
# exception, you may extend this exception to your version of the file(s),
# but you are not obligated to do so. If you do not wish to do so, delete
# this exception statement from your version. If you delete this exception
# statement from all source files in the program, then also delete it here.
#
#
import gtk
import sys
import pkg_resources
import os.path
import gobject
import base64
from twisted.internet.threads import deferToThread
from deluge.ui.client import client
import listview
import deluge.component as component
import deluge.common
from deluge.configmanager import ConfigManager
from deluge.log import LOG as log
class CreateTorrentDialog:
def show(self):
self.glade = gtk.glade.XML(
pkg_resources.resource_filename(
"deluge.ui.gtkui",
"glade/create_torrent_dialog.glade"))
self.config = ConfigManager("gtkui.conf")
self.dialog = self.glade.get_widget("create_torrent_dialog")
self.dialog.set_transient_for(component.get("MainWindow").window)
self.glade.signal_autoconnect({
"on_button_file_clicked": self._on_button_file_clicked,
"on_button_folder_clicked": self._on_button_folder_clicked,
"on_button_remote_path_clicked": self._on_button_remote_path_clicked,
"on_button_cancel_clicked": self._on_button_cancel_clicked,
"on_button_save_clicked": self._on_button_save_clicked,
"on_button_up_clicked": self._on_button_up_clicked,
"on_button_add_clicked": self._on_button_add_clicked,
"on_button_remove_clicked": self._on_button_remove_clicked,
"on_button_down_clicked": self._on_button_down_clicked
})
# path, icon, size
self.files_treestore = gtk.TreeStore(str, str, gobject.TYPE_UINT64)
column = gtk.TreeViewColumn(_("Filename"))
render = gtk.CellRendererPixbuf()
column.pack_start(render, False)
column.add_attribute(render, "stock-id", 1)
render = gtk.CellRendererText()
column.pack_start(render, True)
column.add_attribute(render, "text", 0)
column.set_expand(True)
self.glade.get_widget("treeview_files").append_column(column)
column = gtk.TreeViewColumn(_("Size"))
render = gtk.CellRendererText()
column.pack_start(render)
column.set_cell_data_func(render, listview.cell_data_size, 2)
self.glade.get_widget("treeview_files").append_column(column)
self.glade.get_widget("treeview_files").set_model(self.files_treestore)
self.glade.get_widget("treeview_files").set_show_expanders(False)
# tier, url
self.trackers_liststore = gtk.ListStore(int, str)
self.glade.get_widget("tracker_treeview").append_column(
gtk.TreeViewColumn(_("Tier"), gtk.CellRendererText(), text=0))
self.glade.get_widget("tracker_treeview").append_column(
gtk.TreeViewColumn(_("Tracker"), gtk.CellRendererText(), text=1))
self.glade.get_widget("tracker_treeview").set_model(self.trackers_liststore)
self.trackers_liststore.set_sort_column_id(0, gtk.SORT_ASCENDING)
if not client.is_localhost() and client.connected():
self.glade.get_widget("button_remote_path").show()
else:
self.glade.get_widget("button_remote_path").hide()
self.dialog.show()
def parse_piece_size_text(self, value):
psize, metric = value.split()
psize = int(psize)
if psize < 32:
# This is a MiB value
psize = psize * 1024 * 1024
else:
# This is a KiB value
psize = psize * 1024
return psize
def adjust_piece_size(self):
"""Adjusts the recommended piece based on the file/folder/path selected."""
size = self.files_treestore[0][2]
model = self.glade.get_widget("combo_piece_size").get_model()
for index,value in enumerate(model):
psize = self.parse_piece_size_text(value[0])
pieces = size / psize
if pieces < 2048 or (index + 1) == len(model):
self.glade.get_widget("combo_piece_size").set_active(index)
break
def _on_button_file_clicked(self, widget):
log.debug("_on_button_file_clicked")
# Setup the filechooserdialog
chooser = gtk.FileChooserDialog(_("Choose a file"),
self.dialog,
gtk.FILE_CHOOSER_ACTION_OPEN,
buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN,
gtk.RESPONSE_OK))
chooser.set_transient_for(self.dialog)
chooser.set_select_multiple(False)
chooser.set_property("skip-taskbar-hint", True)
# Run the dialog
response = chooser.run()
if response == gtk.RESPONSE_OK:
result = chooser.get_filename()
else:
chooser.destroy()
return
path = result.decode('utf-8').encode(sys.getfilesystemencoding())
self.files_treestore.clear()
self.files_treestore.append(None, [result, gtk.STOCK_FILE, deluge.common.get_path_size(path)])
self.adjust_piece_size()
chooser.destroy()
def _on_button_folder_clicked(self, widget):
log.debug("_on_button_folder_clicked")
# Setup the filechooserdialog
chooser = gtk.FileChooserDialog(_("Choose a folder"),
self.dialog,
gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER,
buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN,
gtk.RESPONSE_OK))
chooser.set_transient_for(self.dialog)
chooser.set_select_multiple(False)
chooser.set_property("skip-taskbar-hint", True)
# Run the dialog
response = chooser.run()
if response == gtk.RESPONSE_OK:
result = chooser.get_filename()
else:
chooser.destroy()
return
path = result.decode('utf-8').encode(sys.getfilesystemencoding())
self.files_treestore.clear()
self.files_treestore.append(None, [result, gtk.STOCK_OPEN, deluge.common.get_path_size(path)])
self.adjust_piece_size()
chooser.destroy()
def _on_button_remote_path_clicked(self, widget):
log.debug("_on_button_remote_path_clicked")
dialog = self.glade.get_widget("remote_path_dialog")
entry = self.glade.get_widget("entry_path")
dialog.set_transient_for(self.dialog)
entry.set_text("/")
entry.grab_focus()
response = dialog.run()
if response == gtk.RESPONSE_OK:
result = entry.get_text()
def _on_get_path_size(size):
log.debug("size: %s", size)
if size > 0:
self.files_treestore.clear()
self.files_treestore.append(None, [result, gtk.STOCK_NETWORK, size])
self.adjust_piece_size()
client.core.get_path_size(result).addCallback(_on_get_path_size)
client.force_call(True)
dialog.hide()
def _on_button_cancel_clicked(self, widget):
log.debug("_on_button_cancel_clicked")
self.dialog.destroy()
def _on_button_save_clicked(self, widget):
log.debug("_on_button_save_clicked")
if len(self.files_treestore) == 0:
return
is_remote = self.files_treestore[0][1] == gtk.STOCK_NETWORK
if is_remote:
# This is a remote path
dialog = self.glade.get_widget("remote_save_dialog")
response = dialog.run()
if response == gtk.RESPONSE_OK:
result = self.glade.get_widget("entry_save_path").get_text()
else:
dialog.hide()
return
dialog.hide()
else:
# Setup the filechooserdialog
chooser = gtk.FileChooserDialog(_("Save .torrent file"),
self.dialog,
gtk.FILE_CHOOSER_ACTION_SAVE,
buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_SAVE,
gtk.RESPONSE_OK))
chooser.set_transient_for(self.dialog)
chooser.set_select_multiple(False)
chooser.set_property("skip-taskbar-hint", True)
# Add .torrent and * file filters
file_filter = gtk.FileFilter()
file_filter.set_name(_("Torrent files"))
file_filter.add_pattern("*." + "torrent")
chooser.add_filter(file_filter)
file_filter = gtk.FileFilter()
file_filter.set_name(_("All files"))
file_filter.add_pattern("*")
chooser.add_filter(file_filter)
chooser.set_current_name(os.path.split(self.files_treestore[0][0])[-1] + ".torrent")
# Run the dialog
response = chooser.run()
if response == gtk.RESPONSE_OK:
result = chooser.get_filename()
else:
chooser.destroy()
return
chooser.destroy()
# Fix up torrent filename
if len(result) < 9:
result += ".torrent"
elif result[-8:] != ".torrent":
result += ".torrent"
# Get the path
path = self.files_treestore[0][0]
# Get a list of trackers
trackers = []
if not len(self.trackers_liststore):
tracker = None
else:
# Create a list of lists [[tier0, ...], [tier1, ...], ...]
tier_dict = {}
for tier, tracker in self.trackers_liststore:
tier_dict.setdefault(tier, []).append(tracker)
trackers = [tier_dict[tier] for tier in sorted(tier_dict)]
# Get the first tracker in the first tier
tracker = trackers[0][0]
# Get a list of webseeds
webseeds = []
b = self.glade.get_widget("textview_webseeds").get_buffer()
lines = b.get_text(b.get_start_iter(), b.get_end_iter()).strip().split("\n")
import deluge.common
for l in lines:
if deluge.common.is_url(l):
webseeds.append(l)
# Get the piece length in bytes
combo = self.glade.get_widget("combo_piece_size")
piece_length = \
self.parse_piece_size_text(combo.get_model()[combo.get_active()][0])
num_pieces = self.files_treestore[0][2] / piece_length
author = self.glade.get_widget("entry_author").get_text()
comment = self.glade.get_widget("entry_comments").get_text()
private = self.glade.get_widget("chk_private_flag").get_active()
add_to_session = self.glade.get_widget("chk_add_to_session").get_active()
if is_remote:
client.core.create_torrent(
path,
tracker,
piece_length,
comment,
result,
webseeds,
private,
author,
trackers,
add_to_session)
else:
# Setup progress dialog
self.glade.get_widget("progress_dialog").set_transient_for(component.get("MainWindow").window)
self.glade.get_widget("progress_dialog").show_all()
def hide_progress(result):
self.glade.get_widget("progress_dialog").hide_all()
deferToThread(self.create_torrent,
path.decode('utf-8'),
tracker,
piece_length,
self._on_create_torrent_progress,
comment,
result.decode('utf-8'),
webseeds,
private,
author,
trackers,
add_to_session).addCallback(hide_progress)
self.dialog.destroy()
def create_torrent(self, path, tracker, piece_length, progress, comment, target,
webseeds, private, created_by, trackers, add_to_session):
import deluge.metafile
deluge.metafile.make_meta_file(
path,
tracker,
piece_length,
progress=progress,
comment=comment,
target=target,
webseeds=webseeds,
private=private,
created_by=created_by,
trackers=trackers)
if add_to_session:
client.core.add_torrent_file(
os.path.split(target)[-1],
base64.encodestring(open(target, "rb").read()),
{"download_location": os.path.split(path)[0]})
def _on_create_torrent_progress(self, value, num_pieces):
percent = float(value)/float(num_pieces)
pbar = self.glade.get_widget("progressbar")
pbar.set_text("%.2f%%" % (percent*100))
if percent >= 0 and percent <= 1.0:
pbar.set_fraction(percent)
def _on_button_up_clicked(self, widget):
log.debug("_on_button_up_clicked")
row = self.glade.get_widget("tracker_treeview").get_selection().get_selected()[1]
if row is None:
return
if self.trackers_liststore[row][0] == 0:
return
else:
self.trackers_liststore[row][0] -= 1
def _on_button_down_clicked(self, widget):
log.debug("_on_button_down_clicked")
row = self.glade.get_widget("tracker_treeview").get_selection().get_selected()[1]
if row is None:
return
self.trackers_liststore[row][0] += 1
def _on_button_add_clicked(self, widget):
log.debug("_on_button_add_clicked")
glade = gtk.glade.XML(
pkg_resources.resource_filename(
"deluge.ui.gtkui",
"glade/edit_trackers.glade"))
dialog = glade.get_widget("add_tracker_dialog")
dialog.set_transient_for(self.dialog)
textview = glade.get_widget("textview_trackers")
if self.config["createtorrent.trackers"]:
textview.get_buffer().set_text("\n".join(self.config["createtorrent.trackers"]))
else:
textview.get_buffer().set_text("")
textview.grab_focus()
response = dialog.run()
if response == gtk.RESPONSE_OK:
# Create a list of trackers from the textview buffer
trackers = []
b = textview.get_buffer()
lines = b.get_text(b.get_start_iter(), b.get_end_iter()).strip().split("\n")
self.config["createtorrent.trackers"] = lines
log.debug("lines: %s", lines)
for l in lines:
if deluge.common.is_url(l):
trackers.append(l)
# We are going to add these trackers to the heighest tier + 1
tier = 0
for row in self.trackers_liststore:
if row[0] > tier:
tier = row[0]
for tracker in trackers:
self.trackers_liststore.append([tier, tracker])
dialog.destroy()
def _on_button_remove_clicked(self, widget):
log.debug("_on_button_remove_clicked")
row = self.glade.get_widget("tracker_treeview").get_selection().get_selected()[1]
if row is None:
return
self.trackers_liststore.remove(row)
| gpl-3.0 |
dcosentino/edx-platform | lms/djangoapps/shoppingcart/migrations/0005_auto__add_paidcourseregistrationannotation__add_field_orderitem_report.py | 114 | 9808 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'PaidCourseRegistrationAnnotation'
db.create_table('shoppingcart_paidcourseregistrationannotation', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('course_id', self.gf('django.db.models.fields.CharField')(unique=True, max_length=128, db_index=True)),
('annotation', self.gf('django.db.models.fields.TextField')(null=True)),
))
db.send_create_signal('shoppingcart', ['PaidCourseRegistrationAnnotation'])
# Adding field 'OrderItem.report_comments'
db.add_column('shoppingcart_orderitem', 'report_comments',
self.gf('django.db.models.fields.TextField')(default=''),
keep_default=False)
def backwards(self, orm):
# Deleting model 'PaidCourseRegistrationAnnotation'
db.delete_table('shoppingcart_paidcourseregistrationannotation')
# Deleting field 'OrderItem.report_comments'
db.delete_column('shoppingcart_orderitem', 'report_comments')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'shoppingcart.certificateitem': {
'Meta': {'object_name': 'CertificateItem', '_ormbases': ['shoppingcart.OrderItem']},
'course_enrollment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['student.CourseEnrollment']"}),
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'mode': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'shoppingcart.order': {
'Meta': {'object_name': 'Order'},
'bill_to_cardtype': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'bill_to_ccnum': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'bill_to_city': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_country': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_first': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_last': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_postalcode': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'bill_to_state': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'bill_to_street1': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'bill_to_street2': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'processor_reply_dump': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'purchase_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.orderitem': {
'Meta': {'object_name': 'OrderItem'},
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'fulfilled_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_desc': ('django.db.models.fields.CharField', [], {'default': "'Misc. Item'", 'max_length': '1024'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']"}),
'qty': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'report_comments': ('django.db.models.fields.TextField', [], {'default': "''"}),
'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32'}),
'unit_cost': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '30', 'decimal_places': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.paidcourseregistration': {
'Meta': {'object_name': 'PaidCourseRegistration', '_ormbases': ['shoppingcart.OrderItem']},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'mode': ('django.db.models.fields.SlugField', [], {'default': "'honor'", 'max_length': '50'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'shoppingcart.paidcourseregistrationannotation': {
'Meta': {'object_name': 'PaidCourseRegistrationAnnotation'},
'annotation': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'course_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'student.courseenrollment': {
'Meta': {'ordering': "('user', 'course_id')", 'unique_together': "(('user', 'course_id'),)", 'object_name': 'CourseEnrollment'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mode': ('django.db.models.fields.CharField', [], {'default': "'honor'", 'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['shoppingcart']
| agpl-3.0 |
valtech-mooc/edx-platform | cms/djangoapps/contentstore/views/assets.py | 25 | 14681 | import logging
from functools import partial
import math
import json
from django.http import HttpResponseBadRequest
from django.contrib.auth.decorators import login_required
from django.views.decorators.http import require_http_methods
from django_future.csrf import ensure_csrf_cookie
from django.views.decorators.http import require_POST
from django.conf import settings
from edxmako.shortcuts import render_to_response
from cache_toolbox.core import del_cached_content
from contentstore.utils import reverse_course_url
from xmodule.contentstore.django import contentstore
from xmodule.modulestore.django import modulestore
from xmodule.contentstore.content import StaticContent
from xmodule.exceptions import NotFoundError
from django.core.exceptions import PermissionDenied
from opaque_keys.edx.keys import CourseKey, AssetKey
from util.date_utils import get_default_time_display
from util.json_request import JsonResponse
from django.http import HttpResponseNotFound
from django.utils.translation import ugettext as _
from pymongo import ASCENDING, DESCENDING
from student.auth import has_course_author_access
from xmodule.modulestore.exceptions import ItemNotFoundError
__all__ = ['assets_handler']
# pylint: disable=unused-argument
@login_required
@ensure_csrf_cookie
def assets_handler(request, course_key_string=None, asset_key_string=None):
"""
The restful handler for assets.
It allows retrieval of all the assets (as an HTML page), as well as uploading new assets,
deleting assets, and changing the "locked" state of an asset.
GET
html: return an html page which will show all course assets. Note that only the asset container
is returned and that the actual assets are filled in with a client-side request.
json: returns a page of assets. The following parameters are supported:
page: the desired page of results (defaults to 0)
page_size: the number of items per page (defaults to 50)
sort: the asset field to sort by (defaults to "date_added")
direction: the sort direction (defaults to "descending")
POST
json: create (or update?) an asset. The only updating that can be done is changing the lock state.
PUT
json: update the locked state of an asset
DELETE
json: delete an asset
"""
course_key = CourseKey.from_string(course_key_string)
if not has_course_author_access(request.user, course_key):
raise PermissionDenied()
response_format = request.REQUEST.get('format', 'html')
if response_format == 'json' or 'application/json' in request.META.get('HTTP_ACCEPT', 'application/json'):
if request.method == 'GET':
return _assets_json(request, course_key)
else:
asset_key = AssetKey.from_string(asset_key_string) if asset_key_string else None
return _update_asset(request, course_key, asset_key)
elif request.method == 'GET': # assume html
return _asset_index(request, course_key)
else:
return HttpResponseNotFound()
def _asset_index(request, course_key):
"""
Display an editable asset library.
Supports start (0-based index into the list of assets) and max query parameters.
"""
course_module = modulestore().get_course(course_key)
return render_to_response('asset_index.html', {
'context_course': course_module,
'max_file_size_in_mbs': settings.MAX_ASSET_UPLOAD_FILE_SIZE_IN_MB,
'chunk_size_in_mbs': settings.UPLOAD_CHUNK_SIZE_IN_MB,
'max_file_size_redirect_url': settings.MAX_ASSET_UPLOAD_FILE_SIZE_URL,
'asset_callback_url': reverse_course_url('assets_handler', course_key)
})
def _assets_json(request, course_key):
"""
Display an editable asset library.
Supports start (0-based index into the list of assets) and max query parameters.
"""
requested_page = int(request.REQUEST.get('page', 0))
requested_page_size = int(request.REQUEST.get('page_size', 50))
requested_sort = request.REQUEST.get('sort', 'date_added')
requested_filter = request.REQUEST.get('asset_type', '')
requested_file_types = settings.FILES_AND_UPLOAD_TYPE_FILTERS.get(
requested_filter, None)
filter_params = None
if requested_filter:
if requested_filter == 'OTHER':
all_filters = settings.FILES_AND_UPLOAD_TYPE_FILTERS
where = []
for all_filter in all_filters:
extension_filters = all_filters[all_filter]
where.extend(
["JSON.stringify(this.contentType).toUpperCase() != JSON.stringify('{}').toUpperCase()".format(
extension_filter) for extension_filter in extension_filters])
filter_params = {
"$where": ' && '.join(where),
}
else:
where = ["JSON.stringify(this.contentType).toUpperCase() == JSON.stringify('{}').toUpperCase()".format(
req_filter) for req_filter in requested_file_types]
filter_params = {
"$where": ' || '.join(where),
}
sort_direction = DESCENDING
if request.REQUEST.get('direction', '').lower() == 'asc':
sort_direction = ASCENDING
# Convert the field name to the Mongo name
if requested_sort == 'date_added':
requested_sort = 'uploadDate'
elif requested_sort == 'display_name':
requested_sort = 'displayname'
sort = [(requested_sort, sort_direction)]
current_page = max(requested_page, 0)
start = current_page * requested_page_size
options = {
'current_page': current_page,
'page_size': requested_page_size,
'sort': sort,
'filter_params': filter_params
}
assets, total_count = _get_assets_for_page(request, course_key, options)
end = start + len(assets)
# If the query is beyond the final page, then re-query the final page so
# that at least one asset is returned
if requested_page > 0 and start >= total_count:
options['current_page'] = current_page = int(math.floor((total_count - 1) / requested_page_size))
start = current_page * requested_page_size
assets, total_count = _get_assets_for_page(request, course_key, options)
end = start + len(assets)
asset_json = []
for asset in assets:
asset_location = asset['asset_key']
# note, due to the schema change we may not have a 'thumbnail_location'
# in the result set
thumbnail_location = asset.get('thumbnail_location', None)
if thumbnail_location:
thumbnail_location = course_key.make_asset_key(
'thumbnail', thumbnail_location[4])
asset_locked = asset.get('locked', False)
asset_json.append(_get_asset_json(
asset['displayname'],
asset['contentType'],
asset['uploadDate'],
asset_location,
thumbnail_location,
asset_locked
))
return JsonResponse({
'start': start,
'end': end,
'page': current_page,
'pageSize': requested_page_size,
'totalCount': total_count,
'assets': asset_json,
'sort': requested_sort,
})
def _get_assets_for_page(request, course_key, options):
"""
Returns the list of assets for the specified page and page size.
"""
current_page = options['current_page']
page_size = options['page_size']
sort = options['sort']
filter_params = options['filter_params'] if options['filter_params'] else None
start = current_page * page_size
return contentstore().get_all_content_for_course(
course_key, start=start, maxresults=page_size, sort=sort, filter_params=filter_params
)
def get_file_size(upload_file):
"""
Helper method for getting file size of an upload file.
Can be used for mocking test file sizes.
"""
return upload_file.size
@require_POST
@ensure_csrf_cookie
@login_required
def _upload_asset(request, course_key):
'''
This method allows for POST uploading of files into the course asset
library, which will be supported by GridFS in MongoDB.
'''
# Does the course actually exist?!? Get anything from it to prove its
# existence
try:
modulestore().get_course(course_key)
except ItemNotFoundError:
# no return it as a Bad Request response
logging.error("Could not find course: %s", course_key)
return HttpResponseBadRequest()
# compute a 'filename' which is similar to the location formatting, we're
# using the 'filename' nomenclature since we're using a FileSystem paradigm
# here. We're just imposing the Location string formatting expectations to
# keep things a bit more consistent
upload_file = request.FILES['file']
filename = upload_file.name
mime_type = upload_file.content_type
size = get_file_size(upload_file)
# If file is greater than a specified size, reject the upload
# request and send a message to the user. Note that since
# the front-end may batch large file uploads in smaller chunks,
# we validate the file-size on the front-end in addition to
# validating on the backend. (see cms/static/js/views/assets.js)
max_file_size_in_bytes = settings.MAX_ASSET_UPLOAD_FILE_SIZE_IN_MB * 1000 ** 2
if size > max_file_size_in_bytes:
return JsonResponse({
'error': _(
'File {filename} exceeds maximum size of '
'{size_mb} MB. Please follow the instructions here '
'to upload a file elsewhere and link to it instead: '
'{faq_url}'
).format(
filename=filename,
size_mb=settings.MAX_ASSET_UPLOAD_FILE_SIZE_IN_MB,
faq_url=settings.MAX_ASSET_UPLOAD_FILE_SIZE_URL,
)
}, status=413)
content_loc = StaticContent.compute_location(course_key, filename)
chunked = upload_file.multiple_chunks()
sc_partial = partial(StaticContent, content_loc, filename, mime_type)
if chunked:
content = sc_partial(upload_file.chunks())
tempfile_path = upload_file.temporary_file_path()
else:
content = sc_partial(upload_file.read())
tempfile_path = None
# first let's see if a thumbnail can be created
(thumbnail_content, thumbnail_location) = contentstore().generate_thumbnail(
content,
tempfile_path=tempfile_path,
)
# delete cached thumbnail even if one couldn't be created this time (else
# the old thumbnail will continue to show)
del_cached_content(thumbnail_location)
# now store thumbnail location only if we could create it
if thumbnail_content is not None:
content.thumbnail_location = thumbnail_location
# then commit the content
contentstore().save(content)
del_cached_content(content.location)
# readback the saved content - we need the database timestamp
readback = contentstore().find(content.location)
locked = getattr(content, 'locked', False)
response_payload = {
'asset': _get_asset_json(
content.name,
content.content_type,
readback.last_modified_at,
content.location,
content.thumbnail_location,
locked
),
'msg': _('Upload completed')
}
return JsonResponse(response_payload)
@require_http_methods(("DELETE", "POST", "PUT"))
@login_required
@ensure_csrf_cookie
def _update_asset(request, course_key, asset_key):
"""
restful CRUD operations for a course asset.
Currently only DELETE, POST, and PUT methods are implemented.
asset_path_encoding: the odd /c4x/org/course/category/name repr of the asset (used by Backbone as the id)
"""
if request.method == 'DELETE':
# Make sure the item to delete actually exists.
try:
content = contentstore().find(asset_key)
except NotFoundError:
return JsonResponse(status=404)
# ok, save the content into the trashcan
contentstore('trashcan').save(content)
# see if there is a thumbnail as well, if so move that as well
if content.thumbnail_location is not None:
# We are ignoring the value of the thumbnail_location-- we only care whether
# or not a thumbnail has been stored, and we can now easily create the correct path.
thumbnail_location = course_key.make_asset_key('thumbnail', asset_key.name)
try:
thumbnail_content = contentstore().find(thumbnail_location)
contentstore('trashcan').save(thumbnail_content)
# hard delete thumbnail from origin
contentstore().delete(thumbnail_content.get_id())
# remove from any caching
del_cached_content(thumbnail_location)
except:
logging.warning('Could not delete thumbnail: %s', thumbnail_location)
# delete the original
contentstore().delete(content.get_id())
# remove from cache
del_cached_content(content.location)
return JsonResponse()
elif request.method in ('PUT', 'POST'):
if 'file' in request.FILES:
return _upload_asset(request, course_key)
else:
# Update existing asset
try:
modified_asset = json.loads(request.body)
except ValueError:
return HttpResponseBadRequest()
contentstore().set_attr(asset_key, 'locked', modified_asset['locked'])
# Delete the asset from the cache so we check the lock status the next time it is requested.
del_cached_content(asset_key)
return JsonResponse(modified_asset, status=201)
def _get_asset_json(display_name, content_type, date, location, thumbnail_location, locked):
"""
Helper method for formatting the asset information to send to client.
"""
asset_url = StaticContent.serialize_asset_key_with_slash(location)
external_url = settings.LMS_BASE + asset_url
return {
'display_name': display_name,
'content_type': content_type,
'date_added': get_default_time_display(date),
'url': asset_url,
'external_url': external_url,
'portable_url': StaticContent.get_static_path_from_location(location),
'thumbnail': StaticContent.serialize_asset_key_with_slash(thumbnail_location) if thumbnail_location else None,
'locked': locked,
# Needed for Backbone delete/update.
'id': unicode(location)
}
| agpl-3.0 |
pombredanne/pythran | pythran/optimizations/gen_exp_to_imap.py | 3 | 2812 | """ GenExpToImap transforms generator expressions into iterators. """
from pythran.analyses import OptimizableComprehension
from pythran.passmanager import Transformation
from pythran.transformations import NormalizeTuples
import ast
class GenExpToImap(Transformation):
'''
Transforms generator expressions into iterators.
>>> import ast
>>> from pythran import passmanager, backend
>>> node = ast.parse("(x*x for x in range(10))")
>>> pm = passmanager.PassManager("test")
>>> _, node = pm.apply(GenExpToImap, node)
>>> print pm.dump(backend.Python, node)
import itertools
itertools.imap((lambda x: (x * x)), range(10))
'''
def __init__(self):
Transformation.__init__(self, NormalizeTuples,
OptimizableComprehension)
def visit_Module(self, node):
self.generic_visit(node)
importIt = ast.Import(names=[ast.alias(name='itertools', asname=None)])
node.body.insert(0, importIt)
return node
def make_Iterator(self, gen):
if gen.ifs:
ldFilter = ast.Lambda(
ast.arguments([ast.Name(gen.target.id, ast.Param())],
None, None, []), ast.BoolOp(ast.And(), gen.ifs))
ifilterName = ast.Attribute(
value=ast.Name(id='itertools', ctx=ast.Load()),
attr='ifilter', ctx=ast.Load())
return ast.Call(ifilterName, [ldFilter, gen.iter], [], None, None)
else:
return gen.iter
def visit_GeneratorExp(self, node):
if node in self.optimizable_comprehension:
self.update = True
self.generic_visit(node)
iters = [self.make_Iterator(gen) for gen in node.generators]
variables = [ast.Name(gen.target.id, ast.Param())
for gen in node.generators]
# If dim = 1, product is useless
if len(iters) == 1:
iterAST = iters[0]
varAST = ast.arguments([variables[0]], None, None, [])
else:
prodName = ast.Attribute(
value=ast.Name(id='itertools', ctx=ast.Load()),
attr='product', ctx=ast.Load())
iterAST = ast.Call(prodName, iters, [], None, None)
varAST = ast.arguments([ast.Tuple(variables, ast.Store())],
None, None, [])
imapName = ast.Attribute(
value=ast.Name(id='itertools', ctx=ast.Load()),
attr='imap', ctx=ast.Load())
ldBodyimap = node.elt
ldimap = ast.Lambda(varAST, ldBodyimap)
return ast.Call(imapName, [ldimap, iterAST], [], None, None)
else:
return self.generic_visit(node)
| bsd-3-clause |
Bysmyyr/chromium-crosswalk | tools/telemetry/third_party/gsutilz/third_party/rsa/tests/test_pkcs1.py | 34 | 2949 | '''Tests string operations.'''
import struct
import unittest2
import rsa
from rsa import pkcs1
from rsa._compat import byte, is_integer, b, is_bytes
class BinaryTest(unittest2.TestCase):
def setUp(self):
(self.pub, self.priv) = rsa.newkeys(256)
def test_enc_dec(self):
message = struct.pack('>IIII', 0, 0, 0, 1)
print("\tMessage: %r" % message)
encrypted = pkcs1.encrypt(message, self.pub)
print("\tEncrypted: %r" % encrypted)
decrypted = pkcs1.decrypt(encrypted, self.priv)
print("\tDecrypted: %r" % decrypted)
self.assertEqual(message, decrypted)
def test_decoding_failure(self):
message = struct.pack('>IIII', 0, 0, 0, 1)
encrypted = pkcs1.encrypt(message, self.pub)
# Alter the encrypted stream
a = encrypted[5]
if is_bytes(a):
a = ord(a)
encrypted = encrypted[:5] + byte(a + 1) + encrypted[6:]
self.assertRaises(pkcs1.DecryptionError, pkcs1.decrypt, encrypted,
self.priv)
def test_randomness(self):
'''Encrypting the same message twice should result in different
cryptos.
'''
message = struct.pack('>IIII', 0, 0, 0, 1)
encrypted1 = pkcs1.encrypt(message, self.pub)
encrypted2 = pkcs1.encrypt(message, self.pub)
self.assertNotEqual(encrypted1, encrypted2)
class SignatureTest(unittest2.TestCase):
def setUp(self):
(self.pub, self.priv) = rsa.newkeys(512)
def test_sign_verify(self):
'''Test happy flow of sign and verify'''
message = b('je moeder')
print("\tMessage: %r" % message)
signature = pkcs1.sign(message, self.priv, 'SHA-256')
print("\tSignature: %r" % signature)
self.assertTrue(pkcs1.verify(message, signature, self.pub))
def test_alter_message(self):
'''Altering the message should let the verification fail.'''
signature = pkcs1.sign(b('je moeder'), self.priv, 'SHA-256')
self.assertRaises(pkcs1.VerificationError, pkcs1.verify,
b('mijn moeder'), signature, self.pub)
def test_sign_different_key(self):
'''Signing with another key should let the verification fail.'''
(otherpub, _) = rsa.newkeys(512)
message = b('je moeder')
signature = pkcs1.sign(message, self.priv, 'SHA-256')
self.assertRaises(pkcs1.VerificationError, pkcs1.verify,
message, signature, otherpub)
def test_multiple_signings(self):
'''Signing the same message twice should return the same signatures.'''
message = struct.pack('>IIII', 0, 0, 0, 1)
signature1 = pkcs1.sign(message, self.priv, 'SHA-1')
signature2 = pkcs1.sign(message, self.priv, 'SHA-1')
self.assertEqual(signature1, signature2)
| bsd-3-clause |
thnee/ansible | test/units/modules/network/fortios/test_fortios_application_rule_settings.py | 21 | 7349 | # Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_application_rule_settings
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_application_rule_settings.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_application_rule_settings_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'application_rule_settings': {
'id': '3'
},
'vdom': 'root'}
is_error, changed, response = fortios_application_rule_settings.fortios_application(input_data, fos_instance)
expected_data = {
'id': '3'
}
set_method_mock.assert_called_with('application', 'rule-settings', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_application_rule_settings_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'application_rule_settings': {
'id': '3'
},
'vdom': 'root'}
is_error, changed, response = fortios_application_rule_settings.fortios_application(input_data, fos_instance)
expected_data = {
'id': '3'
}
set_method_mock.assert_called_with('application', 'rule-settings', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_application_rule_settings_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'application_rule_settings': {
'id': '3'
},
'vdom': 'root'}
is_error, changed, response = fortios_application_rule_settings.fortios_application(input_data, fos_instance)
delete_method_mock.assert_called_with('application', 'rule-settings', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_application_rule_settings_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'application_rule_settings': {
'id': '3'
},
'vdom': 'root'}
is_error, changed, response = fortios_application_rule_settings.fortios_application(input_data, fos_instance)
delete_method_mock.assert_called_with('application', 'rule-settings', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_application_rule_settings_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'application_rule_settings': {
'id': '3'
},
'vdom': 'root'}
is_error, changed, response = fortios_application_rule_settings.fortios_application(input_data, fos_instance)
expected_data = {
'id': '3'
}
set_method_mock.assert_called_with('application', 'rule-settings', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_application_rule_settings_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'application_rule_settings': {
'random_attribute_not_valid': 'tag',
'id': '3'
},
'vdom': 'root'}
is_error, changed, response = fortios_application_rule_settings.fortios_application(input_data, fos_instance)
expected_data = {
'id': '3'
}
set_method_mock.assert_called_with('application', 'rule-settings', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
| gpl-3.0 |
Mendeley/mrec | mrec/mf/wrmf.py | 3 | 3746 | """
Weighted Regularize Matrix Factorization by alternating least squares.
See:
Y. Hu, Y. Koren and C. Volinsky, Collaborative filtering for implicit feedback datasets, ICDM 2008.
http://research.yahoo.net/files/HuKorenVolinsky-ICDM08.pdf
R. Pan et al., One-class collaborative filtering, ICDM 2008.
http://www.hpl.hp.com/techreports/2008/HPL-2008-48R1.pdf
"""
import numpy as np
from scipy.sparse import csr_matrix
from mrec.sparse import fast_sparse_matrix
from mrec.mf.recommender import MatrixFactorizationRecommender
class WRMFRecommender(MatrixFactorizationRecommender):
"""
Parameters
==========
d : int
Number of latent factors.
alpha : float
Confidence weight, confidence c = 1 + alpha*r where r is the observed "rating".
lbda : float
Regularization constant.
num_iters : int
Number of iterations of alternating least squares.
"""
def __init__(self,d,alpha=1,lbda=0.015,num_iters=15):
self.d = d
self.alpha = alpha
self.lbda = lbda
self.num_iters = num_iters
def __str__(self):
return 'WRMFRecommender (d={0},alpha={1},lambda={2},num_iters={3})'.format(self.d,self.alpha,self.lbda,self.num_iters)
def init_factors(self,num_factors,assign_values=True):
if assign_values:
return self.d**-0.5*np.random.random_sample((num_factors,self.d))
return np.empty((num_factors,self.d))
def fit(self,train,item_features=None):
"""
Learn factors from training set. User and item factors are
fitted alternately.
Parameters
==========
train : scipy.sparse.csr_matrix or mrec.sparse.fast_sparse_matrix
User-item matrix.
item_features : array_like, shape = [num_items, num_features]
Features for each item in the dataset, ignored here.
"""
if type(train) == csr_matrix:
train = fast_sparse_matrix(train)
num_users,num_items = train.shape
self.U = self.init_factors(num_users,False) # don't need values, will compute them
self.V = self.init_factors(num_items)
for it in xrange(self.num_iters):
print 'iteration',it
# fit user factors
VV = self.V.T.dot(self.V)
for u in xrange(num_users):
# get (positive i.e. non-zero scored) items for user
indices = train.X[u].nonzero()[1]
if indices.size:
self.U[u,:] = self.update(indices,self.V,VV)
else:
self.U[u,:] = np.zeros(self.d)
# fit item factors
UU = self.U.T.dot(self.U)
for i in xrange(num_items):
indices = train.fast_get_col(i).nonzero()[0]
if indices.size:
self.V[i,:] = self.update(indices,self.U,UU)
else:
self.V[i,:] = np.zeros(self.d)
def update(self,indices,H,HH):
"""
Update latent factors for a single user or item.
"""
Hix = H[indices,:]
M = HH + self.alpha*Hix.T.dot(Hix) + np.diag(self.lbda*np.ones(self.d))
return np.dot(np.linalg.inv(M),(1+self.alpha)*Hix.sum(axis=0))
def main():
import sys
from mrec import load_sparse_matrix, save_recommender
from mrec.sparse import fast_sparse_matrix
from mrec.mf.wrmf import WRMFRecommender
file_format = sys.argv[1]
filepath = sys.argv[2]
outfile = sys.argv[3]
# load training set as scipy sparse matrix
train = load_sparse_matrix(file_format,filepath)
model = WRMFRecommender(d=5)
model.fit(train)
save_recommender(model,outfile)
if __name__ == '__main__':
main()
| bsd-3-clause |
Akrog/cinder | cinder/tests/api/contrib/test_hosts.py | 4 | 8123 | # Copyright (c) 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from lxml import etree
from oslo_utils import timeutils
import webob.exc
from cinder.api.contrib import hosts as os_hosts
from cinder import context
from cinder import db
from cinder.openstack.common import log as logging
from cinder import test
LOG = logging.getLogger(__name__)
created_time = datetime.datetime(2012, 11, 14, 1, 20, 41, 95099)
curr_time = datetime.datetime(2013, 7, 3, 0, 0, 1)
SERVICE_LIST = [
{'created_at': created_time, 'updated_at': curr_time,
'host': 'test.host.1', 'topic': 'cinder-volume', 'disabled': 0,
'availability_zone': 'cinder'},
{'created_at': created_time, 'updated_at': curr_time,
'host': 'test.host.1', 'topic': 'cinder-volume', 'disabled': 0,
'availability_zone': 'cinder'},
{'created_at': created_time, 'updated_at': curr_time,
'host': 'test.host.1', 'topic': 'cinder-volume', 'disabled': 0,
'availability_zone': 'cinder'},
{'created_at': created_time, 'updated_at': curr_time,
'host': 'test.host.1', 'topic': 'cinder-volume', 'disabled': 0,
'availability_zone': 'cinder'}]
LIST_RESPONSE = [{'service-status': 'available', 'service': 'cinder-volume',
'zone': 'cinder', 'service-state': 'enabled',
'host_name': 'test.host.1', 'last-update': curr_time},
{'service-status': 'available', 'service': 'cinder-volume',
'zone': 'cinder', 'service-state': 'enabled',
'host_name': 'test.host.1', 'last-update': curr_time},
{'service-status': 'available', 'service': 'cinder-volume',
'zone': 'cinder', 'service-state': 'enabled',
'host_name': 'test.host.1', 'last-update': curr_time},
{'service-status': 'available', 'service': 'cinder-volume',
'zone': 'cinder', 'service-state': 'enabled',
'host_name': 'test.host.1', 'last-update': curr_time}]
def stub_utcnow():
return datetime.datetime(2013, 7, 3, 0, 0, 2)
def stub_service_get_all(self, req):
return SERVICE_LIST
class FakeRequest(object):
environ = {'cinder.context': context.get_admin_context()}
GET = {}
class FakeRequestWithcinderZone(object):
environ = {'cinder.context': context.get_admin_context()}
GET = {'zone': 'cinder'}
class HostTestCase(test.TestCase):
"""Test Case for hosts."""
def setUp(self):
super(HostTestCase, self).setUp()
self.controller = os_hosts.HostController()
self.req = FakeRequest()
self.stubs.Set(db, 'service_get_all',
stub_service_get_all)
self.stubs.Set(timeutils, 'utcnow', stub_utcnow)
def _test_host_update(self, host, key, val, expected_value):
body = {key: val}
result = self.controller.update(self.req, host, body=body)
self.assertEqual(result[key], expected_value)
def test_list_hosts(self):
"""Verify that the volume hosts are returned."""
hosts = os_hosts._list_hosts(self.req)
self.assertEqual(hosts, LIST_RESPONSE)
cinder_hosts = os_hosts._list_hosts(self.req, 'cinder-volume')
expected = [host for host in LIST_RESPONSE
if host['service'] == 'cinder-volume']
self.assertEqual(cinder_hosts, expected)
def test_list_hosts_with_zone(self):
req = FakeRequestWithcinderZone()
hosts = os_hosts._list_hosts(req)
self.assertEqual(hosts, LIST_RESPONSE)
def test_bad_status_value(self):
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
self.req, 'test.host.1', body={'status': 'bad'})
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update,
self.req,
'test.host.1',
body={'status': 'disablabc'})
def test_bad_update_key(self):
bad_body = {'crazy': 'bad'}
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
self.req, 'test.host.1', body=bad_body)
def test_bad_update_key_and_correct_udpate_key(self):
bad_body = {'status': 'disable', 'crazy': 'bad'}
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
self.req, 'test.host.1', body=bad_body)
def test_good_udpate_keys(self):
body = {'status': 'disable'}
self.assertRaises(NotImplementedError, self.controller.update,
self.req, 'test.host.1', body=body)
def test_bad_host(self):
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.update,
self.req,
'bogus_host_name',
body={'disabled': 0})
def test_show_forbidden(self):
self.req.environ['cinder.context'].is_admin = False
dest = 'dummydest'
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.show,
self.req, dest)
self.req.environ['cinder.context'].is_admin = True
def test_show_host_not_exist(self):
"""A host given as an argument does not exists."""
self.req.environ['cinder.context'].is_admin = True
dest = 'dummydest'
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.show,
self.req, dest)
class HostSerializerTest(test.TestCase):
def setUp(self):
super(HostSerializerTest, self).setUp()
self.deserializer = os_hosts.HostDeserializer()
def test_index_serializer(self):
serializer = os_hosts.HostIndexTemplate()
text = serializer.serialize({"hosts": LIST_RESPONSE})
tree = etree.fromstring(text)
self.assertEqual('hosts', tree.tag)
self.assertEqual(len(LIST_RESPONSE), len(tree))
for i in range(len(LIST_RESPONSE)):
self.assertEqual('host', tree[i].tag)
self.assertEqual(LIST_RESPONSE[i]['service-status'],
tree[i].get('service-status'))
self.assertEqual(LIST_RESPONSE[i]['service'],
tree[i].get('service'))
self.assertEqual(LIST_RESPONSE[i]['zone'],
tree[i].get('zone'))
self.assertEqual(LIST_RESPONSE[i]['service-state'],
tree[i].get('service-state'))
self.assertEqual(LIST_RESPONSE[i]['host_name'],
tree[i].get('host_name'))
self.assertEqual(str(LIST_RESPONSE[i]['last-update']),
tree[i].get('last-update'))
def test_update_serializer_with_status(self):
exemplar = dict(host='test.host.1', status='enabled')
serializer = os_hosts.HostUpdateTemplate()
text = serializer.serialize(exemplar)
tree = etree.fromstring(text)
self.assertEqual('host', tree.tag)
for key, value in exemplar.items():
self.assertEqual(value, tree.get(key))
def test_update_deserializer(self):
exemplar = dict(status='enabled', foo='bar')
intext = ("<?xml version='1.0' encoding='UTF-8'?>\n"
'<updates><status>enabled</status><foo>bar</foo></updates>')
result = self.deserializer.deserialize(intext)
self.assertEqual(dict(body=exemplar), result)
| apache-2.0 |
bpshetty/erpnext | erpnext/accounts/report/asset_depreciations_and_balances/asset_depreciations_and_balances.py | 32 | 6129 | # Copyright (c) 2013, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import formatdate, getdate, flt, add_days
def execute(filters=None):
filters.day_before_from_date = add_days(filters.from_date, -1)
columns, data = get_columns(filters), get_data(filters)
return columns, data
def get_data(filters):
data = []
asset_categories = get_asset_categories(filters)
assets = get_assets(filters)
asset_costs = get_asset_costs(assets, filters)
asset_depreciations = get_accumulated_depreciations(assets, filters)
for asset_category in asset_categories:
row = frappe._dict()
row.asset_category = asset_category
row.update(asset_costs.get(asset_category))
row.cost_as_on_to_date = (flt(row.cost_as_on_from_date) + flt(row.cost_of_new_purchase)
- flt(row.cost_of_sold_asset) - flt(row.cost_of_scrapped_asset))
row.update(asset_depreciations.get(asset_category))
row.accumulated_depreciation_as_on_to_date = (flt(row.accumulated_depreciation_as_on_from_date) +
flt(row.depreciation_amount_during_the_period) - flt(row.depreciation_eliminated))
row.net_asset_value_as_on_from_date = (flt(row.cost_as_on_from_date) -
flt(row.accumulated_depreciation_as_on_from_date))
row.net_asset_value_as_on_to_date = (flt(row.cost_as_on_to_date) -
flt(row.accumulated_depreciation_as_on_to_date))
data.append(row)
return data
def get_asset_categories(filters):
return frappe.db.sql_list("""
select distinct asset_category from `tabAsset`
where docstatus=1 and company=%s and purchase_date <= %s
""", (filters.company, filters.to_date))
def get_assets(filters):
return frappe.db.sql("""
select name, asset_category, purchase_date, gross_purchase_amount, disposal_date, status
from `tabAsset`
where docstatus=1 and company=%s and purchase_date <= %s""",
(filters.company, filters.to_date), as_dict=1)
def get_asset_costs(assets, filters):
asset_costs = frappe._dict()
for d in assets:
asset_costs.setdefault(d.asset_category, frappe._dict({
"cost_as_on_from_date": 0,
"cost_of_new_purchase": 0,
"cost_of_sold_asset": 0,
"cost_of_scrapped_asset": 0
}))
costs = asset_costs[d.asset_category]
if getdate(d.purchase_date) < getdate(filters.from_date):
if not d.disposal_date or getdate(d.disposal_date) >= getdate(filters.from_date):
costs.cost_as_on_from_date += flt(d.gross_purchase_amount)
else:
costs.cost_of_new_purchase += flt(d.gross_purchase_amount)
if d.disposal_date and getdate(d.disposal_date) >= getdate(filters.from_date) \
and getdate(d.disposal_date) <= getdate(filters.to_date):
if d.status == "Sold":
costs.cost_of_sold_asset += flt(d.gross_purchase_amount)
elif d.status == "Scrapped":
costs.cost_of_scrapped_asset += flt(d.gross_purchase_amount)
return asset_costs
def get_accumulated_depreciations(assets, filters):
asset_depreciations = frappe._dict()
for d in assets:
asset = frappe.get_doc("Asset", d.name)
asset_depreciations.setdefault(d.asset_category, frappe._dict({
"accumulated_depreciation_as_on_from_date": asset.opening_accumulated_depreciation,
"depreciation_amount_during_the_period": 0,
"depreciation_eliminated_during_the_period": 0
}))
depr = asset_depreciations[d.asset_category]
for schedule in asset.get("schedules"):
if getdate(schedule.schedule_date) < getdate(filters.from_date):
if not asset.disposal_date and getdate(asset.disposal_date) >= getdate(filters.from_date):
depr.accumulated_depreciation_as_on_from_date += flt(schedule.depreciation_amount)
elif getdate(schedule.schedule_date) <= getdate(filters.to_date):
depr.depreciation_amount_during_the_period += flt(schedule.depreciation_amount)
if asset.disposal_date and getdate(schedule.schedule_date) > getdate(asset.disposal_date):
depr.depreciation_eliminated_during_the_period += flt(schedule.depreciation_amount)
return asset_depreciations
def get_columns(filters):
return [
{
"label": _("Asset Category"),
"fieldname": "asset_category",
"fieldtype": "Link",
"options": "Asset Category",
"width": 120
},
{
"label": _("Cost as on") + " " + formatdate(filters.day_before_from_date),
"fieldname": "cost_as_on_from_date",
"fieldtype": "Currency",
"width": 140
},
{
"label": _("Cost of New Purchase"),
"fieldname": "cost_of_new_purchase",
"fieldtype": "Currency",
"width": 140
},
{
"label": _("Cost of Sold Asset"),
"fieldname": "cost_of_sold_asset",
"fieldtype": "Currency",
"width": 140
},
{
"label": _("Cost of Scrapped Asset"),
"fieldname": "cost_of_scrapped_asset",
"fieldtype": "Currency",
"width": 140
},
{
"label": _("Cost as on") + " " + formatdate(filters.to_date),
"fieldname": "cost_as_on_to_date",
"fieldtype": "Currency",
"width": 140
},
{
"label": _("Accumulated Depreciation as on") + " " + formatdate(filters.day_before_from_date),
"fieldname": "accumulated_depreciation_as_on_from_date",
"fieldtype": "Currency",
"width": 270
},
{
"label": _("Depreciation Amount during the period"),
"fieldname": "depreciation_amount_during_the_period",
"fieldtype": "Currency",
"width": 240
},
{
"label": _("Depreciation Eliminated due to disposal of assets"),
"fieldname": "depreciation_eliminated_during_the_period",
"fieldtype": "Currency",
"width": 300
},
{
"label": _("Accumulated Depreciation as on") + " " + formatdate(filters.to_date),
"fieldname": "accumulated_depreciation_as_on_to_date",
"fieldtype": "Currency",
"width": 270
},
{
"label": _("Net Asset value as on") + " " + formatdate(filters.day_before_from_date),
"fieldname": "net_asset_value_as_on_from_date",
"fieldtype": "Currency",
"width": 200
},
{
"label": _("Net Asset value as on") + " " + formatdate(filters.to_date),
"fieldname": "net_asset_value_as_on_to_date",
"fieldtype": "Currency",
"width": 200
}
]
| gpl-3.0 |
sominn/android_kernel_samsung_golden | tools/perf/scripts/python/syscall-counts.py | 11181 | 1522 | # system call counts
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts.py [comm]\n";
for_comm = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
print "%-40s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
fursund/EmguCV-Unity | opencv/tests/swig_python/highgui/size_test.py | 3 | 1361 | """
This script will test HighGUI's cvGetCaptureProperty functionality
for correct returnvalues of width and height information for different video formats
"""
# import the necessary things for OpenCV and comparson routine
import os
from cv import *
from highgui import *
#import python
#from python.highgui import *
# path to images and videos we need
PREFIX =os.path.join(os.environ["srcdir"],"../../opencv_extra/testdata/python/")
# this is the folder with the videos and images
# and name of output window
IMAGES = PREFIX+"images/"
VIDEOS = PREFIX+"videos/"
# testing routine, seeks through file and compares read images with frames in COMPARISON
def size_ok(FILENAME):
# create a video reader using the tiny videofile VIDEOS+FILENAME
video=cvCreateFileCapture(VIDEOS+FILENAME)
if video is None:
# couldn't open video (FAIL)
return 1
# get width and height information via HighGUI's cvGetCaptureProperty function
w=cvGetCaptureProperty(video,CV_CAP_PROP_FRAME_WIDTH)
h=cvGetCaptureProperty(video,CV_CAP_PROP_FRAME_HEIGHT)
# get an image to compare
image=cvQueryFrame(video)
if image is None:
return 1
image = cvCloneImage (image)
if (w!=image.width) or (h!=image.height):
# dimensions don't match parameters (FAIL)
return 1
del video
del image
# everything is fine (PASS)
return 0
| gpl-3.0 |
fangxingli/hue | desktop/core/ext-py/Django-1.6.10/tests/lookup/models.py | 107 | 1557 | """
7. The lookup API
This demonstrates features of the database API.
"""
from __future__ import unicode_literals
from django.db import models
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
class Author(models.Model):
name = models.CharField(max_length=100)
class Meta:
ordering = ('name', )
@python_2_unicode_compatible
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateTimeField()
author = models.ForeignKey(Author, blank=True, null=True)
class Meta:
ordering = ('-pub_date', 'headline')
def __str__(self):
return self.headline
class Tag(models.Model):
articles = models.ManyToManyField(Article)
name = models.CharField(max_length=100)
class Meta:
ordering = ('name', )
@python_2_unicode_compatible
class Season(models.Model):
year = models.PositiveSmallIntegerField()
gt = models.IntegerField(null=True, blank=True)
def __str__(self):
return six.text_type(self.year)
@python_2_unicode_compatible
class Game(models.Model):
season = models.ForeignKey(Season, related_name='games')
home = models.CharField(max_length=100)
away = models.CharField(max_length=100)
def __str__(self):
return "%s at %s" % (self.away, self.home)
@python_2_unicode_compatible
class Player(models.Model):
name = models.CharField(max_length=100)
games = models.ManyToManyField(Game, related_name='players')
def __str__(self):
return self.name
| apache-2.0 |
SUSE/azure-sdk-for-python | azure-servicefabric/azure/servicefabric/models/stateless_service_description.py | 2 | 5839 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .service_description import ServiceDescription
class StatelessServiceDescription(ServiceDescription):
"""Describes a stateless service.
:param application_name: The application name.
:type application_name: str
:param service_name: The service name.
:type service_name: str
:param service_type_name: The service type name.
:type service_type_name: str
:param initialization_data: The initialization data as an array of bytes.
Initialization data is passed to service instances or replicas when they
are created.
:type initialization_data: list of int
:param partition_description: The partition description as an object.
:type partition_description: :class:`PartitionSchemeDescription
<azure.servicefabric.models.PartitionSchemeDescription>`
:param placement_constraints: The placement constraints as a string.
Placement constraints are boolean expressions on node properties and
allow for restricting a service to particular nodes based on the service
requirements. For example, to place a service on nodes where NodeType is
blue specify the following: "NodeColor == blue)".
:type placement_constraints: str
:param correlation_scheme: The correlation scheme.
:type correlation_scheme: list of :class:`ServiceCorrelationDescription
<azure.servicefabric.models.ServiceCorrelationDescription>`
:param service_load_metrics: The service load metrics.
:type service_load_metrics: list of :class:`ServiceLoadMetricDescription
<azure.servicefabric.models.ServiceLoadMetricDescription>`
:param service_placement_policies: The service placement policies.
:type service_placement_policies: list of
:class:`ServicePlacementPolicyDescription
<azure.servicefabric.models.ServicePlacementPolicyDescription>`
:param default_move_cost: The move cost for the service. Possible values
include: 'Zero', 'Low', 'Medium', 'High'
:type default_move_cost: str
:param is_default_move_cost_specified: Indicates if the DefaultMoveCost
property is specified.
:type is_default_move_cost_specified: bool
:param service_package_activation_mode: The activation mode of service
package to be used for a service. Possible values include:
'SharedProcess', 'ExclusiveProcess'
:type service_package_activation_mode: str
:param service_dns_name: The DNS name of the service. It requires the DNS
system service to be enabled in Service Fabric cluster.
:type service_dns_name: str
:param ServiceKind: Polymorphic Discriminator
:type ServiceKind: str
:param instance_count: The instance count.
:type instance_count: int
"""
_validation = {
'service_name': {'required': True},
'service_type_name': {'required': True},
'partition_description': {'required': True},
'ServiceKind': {'required': True},
'instance_count': {'required': True, 'minimum': -1},
}
_attribute_map = {
'application_name': {'key': 'ApplicationName', 'type': 'str'},
'service_name': {'key': 'ServiceName', 'type': 'str'},
'service_type_name': {'key': 'ServiceTypeName', 'type': 'str'},
'initialization_data': {'key': 'InitializationData', 'type': '[int]'},
'partition_description': {'key': 'PartitionDescription', 'type': 'PartitionSchemeDescription'},
'placement_constraints': {'key': 'PlacementConstraints', 'type': 'str'},
'correlation_scheme': {'key': 'CorrelationScheme', 'type': '[ServiceCorrelationDescription]'},
'service_load_metrics': {'key': 'ServiceLoadMetrics', 'type': '[ServiceLoadMetricDescription]'},
'service_placement_policies': {'key': 'ServicePlacementPolicies', 'type': '[ServicePlacementPolicyDescription]'},
'default_move_cost': {'key': 'DefaultMoveCost', 'type': 'str'},
'is_default_move_cost_specified': {'key': 'IsDefaultMoveCostSpecified', 'type': 'bool'},
'service_package_activation_mode': {'key': 'ServicePackageActivationMode', 'type': 'str'},
'service_dns_name': {'key': 'ServiceDnsName', 'type': 'str'},
'ServiceKind': {'key': 'ServiceKind', 'type': 'str'},
'instance_count': {'key': 'InstanceCount', 'type': 'int'},
}
def __init__(self, service_name, service_type_name, partition_description, instance_count, application_name=None, initialization_data=None, placement_constraints=None, correlation_scheme=None, service_load_metrics=None, service_placement_policies=None, default_move_cost=None, is_default_move_cost_specified=None, service_package_activation_mode=None, service_dns_name=None):
super(StatelessServiceDescription, self).__init__(application_name=application_name, service_name=service_name, service_type_name=service_type_name, initialization_data=initialization_data, partition_description=partition_description, placement_constraints=placement_constraints, correlation_scheme=correlation_scheme, service_load_metrics=service_load_metrics, service_placement_policies=service_placement_policies, default_move_cost=default_move_cost, is_default_move_cost_specified=is_default_move_cost_specified, service_package_activation_mode=service_package_activation_mode, service_dns_name=service_dns_name)
self.instance_count = instance_count
self.ServiceKind = 'Stateless'
| mit |
jymannob/CouchPotatoServer | libs/pyutil/nummedobj.py | 106 | 2141 | # Copyright (c) 2002-2009 Zooko Wilcox-O'Hearn
# mailto:zooko@zooko.com
# This file is part of pyutil; see README.rst for licensing terms.
import dictutil
class NummedObj(object):
"""
This is useful for nicer debug printouts. Instead of objects of the same class being
distinguished from one another by their memory address, they each get a unique number, which
can be read as "the first object of this class", "the second object of this class", etc. This
is especially useful because separate runs of a program will yield identical debug output,
(assuming that the objects get created in the same order in each run). This makes it possible
to diff outputs from separate runs to see what changed, without having to ignore a difference
on every line due to different memory addresses of objects.
"""
objnums = dictutil.NumDict() # key: class names, value: highest used object number
def __init__(self, klass=None):
"""
@param klass: in which class are you counted? If default value of `None', then self.__class__ will be used.
"""
if klass is None:
klass = self.__class__
self._classname = klass.__name__
NummedObj.objnums.inc(self._classname)
self._objid = NummedObj.objnums[self._classname]
def __repr__(self):
return "<%s #%d>" % (self._classname, self._objid,)
def __lt__(self, other):
return (self._objid, self._classname,) < (other._objid, other._classname,)
def __le__(self, other):
return (self._objid, self._classname,) <= (other._objid, other._classname,)
def __eq__(self, other):
return (self._objid, self._classname,) == (other._objid, other._classname,)
def __ne__(self, other):
return (self._objid, self._classname,) != (other._objid, other._classname,)
def __gt__(self, other):
return (self._objid, self._classname,) > (other._objid, other._classname,)
def __ge__(self, other):
return (self._objid, self._classname,) >= (other._objid, other._classname,)
def __hash__(self):
return id(self)
| gpl-3.0 |
JackDanger/sentry | tests/sentry/utils/http/tests.py | 3 | 10681 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import mock
from exam import fixture
from django.http import HttpRequest
from sentry import options
from sentry.models import Project
from sentry.testutils import TestCase
from sentry.utils.http import (
is_same_domain, is_valid_origin, get_origins, absolute_uri, is_valid_ip,
origin_from_request,
)
class AbsoluteUriTest(TestCase):
def test_without_path(self):
assert absolute_uri() == options.get('system.url-prefix')
def test_with_path(self):
assert absolute_uri('/foo/bar') == '%s/foo/bar' % (options.get('system.url-prefix'),)
class SameDomainTestCase(TestCase):
def test_is_same_domain(self):
url1 = 'http://example.com/foo/bar'
url2 = 'http://example.com/biz/baz'
self.assertTrue(is_same_domain(url1, url2))
def test_is_same_domain_diff_scheme(self):
url1 = 'https://example.com/foo/bar'
url2 = 'http://example.com/biz/baz'
self.assertTrue(is_same_domain(url1, url2))
def test_is_same_domain_diff_port(self):
url1 = 'http://example.com:80/foo/bar'
url2 = 'http://example.com:13/biz/baz'
self.assertFalse(is_same_domain(url1, url2))
class GetOriginsTestCase(TestCase):
def test_project_default(self):
project = Project.objects.get()
with self.settings(SENTRY_ALLOW_ORIGIN=None):
result = get_origins(project)
self.assertEquals(result, frozenset(['*']))
def test_project(self):
project = Project.objects.get()
project.update_option('sentry:origins', [u'http://foo.example'])
with self.settings(SENTRY_ALLOW_ORIGIN=None):
result = get_origins(project)
self.assertEquals(result, frozenset(['http://foo.example']))
def test_project_and_setting(self):
project = Project.objects.get()
project.update_option('sentry:origins', [u'http://foo.example'])
with self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'):
result = get_origins(project)
self.assertEquals(result, frozenset(['http://foo.example', 'http://example.com']))
def test_setting_empty(self):
with self.settings(SENTRY_ALLOW_ORIGIN=None):
result = get_origins(None)
self.assertEquals(result, frozenset([]))
def test_setting_all(self):
with self.settings(SENTRY_ALLOW_ORIGIN='*'):
result = get_origins(None)
self.assertEquals(result, frozenset(['*']))
def test_setting_uri(self):
with self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'):
result = get_origins(None)
self.assertEquals(result, frozenset(['http://example.com']))
class IsValidOriginTestCase(TestCase):
@fixture
def project(self):
return mock.Mock()
def isValidOrigin(self, origin, inputs):
with mock.patch('sentry.utils.http.get_origins') as get_origins:
get_origins.return_value = inputs
result = is_valid_origin(origin, self.project)
get_origins.assert_called_once_with(self.project)
return result
def test_global_wildcard_matches_domain(self):
result = self.isValidOrigin('http://example.com', ['*'])
self.assertEquals(result, True)
def test_domain_wildcard_matches_domain(self):
result = self.isValidOrigin('http://example.com', ['*.example.com'])
self.assertEquals(result, True)
def test_domain_wildcard_matches_domain_with_port(self):
result = self.isValidOrigin('http://example.com:80', ['*.example.com'])
self.assertEquals(result, True)
def test_domain_wildcard_matches_subdomain(self):
result = self.isValidOrigin('http://foo.example.com', ['*.example.com'])
self.assertEquals(result, True)
def test_domain_wildcard_matches_subdomain_with_port(self):
result = self.isValidOrigin('http://foo.example.com:80', ['*.example.com'])
self.assertEquals(result, True)
def test_domain_wildcard_does_not_match_others(self):
result = self.isValidOrigin('http://foo.com', ['*.example.com'])
self.assertEquals(result, False)
def test_domain_wildcard_matches_domain_with_path(self):
result = self.isValidOrigin('http://foo.example.com/foo/bar', ['*.example.com'])
self.assertEquals(result, True)
def test_base_domain_matches_domain(self):
result = self.isValidOrigin('http://example.com', ['example.com'])
self.assertEquals(result, True)
def test_base_domain_matches_domain_with_path(self):
result = self.isValidOrigin('http://example.com/foo/bar', ['example.com'])
self.assertEquals(result, True)
def test_base_domain_matches_domain_with_port(self):
result = self.isValidOrigin('http://example.com:80', ['example.com'])
self.assertEquals(result, True)
def test_base_domain_matches_domain_with_explicit_port(self):
result = self.isValidOrigin('http://example.com:80', ['example.com:80'])
assert result is True
def test_base_domain_does_not_match_domain_with_invalid_port(self):
result = self.isValidOrigin('http://example.com:80', ['example.com:443'])
assert result is False
def test_base_domain_does_not_match_subdomain(self):
result = self.isValidOrigin('http://example.com', ['foo.example.com'])
self.assertEquals(result, False)
def test_full_uri_match(self):
result = self.isValidOrigin('http://example.com', ['http://example.com'])
self.assertEquals(result, True)
def test_full_uri_match_requires_scheme(self):
result = self.isValidOrigin('https://example.com', ['http://example.com'])
self.assertEquals(result, False)
def test_full_uri_match_does_not_require_port(self):
result = self.isValidOrigin('http://example.com:80', ['http://example.com'])
self.assertEquals(result, True)
def test_partial_uri_match(self):
result = self.isValidOrigin('http://example.com/foo/bar', ['http://example.com'])
self.assertEquals(result, True)
def test_null_valid_with_global(self):
result = self.isValidOrigin('null', ['*'])
self.assertEquals(result, True)
def test_null_invalid_graceful_with_domains(self):
result = self.isValidOrigin('null', ['http://example.com'])
self.assertEquals(result, False)
def test_custom_protocol_with_location(self):
result = self.isValidOrigin('sp://custom-thing/foo/bar', ['sp://custom-thing'])
assert result is True
result = self.isValidOrigin('sp://custom-thing-two/foo/bar', ['sp://custom-thing'])
assert result is False
def test_custom_protocol_without_location(self):
result = self.isValidOrigin('sp://custom-thing/foo/bar', ['sp://*'])
assert result is True
result = self.isValidOrigin('dp://custom-thing/foo/bar', ['sp://'])
assert result is False
def test_custom_protocol_with_domainish_match(self):
result = self.isValidOrigin('sp://custom-thing.foobar/foo/bar', ['sp://*.foobar'])
assert result is True
result = self.isValidOrigin('sp://custom-thing.bizbaz/foo/bar', ['sp://*.foobar'])
assert result is False
def test_unicode(self):
result = self.isValidOrigin(u'http://l\xf8calhost', [u'*.l\xf8calhost'])
assert result is True
def test_punycode(self):
result = self.isValidOrigin('http://xn--lcalhost-54a', [u'*.l\xf8calhost'])
assert result is True
result = self.isValidOrigin('http://xn--lcalhost-54a', [u'*.xn--lcalhost-54a'])
assert result is True
result = self.isValidOrigin(u'http://l\xf8calhost', [u'*.xn--lcalhost-54a'])
assert result is True
result = self.isValidOrigin('http://l\xc3\xb8calhost', [u'*.xn--lcalhost-54a'])
assert result is True
result = self.isValidOrigin('http://xn--lcalhost-54a', [u'l\xf8calhost'])
assert result is True
result = self.isValidOrigin('http://xn--lcalhost-54a:80', [u'l\xf8calhost:80'])
assert result is True
def test_unparseable_uri(self):
result = self.isValidOrigin('http://example.com', ['.'])
assert result is False
def test_wildcard_hostname_with_port(self):
result = self.isValidOrigin('http://example.com:1234', ['*:1234'])
assert result is True
def test_without_hostname(self):
result = self.isValidOrigin('foo://', ['foo://*'])
assert result is True
result = self.isValidOrigin('foo://', ['foo://'])
assert result is True
result = self.isValidOrigin('foo://', ['example.com'])
assert result is False
result = self.isValidOrigin('foo://a', ['foo://'])
assert result is False
result = self.isValidOrigin('foo://a', ['foo://*'])
assert result is True
class IsValidIPTestCase(TestCase):
def is_valid_ip(self, ip, inputs):
self.project.update_option('sentry:blacklisted_ips', inputs)
return is_valid_ip(ip, self.project)
def test_not_in_blacklist(self):
assert self.is_valid_ip('127.0.0.1', [])
assert self.is_valid_ip('127.0.0.1', ['0.0.0.0', '192.168.1.1', '10.0.0.0/8'])
def test_match_blacklist(self):
assert not self.is_valid_ip('127.0.0.1', ['127.0.0.1'])
assert not self.is_valid_ip('127.0.0.1', ['0.0.0.0', '127.0.0.1', '192.168.1.1'])
def test_match_blacklist_range(self):
assert not self.is_valid_ip('127.0.0.1', ['127.0.0.0/8'])
assert not self.is_valid_ip('127.0.0.1', ['0.0.0.0', '127.0.0.0/8', '192.168.1.0/8'])
def test_garbage_input(self):
assert self.is_valid_ip('127.0.0.1', ['lol/bar'])
class OriginFromRequestTestCase(TestCase):
def test_nothing(self):
request = HttpRequest()
assert origin_from_request(request) is None
def test_origin(self):
request = HttpRequest()
request.META['HTTP_ORIGIN'] = 'http://example.com'
request.META['HTTP_REFERER'] = 'nope'
assert origin_from_request(request) == 'http://example.com'
def test_referer(self):
request = HttpRequest()
request.META['HTTP_REFERER'] = 'http://example.com/foo/bar'
assert origin_from_request(request) == 'http://example.com'
def test_null_origin(self):
request = HttpRequest()
request.META['HTTP_ORIGIN'] = 'null'
assert origin_from_request(request) is None
request.META['HTTP_REFERER'] = 'http://example.com'
assert origin_from_request(request) == 'http://example.com'
| bsd-3-clause |
gizmag/django-mobile | django_mobile/conf.py | 7 | 1527 | # -*- coding: utf-8 -*-
from django.conf import settings as django_settings
CACHE_LOADER_NAME = 'django_mobile.loader.CachedLoader'
DJANGO_MOBILE_LOADER = 'django_mobile.loader.Loader'
class SettingsProxy(object):
def __init__(self, settings, defaults):
self.settings = settings
self.defaults = defaults
def __getattr__(self, attr):
try:
return getattr(self.settings, attr)
except AttributeError:
try:
return getattr(self.defaults, attr)
except AttributeError:
raise AttributeError(u'settings object has no attribute "%s"' % attr)
class defaults(object):
FLAVOURS = (u'full', u'mobile',)
DEFAULT_MOBILE_FLAVOUR = u'mobile'
FLAVOURS_TEMPLATE_PREFIX = u''
FLAVOURS_GET_PARAMETER = u'flavour'
FLAVOURS_STORAGE_BACKEND = u'cookie'
FLAVOURS_COOKIE_KEY = u'flavour'
FLAVOURS_COOKIE_HTTPONLY = False
FLAVOURS_SESSION_KEY = u'flavour'
FLAVOURS_TEMPLATE_LOADERS = []
for loader in django_settings.TEMPLATE_LOADERS:
if isinstance(loader, (tuple, list)) and loader[0] == CACHE_LOADER_NAME:
for cached_loader in loader[1]:
if cached_loader != DJANGO_MOBILE_LOADER:
FLAVOURS_TEMPLATE_LOADERS.append(cached_loader)
elif loader != DJANGO_MOBILE_LOADER:
FLAVOURS_TEMPLATE_LOADERS.append(loader)
FLAVOURS_TEMPLATE_LOADERS = tuple(FLAVOURS_TEMPLATE_LOADERS)
settings = SettingsProxy(django_settings, defaults)
| bsd-3-clause |
massot/account-invoice-reporting | __unported__/account_invoice_delivery_address/__openerp__.py | 7 | 1709 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2014 Therp BV (<http://therp.nl>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Account invoice delivery address",
"version": "1.0",
"author": "Therp BV,Odoo Community Association (OCA)",
"license": "AGPL-3",
"complexity": "normal",
"description": """
Adds delivery address to the invoice. Also makes sure delivery address is
filled from default delivery address, or taken from sales order. Delivery
address might also be changed untill invoice is confirmed.
""",
"category": "",
"depends": [
'account',
'sale_stock',
],
"data": [
'report/account_print_invoice.xml',
'view/account_invoice.xml',
],
"js": [
],
"css": [
],
"qweb": [
],
"auto_install": False,
'installable': False,
}
| agpl-3.0 |
Frenzie/youtube-dl | youtube_dl/extractor/quickvid.py | 113 | 1719 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_urlparse,
)
from ..utils import (
determine_ext,
int_or_none,
)
class QuickVidIE(InfoExtractor):
_VALID_URL = r'https?://(www\.)?quickvid\.org/watch\.php\?v=(?P<id>[a-zA-Z_0-9-]+)'
_TEST = {
'url': 'http://quickvid.org/watch.php?v=sUQT3RCG8dx',
'md5': 'c0c72dd473f260c06c808a05d19acdc5',
'info_dict': {
'id': 'sUQT3RCG8dx',
'ext': 'mp4',
'title': 'Nick Offerman\'s Summer Reading Recap',
'thumbnail': 're:^https?://.*\.(?:png|jpg|gif)$',
'view_count': int,
},
'skip': 'Not accessible from Travis CI server',
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
title = self._html_search_regex(r'<h2>(.*?)</h2>', webpage, 'title')
view_count = int_or_none(self._html_search_regex(
r'(?s)<div id="views">(.*?)</div>',
webpage, 'view count', fatal=False))
video_code = self._search_regex(
r'(?s)<video id="video"[^>]*>(.*?)</video>', webpage, 'video code')
formats = [
{
'url': compat_urlparse.urljoin(url, src),
'format_id': determine_ext(src, None),
} for src in re.findall('<source\s+src="([^"]+)"', video_code)
]
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'formats': formats,
'thumbnail': self._og_search_thumbnail(webpage),
'view_count': view_count,
}
| unlicense |
Plain-Andy-legacy/android_external_chromium_org | tools/perf/page_sets/tough_animation_cases.py | 33 | 3532 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class ToughAnimationCasesPage(page_module.Page):
def __init__(self, url, page_set, need_measurement_ready):
super(ToughAnimationCasesPage, self).__init__(url=url, page_set=page_set)
self.archive_data_file = 'data/tough_animation_cases.json'
self._need_measurement_ready = need_measurement_ready
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
if self._need_measurement_ready:
action_runner.WaitForJavaScriptCondition('measurementReady')
def RunSmoothness(self, action_runner):
action_runner.Wait(10)
class ToughAnimationCasesPageSet(page_set_module.PageSet):
"""
Description: A collection of animation performance tests
"""
def __init__(self):
super(ToughAnimationCasesPageSet, self).__init__(
archive_data_file='data/tough_animation_cases.json',
bucket=page_set_module.PARTNER_BUCKET)
urls_list_one = [
# Why: Tests the balls animation implemented with SVG animations.
'file://tough_animation_cases/balls_svg_animations.html',
# Why: Tests the balls animation implemented with Javascript and canvas.
'file://tough_animation_cases/balls_javascript_canvas.html',
# Why: Tests the balls animation implemented with Javascript and CSS.
'file://tough_animation_cases/balls_javascript_css.html',
# Why: Tests the balls animation implemented with CSS keyframe animations.
'file://tough_animation_cases/balls_css_keyframe_animations.html',
# Why: Tests the balls animation implemented with transforms and CSS
# keyframe animations to be run on the compositor thread.
# pylint: disable=C0301
'file://tough_animation_cases/balls_css_keyframe_animations_composited_transform.html',
# Why: Tests the balls animation implemented with CSS transitions on 2
# properties.
'file://tough_animation_cases/balls_css_transition_2_properties.html',
# Why: Tests the balls animation implemented with CSS transitions on 40
# properties.
'file://tough_animation_cases/balls_css_transition_40_properties.html',
# Why: Tests the balls animation implemented with CSS transitions on all
# animatable properties.
'file://tough_animation_cases/balls_css_transition_all_properties.html',
# pylint: disable=C0301
'file://tough_animation_cases/overlay_background_color_css_transitions.html'
]
for url in urls_list_one:
self.AddPage(ToughAnimationCasesPage(url, self,
need_measurement_ready=True))
urls_list_two = [
# Why: Tests various keyframed animations.
'file://tough_animation_cases/keyframed_animations.html',
# Why: Tests various transitions.
'file://tough_animation_cases/transform_transitions.html',
# Why: Login page is slow because of ineffecient transform operations.
'http://ie.microsoft.com/testdrive/performance/robohornetpro/',
# Why: JS execution blocks CSS transition unless initial transform is set.
'file://tough_animation_cases/transform_transition_js_block.html'
]
for url in urls_list_two:
self.AddPage(ToughAnimationCasesPage(url, self,
need_measurement_ready=False))
| bsd-3-clause |
GNOME/gedit-plugins | plugins/commander/modules/align.py | 1 | 8900 | # -*- coding: utf-8 -*-
#
# align.py - align commander module
#
# Copyright (C) 2010 - Jesse van den Kieboom
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301, USA.
import commander.commands as commands
import commander.commands.completion
import commander.commands.result
import commander.commands.exceptions
from functools import reduce
import re
__commander_module__ = True
def _get_groups(m, group, add_ws_group):
if len(m.groups()) <= group - 1:
gidx = 0
else:
gidx = group
if len(m.groups()) <= add_ws_group - 1:
wsidx = 0
else:
wsidx = add_ws_group
# Whitespace group must be contained in align group
if m.start(wsidx) < m.start(gidx) or m.end(wsidx) > m.end(gidx):
wsidx = gidx
return (gidx, wsidx)
class Line:
def __init__(self, line, reg, tabwidth):
self.tabwidth = tabwidth
self.line = line
# All the separators
self.matches = list(reg.finditer(line))
# @newline initially contains the first column
if not self.matches:
# No separator found
self.newline = str(line)
else:
# Up to first separator
self.newline = line[0:self.matches[0].start(0)]
def matches_len(self):
return len(self.matches)
def new_len(self, extra=''):
return len((self.newline + extra).expandtabs(self.tabwidth))
def match(self, idx):
if idx >= self.matches_len():
return None
return self.matches[idx]
def append(self, idx, num, group, add_ws_group):
m = self.match(idx)
if m == None:
return
gidx, wsidx = _get_groups(m, group, add_ws_group)
# Append leading match
self.newline += self.line[m.start(0):m.start(gidx)]
# Now align by replacing wsidx with spaces
prefix = self.line[m.start(gidx):m.start(wsidx)]
suffix = self.line[m.end(wsidx):m.end(gidx)]
sp = ''
while True:
bridge = prefix + sp + suffix
if self.new_len(bridge) < num:
sp += ' '
else:
break
self.newline += bridge
# Then append the rest of the match
mnext = self.match(idx + 1)
if mnext == None:
endidx = None
else:
endidx = mnext.start(0)
self.newline += self.line[m.end(gidx):endidx]
def __str__(self):
return self.newline
def _find_max_align(lines, idx, group, add_ws_group):
num = 0
# We will align on 'group', by adding spaces to 'add_ws_group'
for line in lines:
m = line.match(idx)
if m != None:
gidx, wsidx = _get_groups(m, group, add_ws_group)
# until the start
extra = line.line[m.start(0):m.start(wsidx)] + line.line[m.end(wsidx):m.end(gidx)]
# Measure where to align it
l = line.new_len(extra)
else:
l = line.new_len()
if l > num:
num = l
return num
def _regex(view, reg, group, additional_ws, add_ws_group, flags=0):
buf = view.get_buffer()
# Get the selection of lines to align columns on
bounds = buf.get_selection_bounds()
if not bounds:
start = buf.get_iter_at_mark(buf.get_insert())
start.set_line_offset(0)
end = start.copy()
if not end.ends_line():
end.forward_to_line_end()
bounds = (start, end)
if not bounds[0].equal(bounds[1]) and bounds[1].starts_line():
bounds[1].backward_line()
if not bounds[1].ends_line():
bounds[1].forward_to_line_end()
# Get the regular expression from the user
if reg == None:
reg, words, modifier = (yield commander.commands.result.Prompt('Regex:'))
# Compile the regular expression
try:
reg = re.compile(reg, flags)
except Exception as e:
raise commander.commands.exceptions.Execute('Failed to compile regular expression: %s' % (e,))
# Query the user to provide a regex group number to align on
if group == None:
group, words, modifier = (yield commander.commands.result.Prompt('Group (1):'))
try:
group = int(group)
except:
group = 1
# Query the user for additional whitespace to insert for separating items
if additional_ws == None:
additional_ws, words, modifier = (yield commander.commands.result.Prompt('Additional whitespace (0):'))
try:
additional_ws = int(additional_ws)
except:
additional_ws = 0
# Query the user for the regex group number on which to add the
# whitespace
if add_ws_group == None:
add_ws_group, words, modifier = (yield commander.commands.result.Prompt('Whitespace group (1):'))
try:
add_ws_group = int(add_ws_group)
except:
add_ws_group = -1
# By default, add the whitespace on the group on which the columns are
# aligned
if add_ws_group < 0:
add_ws_group = group
start, end = bounds
if not start.starts_line():
start.set_line_offset(0)
if not end.ends_line():
end.forward_to_line_end()
lines = start.get_text(end).splitlines()
newlines = []
num = 0
tabwidth = view.get_tab_width()
# Construct Line objects for all the lines
newlines = [Line(line, reg, tabwidth) for line in lines]
# Calculate maximum number of matches (i.e. columns)
num = reduce(lambda x, y: max(x, y.matches_len()), newlines, 0)
for i in range(num):
al = _find_max_align(newlines, i, group, add_ws_group)
for line in newlines:
line.append(i, al + additional_ws, group, add_ws_group)
# Replace lines
aligned = str.join('\n', [x.newline for x in newlines])
buf.begin_user_action()
buf.delete(bounds[0], bounds[1])
m = buf.create_mark(None, bounds[0], True)
buf.insert(bounds[1], aligned)
buf.select_range(buf.get_iter_at_mark(m), bounds[1])
buf.delete_mark(m)
buf.end_user_action()
yield commander.commands.result.DONE
def __default__(view, reg='\s+', align_group=1, padding=1, padding_group=-1):
"""Align selected in columns using a regular expression: align.regex [<regex>=<i>\s+</i>] [<align-group>] [<padding>] [<padding-group>=<i><align-group></i>]
Align the selected text in columns separated by the specified regular expression.
The optional <align-group> argument specifies on which group in the regular expression
the text should be aligned and defaults to 1 (or 0 in the case that there is
no explicit group specified). The <align-group> will be <b>replaced</b>
with whitespace to align the columns. The optional <padding> argument can
be used to add additional whitespace to the column separation. The last
optional argument (<padding-group>) can be used to specify a separate
group (which must be contained in <align-group>) which to replace with
whitespace.
The regular expression will be matched in case-sensitive mode"""
yield _regex(view, reg, align_group, padding, padding_group)
def i(view, reg='\s+', align_group=1, padding=1, padding_group=-1):
"""Align selected in columns using a regular expression: align.regex [<regex>=<i>\s+</i>] [<align-group>] [<padding>] [<padding-group>=<i><align-group></i>]
Align the selected text in columns separated by the specified regular expression.
The optional <align-group> argument specifies on which group in the regular expression
the text should be aligned and defaults to 1 (or 0 in the case that there is
no explicit group specified). The <align-group> will be <b>replaced</b>
with whitespace to align the columns. The optional <padding> argument can
be used to add additional whitespace to the column separation. The last
optional argument (<padding-group>) can be used to specify a separate
group (which must be contained in <align-group>) which to replace with
whitespace.
The regular expression will be matched in case-insensitive mode"""
yield _regex(view, reg, align_group, padding, padding_group, re.IGNORECASE)
# ex:ts=4:et
| gpl-2.0 |
mtlchun/edx | common/lib/symmath/symmath/test_symmath_check.py | 166 | 2648 | from unittest import TestCase
from .symmath_check import symmath_check
class SymmathCheckTest(TestCase):
def test_symmath_check_integers(self):
number_list = [i for i in range(-100, 100)]
self._symmath_check_numbers(number_list)
def test_symmath_check_floats(self):
number_list = [i + 0.01 for i in range(-100, 100)]
self._symmath_check_numbers(number_list)
def test_symmath_check_same_symbols(self):
expected_str = "x+2*y"
dynamath = '''
<math xmlns="http://www.w3.org/1998/Math/MathML">
<mstyle displaystyle="true">
<mrow>
<mi>x</mi>
<mo>+</mo>
<mn>2</mn>
<mo>*</mo>
<mi>y</mi>
</mrow>
</mstyle>
</math>'''.strip()
# Expect that the exact same symbolic string is marked correct
result = symmath_check(expected_str, expected_str, dynamath=[dynamath])
self.assertTrue('ok' in result and result['ok'])
def test_symmath_check_equivalent_symbols(self):
expected_str = "x+2*y"
input_str = "x+y+y"
dynamath = '''
<math xmlns="http://www.w3.org/1998/Math/MathML">
<mstyle displaystyle="true">
<mrow>
<mi>x</mi>
<mo>+</mo>
<mi>y</mi>
<mo>+</mo>
<mi>y</mi>
</mrow>
</mstyle>
</math>'''.strip()
# Expect that equivalent symbolic strings are marked correct
result = symmath_check(expected_str, input_str, dynamath=[dynamath])
self.assertTrue('ok' in result and result['ok'])
def test_symmath_check_different_symbols(self):
expected_str = "0"
input_str = "x+y"
dynamath = '''
<math xmlns="http://www.w3.org/1998/Math/MathML">
<mstyle displaystyle="true">
<mrow>
<mi>x</mi>
<mo>+</mo>
<mi>y</mi>
</mrow>
</mstyle>
</math>'''.strip()
# Expect that an incorrect response is marked incorrect
result = symmath_check(expected_str, input_str, dynamath=[dynamath])
self.assertTrue('ok' in result and not result['ok'])
self.assertFalse('fail' in result['msg'])
def _symmath_check_numbers(self, number_list):
for n in number_list:
# expect = ans, so should say the answer is correct
expect = n
ans = n
result = symmath_check(str(expect), str(ans))
self.assertTrue('ok' in result and result['ok'],
"%f should == %f" % (expect, ans))
# Change expect so that it != ans
expect += 0.1
result = symmath_check(str(expect), str(ans))
self.assertTrue('ok' in result and not result['ok'],
"%f should != %f" % (expect, ans))
| agpl-3.0 |
vmturbo/nova | nova/tests/unit/api/openstack/compute/test_attach_interfaces.py | 3 | 21862 | # Copyright 2012 SINA Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from webob import exc
from nova.api.openstack.compute import attach_interfaces \
as attach_interfaces_v21
from nova.compute import api as compute_api
from nova import exception
from nova import objects
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_network_cache_model
FAKE_UUID1 = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
FAKE_UUID2 = 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'
FAKE_PORT_ID1 = '11111111-1111-1111-1111-111111111111'
FAKE_PORT_ID2 = '22222222-2222-2222-2222-222222222222'
FAKE_PORT_ID3 = '33333333-3333-3333-3333-333333333333'
FAKE_NOT_FOUND_PORT_ID = '00000000-0000-0000-0000-000000000000'
FAKE_NET_ID1 = '44444444-4444-4444-4444-444444444444'
FAKE_NET_ID2 = '55555555-5555-5555-5555-555555555555'
FAKE_NET_ID3 = '66666666-6666-6666-6666-666666666666'
FAKE_BAD_NET_ID = '00000000-0000-0000-0000-000000000000'
port_data1 = {
"id": FAKE_PORT_ID1,
"network_id": FAKE_NET_ID1,
"admin_state_up": True,
"status": "ACTIVE",
"mac_address": "aa:aa:aa:aa:aa:aa",
"fixed_ips": ["10.0.1.2"],
"device_id": FAKE_UUID1,
}
port_data2 = {
"id": FAKE_PORT_ID2,
"network_id": FAKE_NET_ID2,
"admin_state_up": True,
"status": "ACTIVE",
"mac_address": "bb:bb:bb:bb:bb:bb",
"fixed_ips": ["10.0.2.2"],
"device_id": FAKE_UUID1,
}
port_data3 = {
"id": FAKE_PORT_ID3,
"network_id": FAKE_NET_ID3,
"admin_state_up": True,
"status": "ACTIVE",
"mac_address": "bb:bb:bb:bb:bb:bb",
"fixed_ips": ["10.0.2.2"],
"device_id": '',
}
fake_networks = [FAKE_NET_ID1, FAKE_NET_ID2]
ports = [port_data1, port_data2, port_data3]
def fake_show_port(context, port_id, **kwargs):
for port in ports:
if port['id'] == port_id:
return {'port': port}
else:
raise exception.PortNotFound(port_id=port_id)
def fake_attach_interface(self, context, instance, network_id, port_id,
requested_ip='192.168.1.3'):
if not network_id:
# if no network_id is given when add a port to an instance, use the
# first default network.
network_id = fake_networks[0]
if network_id == FAKE_BAD_NET_ID:
raise exception.NetworkNotFound(network_id=network_id)
if not port_id:
port_id = ports[fake_networks.index(network_id)]['id']
if port_id == FAKE_NOT_FOUND_PORT_ID:
raise exception.PortNotFound(port_id=port_id)
vif = fake_network_cache_model.new_vif()
vif['id'] = port_id
vif['network']['id'] = network_id
vif['network']['subnets'][0]['ips'][0]['address'] = requested_ip
return vif
def fake_detach_interface(self, context, instance, port_id):
for port in ports:
if port['id'] == port_id:
return
raise exception.PortNotFound(port_id=port_id)
def fake_get_instance(self, *args, **kwargs):
return objects.Instance(uuid=FAKE_UUID1)
class InterfaceAttachTestsV21(test.NoDBTestCase):
controller_cls = attach_interfaces_v21.InterfaceAttachmentController
validate_exc = exception.ValidationError
in_use_exc = exc.HTTPConflict
not_found_exc = exc.HTTPNotFound
not_usable_exc = exc.HTTPBadRequest
def setUp(self):
super(InterfaceAttachTestsV21, self).setUp()
self.flags(timeout=30, group='neutron')
self.stub_out('nova.compute.api.API.get', fake_get_instance)
self.expected_show = {'interfaceAttachment':
{'net_id': FAKE_NET_ID1,
'port_id': FAKE_PORT_ID1,
'mac_addr': port_data1['mac_address'],
'port_state': port_data1['status'],
'fixed_ips': port_data1['fixed_ips'],
}}
self.attachments = self.controller_cls()
show_port_patch = mock.patch.object(self.attachments.network_api,
'show_port', fake_show_port)
show_port_patch.start()
self.addCleanup(show_port_patch.stop)
self.req = fakes.HTTPRequest.blank('')
@mock.patch.object(compute_api.API, 'get',
side_effect=exception.InstanceNotFound(instance_id=''))
def _test_instance_not_found(self, func, args, mock_get, kwargs=None):
if not kwargs:
kwargs = {}
self.assertRaises(exc.HTTPNotFound, func, self.req, *args, **kwargs)
def test_show_instance_not_found(self):
self._test_instance_not_found(self.attachments.show, ('fake', 'fake'))
def test_index_instance_not_found(self):
self._test_instance_not_found(self.attachments.index, ('fake', ))
def test_detach_interface_instance_not_found(self):
self._test_instance_not_found(self.attachments.delete,
('fake', 'fake'))
def test_attach_interface_instance_not_found(self):
self._test_instance_not_found(self.attachments.create, ('fake', ),
kwargs={'body': {'interfaceAttachment': {}}})
def test_show(self):
result = self.attachments.show(self.req, FAKE_UUID1, FAKE_PORT_ID1)
self.assertEqual(self.expected_show, result)
def test_show_with_port_not_found(self):
self.assertRaises(exc.HTTPNotFound,
self.attachments.show, self.req, FAKE_UUID2,
FAKE_PORT_ID1)
def test_show_forbidden(self):
with mock.patch.object(self.attachments.network_api, 'show_port',
side_effect=exception.Forbidden):
self.assertRaises(exc.HTTPForbidden,
self.attachments.show, self.req, FAKE_UUID1,
FAKE_PORT_ID1)
def test_delete(self):
self.stub_out('nova.compute.api.API.detach_interface',
fake_detach_interface)
result = self.attachments.delete(self.req, FAKE_UUID1, FAKE_PORT_ID1)
# NOTE: on v2.1, http status code is set as wsgi_code of API
# method instead of status_int in a response object.
if isinstance(self.attachments,
attach_interfaces_v21.InterfaceAttachmentController):
status_int = self.attachments.delete.wsgi_code
else:
status_int = result.status_int
self.assertEqual(202, status_int)
def test_detach_interface_instance_locked(self):
def fake_detach_interface_from_locked_server(self, context,
instance, port_id):
raise exception.InstanceIsLocked(instance_uuid=FAKE_UUID1)
self.stub_out('nova.compute.api.API.detach_interface',
fake_detach_interface_from_locked_server)
self.assertRaises(exc.HTTPConflict,
self.attachments.delete,
self.req,
FAKE_UUID1,
FAKE_PORT_ID1)
def test_delete_interface_not_found(self):
self.stub_out('nova.compute.api.API.detach_interface',
fake_detach_interface)
self.assertRaises(exc.HTTPNotFound,
self.attachments.delete,
self.req,
FAKE_UUID1,
'invalid-port-id')
def test_attach_interface_instance_locked(self):
def fake_attach_interface_to_locked_server(self, context,
instance, network_id, port_id, requested_ip):
raise exception.InstanceIsLocked(instance_uuid=FAKE_UUID1)
self.stub_out('nova.compute.api.API.attach_interface',
fake_attach_interface_to_locked_server)
body = {}
self.assertRaises(exc.HTTPConflict,
self.attachments.create, self.req, FAKE_UUID1,
body=body)
def test_attach_interface_without_network_id(self):
self.stub_out('nova.compute.api.API.attach_interface',
fake_attach_interface)
body = {}
result = self.attachments.create(self.req, FAKE_UUID1, body=body)
self.assertEqual(result['interfaceAttachment']['net_id'],
FAKE_NET_ID1)
def test_attach_interface_with_network_id(self):
self.stub_out('nova.compute.api.API.attach_interface',
fake_attach_interface)
body = {'interfaceAttachment': {'net_id': FAKE_NET_ID2}}
result = self.attachments.create(self.req, FAKE_UUID1, body=body)
self.assertEqual(result['interfaceAttachment']['net_id'],
FAKE_NET_ID2)
def _attach_interface_bad_request_case(self, body):
self.stub_out('nova.compute.api.API.attach_interface',
fake_attach_interface)
self.assertRaises(exc.HTTPBadRequest,
self.attachments.create, self.req, FAKE_UUID1,
body=body)
def _attach_interface_not_found_case(self, body):
self.stub_out('nova.compute.api.API.attach_interface',
fake_attach_interface)
self.assertRaises(self.not_found_exc,
self.attachments.create, self.req, FAKE_UUID1,
body=body)
def test_attach_interface_with_port_and_network_id(self):
body = {
'interfaceAttachment': {
'port_id': FAKE_PORT_ID1,
'net_id': FAKE_NET_ID2
}
}
self._attach_interface_bad_request_case(body)
def test_attach_interface_with_not_found_network_id(self):
body = {
'interfaceAttachment': {
'net_id': FAKE_BAD_NET_ID
}
}
self._attach_interface_not_found_case(body)
def test_attach_interface_with_not_found_port_id(self):
body = {
'interfaceAttachment': {
'port_id': FAKE_NOT_FOUND_PORT_ID
}
}
self._attach_interface_not_found_case(body)
def test_attach_interface_with_invalid_state(self):
def fake_attach_interface_invalid_state(*args, **kwargs):
raise exception.InstanceInvalidState(
instance_uuid='', attr='', state='',
method='attach_interface')
self.stub_out('nova.compute.api.API.attach_interface',
fake_attach_interface_invalid_state)
body = {'interfaceAttachment': {'net_id': FAKE_NET_ID1}}
self.assertRaises(exc.HTTPConflict,
self.attachments.create, self.req, FAKE_UUID1,
body=body)
def test_detach_interface_with_invalid_state(self):
def fake_detach_interface_invalid_state(*args, **kwargs):
raise exception.InstanceInvalidState(
instance_uuid='', attr='', state='',
method='detach_interface')
self.stub_out('nova.compute.api.API.detach_interface',
fake_detach_interface_invalid_state)
self.assertRaises(exc.HTTPConflict,
self.attachments.delete,
self.req,
FAKE_UUID1,
FAKE_NET_ID1)
@mock.patch.object(compute_api.API, 'detach_interface',
side_effect=NotImplementedError())
def test_detach_interface_with_not_implemented(self, _mock):
self.assertRaises(exc.HTTPNotImplemented,
self.attachments.delete,
self.req, FAKE_UUID1, FAKE_NET_ID1)
def test_attach_interface_invalid_fixed_ip(self):
body = {
'interfaceAttachment': {
'net_id': FAKE_NET_ID1,
'fixed_ips': [{'ip_address': 'invalid_ip'}]
}
}
self.assertRaises(self.validate_exc,
self.attachments.create, self.req, FAKE_UUID1,
body=body)
@mock.patch.object(compute_api.API, 'get')
@mock.patch.object(compute_api.API, 'attach_interface')
def test_attach_interface_fixed_ip_already_in_use(self,
attach_mock,
get_mock):
fake_instance = objects.Instance(uuid=FAKE_UUID1)
get_mock.return_value = fake_instance
attach_mock.side_effect = exception.FixedIpAlreadyInUse(
address='10.0.2.2', instance_uuid=FAKE_UUID1)
body = {}
self.assertRaises(self.in_use_exc,
self.attachments.create, self.req, FAKE_UUID1,
body=body)
ctxt = self.req.environ['nova.context']
attach_mock.assert_called_once_with(ctxt, fake_instance, None,
None, None)
get_mock.assert_called_once_with(ctxt, FAKE_UUID1,
expected_attrs=None)
@mock.patch.object(compute_api.API, 'get')
@mock.patch.object(compute_api.API, 'attach_interface')
def test_attach_interface_port_in_use(self,
attach_mock,
get_mock):
fake_instance = objects.Instance(uuid=FAKE_UUID1)
get_mock.return_value = fake_instance
attach_mock.side_effect = exception.PortInUse(
port_id=FAKE_PORT_ID1)
body = {}
self.assertRaises(self.in_use_exc,
self.attachments.create, self.req, FAKE_UUID1,
body=body)
ctxt = self.req.environ['nova.context']
attach_mock.assert_called_once_with(ctxt, fake_instance, None,
None, None)
get_mock.assert_called_once_with(ctxt, FAKE_UUID1,
expected_attrs=None)
@mock.patch.object(compute_api.API, 'get')
@mock.patch.object(compute_api.API, 'attach_interface')
def test_attach_interface_port_not_usable(self,
attach_mock,
get_mock):
fake_instance = objects.Instance(uuid=FAKE_UUID1)
get_mock.return_value = fake_instance
attach_mock.side_effect = exception.PortNotUsable(
port_id=FAKE_PORT_ID1,
instance=fake_instance.uuid)
body = {}
self.assertRaises(self.not_usable_exc,
self.attachments.create, self.req, FAKE_UUID1,
body=body)
ctxt = self.req.environ['nova.context']
attach_mock.assert_called_once_with(ctxt, fake_instance, None,
None, None)
get_mock.assert_called_once_with(ctxt, FAKE_UUID1,
expected_attrs=None)
@mock.patch.object(compute_api.API, 'get')
@mock.patch.object(compute_api.API, 'attach_interface')
def test_attach_interface_failed_no_network(self, attach_mock, get_mock):
fake_instance = objects.Instance(uuid=FAKE_UUID1,
project_id=FAKE_UUID2)
get_mock.return_value = fake_instance
attach_mock.side_effect = (
exception.InterfaceAttachFailedNoNetwork(project_id=FAKE_UUID2))
self.assertRaises(exc.HTTPBadRequest, self.attachments.create,
self.req, FAKE_UUID1, body={})
ctxt = self.req.environ['nova.context']
attach_mock.assert_called_once_with(ctxt, fake_instance, None,
None, None)
get_mock.assert_called_once_with(ctxt, FAKE_UUID1,
expected_attrs=None)
@mock.patch.object(compute_api.API, 'get')
@mock.patch.object(compute_api.API, 'attach_interface')
def test_attach_interface_no_more_fixed_ips(self,
attach_mock,
get_mock):
fake_instance = objects.Instance(uuid=FAKE_UUID1)
get_mock.return_value = fake_instance
attach_mock.side_effect = exception.NoMoreFixedIps(
net=FAKE_NET_ID1)
body = {}
self.assertRaises(exc.HTTPBadRequest,
self.attachments.create, self.req, FAKE_UUID1,
body=body)
ctxt = self.req.environ['nova.context']
attach_mock.assert_called_once_with(ctxt, fake_instance, None,
None, None)
get_mock.assert_called_once_with(ctxt, FAKE_UUID1,
expected_attrs=None)
@mock.patch.object(compute_api.API, 'get')
@mock.patch.object(compute_api.API, 'attach_interface')
def test_attach_interface_failed_securitygroup_cannot_be_applied(
self, attach_mock, get_mock):
fake_instance = objects.Instance(uuid=FAKE_UUID1,
project_id=FAKE_UUID2)
get_mock.return_value = fake_instance
attach_mock.side_effect = (
exception.SecurityGroupCannotBeApplied())
self.assertRaises(exc.HTTPBadRequest, self.attachments.create,
self.req, FAKE_UUID1, body={})
ctxt = self.req.environ['nova.context']
attach_mock.assert_called_once_with(ctxt, fake_instance, None,
None, None)
get_mock.assert_called_once_with(ctxt, FAKE_UUID1,
expected_attrs=None)
def _test_attach_interface_with_invalid_parameter(self, param):
self.stub_out('nova.compute.api.API.attach_interface',
fake_attach_interface)
body = {'interface_attachment': param}
self.assertRaises(exception.ValidationError,
self.attachments.create, self.req, FAKE_UUID1,
body=body)
def test_attach_interface_instance_with_non_uuid_net_id(self):
param = {'net_id': 'non_uuid'}
self._test_attach_interface_with_invalid_parameter(param)
def test_attach_interface_instance_with_non_uuid_port_id(self):
param = {'port_id': 'non_uuid'}
self._test_attach_interface_with_invalid_parameter(param)
def test_attach_interface_instance_with_non_array_fixed_ips(self):
param = {'fixed_ips': 'non_array'}
self._test_attach_interface_with_invalid_parameter(param)
class AttachInterfacesPolicyEnforcementv21(test.NoDBTestCase):
def setUp(self):
super(AttachInterfacesPolicyEnforcementv21, self).setUp()
self.controller = \
attach_interfaces_v21.InterfaceAttachmentController()
self.req = fakes.HTTPRequest.blank('')
self.rule_name = "os_compute_api:os-attach-interfaces"
self.policy.set_rules({self.rule_name: "project:non_fake"})
def test_index_attach_interfaces_policy_failed(self):
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.index, self.req, fakes.FAKE_UUID)
self.assertEqual(
"Policy doesn't allow %s to be performed." % self.rule_name,
exc.format_message())
def test_show_attach_interfaces_policy_failed(self):
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.show, self.req, fakes.FAKE_UUID, FAKE_PORT_ID1)
self.assertEqual(
"Policy doesn't allow %s to be performed." % self.rule_name,
exc.format_message())
def test_create_attach_interfaces_policy_failed(self):
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.create, self.req, fakes.FAKE_UUID, body={})
self.assertEqual(
"Policy doesn't allow %s to be performed." % self.rule_name,
exc.format_message())
def test_delete_attach_interfaces_policy_failed(self):
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.delete, self.req, fakes.FAKE_UUID, FAKE_PORT_ID1)
self.assertEqual(
"Policy doesn't allow %s to be performed." % self.rule_name,
exc.format_message())
def test_attach_interfaces_create_policy_failed(self):
self.policy.set_rules({self.rule_name: "@",
'os_compute_api:os-attach-interfaces:create':
"!"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.create, self.req, fakes.FAKE_UUID, body={})
self.assertEqual(
"Policy doesn't allow os_compute_api:os-attach-interfaces:create "
"to be performed.", exc.format_message())
def test_attach_interfaces_delete_policy_failed(self):
self.policy.set_rules({self.rule_name: "@",
'os_compute_api:os-attach-interfaces:delete':
"!"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.delete, self.req, fakes.FAKE_UUID, FAKE_PORT_ID1)
self.assertEqual(
"Policy doesn't allow os_compute_api:os-attach-interfaces:delete "
"to be performed.", exc.format_message())
| apache-2.0 |
Teagan42/home-assistant | homeassistant/components/google_assistant/const.py | 1 | 4660 | """Constants for Google Assistant."""
from homeassistant.components import (
alarm_control_panel,
binary_sensor,
camera,
climate,
cover,
fan,
group,
input_boolean,
light,
lock,
media_player,
scene,
script,
sensor,
switch,
vacuum,
)
DOMAIN = "google_assistant"
GOOGLE_ASSISTANT_API_ENDPOINT = "/api/google_assistant"
CONF_EXPOSE = "expose"
CONF_ENTITY_CONFIG = "entity_config"
CONF_EXPOSE_BY_DEFAULT = "expose_by_default"
CONF_EXPOSED_DOMAINS = "exposed_domains"
CONF_PROJECT_ID = "project_id"
CONF_ALIASES = "aliases"
CONF_API_KEY = "api_key"
CONF_ROOM_HINT = "room"
CONF_ALLOW_UNLOCK = "allow_unlock"
CONF_SECURE_DEVICES_PIN = "secure_devices_pin"
CONF_REPORT_STATE = "report_state"
CONF_SERVICE_ACCOUNT = "service_account"
CONF_CLIENT_EMAIL = "client_email"
CONF_PRIVATE_KEY = "private_key"
DEFAULT_EXPOSE_BY_DEFAULT = True
DEFAULT_EXPOSED_DOMAINS = [
"climate",
"cover",
"fan",
"group",
"input_boolean",
"light",
"media_player",
"scene",
"script",
"switch",
"vacuum",
"lock",
"binary_sensor",
"sensor",
"alarm_control_panel",
]
PREFIX_TYPES = "action.devices.types."
TYPE_CAMERA = PREFIX_TYPES + "CAMERA"
TYPE_LIGHT = PREFIX_TYPES + "LIGHT"
TYPE_SWITCH = PREFIX_TYPES + "SWITCH"
TYPE_VACUUM = PREFIX_TYPES + "VACUUM"
TYPE_SCENE = PREFIX_TYPES + "SCENE"
TYPE_FAN = PREFIX_TYPES + "FAN"
TYPE_THERMOSTAT = PREFIX_TYPES + "THERMOSTAT"
TYPE_LOCK = PREFIX_TYPES + "LOCK"
TYPE_BLINDS = PREFIX_TYPES + "BLINDS"
TYPE_GARAGE = PREFIX_TYPES + "GARAGE"
TYPE_OUTLET = PREFIX_TYPES + "OUTLET"
TYPE_SENSOR = PREFIX_TYPES + "SENSOR"
TYPE_DOOR = PREFIX_TYPES + "DOOR"
TYPE_TV = PREFIX_TYPES + "TV"
TYPE_SPEAKER = PREFIX_TYPES + "SPEAKER"
TYPE_ALARM = PREFIX_TYPES + "SECURITYSYSTEM"
SERVICE_REQUEST_SYNC = "request_sync"
HOMEGRAPH_URL = "https://homegraph.googleapis.com/"
HOMEGRAPH_SCOPE = "https://www.googleapis.com/auth/homegraph"
HOMEGRAPH_TOKEN_URL = "https://accounts.google.com/o/oauth2/token"
REQUEST_SYNC_BASE_URL = HOMEGRAPH_URL + "v1/devices:requestSync"
REPORT_STATE_BASE_URL = HOMEGRAPH_URL + "v1/devices:reportStateAndNotification"
# Error codes used for SmartHomeError class
# https://developers.google.com/actions/reference/smarthome/errors-exceptions
ERR_DEVICE_OFFLINE = "deviceOffline"
ERR_DEVICE_NOT_FOUND = "deviceNotFound"
ERR_VALUE_OUT_OF_RANGE = "valueOutOfRange"
ERR_NOT_SUPPORTED = "notSupported"
ERR_PROTOCOL_ERROR = "protocolError"
ERR_UNKNOWN_ERROR = "unknownError"
ERR_FUNCTION_NOT_SUPPORTED = "functionNotSupported"
ERR_ALREADY_DISARMED = "alreadyDisarmed"
ERR_ALREADY_ARMED = "alreadyArmed"
ERR_CHALLENGE_NEEDED = "challengeNeeded"
ERR_CHALLENGE_NOT_SETUP = "challengeFailedNotSetup"
ERR_TOO_MANY_FAILED_ATTEMPTS = "tooManyFailedAttempts"
ERR_PIN_INCORRECT = "pinIncorrect"
ERR_USER_CANCELLED = "userCancelled"
# Event types
EVENT_COMMAND_RECEIVED = "google_assistant_command"
EVENT_QUERY_RECEIVED = "google_assistant_query"
EVENT_SYNC_RECEIVED = "google_assistant_sync"
DOMAIN_TO_GOOGLE_TYPES = {
camera.DOMAIN: TYPE_CAMERA,
climate.DOMAIN: TYPE_THERMOSTAT,
cover.DOMAIN: TYPE_BLINDS,
fan.DOMAIN: TYPE_FAN,
group.DOMAIN: TYPE_SWITCH,
input_boolean.DOMAIN: TYPE_SWITCH,
light.DOMAIN: TYPE_LIGHT,
lock.DOMAIN: TYPE_LOCK,
media_player.DOMAIN: TYPE_SWITCH,
scene.DOMAIN: TYPE_SCENE,
script.DOMAIN: TYPE_SCENE,
switch.DOMAIN: TYPE_SWITCH,
vacuum.DOMAIN: TYPE_VACUUM,
alarm_control_panel.DOMAIN: TYPE_ALARM,
}
DEVICE_CLASS_TO_GOOGLE_TYPES = {
(cover.DOMAIN, cover.DEVICE_CLASS_GARAGE): TYPE_GARAGE,
(cover.DOMAIN, cover.DEVICE_CLASS_DOOR): TYPE_DOOR,
(switch.DOMAIN, switch.DEVICE_CLASS_SWITCH): TYPE_SWITCH,
(switch.DOMAIN, switch.DEVICE_CLASS_OUTLET): TYPE_OUTLET,
(binary_sensor.DOMAIN, binary_sensor.DEVICE_CLASS_DOOR): TYPE_DOOR,
(binary_sensor.DOMAIN, binary_sensor.DEVICE_CLASS_GARAGE_DOOR): TYPE_GARAGE,
(binary_sensor.DOMAIN, binary_sensor.DEVICE_CLASS_LOCK): TYPE_SENSOR,
(binary_sensor.DOMAIN, binary_sensor.DEVICE_CLASS_OPENING): TYPE_SENSOR,
(binary_sensor.DOMAIN, binary_sensor.DEVICE_CLASS_WINDOW): TYPE_SENSOR,
(media_player.DOMAIN, media_player.DEVICE_CLASS_TV): TYPE_TV,
(media_player.DOMAIN, media_player.DEVICE_CLASS_SPEAKER): TYPE_SPEAKER,
(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE): TYPE_SENSOR,
(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY): TYPE_SENSOR,
}
CHALLENGE_ACK_NEEDED = "ackNeeded"
CHALLENGE_PIN_NEEDED = "pinNeeded"
CHALLENGE_FAILED_PIN_NEEDED = "challengeFailedPinNeeded"
STORE_AGENT_USER_IDS = "agent_user_ids"
SOURCE_CLOUD = "cloud"
SOURCE_LOCAL = "local"
| apache-2.0 |
LoyolaCSDepartment/LDA-ICPC-2014 | topic-models/topic-count/xmlsplit.py | 1 | 1409 | #! /usr/bin/env /usr/bin/python3
import os
import sys
def ensure_dir(f):
d = os.path.dirname(f)
if not os.path.exists(d):
os.makedirs(d)
def xmlSplit(infile_name, dest_dir):
try:
# in_file = open('{0}{1}'.format(folder, filename), 'r', encoding='latin_1')
in_file = open(infile_name, 'r', encoding='latin_1')
except IOError:
print("File not found.")
return
dest_dir += '/' # redundant ok; without sucks!
# dest_dir = '{0}input/'.format(folder)
ensure_dir(dest_dir)
file_num = 1
out_file = open('%s%d.txt' % (dest_dir, file_num), 'w')
file_open = True
for x in in_file:
if x[-1] != '\n':
x = '%s\n' % (x)
if not file_open:
file_open = True
out_file = open(next_file, 'w')
# hack to remove non-ascii characters
x = ''.join([c for c in x if ord(c) < 128])
out_file.write('%s' % (x))
if x.startswith('</source>'):
out_file.close()
file_num += 1
next_file = '%s%d.txt' % (dest_dir, file_num)
file_open = False
print('{0} files'.format(file_num - 1) + " left in " + dest_dir)
out_file.close()
in_file.close()
if len(sys.argv) != 3:
print("usage: " + sys.argv[0] + " <input xml file> <output directory>")
sys.exit (-1)
xmlSplit(sys.argv[1], sys.argv[2])
# example call: xmlsplit.py cook.xml /scratch/topics/out')
# xmlSplit('<FIX ME>/topic-models/topic-count/sources/', 'cook.xml')
| mit |
glennmatthews/cot | COT/vm_description/ovf/hardware.py | 1 | 24156 | #!/usr/bin/env python
#
# hardware.py - OVFHardware class
#
# June 2016, Glenn F. Matthews
# Copyright (c) 2013-2016, 2019 the COT project developers.
# See the COPYRIGHT.txt file at the top-level directory of this distribution
# and at https://github.com/glennmatthews/cot/blob/master/COPYRIGHT.txt.
#
# This file is part of the Common OVF Tool (COT) project.
# It is subject to the license terms in the LICENSE.txt file found in the
# top-level directory of this distribution and at
# https://github.com/glennmatthews/cot/blob/master/LICENSE.txt. No part
# of COT, including this file, may be copied, modified, propagated, or
# distributed except according to the terms contained in the LICENSE.txt file.
"""Representation of OVF hardware definitions.
**Classes and Exceptions**
.. autosummary::
:nosignatures:
OVFHardware
OVFHardwareDataError
"""
import copy
import logging
from COT.data_validation import natural_sort
from COT.xml_file import XML
from .item import OVFItem, OVFItemDataError
logger = logging.getLogger(__name__)
class OVFHardwareDataError(Exception):
"""The input data used to construct an :class:`OVFHardware` is not sane."""
class OVFHardware(object):
"""Helper class for :class:`~COT.vm_description.ovf.ovf.OVF`.
Represents all hardware items defined by this OVF;
i.e., the contents of all Items in the VirtualHardwareSection.
Fundamentally it's just a dict of
:class:`~COT.vm_description.ovf.item.OVFItem` objects
with a bunch of helper methods.
"""
def __init__(self, ovf):
"""Construct an OVFHardware object describing all Items in the OVF.
Args:
ovf (OVF): OVF instance to extract hardware information from.
Raises:
OVFHardwareDataError: if any data errors are seen
"""
self.ovf = ovf
self.item_dict = {}
valid_profiles = set(ovf.config_profiles)
item_count = 0
for item in ovf.virtual_hw_section:
namespace = ovf.namespace_for_item_tag(item.tag)
if not namespace:
continue
item_count += 1
# We index the dict by InstanceID as it's the one property of
# an Item that uniquely identifies this set of hardware items.
instance = item.find(namespace + self.ovf.INSTANCE_ID).text
# Pre-sanity check - are all of the profiles associated with this
# item properly defined in the OVF DeploymentOptionSection?
item_profiles = set(item.get(self.ovf.ITEM_CONFIG, "").split())
unknown_profiles = item_profiles - valid_profiles
if unknown_profiles:
raise OVFHardwareDataError("Unknown profile(s) {0} for "
"Item instance {1}"
.format(unknown_profiles, instance))
if instance not in self.item_dict:
self.item_dict[instance] = OVFItem(self.ovf, item)
else:
try:
self.item_dict[instance].add_item(item)
except OVFItemDataError as exc:
logger.debug(exc)
# Mask away the nitty-gritty details from our caller
raise OVFHardwareDataError("Data conflict for instance {0}"
.format(instance))
logger.debug(
"OVF contains %s hardware Item elements describing %s "
"unique devices", item_count, len(self.item_dict))
# Treat the current state as golden:
for ovfitem in self.item_dict.values():
ovfitem.modified = False
def update_xml(self):
"""Regenerate all Items under the VirtualHardwareSection, if needed.
Will do nothing if no Items have been changed.
"""
modified = False
if len(self.item_dict) != len(XML.find_all_children(
self.ovf.virtual_hw_section,
set([self.ovf.ITEM, self.ovf.STORAGE_ITEM,
self.ovf.ETHERNET_PORT_ITEM]))):
modified = True
else:
for ovfitem in self.item_dict.values():
if ovfitem.modified:
modified = True
break
if not modified:
logger.verbose("No changes to hardware definition, "
"so no XML update is required")
return
# Delete the existing Items:
delete_count = 0
for item in list(self.ovf.virtual_hw_section):
if (item.tag == self.ovf.ITEM or
item.tag == self.ovf.STORAGE_ITEM or
item.tag == self.ovf.ETHERNET_PORT_ITEM):
self.ovf.virtual_hw_section.remove(item)
delete_count += 1
logger.debug("Cleared %d existing items from VirtualHWSection",
delete_count)
# Generate the new XML Items, in appropriately sorted order by Instance
ordering = [self.ovf.INFO, self.ovf.SYSTEM, self.ovf.ITEM]
for instance in natural_sort(self.item_dict):
logger.debug("Writing Item(s) with InstanceID %s", instance)
ovfitem = self.item_dict[instance]
new_items = ovfitem.generate_items()
logger.spam("Generated %d items", len(new_items))
for item in new_items:
XML.add_child(self.ovf.virtual_hw_section, item, ordering)
logger.verbose("Updated XML VirtualHardwareSection, now contains %d "
"Items representing %d devices",
len(self.ovf.virtual_hw_section.findall(self.ovf.ITEM)),
len(self.item_dict))
def find_unused_instance_id(self, start=1):
"""Find the first available ``InstanceID`` number.
Args:
start (int): First InstanceID value to consider (disregarding all
lower InstanceIDs, even if available).
Returns:
str: An instance ID that is not yet in use.
"""
instance = int(start)
while str(instance) in self.item_dict.keys():
instance += 1
logger.debug("Found unused InstanceID %d", instance)
return str(instance)
def new_item(self, resource_type, profile_list=None):
"""Create a new OVFItem of the given type.
Args:
resource_type (str): String such as 'cpu' or 'harddisk' - used as
a key to
:data:`~COT.vm_description.ovf.name_helper.OVFNameHelper1.RES_MAP`
profile_list (list): Profiles the new item should belong to
Returns:
tuple: ``(instance_id, ovfitem)``
"""
instance = self.find_unused_instance_id()
ovfitem = OVFItem(self.ovf)
ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list)
ovfitem.set_property(self.ovf.RESOURCE_TYPE,
self.ovf.RES_MAP[resource_type],
profile_list)
# ovftool freaks out if we leave out the ElementName on an Item,
# so provide a simple default value.
ovfitem.set_property(self.ovf.ELEMENT_NAME, resource_type,
profile_list)
self.item_dict[instance] = ovfitem
ovfitem.modified = True
logger.info("Created new %s under profile(s) %s, InstanceID is %s",
resource_type, profile_list, instance)
return (instance, ovfitem)
def delete_item(self, item):
"""Delete the given Item from the hardware.
Args:
item (OVFItem): Item to delete
"""
instance = item.get_value(self.ovf.INSTANCE_ID)
if self.item_dict[instance] == item:
del self.item_dict[instance]
# TODO: error handling - currently a no-op if item not in item_dict
def clone_item(self, parent_item, profile_list):
"""Clone an OVFItem to create a new instance.
Args:
parent_item (OVFItem): Instance to clone from
profile_list (list): List of profiles to clone into
Returns:
tuple: ``(instance_id, ovfitem)``
"""
instance = self.find_unused_instance_id(start=parent_item.instance_id)
logger.spam("Cloning existing Item %s with new instance ID %s",
parent_item, instance)
ovfitem = copy.deepcopy(parent_item)
# Delete any profiles from the parent that we don't need now,
# otherwise we'll get an error when trying to set the instance ID
# on our clone due to self-inconsistency (#64).
for profile in self.ovf.config_profiles:
if ovfitem.has_profile(profile) and profile not in profile_list:
ovfitem.remove_profile(profile)
ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list)
ovfitem.modified = True
self.item_dict[instance] = ovfitem
logger.spam("Added clone of %s under %s, instance is %s",
parent_item, profile_list, instance)
return (instance, ovfitem)
def item_match(self, item, resource_type, properties, profile_list):
"""Check whether the given item matches the given filters.
Args:
item (OVFItem): Item to validate
resource_type (str): Resource type string like 'scsi' or 'serial'
properties (dict): Properties and their values to match
profile_list (list): List of profiles to filter on
Returns:
bool: True if the item matches all filters, False if not.
"""
if resource_type and (self.ovf.RES_MAP[resource_type] !=
item.get_value(self.ovf.RESOURCE_TYPE)):
return False
if profile_list:
for profile in profile_list:
if not item.has_profile(profile):
return False
for (prop, value) in properties.items():
if item.get_value(prop) != value:
return False
return True
def find_all_items(self, resource_type=None, properties=None,
profile_list=None):
"""Find all items matching the given type, properties, and profiles.
Args:
resource_type (str): Resource type string like 'scsi' or 'serial'
properties (dict): Properties and their values to match
profile_list (list): List of profiles to filter on
Returns:
list: Matching OVFItem instances
"""
items = [self.item_dict[instance] for instance in
natural_sort(self.item_dict)]
filtered_items = []
if properties is None:
properties = {}
for item in items:
if self.item_match(item, resource_type, properties, profile_list):
filtered_items.append(item)
logger.spam("Found %s Items of type %s with properties %s and"
" profiles %s", len(filtered_items), resource_type,
properties, profile_list)
return filtered_items
def find_item(self, resource_type=None, properties=None, profile=None):
"""Find the only OVFItem of the given :attr:`resource_type`.
Args:
resource_type (str): Resource type string like 'scsi' or 'serial'
properties (dict): Properties and their values to match
profile (str): Single profile ID to search within
Returns:
OVFItem: Matching instance, or None
Raises:
LookupError: if more than one such Item exists.
"""
matches = self.find_all_items(resource_type, properties, [profile])
if len(matches) > 1:
raise LookupError(
"Found multiple matching '{0}' Items (instances {1})"
.format(resource_type, [m.instance_id for m in matches]))
elif len(matches) == 0:
return None
else:
return matches[0]
def get_item_count(self, resource_type, profile):
"""Get the number of Items of the given type for the given profile.
Wrapper for :meth:`get_item_count_per_profile`.
Args:
resource_type (str): Resource type string like 'scsi' or 'serial'
profile (str): Single profile identifier string to look up.
Returns:
int: Number of items of this type in this profile.
"""
return (self.get_item_count_per_profile(resource_type, [profile])
[profile])
def get_item_count_per_profile(self, resource_type, profile_list):
"""Get the number of Items of the given type per profile.
Items present under "no profile" will be counted against
the total for each profile.
Args:
resource_type (str): Resource type string like 'scsi' or 'serial'
profile_list (list): List of profiles to filter on
(default: apply across all profiles)
Returns:
dict: mapping profile strings to the number of items under each
profile.
"""
count_dict = {}
if not profile_list:
# Get the count under all profiles
profile_list = self.ovf.config_profiles + [None]
for profile in profile_list:
count_dict[profile] = 0
for ovfitem in self.find_all_items(resource_type):
for profile in profile_list:
if ovfitem.has_profile(profile):
count_dict[profile] += 1
for (profile, count) in count_dict.items():
logger.spam("Profile '%s' has %s %s Item(s)",
profile, count, resource_type)
return count_dict
def _update_existing_item_profiles(self, resource_type,
count, profile_list):
"""Change profile membership of existing items as needed.
Helper method for :meth:`set_item_count_per_profile`.
Args:
resource_type (str): 'cpu', 'harddisk', etc.
count (int): Desired number of items
profile_list (list): List of profiles to filter on
(default: apply across all profiles)
Returns:
tuple: (count_dict, items_to_add, last_item)
"""
count_dict = self.get_item_count_per_profile(resource_type,
profile_list)
items_seen = dict.fromkeys(profile_list, 0)
last_item = None
# First, iterate over existing Items.
# Once we've seen "count" items under a profile, remove all subsequent
# items from this profile.
# If we don't have enough items under a profile, add any items found
# under other profiles to this profile as well.
for ovfitem in self.find_all_items(resource_type):
last_item = ovfitem
for profile in profile_list:
if ovfitem.has_profile(profile):
if items_seen[profile] >= count:
# Too many items - remove this one!
ovfitem.remove_profile(profile)
else:
items_seen[profile] += 1
else:
if count_dict[profile] < count:
# Add this profile to this Item
ovfitem.add_profile(profile)
count_dict[profile] += 1
items_seen[profile] += 1
# How many new Items do we need to create in total?
items_to_add = 0
for profile in profile_list:
delta = count - items_seen[profile]
if delta > items_to_add:
items_to_add = delta
return count_dict, items_to_add, last_item
def _update_cloned_item(self, new_item, new_item_profiles, item_count):
"""Update a cloned item to make it distinct from its parent.
Helper method for :meth:`set_item_count_per_profile`.
Args:
new_item (OVFItem): Newly cloned Item
new_item_profiles (list): Profiles new_item should belong to
item_count (int): How many Items of this type (including this
item) now exist. Used with
:meth:`COT.platform.Platform.guess_nic_name`
Returns:
OVFItem: Updated :param:`new_item`
Raises:
NotImplementedError: No support yet for updating ``Address``
NotImplementedError: If updating ``AddressOnParent`` but the
prior value varies across config profiles.
NotImplementedError: if ``AddressOnParent`` is not an integer.
"""
resource_type = new_item.hardware_type
address = new_item.get(self.ovf.ADDRESS)
if address:
raise NotImplementedError("Don't know how to ensure a unique "
"Address value when cloning an Item "
"of type {0}".format(resource_type))
address_on_parent = new_item.get(self.ovf.ADDRESS_ON_PARENT)
if address_on_parent:
address_list = new_item.get_all_values(self.ovf.ADDRESS_ON_PARENT)
if len(address_list) > 1:
raise NotImplementedError("AddressOnParent is not common "
"across all profiles but has "
"multiple values {0}. COT can't "
"handle this yet."
.format(address_list))
address_on_parent = address_list[0]
# Currently we only handle integer addresses
try:
address_on_parent = int(address_on_parent)
address_on_parent += 1
new_item.set_property(self.ovf.ADDRESS_ON_PARENT,
str(address_on_parent),
new_item_profiles)
except ValueError:
raise NotImplementedError("Don't know how to ensure a "
"unique AddressOnParent value "
"given base value '{0}'"
.format(address_on_parent))
if resource_type == 'ethernet':
# Update ElementName to reflect the NIC number
element_name = self.ovf.platform.guess_nic_name(item_count)
new_item.set_property(self.ovf.ELEMENT_NAME, element_name,
new_item_profiles)
return new_item
def set_item_count_per_profile(self, resource_type, count, profile_list):
"""Set the number of items of a given type under the given profile(s).
If the new count is greater than the current count under this
profile, then additional instances that already exist under
another profile will be added to this profile, starting with
the lowest-sequence instance not already present, and only as
a last resort will new instances be created.
If the new count is less than the current count under this profile,
then the highest-numbered instances will be removed preferentially.
Args:
resource_type (str): 'cpu', 'harddisk', etc.
count (int): Desired number of items
profile_list (list): List of profiles to filter on
(default: apply across all profiles)
"""
if not profile_list:
# Set the profile list for all profiles, including the default
profile_list = self.ovf.config_profiles + [None]
count_dict, items_to_add, last_item = \
self._update_existing_item_profiles(
resource_type, count, profile_list)
logger.debug("Creating %d new items", items_to_add)
while items_to_add > 0:
# Which profiles does this Item need to belong to?
new_item_profiles = []
for profile in profile_list:
if count_dict[profile] < count:
new_item_profiles.append(profile)
count_dict[profile] += 1
if last_item is None:
logger.notice("No existing items of type %s found. "
"Will create new %s from scratch.",
resource_type, resource_type)
(_, new_item) = self.new_item(resource_type, new_item_profiles)
else:
(_, new_item) = self.clone_item(last_item, new_item_profiles)
# Check/update other properties of the clone that should be unique:
# TODO - we assume that the count is the same across profiles
new_item = self._update_cloned_item(
new_item, new_item_profiles, count_dict[new_item_profiles[0]])
last_item = new_item
items_to_add -= 1
def set_value_for_all_items(self, resource_type, prop_name, new_value,
profile_list, create_new=False):
"""Set a property to the given value for all items of the given type.
If no items of the given type exist, will create a new ``Item`` if
:attr:`create_new` is set to ``True``; otherwise will log a warning
and do nothing.
Args:
resource_type (str): Resource type such as 'cpu' or 'harddisk'
prop_name (str): Property name to update
new_value (str): New value to set the property to
profile_list (list): List of profiles to filter on
(default: apply across all profiles)
create_new (bool): Whether to create a new entry if no items
of this :attr:`resource_type` presently exist.
"""
ovfitem_list = self.find_all_items(resource_type)
if not ovfitem_list:
if not create_new:
logger.warning("No items of type %s found. Nothing to do.",
resource_type)
return
logger.notice("No existing items of type %s found. "
"Will create new %s from scratch.",
resource_type, resource_type)
(_, ovfitem) = self.new_item(resource_type, profile_list)
ovfitem_list = [ovfitem]
for ovfitem in ovfitem_list:
ovfitem.set_property(prop_name, new_value, profile_list)
logger.debug("Updated %s %s to %s under profiles %s",
resource_type, prop_name, new_value, profile_list)
def set_item_values_per_profile(self, resource_type, prop_name, value_list,
profile_list, default=None):
"""Set value(s) for a property of multiple items of a type.
Args:
resource_type (str): Device type such as 'harddisk' or 'cpu'
prop_name (str): Property name to update
value_list (list): List of values to set (one value per item of the
given :attr:`resource_type`)
profile_list (list): List of profiles to filter on
(default: apply across all profiles)
default (str): If there are more matching items than entries in
:attr:`value_list`, set extra items to this value
"""
if profile_list is None:
profile_list = self.ovf.config_profiles + [None]
for ovfitem in self.find_all_items(resource_type):
if len(value_list):
new_value = value_list.pop(0)
else:
new_value = default
for profile in profile_list:
if ovfitem.has_profile(profile):
ovfitem.set_property(prop_name, new_value, [profile])
logger.info("Updated %s property %s to %s under %s",
resource_type, prop_name, new_value, profile_list)
if len(value_list):
logger.warning("After scanning all known %s Items, not all "
"%s values were used - leftover %s",
resource_type, prop_name, value_list)
| mit |
kevalds51/sympy | sympy/matrices/expressions/hadamard.py | 91 | 2443 | from __future__ import print_function, division
from sympy.core import Mul, sympify
from sympy.strategies import unpack, flatten, condition, exhaust, do_one
from sympy.matrices.expressions.matexpr import MatrixExpr, ShapeError
def hadamard_product(*matrices):
"""
Return the elementwise (aka Hadamard) product of matrices.
Examples
========
>>> from sympy.matrices import hadamard_product, MatrixSymbol
>>> A = MatrixSymbol('A', 2, 3)
>>> B = MatrixSymbol('B', 2, 3)
>>> hadamard_product(A)
A
>>> hadamard_product(A, B)
A.*B
>>> hadamard_product(A, B)[0, 1]
A[0, 1]*B[0, 1]
"""
if not matrices:
raise TypeError("Empty Hadamard product is undefined")
validate(*matrices)
if len(matrices) == 1:
return matrices[0]
else:
return HadamardProduct(*matrices).doit()
class HadamardProduct(MatrixExpr):
"""
Elementwise product of matrix expressions
This is a symbolic object that simply stores its argument without
evaluating it. To actually compute the product, use the function
``hadamard_product()``.
>>> from sympy.matrices import hadamard_product, HadamardProduct, MatrixSymbol
>>> A = MatrixSymbol('A', 5, 5)
>>> B = MatrixSymbol('B', 5, 5)
>>> isinstance(hadamard_product(A, B), HadamardProduct)
True
"""
is_HadamardProduct = True
def __new__(cls, *args, **kwargs):
args = list(map(sympify, args))
check = kwargs.get('check' , True)
if check:
validate(*args)
return super(HadamardProduct, cls).__new__(cls, *args)
@property
def shape(self):
return self.args[0].shape
def _entry(self, i, j):
return Mul(*[arg._entry(i, j) for arg in self.args])
def _eval_transpose(self):
from sympy.matrices.expressions.transpose import transpose
return HadamardProduct(*list(map(transpose, self.args)))
def doit(self, **ignored):
return canonicalize(self)
def validate(*args):
if not all(arg.is_Matrix for arg in args):
raise TypeError("Mix of Matrix and Scalar symbols")
A = args[0]
for B in args[1:]:
if A.shape != B.shape:
raise ShapeError("Matrices %s and %s are not aligned" % (A, B))
rules = (unpack,
flatten)
canonicalize = exhaust(condition(lambda x: isinstance(x, HadamardProduct),
do_one(*rules)))
| bsd-3-clause |
vlegoff/tsunami | src/primaires/scripting/config.py | 1 | 2340 | # -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Ce fichier contient la configuration par défaut du module 'scripting'."""
cfg_exportation = r"""
# Ce fichier contient la configuration de l'exportation du scripting.
# L'exportation du scripting permet de générer automtiquement
# la documentation des fonctions et actions du scripting et de
# l'enregistrer dans un fichier txt au format Dokuwiki.
# Cette exportation est désactivée par défaut. Si vous voulez l'activer,
# lisez bien la configuration qui suit.
# Pour activer l'exportation, mettez l'option qui suit à True.
active = False
# Chemin du fichier dans lequel écrire la documentation des actions
chemin_doc_actions = "actions.txt"
# Chemin du fichier dans lequel écrire la documentation des fonctions
chemin_doc_fonctions = "fonctions.txt"
"""
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.