repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
gijs/solpy | tests/unit.py | 2 | 19954 | """solpy unit tests."""
import unittest
import doctest
import json
import logging
from solpy import pv
from solpy import modules
from solpy import design
from solpy import inverters
from solpy import enphase
from solpy import ee
from solpy import vd
logging.basicConfig(level=logging.ERROR)
# modules.py
class TestModules(unittest.TestCase):
def test_module(self):
model = modules.model_search('Powertec 250 PL')[0]
p = modules.Module(model)
self.assertAlmostEquals(p.v_max(-10), 42.3129)
self.assertAlmostEquals(p.v_dc(), 31.28)
self.assertAlmostEquals(p.i_dc(), 8.01)
self.assertAlmostEquals(p.v_min(40), 24.931724)
self.assertAlmostEquals(p.output(900), 225.49752)
# inverters.py
class TestInverters(unittest.TestCase):
def test_inverter(self):
pass
class TestEE(unittest.TestCase):
def test_inverter(self):
doctest.DocTestSuite(ee)
# pv.py
# irradiation.py
class TestModeling(unittest.TestCase):
def test_annual_output1(self):
p1 = """{"system_name":"HAPPY CUSTOMER",
"address":"15013 Denver W Pkwy, Golden, CO",
"zipcode":"80401",
"phase":1,
"voltage":240,
"array":[
{"inverter":"Enphase Energy: M215-60-2LL-S2x-IG-NA (240 V) 240V",
"panel":"Mage Solar : Powertec Plus 250-6 PL",
"quantity":20,
"azimuth":180,
"tilt":25
}
]}"""
plant = pv.json_system(json.loads(p1))
rs = plant.model()
self.assertAlmostEquals(rs.annual_output, 7689.05)
def test_annual_output2(self):
p1 = """{"system_name":"HAPPY CUSTOMER",
"address":"15013 Denver W Pkwy, Golden, CO",
"zipcode":"80401",
"tilt":23,
"azimuth":180,
"phase":1,
"voltage":240,
"array":[
{"inverter":"SMA America: SB11000TL-US-12 (240V) 240V",
"panel":"Axitec : AC-250P-156-60S *",
"series":14,
"parallel":4,
"quantity":7
},
{"inverter":"SMA America: SB8000TL-US-12 (240V) 240V",
"panel":"Axitec : AC-250P-156-60S *",
"series":14,
"parallel":3,
"quantity":1
}
]}
"""
js1 = json.loads(p1)
plant1 = pv.json_system(js1)
rs1 = plant1.model()
js2 = json.loads(json.dumps(plant1.dump()))
plant2 = pv.json_system(js2)
js3 = json.loads(json.dumps(plant2.dump()))
rs2 = plant2.model()
self.assertEqual(js2, js3)
self.assertAlmostEquals(rs1.annual_output, rs2.annual_output)
def test_hourlyNoShading(self):
p1 = """{"system_name":"Another Happy Customer",
"zipcode":"44654",
"tilt":25,
"azimuth":180,
"phase":1,
"voltage":240,
"array":[
{"inverter":"Enphase Energy: M215-60-2LL-S2x-IG-NA (240 V) 240V",
"derate":0.98,
"panel":"Mage Solar : Powertec Plus 250-6 PL",
"series":1,
"parallel":1,
"scale":36
}
]}"""
js1 = json.loads(p1)
plant1 = pv.json_system(js1)
rs1 = plant1.model()
self.assertAlmostEquals(rs1.annual_output, 12939.9)
def test_hourlyShading(self):
p1 = """{"system_name":"Another Happy Customer",
"zipcode":"44654",
"tilt":25,
"azimuth":180,
"phase":1,
"voltage":240,
"shade":{"0": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1, 1.0, 1.0, 1.0, 1.0, 0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], "1": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6, 1.0, 0.2, 0.0, 0.1, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.4, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], "2": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6, 1.0, 1.0, 0.0, 0.0, 0.5, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], "3": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], "4": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.9, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], "5": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.3, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], "6": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], "7": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.2, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], "8": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8, 1.0, 1.0, 0.1, 0.0, 0.6, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.6, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], "9": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.9, 1.0, 0.1, 0.0, 0.1, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], "10": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4, 1.0, 1.0, 1.0, 1.0, 0.8, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], "11": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1, 1.0, 1.0, 0.2, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]},
"array":[
{"inverter":"Enphase Energy: M215-60-2LL-S2x-IG-NA (240 V) 240V",
"derate":0.98,
"panel":"Mage Solar : Powertec Plus 250-6 PL",
"series":1,
"parallel":1,
"scale":36
}
]}"""
js1 = json.loads(p1)
plant1 = pv.json_system(js1)
rs1 = plant1.model()
self.assertAlmostEquals(rs1.annual_output, 9098.56)
#design.py
#tools.py
class TestDesign(unittest.TestCase):
def test_tools_fill(self):
m = "Mage Solar : USA Powertec Plus 250-6 MNCS"
ms = modules.Module(m)
zc = '27713'
system = inverters.Inverter("SMA America: SB7000US-11 277V", \
modules.Array(ms, [{'series':14}]))
sols = design.tools_fill(system, zc, mount="Roof")
ans = ['8266.5W : 11S x 3P : ratio 1.18 : 265.0 - 467.0 V', \
'6012.0W : 12S x 2P : ratio 0.86 : 290.0 - 510.0 V', \
'9018.0W : 12S x 3P : ratio 1.29 : 290.0 - 510.0 V', \
'6513.0W : 13S x 2P : ratio 0.93 : 314.0 - 552.0 V', \
'9769.5W : 13S x 3P : ratio 1.4 : 314.0 - 552.0 V', \
'7014.0W : 14S x 2P : ratio 1.0 : 338.0 - 595.0 V', \
'10521.0W : 14S x 3P : ratio 1.5 : 338.0 - 595.0 V']
ans1 = ['6513.0W : channel 0: 13S x 2P Mage Solar : USA Powertec Plus 250-6 MNCS : ratio 0.93 : 323.0 - 552.0 V',
'7014.0W : channel 0: 14S x 2P Mage Solar : USA Powertec Plus 250-6 MNCS : ratio 1.0 : 348.0 - 595.0 V',
'8266.5W : channel 0: 11S x 3P Mage Solar : USA Powertec Plus 250-6 MNCS : ratio 1.18 : 274.0 - 467.0 V',
'9018.0W : channel 0: 12S x 3P Mage Solar : USA Powertec Plus 250-6 MNCS : ratio 1.29 : 298.0 - 510.0 V',
'9769.5W : channel 0: 13S x 3P Mage Solar : USA Powertec Plus 250-6 MNCS : ratio 1.4 : 323.0 - 552.0 V',
'10521.0W : channel 0: 14S x 3P Mage Solar : USA Powertec Plus 250-6 MNCS : ratio 1.5 : 348.0 - 595.0 V']
#print 'ans', '\n'.join(ans)
#print 'sols', '\n'.join(sols)
self.assertListEqual(ans1, sols)
class TestDesign1(unittest.TestCase):
def test_generate(self):
m = "Mage Solar : USA Powertec Plus 250-6 MNCS"
zc = '27713'
i_name = "SMA America: SB7000US-11 277V"
sols = design.generate_options(i_name, m, zc, mount="Roof")
str_sols = [design.str_format(i) for i in sols]
ans_1 = ['6513.0W : channel 0: 13S x 2P Mage Solar : USA Powertec Plus 250-6 MNCS : ratio 0.93 : 323.0 - 552.0 V',
'7014.0W : channel 0: 14S x 2P Mage Solar : USA Powertec Plus 250-6 MNCS : ratio 1.0 : 348.0 - 595.0 V',
'8266.5W : channel 0: 11S x 3P Mage Solar : USA Powertec Plus 250-6 MNCS : ratio 1.18 : 274.0 - 467.0 V',
'9018.0W : channel 0: 12S x 3P Mage Solar : USA Powertec Plus 250-6 MNCS : ratio 1.29 : 298.0 - 510.0 V',
'9769.5W : channel 0: 13S x 3P Mage Solar : USA Powertec Plus 250-6 MNCS : ratio 1.4 : 323.0 - 552.0 V',
'10521.0W : channel 0: 14S x 3P Mage Solar : USA Powertec Plus 250-6 MNCS : ratio 1.5 : 348.0 - 595.0 V']
self.assertListEqual(ans_1, str_sols)
class TestVirr(unittest.TestCase):
def test_virr1(self):
import datetime
p1 = """{"system_name":"HAPPY CUSTOMER",
"address":"15013 Denver W Pkwy, Golden, CO",
"zipcode":"80401",
"phase":1,
"voltage":240,
"array":[
{"inverter":"Enphase Energy: M215-60-2LL-S2x-IG-NA (240 V) 240V",
"panel":"Mage Solar : Powertec Plus 250-6 PL",
"quantity":20,
"azimuth":180,
"tilt":25
}
]}"""
plant = pv.json_system(json.loads(p1))
ts = datetime.datetime(2000, 9, 22, 19)
weatherData = {}
weatherData['temperature'] = 25
weatherData['windSpeed'] = 0
virrRec = plant.virr(2000, ts, weatherData)
self.assertAlmostEquals(virrRec['girr'], 437.0)
@unittest.skipIf(not enphase.APIKEY, 'Enphase APIKEY not set')
class TestEnphase(unittest.TestCase):
maxDiff = None
def setUp(self):
"""The key should be changed for an application that is testing"""
self.user_id = '4d7a45774e6a41320a'
self.systems = enphase.index(self.user_id)
def test_index(self):
e1 = """{"systems":
[{"system_id":67,
"system_name":"Eich Residence",
"system_public_name":"Eich Residence",
"status":"normal",
"timezone":"America/Los_Angeles",
"country":"US",
"state":"CA",
"city":"Sebastopol",
"postal_code":"95472",
"other_references":["Solarfox"],
"connection_type":"ethernet"},
{"system_id":239922,
"system_name":"Fornage D st",
"system_public_name":
"Residential System",
"status":"normal",
"timezone":"America/Los_Angeles",
"country":"US",
"state":"CA",
"city":"Petaluma",
"postal_code":"94952",
"connection_type":"ethernet"},
{"system_id":286840,
"system_name":"Rogers, Jim",
"system_public_name":"Residential System",
"status":"normal",
"timezone":"America/Los_Angeles",
"country":"US",
"state":"CA",
"city":"Petaluma",
"postal_code":"94952",
"connection_type":"ethernet"},
{"system_id":339784,
"system_name":"Weed Patch Solar",
"system_public_name":"Residential System",
"status":"normal",
"timezone":"America/Denver",
"country":"US",
"state":"ID",
"city":"Mountain Home",
"postal_code":"83647",
"connection_type":"wifi"},
{"system_id":490782,
"system_name":"Derek's House - 5 Regalia",
"system_public_name":"Derek's House - 5 Regalia",
"status":"comm",
"timezone":"America/Los_Angeles",
"country":"US",
"state":"CA",
"city":"Novato",
"postal_code":"94947-2125",
"connection_type":"ethernet"},
{"system_id":537438,
"system_name":"Wagoner",
"system_public_name":"Residential System",
"status":"normal",
"timezone":"America/Los_Angeles",
"country":"US",
"state":"CA",
"city":"Windsor",
"postal_code":"95492",
"connection_type":"ethernet"}]}"""
indexes = [enphase.System(user_id=self.user_id,**i) for i in json.loads(e1)["systems"]]
self.assertEqual(indexes, self.systems)
def test_stats(self):
system67 = [system for system in self.systems if int(system.system_id) == 67][0]
stats = system67.stats(1430742599, 1430742901)
e1 = """{"intervals":
[{ "powr": 15,
"enwh": 1,
"devices_reporting": 2,
"end_at": 1430742600},
{ "powr": 15,
"enwh": 1,
"devices_reporting": 2,
"end_at": 1430742900
}],
"total_devices": 35,
"system_id": 67}"""
self.assertEqual(stats, json.loads(e1))
def test_rgm_stats(self):
pass
def test_monthly_production(self):
system67 = [system for system in self.systems if int(system.system_id) == 67][0]
def test_energy_lifetime(self):
system67 = [system for system in self.systems if int(system.system_id) == 67][0]
el = system67.energy_lifetime('2008-01-28', '2008-02-02' )
e1 = """{ "production":
[8388, 7537, 8843, 2039, 8235, 0],
"system_id": 67,
"start_date": "2008-01-28"}"""
self.assertEqual(el,json.loads(e1))
def test_envoys(self):
system67 = [system for system in self.systems if int(system.system_id) == 67][0]
envoy = system67.envoys()
for e in envoy['envoys']:
e.pop('last_report_at')#last_report_at is always changing don't test that field
e1 = """{ "system_id": 67,
"envoys":
[{ "status": "normal",
"envoy_id": 434803,
"name": "Envoy 121112607295",
"part_number": "800-00069-r02",
"serial_number": "121112607295"}]}"""
self.assertEqual(envoy, json.loads(e1))
def test_summary(self):
system67 = [system for system in self.systems if int(system.system_id) == 67][0]
summary = system67.summary('2008-01-28')
summary.pop('last_report_at')
e1 = """{ "status": "normal",
"energy_lifetime": 14040,
"current_power": 0,
"modules": 0,
"operational_at": 1201362300,
"summary_date": "2008-01-28",
"source": "microinverters",
"energy_today": 8388,
"system_id": 67,
"size_w": 0}"""
self.assertEqual(summary, json.loads(e1))
def test_inventory(self):
system67 = [system for system in self.systems if int(system.system_id) == 67][0]
inventory = system67.inventory()
e1 = """{ "inverters":
[ {"model": "M190", "sn": "030909022461"},
{"model": "M190", "sn": "030910024009"},
{"model": "M190", "sn": "030909022244"},
{"model": "M190", "sn": "030909022445"},
{"model": "M190", "sn": "030909022442"},
{"model": "M190", "sn": "030910023946"},
{"model": "M190", "sn": "030909022271"},
{"model": "M190", "sn": "030910023959"},
{"model": "M190", "sn": "030909022183"},
{"model": "M190", "sn": "030909022272"},
{"model": "M190", "sn": "030910024007"},
{"model": "M190", "sn": "030910024016"},
{"model": "M190", "sn": "030910023983"},
{"model": "M190", "sn": "030910023982"},
{"model": "M190", "sn": "030909022453"},
{"model": "M190", "sn": "030910023947"},
{"model": "M190", "sn": "030910024001"},
{"model": "M190", "sn": "030909022443"},
{"model": "M190", "sn": "030910024029"},
{"model": "M190", "sn": "030909022283"},
{"model": "M190", "sn": "110923032336"},
{"model": "M190", "sn": "110923032378"},
{"model": "M190", "sn": "110923032337"},
{"model": "M190", "sn": "110923032365"},
{"model": "M190", "sn": "110923032368"},
{"model": "M190", "sn": "110923032334"},
{"model": "M190", "sn": "110923032346"},
{"model": "M190", "sn": "110918030057"},
{"model": "M190", "sn": "110918030192"},
{"model": "M190", "sn": "110918030079"},
{"model": "M190", "sn": "110918030185"},
{"model": "D380", "sn": "110924032940-A"},
{"model": "D380", "sn": "110924032940-B"},
{"model": "D380", "sn": "110924032942-A"},
{"model": "D380", "sn": "110924032942-B"}],
"system_id": 67,
"envoys": [{"sn": "121112607295"}]}"""
self.assertEqual(inventory, json.loads(e1))
# todo: modules that still need unit tests
# geo.py
# expedite.py
# forecast.py
# noaa.py
# setup.py
# thermal.py
# collectors.py
# pathfinder.py
# tmy3.py
# epw.py
# epw_thermal.py
# fisheye.py
# nec.py
# site_analysis.py
# solar_fun.py
def load_tests(loader, tests, ignore):
tests.addTests(doctest.DocTestSuite(ee))
tests.addTests(doctest.DocTestSuite(vd))
tests.addTests(doctest.DocTestSuite(design))
return tests
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 |
CJ8664/servo | tests/wpt/web-platform-tests/tools/wptrunner/wptrunner/browsers/servodriver.py | 14 | 5013 | import os
import subprocess
import tempfile
from mozprocess import ProcessHandler
from .base import Browser, require_arg, get_free_port, browser_command, ExecutorBrowser
from ..executors import executor_kwargs as base_executor_kwargs
from ..executors.executorservodriver import (ServoWebDriverTestharnessExecutor,
ServoWebDriverRefTestExecutor)
here = os.path.join(os.path.split(__file__)[0])
__wptrunner__ = {
"product": "servodriver",
"check_args": "check_args",
"browser": "ServoWebDriverBrowser",
"executor": {
"testharness": "ServoWebDriverTestharnessExecutor",
"reftest": "ServoWebDriverRefTestExecutor",
},
"browser_kwargs": "browser_kwargs",
"executor_kwargs": "executor_kwargs",
"env_extras": "env_extras",
"env_options": "env_options",
"update_properties": "update_properties",
}
hosts_text = """127.0.0.1 web-platform.test
127.0.0.1 www.web-platform.test
127.0.0.1 www1.web-platform.test
127.0.0.1 www2.web-platform.test
127.0.0.1 xn--n8j6ds53lwwkrqhv28a.web-platform.test
127.0.0.1 xn--lve-6lad.web-platform.test
"""
def check_args(**kwargs):
require_arg(kwargs, "binary")
def browser_kwargs(test_type, run_info_data, **kwargs):
return {
"binary": kwargs["binary"],
"debug_info": kwargs["debug_info"],
"user_stylesheets": kwargs.get("user_stylesheets"),
}
def executor_kwargs(test_type, server_config, cache_manager, run_info_data, **kwargs):
rv = base_executor_kwargs(test_type, server_config,
cache_manager, **kwargs)
return rv
def env_extras(**kwargs):
return []
def env_options():
return {"host": "127.0.0.1",
"external_host": "web-platform.test",
"bind_hostname": "true",
"testharnessreport": "testharnessreport-servodriver.js",
"supports_debugger": True}
def update_properties():
return ["debug", "os", "version", "processor", "bits"], None
def make_hosts_file():
hosts_fd, hosts_path = tempfile.mkstemp()
with os.fdopen(hosts_fd, "w") as f:
f.write(hosts_text)
return hosts_path
class ServoWebDriverBrowser(Browser):
used_ports = set()
def __init__(self, logger, binary, debug_info=None, webdriver_host="127.0.0.1",
user_stylesheets=None):
Browser.__init__(self, logger)
self.binary = binary
self.webdriver_host = webdriver_host
self.webdriver_port = None
self.proc = None
self.debug_info = debug_info
self.hosts_path = make_hosts_file()
self.command = None
self.user_stylesheets = user_stylesheets if user_stylesheets else []
def start(self, **kwargs):
self.webdriver_port = get_free_port(4444, exclude=self.used_ports)
self.used_ports.add(self.webdriver_port)
env = os.environ.copy()
env["HOST_FILE"] = self.hosts_path
env["RUST_BACKTRACE"] = "1"
debug_args, command = browser_command(
self.binary,
[
"--hard-fail",
"--webdriver", str(self.webdriver_port),
"about:blank",
],
self.debug_info
)
for stylesheet in self.user_stylesheets:
command += ["--user-stylesheet", stylesheet]
self.command = command
self.command = debug_args + self.command
if not self.debug_info or not self.debug_info.interactive:
self.proc = ProcessHandler(self.command,
processOutputLine=[self.on_output],
env=env,
storeOutput=False)
self.proc.run()
else:
self.proc = subprocess.Popen(self.command, env=env)
self.logger.debug("Servo Started")
def stop(self, force=False):
self.logger.debug("Stopping browser")
if self.proc is not None:
try:
self.proc.kill()
except OSError:
# This can happen on Windows if the process is already dead
pass
def pid(self):
if self.proc is None:
return None
try:
return self.proc.pid
except AttributeError:
return None
def on_output(self, line):
"""Write a line of output from the process to the log"""
self.logger.process_output(self.pid(),
line.decode("utf8", "replace"),
command=" ".join(self.command))
def is_alive(self):
if self.runner:
return self.runner.is_running()
return False
def cleanup(self):
self.stop()
def executor_browser(self):
assert self.webdriver_port is not None
return ExecutorBrowser, {"webdriver_host": self.webdriver_host,
"webdriver_port": self.webdriver_port}
| mpl-2.0 |
dfalt974/SickRage | lib/sqlalchemy/dialects/sybase/pysybase.py | 79 | 3228 | # sybase/pysybase.py
# Copyright (C) 2010-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: sybase+pysybase
:name: Python-Sybase
:dbapi: Sybase
:connectstring: sybase+pysybase://<username>:<password>@<dsn>/[database name]
:url: http://python-sybase.sourceforge.net/
Unicode Support
---------------
The python-sybase driver does not appear to support non-ASCII strings of any
kind at this time.
"""
from sqlalchemy import types as sqltypes, processors
from sqlalchemy.dialects.sybase.base import SybaseDialect, \
SybaseExecutionContext, SybaseSQLCompiler
class _SybNumeric(sqltypes.Numeric):
def result_processor(self, dialect, type_):
if not self.asdecimal:
return processors.to_float
else:
return sqltypes.Numeric.result_processor(self, dialect, type_)
class SybaseExecutionContext_pysybase(SybaseExecutionContext):
def set_ddl_autocommit(self, dbapi_connection, value):
if value:
# call commit() on the Sybase connection directly,
# to avoid any side effects of calling a Connection
# transactional method inside of pre_exec()
dbapi_connection.commit()
def pre_exec(self):
SybaseExecutionContext.pre_exec(self)
for param in self.parameters:
for key in list(param):
param["@" + key] = param[key]
del param[key]
class SybaseSQLCompiler_pysybase(SybaseSQLCompiler):
def bindparam_string(self, name, **kw):
return "@" + name
class SybaseDialect_pysybase(SybaseDialect):
driver = 'pysybase'
execution_ctx_cls = SybaseExecutionContext_pysybase
statement_compiler = SybaseSQLCompiler_pysybase
colspecs = {
sqltypes.Numeric: _SybNumeric,
sqltypes.Float: sqltypes.Float
}
@classmethod
def dbapi(cls):
import Sybase
return Sybase
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user', password='passwd')
return ([opts.pop('host')], opts)
def do_executemany(self, cursor, statement, parameters, context=None):
# calling python-sybase executemany yields:
# TypeError: string too long for buffer
for param in parameters:
cursor.execute(statement, param)
def _get_server_version_info(self, connection):
vers = connection.scalar("select @@version_number")
# i.e. 15500, 15000, 12500 == (15, 5, 0, 0), (15, 0, 0, 0),
# (12, 5, 0, 0)
return (vers / 1000, vers % 1000 / 100, vers % 100 / 10, vers % 10)
def is_disconnect(self, e, connection, cursor):
if isinstance(e, (self.dbapi.OperationalError,
self.dbapi.ProgrammingError)):
msg = str(e)
return ('Unable to complete network request to host' in msg or
'Invalid connection state' in msg or
'Invalid cursor state' in msg)
else:
return False
dialect = SybaseDialect_pysybase
| gpl-3.0 |
nzavagli/UnrealPy | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/Python-2.7.10/Lib/lib2to3/fixes/fix_print.py | 326 | 2865 | # Copyright 2006 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for print.
Change:
'print' into 'print()'
'print ...' into 'print(...)'
'print ... ,' into 'print(..., end=" ")'
'print >>x, ...' into 'print(..., file=x)'
No changes are applied if print_function is imported from __future__
"""
# Local imports
from .. import patcomp
from .. import pytree
from ..pgen2 import token
from .. import fixer_base
from ..fixer_util import Name, Call, Comma, String, is_tuple
parend_expr = patcomp.compile_pattern(
"""atom< '(' [atom|STRING|NAME] ')' >"""
)
class FixPrint(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """
simple_stmt< any* bare='print' any* > | print_stmt
"""
def transform(self, node, results):
assert results
bare_print = results.get("bare")
if bare_print:
# Special-case print all by itself
bare_print.replace(Call(Name(u"print"), [],
prefix=bare_print.prefix))
return
assert node.children[0] == Name(u"print")
args = node.children[1:]
if len(args) == 1 and parend_expr.match(args[0]):
# We don't want to keep sticking parens around an
# already-parenthesised expression.
return
sep = end = file = None
if args and args[-1] == Comma():
args = args[:-1]
end = " "
if args and args[0] == pytree.Leaf(token.RIGHTSHIFT, u">>"):
assert len(args) >= 2
file = args[1].clone()
args = args[3:] # Strip a possible comma after the file expression
# Now synthesize a print(args, sep=..., end=..., file=...) node.
l_args = [arg.clone() for arg in args]
if l_args:
l_args[0].prefix = u""
if sep is not None or end is not None or file is not None:
if sep is not None:
self.add_kwarg(l_args, u"sep", String(repr(sep)))
if end is not None:
self.add_kwarg(l_args, u"end", String(repr(end)))
if file is not None:
self.add_kwarg(l_args, u"file", file)
n_stmt = Call(Name(u"print"), l_args)
n_stmt.prefix = node.prefix
return n_stmt
def add_kwarg(self, l_nodes, s_kwd, n_expr):
# XXX All this prefix-setting may lose comments (though rarely)
n_expr.prefix = u""
n_argument = pytree.Node(self.syms.argument,
(Name(s_kwd),
pytree.Leaf(token.EQUAL, u"="),
n_expr))
if l_nodes:
l_nodes.append(Comma())
n_argument.prefix = u" "
l_nodes.append(n_argument)
| mit |
guyemerson/sem-func | src/core/intrinsic.py | 1 | 5876 | import os, pickle, numpy as np
from collections import Counter
from math import log
from warnings import warn
from __config__.filepath import AUX_DIR, FREQ_FILE, VOCAB_FILE
from utils import product
# Load vocabulary
with open(os.path.join(AUX_DIR, VOCAB_FILE), 'rb') as f:
pred_name = pickle.load(f)
with open(os.path.join(AUX_DIR, FREQ_FILE), 'rb') as f:
pred_freq = pickle.load(f)
verbs = [i for i,x in enumerate(pred_name) if x.rsplit('_', 2)[1] == 'v']
nouns = [i for i,x in enumerate(pred_name) if x.rsplit('_', 2)[1] == 'n']
noun_mask = np.array([x.rsplit('_', 2)[1] == 'n' for x in pred_name])
verb_mask = np.array([x.rsplit('_', 2)[1] == 'v' for x in pred_name])
def generate_random_data(n_trans, n_subj, n_obj):
"""
Generate a random set of tuples
:param n_trans: number of transitive tuples
:param n_subj: number of subject-verb tuples
:param n_obj: number of verb-object tuples
:return: list of (verb, subj, obj) tuples, with None for missing arguments
"""
# Get noun and verb tokens to sample from
verb_tokens = np.zeros(pred_freq[verbs].sum(), dtype='int64')
i = 0
for p in verbs:
f = pred_freq[p]
verb_tokens[i : i+f] = p
i += f
noun_tokens = np.zeros(pred_freq[nouns].sum(), dtype='int64')
i = 0
for p in nouns:
f = pred_freq[p]
noun_tokens[i : i+f] = p
i += f
# Sample the tuples
n_total = n_trans + n_subj + n_obj
subj = np.random.choice(noun_tokens, n_total)
verb = np.random.choice(verb_tokens, n_total)
obj = np.random.choice(noun_tokens, n_total)
data = [(int(verb[i]), int(subj[i]), int(obj[i])) for i in range(n_trans)]
data += [(int(verb[i]), int(subj[i]), None) for i in range(n_trans, n_trans+n_subj)]
data += [(int(verb[i]), None, int(obj[i])) for i in range(n_trans+n_subj, n_trans+n_subj+n_obj)]
return data
def separate_prob(data):
"""
Convert a list of SVO triples with missing arguments,
to three counts of tuples
:param data: list of triples
:return: {SVO}, {SV}, {VO}
"""
svo, sv, vo = Counter(), Counter(), Counter()
for verb, subj, obj in data:
if subj is None:
vo[verb, obj] += 1
elif obj is None:
sv[subj, verb] += 1
else:
svo[subj, verb, obj] += 1
totals = [sum(c.values()) for c in (svo, sv, vo)]
probs = [{tup: num/totals[i] for tup, num in c.items()} for i,c in enumerate((svo, sv, vo))]
return probs
def KL(P, Q):
"""
Calculate Kullback-Leibler divergence from Q to P
Both P and Q should be dicts from elements to probabilities
:param P: true distribution
:param Q: approximating distribution
:return: divergence
"""
# sum_x P(x) ( logP(x) - logQ(x) )
res = 0
for item, prob in P.items():
res += prob * (log(prob) - log(Q[item]))
return res
def compare_KL(model, real_data, fake_data, samples=(100,100,100), **kwargs):
"""
Approximately calculate the Kullback-Leibler divergence from the model to two sets of data
:param model: the sem-func model
:param real_data: first set of tuples
:param fake_data: second set of tuples
:param samples: number of samples to draw, for: SVO, SV, VO graphs
:return: (real KLs, fake KLs), each for (SVO, SV, VO) subsets
"""
# Get sample probabilities from the data
real_prob = separate_prob(real_data)
fake_prob = separate_prob(fake_data)
# Initialise counts for generated samples
real_match = [{tup: 0 for tup in c} for c in real_prob]
fake_match = [{tup: 0 for tup in c} for c in fake_prob]
# Sample from the model
sampler = [model.sample_background_svo, model.sample_background_sv, model.sample_background_vo]
for i in range(3):
# Sample entities for each graph configuration
for ents in sampler[i](samples=samples[i], **kwargs):
# For the sampled entities, find the distribution over predicates
pred_dist = [model.pred_dist(e) for e in ents]
# Add the probability that this sample would generate the observed predicates
for preds in real_match[i]:
real_match[i][preds] += product(pred_dist[j][p] for j,p in enumerate(preds))
for preds in fake_match[i]:
fake_match[i][preds] += product(pred_dist[j][p] for j,p in enumerate(preds))
# Average the probabilities
for preds in real_match[i]:
real_match[i][preds] /= samples[i]
for preds in fake_match[i]:
fake_match[i][preds] /= samples[i]
real_KL = [KL(real_prob[i], real_match[i]) for i in range(3)]
fake_KL = [KL(fake_prob[i], fake_match[i]) for i in range(3)]
return real_KL, fake_KL
def baseline_KL(real_data, fake_data):
"""
Calculate the Kullback-Leibler divergence from the null hypothesis (sample nouns and verbs according to frequency) to two sets of data
:param real_data: first set of tuples
:param fake_data: second set of tuples
:return: (real KLs, fake KLs), each for (SVO, SV, VO) subsets
"""
real_prob = separate_prob(real_data)
fake_prob = separate_prob(fake_data)
noun_prob = pred_freq * noun_mask / pred_freq[nouns].sum()
verb_prob = pred_freq * verb_mask / pred_freq[verbs].sum()
both_prob = noun_prob + verb_prob
real_match = [{tup: product(both_prob[p] for p in tup)
for tup in c}
for c in real_prob]
fake_match = [{tup: product(both_prob[p] for p in tup)
for tup in c}
for c in fake_prob]
real_KL = [KL(real_prob[i], real_match[i]) for i in range(3)]
fake_KL = [KL(fake_prob[i], fake_match[i]) for i in range(3)]
return real_KL, fake_KL
| mit |
epam/DLab | integration-tests/examples/copy_files.py | 1 | 4782 | #!/usr/bin/python
# *****************************************************************************
#
# Copyright (c) 2016, EPAM SYSTEMS INC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ******************************************************************************
import os, sys, json
import argparse
from fabric.api import *
parser = argparse.ArgumentParser()
parser.add_argument('--storage', type=str, default='S3/GCP buckets, Azure Blob container / Datalake folder')
parser.add_argument('--notebook', type=str, default='aws, azure, gcp')
parser.add_argument('--cloud', type=str, default='aws, azure, gcp')
parser.add_argument('--azure_storage_account', type=str, default='')
parser.add_argument('--azure_datalake_account', type=str, default='')
args = parser.parse_args()
dataset_file = ['airports.csv', 'carriers.csv', '2008.csv.bz2']
def download_dataset():
try:
for f in dataset_file:
local('wget http://stat-computing.org/dataexpo/2009/{0} -O /tmp/{0}'.format(f))
except Exception as err:
print('Failed to download test dataset', str(err))
sys.exit(1)
def upload_aws():
try:
for f in dataset_file:
local('aws s3 cp /tmp/{0} s3://{1}/{2}_dataset/ --sse AES256'.format(f, args.storage, args.notebook))
except Exception as err:
print('Failed to upload test dataset to bucket', str(err))
sys.exit(1)
def upload_azure_datalake():
try:
from azure.datalake.store import core, lib, multithread
sp_creds = json.loads(open(os.environ['AZURE_AUTH_LOCATION']).read())
dl_filesystem_creds = lib.auth(tenant_id=json.dumps(sp_creds['tenantId']).replace('"', ''),
client_secret=json.dumps(sp_creds['clientSecret']).replace('"', ''),
client_id=json.dumps(sp_creds['clientId']).replace('"', ''),
resource='https://datalake.azure.net/')
datalake_client = core.AzureDLFileSystem(dl_filesystem_creds, store_name=args.azure_datalake_account)
for f in dataset_file:
multithread.ADLUploader(datalake_client,
lpath='/tmp/{0}'.format(f),
rpath='{0}/{1}_dataset/{2}'.format(args.storage, args.notebook, f))
except Exception as err:
print('Failed to upload test dataset to datalake store', str(err))
sys.exit(1)
def upload_azure_blob():
try:
from azure.mgmt.storage import StorageManagementClient
from azure.storage.blob import BlockBlobService
from azure.common.client_factory import get_client_from_auth_file
storage_client = get_client_from_auth_file(StorageManagementClient)
resource_group_name = ''
for i in storage_client.storage_accounts.list():
if args.storage.replace('container', 'storage') == str(i.tags.get('Name')):
resource_group_name = str(i.tags.get('SBN'))
secret_key = storage_client.storage_accounts.list_keys(resource_group_name, args.azure_storage_account).keys[0].value
block_blob_service = BlockBlobService(account_name=args.azure_storage_account, account_key=secret_key)
for f in dataset_file:
block_blob_service.create_blob_from_path(args.storage, '{0}_dataset/{1}'.format(args.notebook, f), '/tmp/{0}'.format(f))
except Exception as err:
print('Failed to upload test dataset to blob storage', str(err))
sys.exit(1)
def upload_gcp():
try:
for f in dataset_file:
local('gsutil -m cp /tmp/{0} gs://{1}/{2}_dataset/'.format(f, args.storage, args.notebook))
except Exception as err:
print('Failed to upload test dataset to bucket', str(err))
sys.exit(1)
if __name__ == "__main__":
download_dataset()
if args.cloud == 'aws':
upload_aws()
elif args.cloud == 'azure':
os.environ['AZURE_AUTH_LOCATION'] = '/home/dlab-user/keys/azure_auth.json'
if args.azure_datalake_account:
upload_azure_datalake()
else:
upload_azure_blob()
elif args.cloud == 'gcp':
upload_gcp()
else:
print('Error! Unknown cloud provider.')
sys.exit(1)
sys.exit(0)
| apache-2.0 |
britcey/ansible | lib/ansible/utils/module_docs_fragments/dimensiondata.py | 192 | 2098 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Dimension Data
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# - Adam Friedman <tintoy@tintoy.io>
class ModuleDocFragment(object):
# Dimension Data doc fragment
DOCUMENTATION = '''
options:
region:
description:
- The target region.
choices:
- Regions are defined in Apache libcloud project [libcloud/common/dimensiondata.py]
- They are also listed in U(https://libcloud.readthedocs.io/en/latest/compute/drivers/dimensiondata.html)
- Note that the default value "na" stands for "North America".
- The module prepends 'dd-' to the region choice.
default: na
mcp_user:
description:
- The username used to authenticate to the CloudControl API.
- If not specified, will fall back to C(MCP_USER) from environment variable or C(~/.dimensiondata).
required: false
mcp_password:
description:
- The password used to authenticate to the CloudControl API.
- If not specified, will fall back to C(MCP_PASSWORD) from environment variable or C(~/.dimensiondata).
- Required if I(mcp_user) is specified.
required: false
location:
description:
- The target datacenter.
required: true
validate_certs:
description:
- If C(false), SSL certificates will not be validated.
- This should only be used on private instances of the CloudControl API that use self-signed certificates.
required: false
default: true
'''
| gpl-3.0 |
slevenhagen/odoo | addons/stock/report/report_stock.py | 376 | 2486 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.sql import drop_view_if_exists
class report_stock_lines_date(osv.osv):
_name = "report.stock.lines.date"
_description = "Dates of Inventories and latest Moves"
_auto = False
_order = "date"
_columns = {
'id': fields.integer('Product Id', readonly=True),
'product_id': fields.many2one('product.product', 'Product', readonly=True, select=True),
'date': fields.datetime('Date of latest Inventory', readonly=True),
'move_date': fields.datetime('Date of latest Stock Move', readonly=True),
"active": fields.boolean("Active", readonly=True),
}
def init(self, cr):
drop_view_if_exists(cr, 'report_stock_lines_date')
cr.execute("""
create or replace view report_stock_lines_date as (
select
p.id as id,
p.id as product_id,
max(s.date) as date,
max(m.date) as move_date,
p.active as active
from
product_product p
left join (
stock_inventory_line l
inner join stock_inventory s on (l.inventory_id=s.id and s.state = 'done')
) on (p.id=l.product_id)
left join stock_move m on (m.product_id=p.id and m.state = 'done')
group by p.id
)""")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
danigonza/phantomjs | src/breakpad/src/tools/gyp/test/lib/TestGyp.py | 137 | 23211 | #!/usr/bin/python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
TestGyp.py: a testing framework for GYP integration tests.
"""
import os
import re
import shutil
import stat
import sys
import TestCommon
from TestCommon import __all__
__all__.extend([
'TestGyp',
])
class TestGypBase(TestCommon.TestCommon):
"""
Class for controlling end-to-end tests of gyp generators.
Instantiating this class will create a temporary directory and
arrange for its destruction (via the TestCmd superclass) and
copy all of the non-gyptest files in the directory hierarchy of the
executing script.
The default behavior is to test the 'gyp' or 'gyp.bat' file in the
current directory. An alternative may be specified explicitly on
instantiation, or by setting the TESTGYP_GYP environment variable.
This class should be subclassed for each supported gyp generator
(format). Various abstract methods below define calling signatures
used by the test scripts to invoke builds on the generated build
configuration and to run executables generated by those builds.
"""
build_tool = None
build_tool_list = []
_exe = TestCommon.exe_suffix
_obj = TestCommon.obj_suffix
shobj_ = TestCommon.shobj_prefix
_shobj = TestCommon.shobj_suffix
lib_ = TestCommon.lib_prefix
_lib = TestCommon.lib_suffix
dll_ = TestCommon.dll_prefix
_dll = TestCommon.dll_suffix
# Constants to represent different targets.
ALL = '__all__'
DEFAULT = '__default__'
# Constants for different target types.
EXECUTABLE = '__executable__'
STATIC_LIB = '__static_lib__'
SHARED_LIB = '__shared_lib__'
def __init__(self, gyp=None, *args, **kw):
self.origin_cwd = os.path.abspath(os.path.dirname(sys.argv[0]))
if not gyp:
gyp = os.environ.get('TESTGYP_GYP')
if not gyp:
if sys.platform == 'win32':
gyp = 'gyp.bat'
else:
gyp = 'gyp'
self.gyp = os.path.abspath(gyp)
self.initialize_build_tool()
if not kw.has_key('match'):
kw['match'] = TestCommon.match_exact
if not kw.has_key('workdir'):
# Default behavior: the null string causes TestCmd to create
# a temporary directory for us.
kw['workdir'] = ''
formats = kw.get('formats', [])
if kw.has_key('formats'):
del kw['formats']
super(TestGypBase, self).__init__(*args, **kw)
excluded_formats = set([f for f in formats if f[0] == '!'])
included_formats = set(formats) - excluded_formats
if ('!'+self.format in excluded_formats or
included_formats and self.format not in included_formats):
msg = 'Invalid test for %r format; skipping test.\n'
self.skip_test(msg % self.format)
self.copy_test_configuration(self.origin_cwd, self.workdir)
self.set_configuration(None)
def built_file_must_exist(self, name, type=None, **kw):
"""
Fails the test if the specified built file name does not exist.
"""
return self.must_exist(self.built_file_path(name, type, **kw))
def built_file_must_not_exist(self, name, type=None, **kw):
"""
Fails the test if the specified built file name exists.
"""
return self.must_not_exist(self.built_file_path(name, type, **kw))
def built_file_must_match(self, name, contents, **kw):
"""
Fails the test if the contents of the specified built file name
do not match the specified contents.
"""
return self.must_match(self.built_file_path(name, **kw), contents)
def built_file_must_not_match(self, name, contents, **kw):
"""
Fails the test if the contents of the specified built file name
match the specified contents.
"""
return self.must_not_match(self.built_file_path(name, **kw), contents)
def copy_test_configuration(self, source_dir, dest_dir):
"""
Copies the test configuration from the specified source_dir
(the directory in which the test script lives) to the
specified dest_dir (a temporary working directory).
This ignores all files and directories that begin with
the string 'gyptest', and all '.svn' subdirectories.
"""
for root, dirs, files in os.walk(source_dir):
if '.svn' in dirs:
dirs.remove('.svn')
dirs = [ d for d in dirs if not d.startswith('gyptest') ]
files = [ f for f in files if not f.startswith('gyptest') ]
for dirname in dirs:
source = os.path.join(root, dirname)
destination = source.replace(source_dir, dest_dir)
os.mkdir(destination)
if sys.platform != 'win32':
shutil.copystat(source, destination)
for filename in files:
source = os.path.join(root, filename)
destination = source.replace(source_dir, dest_dir)
shutil.copy2(source, destination)
def initialize_build_tool(self):
"""
Initializes the .build_tool attribute.
Searches the .build_tool_list for an executable name on the user's
$PATH. The first tool on the list is used as-is if nothing is found
on the current $PATH.
"""
for build_tool in self.build_tool_list:
if not build_tool:
continue
if os.path.isabs(build_tool):
self.build_tool = build_tool
return
build_tool = self.where_is(build_tool)
if build_tool:
self.build_tool = build_tool
return
if self.build_tool_list:
self.build_tool = self.build_tool_list[0]
def relocate(self, source, destination):
"""
Renames (relocates) the specified source (usually a directory)
to the specified destination, creating the destination directory
first if necessary.
Note: Don't use this as a generic "rename" operation. In the
future, "relocating" parts of a GYP tree may affect the state of
the test to modify the behavior of later method calls.
"""
destination_dir = os.path.dirname(destination)
if not os.path.exists(destination_dir):
self.subdir(destination_dir)
os.rename(source, destination)
def report_not_up_to_date(self):
"""
Reports that a build is not up-to-date.
This provides common reporting for formats that have complicated
conditions for checking whether a build is up-to-date. Formats
that expect exact output from the command (make, scons) can
just set stdout= when they call the run_build() method.
"""
print "Build is not up-to-date:"
print self.banner('STDOUT ')
print self.stdout()
stderr = self.stderr()
if stderr:
print self.banner('STDERR ')
print stderr
def run_gyp(self, gyp_file, *args, **kw):
"""
Runs gyp against the specified gyp_file with the specified args.
"""
# TODO: --depth=. works around Chromium-specific tree climbing.
args = ('--depth=.', '--format='+self.format, gyp_file) + args
return self.run(program=self.gyp, arguments=args, **kw)
def run(self, *args, **kw):
"""
Executes a program by calling the superclass .run() method.
This exists to provide a common place to filter out keyword
arguments implemented in this layer, without having to update
the tool-specific subclasses or clutter the tests themselves
with platform-specific code.
"""
if kw.has_key('SYMROOT'):
del kw['SYMROOT']
super(TestGypBase, self).run(*args, **kw)
def set_configuration(self, configuration):
"""
Sets the configuration, to be used for invoking the build
tool and testing potential built output.
"""
self.configuration = configuration
def configuration_dirname(self):
if self.configuration:
return self.configuration.split('|')[0]
else:
return 'Default'
def configuration_buildname(self):
if self.configuration:
return self.configuration
else:
return 'Default'
#
# Abstract methods to be defined by format-specific subclasses.
#
def build(self, gyp_file, target=None, **kw):
"""
Runs a build of the specified target against the configuration
generated from the specified gyp_file.
A 'target' argument of None or the special value TestGyp.DEFAULT
specifies the default argument for the underlying build tool.
A 'target' argument of TestGyp.ALL specifies the 'all' target
(if any) of the underlying build tool.
"""
raise NotImplementedError
def built_file_path(self, name, type=None, **kw):
"""
Returns a path to the specified file name, of the specified type.
"""
raise NotImplementedError
def run_built_executable(self, name, *args, **kw):
"""
Runs an executable program built from a gyp-generated configuration.
The specified name should be independent of any particular generator.
Subclasses should find the output executable in the appropriate
output build directory, tack on any necessary executable suffix, etc.
"""
raise NotImplementedError
def up_to_date(self, gyp_file, target=None, **kw):
"""
Verifies that a build of the specified target is up to date.
The subclass should implement this by calling build()
(or a reasonable equivalent), checking whatever conditions
will tell it the build was an "up to date" null build, and
failing if it isn't.
"""
raise NotImplementedError
class TestGypGypd(TestGypBase):
"""
Subclass for testing the GYP 'gypd' generator (spit out the
internal data structure as pretty-printed Python).
"""
format = 'gypd'
class TestGypMake(TestGypBase):
"""
Subclass for testing the GYP Make generator.
"""
format = 'make'
build_tool_list = ['make']
ALL = 'all'
def build(self, gyp_file, target=None, **kw):
"""
Runs a Make build using the Makefiles generated from the specified
gyp_file.
"""
arguments = kw.get('arguments', [])
if self.configuration:
arguments.append('BUILDTYPE=' + self.configuration)
if target not in (None, self.DEFAULT):
arguments.append(target)
kw['arguments'] = arguments
return self.run(program=self.build_tool, **kw)
def up_to_date(self, gyp_file, target=None, **kw):
"""
Verifies that a build of the specified Make target is up to date.
"""
if target in (None, self.DEFAULT):
message_target = 'all'
else:
message_target = target
kw['stdout'] = "make: Nothing to be done for `%s'.\n" % message_target
return self.build(gyp_file, target, **kw)
def run_built_executable(self, name, *args, **kw):
"""
Runs an executable built by Make.
"""
configuration = self.configuration_dirname()
libdir = os.path.join('out', configuration, 'lib')
# TODO(piman): when everything is cross-compile safe, remove lib.target
os.environ['LD_LIBRARY_PATH'] = libdir + '.host:' + libdir + '.target'
# Enclosing the name in a list avoids prepending the original dir.
program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
return self.run(program=program, *args, **kw)
def built_file_path(self, name, type=None, **kw):
"""
Returns a path to the specified file name, of the specified type,
as built by Make.
Built files are in the subdirectory 'out/{configuration}'.
The default is 'out/Default'.
A chdir= keyword argument specifies the source directory
relative to which the output subdirectory can be found.
"type" values of STATIC_LIB or SHARED_LIB append the necessary
prefixes and suffixes to a platform-independent library base name.
A libdir= keyword argument specifies a library subdirectory other
than the default 'obj.target'.
"""
result = []
chdir = kw.get('chdir')
if chdir:
result.append(chdir)
configuration = self.configuration_dirname()
result.extend(['out', configuration])
if type == self.EXECUTABLE:
result.append(name + self._exe)
elif type == self.STATIC_LIB:
name = self.lib_ + name + self._lib
libdir = kw.get('libdir', 'lib')
result.extend([libdir, name])
elif type == self.SHARED_LIB:
name = self.dll_ + name + self._dll
libdir = kw.get('libdir', 'lib.target')
result.extend([libdir, name])
else:
result.append(name)
return self.workpath(*result)
class TestGypMSVS(TestGypBase):
"""
Subclass for testing the GYP Visual Studio generator.
"""
format = 'msvs'
u = r'=== Build: 0 succeeded, 0 failed, (\d+) up-to-date, 0 skipped ==='
up_to_date_re = re.compile(u, re.M)
# Initial None element will indicate to our .initialize_build_tool()
# method below that 'devenv' was not found on %PATH%.
#
# Note: we must use devenv.com to be able to capture build output.
# Directly executing devenv.exe only sends output to BuildLog.htm.
build_tool_list = [None, 'devenv.com']
def initialize_build_tool(self):
"""
Initializes the Visual Studio .build_tool parameter, searching %PATH%
and %PATHEXT% for a devenv.{exe,bat,...} executable, and falling
back to a hard-coded default (on the current drive) if necessary.
"""
super(TestGypMSVS, self).initialize_build_tool()
if not self.build_tool:
# We didn't find 'devenv' on the path. Just hard-code a default,
# and revisit this if it becomes important.
possible = [
('C:\\Program Files',
'Microsoft Visual Studio 8', 'Common7', 'IDE', 'devenv.com'),
# Note: if you're using this, set GYP_MSVS_VERSION=2008
# to get the tests to pass.
('C:\\Program Files (x86)',
'Microsoft Visual Studio 9.0', 'Common7', 'IDE', 'devenv.com'),
]
for build_tool in possible:
bt = os.path.join(*build_tool)
if os.path.exists(bt):
self.build_tool = bt
break
def build(self, gyp_file, target=None, rebuild=False, **kw):
"""
Runs a Visual Studio build using the configuration generated
from the specified gyp_file.
"""
configuration = self.configuration_buildname()
if rebuild:
build = '/Rebuild'
else:
build = '/Build'
arguments = kw.get('arguments', [])
arguments.extend([gyp_file.replace('.gyp', '.sln'),
build, configuration])
# Note: the Visual Studio generator doesn't add an explicit 'all'
# target, so we just treat it the same as the default.
if target not in (None, self.ALL, self.DEFAULT):
arguments.extend(['/Project', target])
if self.configuration:
arguments.extend(['/ProjectConfig', self.configuration])
kw['arguments'] = arguments
return self.run(program=self.build_tool, **kw)
def up_to_date(self, gyp_file, target=None, **kw):
"""
Verifies that a build of the specified Visual Studio target is up to date.
"""
result = self.build(gyp_file, target, **kw)
if not result:
stdout = self.stdout()
m = self.up_to_date_re.search(stdout)
if not m or m.group(1) == '0':
self.report_not_up_to_date()
self.fail_test()
return result
def run_built_executable(self, name, *args, **kw):
"""
Runs an executable built by Visual Studio.
"""
configuration = self.configuration_dirname()
# Enclosing the name in a list avoids prepending the original dir.
program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
return self.run(program=program, *args, **kw)
def built_file_path(self, name, type=None, **kw):
"""
Returns a path to the specified file name, of the specified type,
as built by Visual Studio.
Built files are in a subdirectory that matches the configuration
name. The default is 'Default'.
A chdir= keyword argument specifies the source directory
relative to which the output subdirectory can be found.
"type" values of STATIC_LIB or SHARED_LIB append the necessary
prefixes and suffixes to a platform-independent library base name.
"""
result = []
chdir = kw.get('chdir')
if chdir:
result.append(chdir)
result.append(self.configuration_dirname())
if type == self.EXECUTABLE:
result.append(name + self._exe)
elif type == self.STATIC_LIB:
name = self.lib_ + name + self._lib
result.extend(['lib', name])
elif type == self.SHARED_LIB:
name = self.dll_ + name + self._dll
result.append(name)
else:
result.append(name)
return self.workpath(*result)
class TestGypSCons(TestGypBase):
"""
Subclass for testing the GYP SCons generator.
"""
format = 'scons'
build_tool_list = ['scons', 'scons.py']
ALL = 'all'
def build(self, gyp_file, target=None, **kw):
"""
Runs a scons build using the SCons configuration generated from the
specified gyp_file.
"""
arguments = kw.get('arguments', [])
dirname = os.path.dirname(gyp_file)
if dirname:
arguments.extend(['-C', dirname])
if self.configuration:
arguments.append('--mode=' + self.configuration)
if target not in (None, self.DEFAULT):
arguments.append(target)
kw['arguments'] = arguments
return self.run(program=self.build_tool, **kw)
def up_to_date(self, gyp_file, target=None, **kw):
"""
Verifies that a build of the specified SCons target is up to date.
"""
if target in (None, self.DEFAULT):
up_to_date_targets = 'all'
else:
up_to_date_targets = target
up_to_date_lines = []
for arg in up_to_date_targets.split():
up_to_date_lines.append("scons: `%s' is up to date.\n" % arg)
kw['stdout'] = ''.join(up_to_date_lines)
arguments = kw.get('arguments', [])
arguments.append('-Q')
kw['arguments'] = arguments
return self.build(gyp_file, target, **kw)
def run_built_executable(self, name, *args, **kw):
"""
Runs an executable built by scons.
"""
configuration = self.configuration_dirname()
os.environ['LD_LIBRARY_PATH'] = os.path.join(configuration, 'lib')
# Enclosing the name in a list avoids prepending the original dir.
program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
return self.run(program=program, *args, **kw)
def built_file_path(self, name, type=None, **kw):
"""
Returns a path to the specified file name, of the specified type,
as built by Scons.
Built files are in a subdirectory that matches the configuration
name. The default is 'Default'.
A chdir= keyword argument specifies the source directory
relative to which the output subdirectory can be found.
"type" values of STATIC_LIB or SHARED_LIB append the necessary
prefixes and suffixes to a platform-independent library base name.
"""
result = []
chdir = kw.get('chdir')
if chdir:
result.append(chdir)
result.append(self.configuration_dirname())
if type == self.EXECUTABLE:
result.append(name + self._exe)
elif type == self.STATIC_LIB:
name = self.lib_ + name + self._lib
result.extend(['lib', name])
elif type == self.SHARED_LIB:
name = self.dll_ + name + self._dll
result.extend(['lib', name])
else:
result.append(name)
return self.workpath(*result)
class TestGypXcode(TestGypBase):
"""
Subclass for testing the GYP Xcode generator.
"""
format = 'xcode'
build_tool_list = ['xcodebuild']
phase_script_execution = ("\n"
"PhaseScriptExecution /\\S+/Script-[0-9A-F]+\\.sh\n"
" cd /\\S+\n"
" /bin/sh -c /\\S+/Script-[0-9A-F]+\\.sh\n"
"(make: Nothing to be done for `all'\\.\n)?")
strip_up_to_date_expressions = [
# Various actions or rules can run even when the overall build target
# is up to date. Strip those phases' GYP-generated output.
re.compile(phase_script_execution, re.S),
# The message from distcc_pump can trail the "BUILD SUCCEEDED"
# message, so strip that, too.
re.compile('__________Shutting down distcc-pump include server\n', re.S),
]
up_to_date_ending = 'Checking Dependencies...\n** BUILD SUCCEEDED **\n'
def build(self, gyp_file, target=None, **kw):
"""
Runs an xcodebuild using the .xcodeproj generated from the specified
gyp_file.
"""
arguments = kw.get('arguments', [])
arguments.extend(['-project', gyp_file.replace('.gyp', '.xcodeproj')])
if target == self.ALL:
arguments.append('-alltargets',)
elif target not in (None, self.DEFAULT):
arguments.extend(['-target', target])
if self.configuration:
arguments.extend(['-configuration', self.configuration])
symroot = kw.get('SYMROOT', '$SRCROOT/build')
if symroot:
arguments.append('SYMROOT='+symroot)
kw['arguments'] = arguments
return self.run(program=self.build_tool, **kw)
def up_to_date(self, gyp_file, target=None, **kw):
"""
Verifies that a build of the specified Xcode target is up to date.
"""
result = self.build(gyp_file, target, **kw)
if not result:
output = self.stdout()
for expression in self.strip_up_to_date_expressions:
output = expression.sub('', output)
if not output.endswith(self.up_to_date_ending):
self.report_not_up_to_date()
self.fail_test()
return result
def run_built_executable(self, name, *args, **kw):
"""
Runs an executable built by xcodebuild.
"""
configuration = self.configuration_dirname()
os.environ['DYLD_LIBRARY_PATH'] = os.path.join('build', configuration)
# Enclosing the name in a list avoids prepending the original dir.
program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
return self.run(program=program, *args, **kw)
def built_file_path(self, name, type=None, **kw):
"""
Returns a path to the specified file name, of the specified type,
as built by Xcode.
Built files are in the subdirectory 'build/{configuration}'.
The default is 'build/Default'.
A chdir= keyword argument specifies the source directory
relative to which the output subdirectory can be found.
"type" values of STATIC_LIB or SHARED_LIB append the necessary
prefixes and suffixes to a platform-independent library base name.
"""
result = []
chdir = kw.get('chdir')
if chdir:
result.append(chdir)
configuration = self.configuration_dirname()
result.extend(['build', configuration])
if type == self.EXECUTABLE:
result.append(name + self._exe)
elif type == self.STATIC_LIB:
name = self.lib_ + name + self._lib
result.append(name)
elif type == self.SHARED_LIB:
name = name + self._dll
result.append(name)
else:
result.append(name)
return self.workpath(*result)
format_class_list = [
TestGypGypd,
TestGypMake,
TestGypMSVS,
TestGypSCons,
TestGypXcode,
]
def TestGyp(*args, **kw):
"""
Returns an appropriate TestGyp* instance for a specified GYP format.
"""
format = kw.get('format')
if format:
del kw['format']
else:
format = os.environ.get('TESTGYP_FORMAT')
for format_class in format_class_list:
if format == format_class.format:
return format_class(*args, **kw)
raise Exception, "unknown format %r" % format
| bsd-3-clause |
dosiecki/NewsBlur | apps/rss_feeds/management/commands/refresh_feeds.py | 16 | 3358 | from django.core.management.base import BaseCommand
from django.conf import settings
from django.contrib.auth.models import User
from apps.statistics.models import MStatistics
from apps.rss_feeds.models import Feed
from optparse import make_option
from utils import feed_fetcher
from utils.management_functions import daemonize
import django
import socket
import datetime
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option("-f", "--feed", default=None),
make_option("-d", "--daemon", dest="daemonize", action="store_true"),
make_option("-F", "--force", dest="force", action="store_true"),
make_option("-s", "--single_threaded", dest="single_threaded", action="store_true"),
make_option('-t', '--timeout', type='int', default=10,
help='Wait timeout in seconds when connecting to feeds.'),
make_option('-u', '--username', type='str', dest='username'),
make_option('-V', '--verbose', action='store_true',
dest='verbose', default=False, help='Verbose output.'),
make_option('-S', '--skip', type='int',
dest='skip', default=0, help='Skip stories per month < #.'),
make_option('-w', '--workerthreads', type='int', default=4,
help='Worker threads that will fetch feeds in parallel.'),
)
def handle(self, *args, **options):
if options['daemonize']:
daemonize()
settings.LOG_TO_STREAM = True
now = datetime.datetime.utcnow()
if options['skip']:
feeds = Feed.objects.filter(next_scheduled_update__lte=now,
average_stories_per_month__lt=options['skip'],
active=True)
print " ---> Skipping %s feeds" % feeds.count()
for feed in feeds:
feed.set_next_scheduled_update()
print '.',
return
socket.setdefaulttimeout(options['timeout'])
if options['force']:
feeds = Feed.objects.all()
elif options['username']:
feeds = Feed.objects.filter(subscribers__user=User.objects.get(username=options['username']))
elif options['feed']:
feeds = Feed.objects.filter(pk=options['feed'])
else:
feeds = Feed.objects.filter(next_scheduled_update__lte=now, active=True)
feeds = feeds.order_by('?')
for f in feeds:
f.set_next_scheduled_update()
num_workers = min(len(feeds), options['workerthreads'])
if options['single_threaded']:
num_workers = 1
options['compute_scores'] = True
options['quick'] = float(MStatistics.get('quick_fetch', 0))
options['updates_off'] = MStatistics.get('updates_off', False)
disp = feed_fetcher.Dispatcher(options, num_workers)
feeds_queue = []
for _ in range(num_workers):
feeds_queue.append([])
i = 0
for feed in feeds:
feeds_queue[i%num_workers].append(feed.pk)
i += 1
disp.add_jobs(feeds_queue, i)
django.db.connection.close()
print " ---> Fetching %s feeds..." % feeds.count()
disp.run_jobs()
| mit |
luciano666/EVAServer | EVAMobile/node_modules/gulp-sass/node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py | 1284 | 100329 | # Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import copy
import hashlib
import json
import multiprocessing
import os.path
import re
import signal
import subprocess
import sys
import gyp
import gyp.common
from gyp.common import OrderedSet
import gyp.msvs_emulation
import gyp.MSVSUtil as MSVSUtil
import gyp.xcode_emulation
from cStringIO import StringIO
from gyp.common import GetEnvironFallback
import gyp.ninja_syntax as ninja_syntax
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_PREFIX': 'lib',
# Gyp expects the following variables to be expandable by the build
# system to the appropriate locations. Ninja prefers paths to be
# known at gyp time. To resolve this, introduce special
# variables starting with $! and $| (which begin with a $ so gyp knows it
# should be treated specially, but is otherwise an invalid
# ninja/shell variable) that are passed to gyp here but expanded
# before writing out into the target .ninja files; see
# ExpandSpecial.
# $! is used for variables that represent a path and that can only appear at
# the start of a string, while $| is used for variables that can appear
# anywhere in a string.
'INTERMEDIATE_DIR': '$!INTERMEDIATE_DIR',
'SHARED_INTERMEDIATE_DIR': '$!PRODUCT_DIR/gen',
'PRODUCT_DIR': '$!PRODUCT_DIR',
'CONFIGURATION_NAME': '$|CONFIGURATION_NAME',
# Special variables that may be used by gyp 'rule' targets.
# We generate definitions for these variables on the fly when processing a
# rule.
'RULE_INPUT_ROOT': '${root}',
'RULE_INPUT_DIRNAME': '${dirname}',
'RULE_INPUT_PATH': '${source}',
'RULE_INPUT_EXT': '${ext}',
'RULE_INPUT_NAME': '${name}',
}
# Placates pylint.
generator_additional_non_configuration_keys = []
generator_additional_path_sections = []
generator_extra_sources_for_rules = []
generator_filelist_paths = None
generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
def StripPrefix(arg, prefix):
if arg.startswith(prefix):
return arg[len(prefix):]
return arg
def QuoteShellArgument(arg, flavor):
"""Quote a string such that it will be interpreted as a single argument
by the shell."""
# Rather than attempting to enumerate the bad shell characters, just
# whitelist common OK ones and quote anything else.
if re.match(r'^[a-zA-Z0-9_=.\\/-]+$', arg):
return arg # No quoting necessary.
if flavor == 'win':
return gyp.msvs_emulation.QuoteForRspFile(arg)
return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'"
def Define(d, flavor):
"""Takes a preprocessor define and returns a -D parameter that's ninja- and
shell-escaped."""
if flavor == 'win':
# cl.exe replaces literal # characters with = in preprocesor definitions for
# some reason. Octal-encode to work around that.
d = d.replace('#', '\\%03o' % ord('#'))
return QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor)
def AddArch(output, arch):
"""Adds an arch string to an output path."""
output, extension = os.path.splitext(output)
return '%s.%s%s' % (output, arch, extension)
class Target(object):
"""Target represents the paths used within a single gyp target.
Conceptually, building a single target A is a series of steps:
1) actions/rules/copies generates source/resources/etc.
2) compiles generates .o files
3) link generates a binary (library/executable)
4) bundle merges the above in a mac bundle
(Any of these steps can be optional.)
From a build ordering perspective, a dependent target B could just
depend on the last output of this series of steps.
But some dependent commands sometimes need to reach inside the box.
For example, when linking B it needs to get the path to the static
library generated by A.
This object stores those paths. To keep things simple, member
variables only store concrete paths to single files, while methods
compute derived values like "the last output of the target".
"""
def __init__(self, type):
# Gyp type ("static_library", etc.) of this target.
self.type = type
# File representing whether any input dependencies necessary for
# dependent actions have completed.
self.preaction_stamp = None
# File representing whether any input dependencies necessary for
# dependent compiles have completed.
self.precompile_stamp = None
# File representing the completion of actions/rules/copies, if any.
self.actions_stamp = None
# Path to the output of the link step, if any.
self.binary = None
# Path to the file representing the completion of building the bundle,
# if any.
self.bundle = None
# On Windows, incremental linking requires linking against all the .objs
# that compose a .lib (rather than the .lib itself). That list is stored
# here. In this case, we also need to save the compile_deps for the target,
# so that the the target that directly depends on the .objs can also depend
# on those.
self.component_objs = None
self.compile_deps = None
# Windows only. The import .lib is the output of a build step, but
# because dependents only link against the lib (not both the lib and the
# dll) we keep track of the import library here.
self.import_lib = None
def Linkable(self):
"""Return true if this is a target that can be linked against."""
return self.type in ('static_library', 'shared_library')
def UsesToc(self, flavor):
"""Return true if the target should produce a restat rule based on a TOC
file."""
# For bundles, the .TOC should be produced for the binary, not for
# FinalOutput(). But the naive approach would put the TOC file into the
# bundle, so don't do this for bundles for now.
if flavor == 'win' or self.bundle:
return False
return self.type in ('shared_library', 'loadable_module')
def PreActionInput(self, flavor):
"""Return the path, if any, that should be used as a dependency of
any dependent action step."""
if self.UsesToc(flavor):
return self.FinalOutput() + '.TOC'
return self.FinalOutput() or self.preaction_stamp
def PreCompileInput(self):
"""Return the path, if any, that should be used as a dependency of
any dependent compile step."""
return self.actions_stamp or self.precompile_stamp
def FinalOutput(self):
"""Return the last output of the target, which depends on all prior
steps."""
return self.bundle or self.binary or self.actions_stamp
# A small discourse on paths as used within the Ninja build:
# All files we produce (both at gyp and at build time) appear in the
# build directory (e.g. out/Debug).
#
# Paths within a given .gyp file are always relative to the directory
# containing the .gyp file. Call these "gyp paths". This includes
# sources as well as the starting directory a given gyp rule/action
# expects to be run from. We call the path from the source root to
# the gyp file the "base directory" within the per-.gyp-file
# NinjaWriter code.
#
# All paths as written into the .ninja files are relative to the build
# directory. Call these paths "ninja paths".
#
# We translate between these two notions of paths with two helper
# functions:
#
# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file)
# into the equivalent ninja path.
#
# - GypPathToUniqueOutput translates a gyp path into a ninja path to write
# an output file; the result can be namespaced such that it is unique
# to the input file name as well as the output target name.
class NinjaWriter(object):
def __init__(self, hash_for_rules, target_outputs, base_dir, build_dir,
output_file, toplevel_build, output_file_name, flavor,
toplevel_dir=None):
"""
base_dir: path from source root to directory containing this gyp file,
by gyp semantics, all input paths are relative to this
build_dir: path from source root to build output
toplevel_dir: path to the toplevel directory
"""
self.hash_for_rules = hash_for_rules
self.target_outputs = target_outputs
self.base_dir = base_dir
self.build_dir = build_dir
self.ninja = ninja_syntax.Writer(output_file)
self.toplevel_build = toplevel_build
self.output_file_name = output_file_name
self.flavor = flavor
self.abs_build_dir = None
if toplevel_dir is not None:
self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir,
build_dir))
self.obj_ext = '.obj' if flavor == 'win' else '.o'
if flavor == 'win':
# See docstring of msvs_emulation.GenerateEnvironmentFiles().
self.win_env = {}
for arch in ('x86', 'x64'):
self.win_env[arch] = 'environment.' + arch
# Relative path from build output dir to base dir.
build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir)
self.build_to_base = os.path.join(build_to_top, base_dir)
# Relative path from base dir to build dir.
base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir)
self.base_to_build = os.path.join(base_to_top, build_dir)
def ExpandSpecial(self, path, product_dir=None):
"""Expand specials like $!PRODUCT_DIR in |path|.
If |product_dir| is None, assumes the cwd is already the product
dir. Otherwise, |product_dir| is the relative path to the product
dir.
"""
PRODUCT_DIR = '$!PRODUCT_DIR'
if PRODUCT_DIR in path:
if product_dir:
path = path.replace(PRODUCT_DIR, product_dir)
else:
path = path.replace(PRODUCT_DIR + '/', '')
path = path.replace(PRODUCT_DIR + '\\', '')
path = path.replace(PRODUCT_DIR, '.')
INTERMEDIATE_DIR = '$!INTERMEDIATE_DIR'
if INTERMEDIATE_DIR in path:
int_dir = self.GypPathToUniqueOutput('gen')
# GypPathToUniqueOutput generates a path relative to the product dir,
# so insert product_dir in front if it is provided.
path = path.replace(INTERMEDIATE_DIR,
os.path.join(product_dir or '', int_dir))
CONFIGURATION_NAME = '$|CONFIGURATION_NAME'
path = path.replace(CONFIGURATION_NAME, self.config_name)
return path
def ExpandRuleVariables(self, path, root, dirname, source, ext, name):
if self.flavor == 'win':
path = self.msvs_settings.ConvertVSMacros(
path, config=self.config_name)
path = path.replace(generator_default_variables['RULE_INPUT_ROOT'], root)
path = path.replace(generator_default_variables['RULE_INPUT_DIRNAME'],
dirname)
path = path.replace(generator_default_variables['RULE_INPUT_PATH'], source)
path = path.replace(generator_default_variables['RULE_INPUT_EXT'], ext)
path = path.replace(generator_default_variables['RULE_INPUT_NAME'], name)
return path
def GypPathToNinja(self, path, env=None):
"""Translate a gyp path to a ninja path, optionally expanding environment
variable references in |path| with |env|.
See the above discourse on path conversions."""
if env:
if self.flavor == 'mac':
path = gyp.xcode_emulation.ExpandEnvVars(path, env)
elif self.flavor == 'win':
path = gyp.msvs_emulation.ExpandMacros(path, env)
if path.startswith('$!'):
expanded = self.ExpandSpecial(path)
if self.flavor == 'win':
expanded = os.path.normpath(expanded)
return expanded
if '$|' in path:
path = self.ExpandSpecial(path)
assert '$' not in path, path
return os.path.normpath(os.path.join(self.build_to_base, path))
def GypPathToUniqueOutput(self, path, qualified=True):
"""Translate a gyp path to a ninja path for writing output.
If qualified is True, qualify the resulting filename with the name
of the target. This is necessary when e.g. compiling the same
path twice for two separate output targets.
See the above discourse on path conversions."""
path = self.ExpandSpecial(path)
assert not path.startswith('$'), path
# Translate the path following this scheme:
# Input: foo/bar.gyp, target targ, references baz/out.o
# Output: obj/foo/baz/targ.out.o (if qualified)
# obj/foo/baz/out.o (otherwise)
# (and obj.host instead of obj for cross-compiles)
#
# Why this scheme and not some other one?
# 1) for a given input, you can compute all derived outputs by matching
# its path, even if the input is brought via a gyp file with '..'.
# 2) simple files like libraries and stamps have a simple filename.
obj = 'obj'
if self.toolset != 'target':
obj += '.' + self.toolset
path_dir, path_basename = os.path.split(path)
assert not os.path.isabs(path_dir), (
"'%s' can not be absolute path (see crbug.com/462153)." % path_dir)
if qualified:
path_basename = self.name + '.' + path_basename
return os.path.normpath(os.path.join(obj, self.base_dir, path_dir,
path_basename))
def WriteCollapsedDependencies(self, name, targets, order_only=None):
"""Given a list of targets, return a path for a single file
representing the result of building all the targets or None.
Uses a stamp file if necessary."""
assert targets == filter(None, targets), targets
if len(targets) == 0:
assert not order_only
return None
if len(targets) > 1 or order_only:
stamp = self.GypPathToUniqueOutput(name + '.stamp')
targets = self.ninja.build(stamp, 'stamp', targets, order_only=order_only)
self.ninja.newline()
return targets[0]
def _SubninjaNameForArch(self, arch):
output_file_base = os.path.splitext(self.output_file_name)[0]
return '%s.%s.ninja' % (output_file_base, arch)
def WriteSpec(self, spec, config_name, generator_flags):
"""The main entry point for NinjaWriter: write the build rules for a spec.
Returns a Target object, which represents the output paths for this spec.
Returns None if there are no outputs (e.g. a settings-only 'none' type
target)."""
self.config_name = config_name
self.name = spec['target_name']
self.toolset = spec['toolset']
config = spec['configurations'][config_name]
self.target = Target(spec['type'])
self.is_standalone_static_library = bool(
spec.get('standalone_static_library', 0))
# Track if this target contains any C++ files, to decide if gcc or g++
# should be used for linking.
self.uses_cpp = False
self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
self.xcode_settings = self.msvs_settings = None
if self.flavor == 'mac':
self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
if self.flavor == 'win':
self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec,
generator_flags)
arch = self.msvs_settings.GetArch(config_name)
self.ninja.variable('arch', self.win_env[arch])
self.ninja.variable('cc', '$cl_' + arch)
self.ninja.variable('cxx', '$cl_' + arch)
self.ninja.variable('cc_host', '$cl_' + arch)
self.ninja.variable('cxx_host', '$cl_' + arch)
self.ninja.variable('asm', '$ml_' + arch)
if self.flavor == 'mac':
self.archs = self.xcode_settings.GetActiveArchs(config_name)
if len(self.archs) > 1:
self.arch_subninjas = dict(
(arch, ninja_syntax.Writer(
OpenOutput(os.path.join(self.toplevel_build,
self._SubninjaNameForArch(arch)),
'w')))
for arch in self.archs)
# Compute predepends for all rules.
# actions_depends is the dependencies this target depends on before running
# any of its action/rule/copy steps.
# compile_depends is the dependencies this target depends on before running
# any of its compile steps.
actions_depends = []
compile_depends = []
# TODO(evan): it is rather confusing which things are lists and which
# are strings. Fix these.
if 'dependencies' in spec:
for dep in spec['dependencies']:
if dep in self.target_outputs:
target = self.target_outputs[dep]
actions_depends.append(target.PreActionInput(self.flavor))
compile_depends.append(target.PreCompileInput())
actions_depends = filter(None, actions_depends)
compile_depends = filter(None, compile_depends)
actions_depends = self.WriteCollapsedDependencies('actions_depends',
actions_depends)
compile_depends = self.WriteCollapsedDependencies('compile_depends',
compile_depends)
self.target.preaction_stamp = actions_depends
self.target.precompile_stamp = compile_depends
# Write out actions, rules, and copies. These must happen before we
# compile any sources, so compute a list of predependencies for sources
# while we do it.
extra_sources = []
mac_bundle_depends = []
self.target.actions_stamp = self.WriteActionsRulesCopies(
spec, extra_sources, actions_depends, mac_bundle_depends)
# If we have actions/rules/copies, we depend directly on those, but
# otherwise we depend on dependent target's actions/rules/copies etc.
# We never need to explicitly depend on previous target's link steps,
# because no compile ever depends on them.
compile_depends_stamp = (self.target.actions_stamp or compile_depends)
# Write out the compilation steps, if any.
link_deps = []
sources = extra_sources + spec.get('sources', [])
if sources:
if self.flavor == 'mac' and len(self.archs) > 1:
# Write subninja file containing compile and link commands scoped to
# a single arch if a fat binary is being built.
for arch in self.archs:
self.ninja.subninja(self._SubninjaNameForArch(arch))
pch = None
if self.flavor == 'win':
gyp.msvs_emulation.VerifyMissingSources(
sources, self.abs_build_dir, generator_flags, self.GypPathToNinja)
pch = gyp.msvs_emulation.PrecompiledHeader(
self.msvs_settings, config_name, self.GypPathToNinja,
self.GypPathToUniqueOutput, self.obj_ext)
else:
pch = gyp.xcode_emulation.MacPrefixHeader(
self.xcode_settings, self.GypPathToNinja,
lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang))
link_deps = self.WriteSources(
self.ninja, config_name, config, sources, compile_depends_stamp, pch,
spec)
# Some actions/rules output 'sources' that are already object files.
obj_outputs = [f for f in sources if f.endswith(self.obj_ext)]
if obj_outputs:
if self.flavor != 'mac' or len(self.archs) == 1:
link_deps += [self.GypPathToNinja(o) for o in obj_outputs]
else:
print "Warning: Actions/rules writing object files don't work with " \
"multiarch targets, dropping. (target %s)" % spec['target_name']
elif self.flavor == 'mac' and len(self.archs) > 1:
link_deps = collections.defaultdict(list)
compile_deps = self.target.actions_stamp or actions_depends
if self.flavor == 'win' and self.target.type == 'static_library':
self.target.component_objs = link_deps
self.target.compile_deps = compile_deps
# Write out a link step, if needed.
output = None
is_empty_bundle = not link_deps and not mac_bundle_depends
if link_deps or self.target.actions_stamp or actions_depends:
output = self.WriteTarget(spec, config_name, config, link_deps,
compile_deps)
if self.is_mac_bundle:
mac_bundle_depends.append(output)
# Bundle all of the above together, if needed.
if self.is_mac_bundle:
output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle)
if not output:
return None
assert self.target.FinalOutput(), output
return self.target
def _WinIdlRule(self, source, prebuild, outputs):
"""Handle the implicit VS .idl rule for one source file. Fills |outputs|
with files that are generated."""
outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData(
source, self.config_name)
outdir = self.GypPathToNinja(outdir)
def fix_path(path, rel=None):
path = os.path.join(outdir, path)
dirname, basename = os.path.split(source)
root, ext = os.path.splitext(basename)
path = self.ExpandRuleVariables(
path, root, dirname, source, ext, basename)
if rel:
path = os.path.relpath(path, rel)
return path
vars = [(name, fix_path(value, outdir)) for name, value in vars]
output = [fix_path(p) for p in output]
vars.append(('outdir', outdir))
vars.append(('idlflags', flags))
input = self.GypPathToNinja(source)
self.ninja.build(output, 'idl', input,
variables=vars, order_only=prebuild)
outputs.extend(output)
def WriteWinIdlFiles(self, spec, prebuild):
"""Writes rules to match MSVS's implicit idl handling."""
assert self.flavor == 'win'
if self.msvs_settings.HasExplicitIdlRulesOrActions(spec):
return []
outputs = []
for source in filter(lambda x: x.endswith('.idl'), spec['sources']):
self._WinIdlRule(source, prebuild, outputs)
return outputs
def WriteActionsRulesCopies(self, spec, extra_sources, prebuild,
mac_bundle_depends):
"""Write out the Actions, Rules, and Copies steps. Return a path
representing the outputs of these steps."""
outputs = []
if self.is_mac_bundle:
mac_bundle_resources = spec.get('mac_bundle_resources', [])[:]
else:
mac_bundle_resources = []
extra_mac_bundle_resources = []
if 'actions' in spec:
outputs += self.WriteActions(spec['actions'], extra_sources, prebuild,
extra_mac_bundle_resources)
if 'rules' in spec:
outputs += self.WriteRules(spec['rules'], extra_sources, prebuild,
mac_bundle_resources,
extra_mac_bundle_resources)
if 'copies' in spec:
outputs += self.WriteCopies(spec['copies'], prebuild, mac_bundle_depends)
if 'sources' in spec and self.flavor == 'win':
outputs += self.WriteWinIdlFiles(spec, prebuild)
stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs)
if self.is_mac_bundle:
xcassets = self.WriteMacBundleResources(
extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends)
partial_info_plist = self.WriteMacXCassets(xcassets, mac_bundle_depends)
self.WriteMacInfoPlist(partial_info_plist, mac_bundle_depends)
return stamp
def GenerateDescription(self, verb, message, fallback):
"""Generate and return a description of a build step.
|verb| is the short summary, e.g. ACTION or RULE.
|message| is a hand-written description, or None if not available.
|fallback| is the gyp-level name of the step, usable as a fallback.
"""
if self.toolset != 'target':
verb += '(%s)' % self.toolset
if message:
return '%s %s' % (verb, self.ExpandSpecial(message))
else:
return '%s %s: %s' % (verb, self.name, fallback)
def WriteActions(self, actions, extra_sources, prebuild,
extra_mac_bundle_resources):
# Actions cd into the base directory.
env = self.GetToolchainEnv()
all_outputs = []
for action in actions:
# First write out a rule for the action.
name = '%s_%s' % (action['action_name'], self.hash_for_rules)
description = self.GenerateDescription('ACTION',
action.get('message', None),
name)
is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action)
if self.flavor == 'win' else False)
args = action['action']
depfile = action.get('depfile', None)
if depfile:
depfile = self.ExpandSpecial(depfile, self.base_to_build)
pool = 'console' if int(action.get('ninja_use_console', 0)) else None
rule_name, _ = self.WriteNewNinjaRule(name, args, description,
is_cygwin, env, pool,
depfile=depfile)
inputs = [self.GypPathToNinja(i, env) for i in action['inputs']]
if int(action.get('process_outputs_as_sources', False)):
extra_sources += action['outputs']
if int(action.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += action['outputs']
outputs = [self.GypPathToNinja(o, env) for o in action['outputs']]
# Then write out an edge using the rule.
self.ninja.build(outputs, rule_name, inputs,
order_only=prebuild)
all_outputs += outputs
self.ninja.newline()
return all_outputs
def WriteRules(self, rules, extra_sources, prebuild,
mac_bundle_resources, extra_mac_bundle_resources):
env = self.GetToolchainEnv()
all_outputs = []
for rule in rules:
# Skip a rule with no action and no inputs.
if 'action' not in rule and not rule.get('rule_sources', []):
continue
# First write out a rule for the rule action.
name = '%s_%s' % (rule['rule_name'], self.hash_for_rules)
args = rule['action']
description = self.GenerateDescription(
'RULE',
rule.get('message', None),
('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name)
is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule)
if self.flavor == 'win' else False)
pool = 'console' if int(rule.get('ninja_use_console', 0)) else None
rule_name, args = self.WriteNewNinjaRule(
name, args, description, is_cygwin, env, pool)
# TODO: if the command references the outputs directly, we should
# simplify it to just use $out.
# Rules can potentially make use of some special variables which
# must vary per source file.
# Compute the list of variables we'll need to provide.
special_locals = ('source', 'root', 'dirname', 'ext', 'name')
needed_variables = set(['source'])
for argument in args:
for var in special_locals:
if '${%s}' % var in argument:
needed_variables.add(var)
def cygwin_munge(path):
# pylint: disable=cell-var-from-loop
if is_cygwin:
return path.replace('\\', '/')
return path
inputs = [self.GypPathToNinja(i, env) for i in rule.get('inputs', [])]
# If there are n source files matching the rule, and m additional rule
# inputs, then adding 'inputs' to each build edge written below will
# write m * n inputs. Collapsing reduces this to m + n.
sources = rule.get('rule_sources', [])
num_inputs = len(inputs)
if prebuild:
num_inputs += 1
if num_inputs > 2 and len(sources) > 2:
inputs = [self.WriteCollapsedDependencies(
rule['rule_name'], inputs, order_only=prebuild)]
prebuild = []
# For each source file, write an edge that generates all the outputs.
for source in sources:
source = os.path.normpath(source)
dirname, basename = os.path.split(source)
root, ext = os.path.splitext(basename)
# Gather the list of inputs and outputs, expanding $vars if possible.
outputs = [self.ExpandRuleVariables(o, root, dirname,
source, ext, basename)
for o in rule['outputs']]
if int(rule.get('process_outputs_as_sources', False)):
extra_sources += outputs
was_mac_bundle_resource = source in mac_bundle_resources
if was_mac_bundle_resource or \
int(rule.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += outputs
# Note: This is n_resources * n_outputs_in_rule. Put to-be-removed
# items in a set and remove them all in a single pass if this becomes
# a performance issue.
if was_mac_bundle_resource:
mac_bundle_resources.remove(source)
extra_bindings = []
for var in needed_variables:
if var == 'root':
extra_bindings.append(('root', cygwin_munge(root)))
elif var == 'dirname':
# '$dirname' is a parameter to the rule action, which means
# it shouldn't be converted to a Ninja path. But we don't
# want $!PRODUCT_DIR in there either.
dirname_expanded = self.ExpandSpecial(dirname, self.base_to_build)
extra_bindings.append(('dirname', cygwin_munge(dirname_expanded)))
elif var == 'source':
# '$source' is a parameter to the rule action, which means
# it shouldn't be converted to a Ninja path. But we don't
# want $!PRODUCT_DIR in there either.
source_expanded = self.ExpandSpecial(source, self.base_to_build)
extra_bindings.append(('source', cygwin_munge(source_expanded)))
elif var == 'ext':
extra_bindings.append(('ext', ext))
elif var == 'name':
extra_bindings.append(('name', cygwin_munge(basename)))
else:
assert var == None, repr(var)
outputs = [self.GypPathToNinja(o, env) for o in outputs]
if self.flavor == 'win':
# WriteNewNinjaRule uses unique_name for creating an rsp file on win.
extra_bindings.append(('unique_name',
hashlib.md5(outputs[0]).hexdigest()))
self.ninja.build(outputs, rule_name, self.GypPathToNinja(source),
implicit=inputs,
order_only=prebuild,
variables=extra_bindings)
all_outputs.extend(outputs)
return all_outputs
def WriteCopies(self, copies, prebuild, mac_bundle_depends):
outputs = []
env = self.GetToolchainEnv()
for copy in copies:
for path in copy['files']:
# Normalize the path so trailing slashes don't confuse us.
path = os.path.normpath(path)
basename = os.path.split(path)[1]
src = self.GypPathToNinja(path, env)
dst = self.GypPathToNinja(os.path.join(copy['destination'], basename),
env)
outputs += self.ninja.build(dst, 'copy', src, order_only=prebuild)
if self.is_mac_bundle:
# gyp has mac_bundle_resources to copy things into a bundle's
# Resources folder, but there's no built-in way to copy files to other
# places in the bundle. Hence, some targets use copies for this. Check
# if this file is copied into the current bundle, and if so add it to
# the bundle depends so that dependent targets get rebuilt if the copy
# input changes.
if dst.startswith(self.xcode_settings.GetBundleContentsFolderPath()):
mac_bundle_depends.append(dst)
return outputs
def WriteMacBundleResources(self, resources, bundle_depends):
"""Writes ninja edges for 'mac_bundle_resources'."""
xcassets = []
for output, res in gyp.xcode_emulation.GetMacBundleResources(
generator_default_variables['PRODUCT_DIR'],
self.xcode_settings, map(self.GypPathToNinja, resources)):
output = self.ExpandSpecial(output)
if os.path.splitext(output)[-1] != '.xcassets':
isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
self.ninja.build(output, 'mac_tool', res,
variables=[('mactool_cmd', 'copy-bundle-resource'), \
('binary', isBinary)])
bundle_depends.append(output)
else:
xcassets.append(res)
return xcassets
def WriteMacXCassets(self, xcassets, bundle_depends):
"""Writes ninja edges for 'mac_bundle_resources' .xcassets files.
This add an invocation of 'actool' via the 'mac_tool.py' helper script.
It assumes that the assets catalogs define at least one imageset and
thus an Assets.car file will be generated in the application resources
directory. If this is not the case, then the build will probably be done
at each invocation of ninja."""
if not xcassets:
return
extra_arguments = {}
settings_to_arg = {
'XCASSETS_APP_ICON': 'app-icon',
'XCASSETS_LAUNCH_IMAGE': 'launch-image',
}
settings = self.xcode_settings.xcode_settings[self.config_name]
for settings_key, arg_name in settings_to_arg.iteritems():
value = settings.get(settings_key)
if value:
extra_arguments[arg_name] = value
partial_info_plist = None
if extra_arguments:
partial_info_plist = self.GypPathToUniqueOutput(
'assetcatalog_generated_info.plist')
extra_arguments['output-partial-info-plist'] = partial_info_plist
outputs = []
outputs.append(
os.path.join(
self.xcode_settings.GetBundleResourceFolder(),
'Assets.car'))
if partial_info_plist:
outputs.append(partial_info_plist)
keys = QuoteShellArgument(json.dumps(extra_arguments), self.flavor)
extra_env = self.xcode_settings.GetPerTargetSettings()
env = self.GetSortedXcodeEnv(additional_settings=extra_env)
env = self.ComputeExportEnvString(env)
bundle_depends.extend(self.ninja.build(
outputs, 'compile_xcassets', xcassets,
variables=[('env', env), ('keys', keys)]))
return partial_info_plist
def WriteMacInfoPlist(self, partial_info_plist, bundle_depends):
"""Write build rules for bundle Info.plist files."""
info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
generator_default_variables['PRODUCT_DIR'],
self.xcode_settings, self.GypPathToNinja)
if not info_plist:
return
out = self.ExpandSpecial(out)
if defines:
# Create an intermediate file to store preprocessed results.
intermediate_plist = self.GypPathToUniqueOutput(
os.path.basename(info_plist))
defines = ' '.join([Define(d, self.flavor) for d in defines])
info_plist = self.ninja.build(
intermediate_plist, 'preprocess_infoplist', info_plist,
variables=[('defines',defines)])
env = self.GetSortedXcodeEnv(additional_settings=extra_env)
env = self.ComputeExportEnvString(env)
if partial_info_plist:
intermediate_plist = self.GypPathToUniqueOutput('merged_info.plist')
info_plist = self.ninja.build(
intermediate_plist, 'merge_infoplist',
[partial_info_plist, info_plist])
keys = self.xcode_settings.GetExtraPlistItems(self.config_name)
keys = QuoteShellArgument(json.dumps(keys), self.flavor)
isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
self.ninja.build(out, 'copy_infoplist', info_plist,
variables=[('env', env), ('keys', keys),
('binary', isBinary)])
bundle_depends.append(out)
def WriteSources(self, ninja_file, config_name, config, sources, predepends,
precompiled_header, spec):
"""Write build rules to compile all of |sources|."""
if self.toolset == 'host':
self.ninja.variable('ar', '$ar_host')
self.ninja.variable('cc', '$cc_host')
self.ninja.variable('cxx', '$cxx_host')
self.ninja.variable('ld', '$ld_host')
self.ninja.variable('ldxx', '$ldxx_host')
self.ninja.variable('nm', '$nm_host')
self.ninja.variable('readelf', '$readelf_host')
if self.flavor != 'mac' or len(self.archs) == 1:
return self.WriteSourcesForArch(
self.ninja, config_name, config, sources, predepends,
precompiled_header, spec)
else:
return dict((arch, self.WriteSourcesForArch(
self.arch_subninjas[arch], config_name, config, sources, predepends,
precompiled_header, spec, arch=arch))
for arch in self.archs)
def WriteSourcesForArch(self, ninja_file, config_name, config, sources,
predepends, precompiled_header, spec, arch=None):
"""Write build rules to compile all of |sources|."""
extra_defines = []
if self.flavor == 'mac':
cflags = self.xcode_settings.GetCflags(config_name, arch=arch)
cflags_c = self.xcode_settings.GetCflagsC(config_name)
cflags_cc = self.xcode_settings.GetCflagsCC(config_name)
cflags_objc = ['$cflags_c'] + \
self.xcode_settings.GetCflagsObjC(config_name)
cflags_objcc = ['$cflags_cc'] + \
self.xcode_settings.GetCflagsObjCC(config_name)
elif self.flavor == 'win':
asmflags = self.msvs_settings.GetAsmflags(config_name)
cflags = self.msvs_settings.GetCflags(config_name)
cflags_c = self.msvs_settings.GetCflagsC(config_name)
cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
extra_defines = self.msvs_settings.GetComputedDefines(config_name)
# See comment at cc_command for why there's two .pdb files.
pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName(
config_name, self.ExpandSpecial)
if not pdbpath_c:
obj = 'obj'
if self.toolset != 'target':
obj += '.' + self.toolset
pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name))
pdbpath_c = pdbpath + '.c.pdb'
pdbpath_cc = pdbpath + '.cc.pdb'
self.WriteVariableList(ninja_file, 'pdbname_c', [pdbpath_c])
self.WriteVariableList(ninja_file, 'pdbname_cc', [pdbpath_cc])
self.WriteVariableList(ninja_file, 'pchprefix', [self.name])
else:
cflags = config.get('cflags', [])
cflags_c = config.get('cflags_c', [])
cflags_cc = config.get('cflags_cc', [])
# Respect environment variables related to build, but target-specific
# flags can still override them.
if self.toolset == 'target':
cflags_c = (os.environ.get('CPPFLAGS', '').split() +
os.environ.get('CFLAGS', '').split() + cflags_c)
cflags_cc = (os.environ.get('CPPFLAGS', '').split() +
os.environ.get('CXXFLAGS', '').split() + cflags_cc)
elif self.toolset == 'host':
cflags_c = (os.environ.get('CPPFLAGS_host', '').split() +
os.environ.get('CFLAGS_host', '').split() + cflags_c)
cflags_cc = (os.environ.get('CPPFLAGS_host', '').split() +
os.environ.get('CXXFLAGS_host', '').split() + cflags_cc)
defines = config.get('defines', []) + extra_defines
self.WriteVariableList(ninja_file, 'defines',
[Define(d, self.flavor) for d in defines])
if self.flavor == 'win':
self.WriteVariableList(ninja_file, 'asmflags',
map(self.ExpandSpecial, asmflags))
self.WriteVariableList(ninja_file, 'rcflags',
[QuoteShellArgument(self.ExpandSpecial(f), self.flavor)
for f in self.msvs_settings.GetRcflags(config_name,
self.GypPathToNinja)])
include_dirs = config.get('include_dirs', [])
env = self.GetToolchainEnv()
if self.flavor == 'win':
include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs,
config_name)
self.WriteVariableList(ninja_file, 'includes',
[QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
for i in include_dirs])
if self.flavor == 'win':
midl_include_dirs = config.get('midl_include_dirs', [])
midl_include_dirs = self.msvs_settings.AdjustMidlIncludeDirs(
midl_include_dirs, config_name)
self.WriteVariableList(ninja_file, 'midl_includes',
[QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
for i in midl_include_dirs])
pch_commands = precompiled_header.GetPchBuildCommands(arch)
if self.flavor == 'mac':
# Most targets use no precompiled headers, so only write these if needed.
for ext, var in [('c', 'cflags_pch_c'), ('cc', 'cflags_pch_cc'),
('m', 'cflags_pch_objc'), ('mm', 'cflags_pch_objcc')]:
include = precompiled_header.GetInclude(ext, arch)
if include: ninja_file.variable(var, include)
arflags = config.get('arflags', [])
self.WriteVariableList(ninja_file, 'cflags',
map(self.ExpandSpecial, cflags))
self.WriteVariableList(ninja_file, 'cflags_c',
map(self.ExpandSpecial, cflags_c))
self.WriteVariableList(ninja_file, 'cflags_cc',
map(self.ExpandSpecial, cflags_cc))
if self.flavor == 'mac':
self.WriteVariableList(ninja_file, 'cflags_objc',
map(self.ExpandSpecial, cflags_objc))
self.WriteVariableList(ninja_file, 'cflags_objcc',
map(self.ExpandSpecial, cflags_objcc))
self.WriteVariableList(ninja_file, 'arflags',
map(self.ExpandSpecial, arflags))
ninja_file.newline()
outputs = []
has_rc_source = False
for source in sources:
filename, ext = os.path.splitext(source)
ext = ext[1:]
obj_ext = self.obj_ext
if ext in ('cc', 'cpp', 'cxx'):
command = 'cxx'
self.uses_cpp = True
elif ext == 'c' or (ext == 'S' and self.flavor != 'win'):
command = 'cc'
elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files.
command = 'cc_s'
elif (self.flavor == 'win' and ext == 'asm' and
not self.msvs_settings.HasExplicitAsmRules(spec)):
command = 'asm'
# Add the _asm suffix as msvs is capable of handling .cc and
# .asm files of the same name without collision.
obj_ext = '_asm.obj'
elif self.flavor == 'mac' and ext == 'm':
command = 'objc'
elif self.flavor == 'mac' and ext == 'mm':
command = 'objcxx'
self.uses_cpp = True
elif self.flavor == 'win' and ext == 'rc':
command = 'rc'
obj_ext = '.res'
has_rc_source = True
else:
# Ignore unhandled extensions.
continue
input = self.GypPathToNinja(source)
output = self.GypPathToUniqueOutput(filename + obj_ext)
if arch is not None:
output = AddArch(output, arch)
implicit = precompiled_header.GetObjDependencies([input], [output], arch)
variables = []
if self.flavor == 'win':
variables, output, implicit = precompiled_header.GetFlagsModifications(
input, output, implicit, command, cflags_c, cflags_cc,
self.ExpandSpecial)
ninja_file.build(output, command, input,
implicit=[gch for _, _, gch in implicit],
order_only=predepends, variables=variables)
outputs.append(output)
if has_rc_source:
resource_include_dirs = config.get('resource_include_dirs', include_dirs)
self.WriteVariableList(ninja_file, 'resource_includes',
[QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
for i in resource_include_dirs])
self.WritePchTargets(ninja_file, pch_commands)
ninja_file.newline()
return outputs
def WritePchTargets(self, ninja_file, pch_commands):
"""Writes ninja rules to compile prefix headers."""
if not pch_commands:
return
for gch, lang_flag, lang, input in pch_commands:
var_name = {
'c': 'cflags_pch_c',
'cc': 'cflags_pch_cc',
'm': 'cflags_pch_objc',
'mm': 'cflags_pch_objcc',
}[lang]
map = { 'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', }
cmd = map.get(lang)
ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)])
def WriteLink(self, spec, config_name, config, link_deps):
"""Write out a link step. Fills out target.binary. """
if self.flavor != 'mac' or len(self.archs) == 1:
return self.WriteLinkForArch(
self.ninja, spec, config_name, config, link_deps)
else:
output = self.ComputeOutput(spec)
inputs = [self.WriteLinkForArch(self.arch_subninjas[arch], spec,
config_name, config, link_deps[arch],
arch=arch)
for arch in self.archs]
extra_bindings = []
build_output = output
if not self.is_mac_bundle:
self.AppendPostbuildVariable(extra_bindings, spec, output, output)
# TODO(yyanagisawa): more work needed to fix:
# https://code.google.com/p/gyp/issues/detail?id=411
if (spec['type'] in ('shared_library', 'loadable_module') and
not self.is_mac_bundle):
extra_bindings.append(('lib', output))
self.ninja.build([output, output + '.TOC'], 'solipo', inputs,
variables=extra_bindings)
else:
self.ninja.build(build_output, 'lipo', inputs, variables=extra_bindings)
return output
def WriteLinkForArch(self, ninja_file, spec, config_name, config,
link_deps, arch=None):
"""Write out a link step. Fills out target.binary. """
command = {
'executable': 'link',
'loadable_module': 'solink_module',
'shared_library': 'solink',
}[spec['type']]
command_suffix = ''
implicit_deps = set()
solibs = set()
order_deps = set()
if 'dependencies' in spec:
# Two kinds of dependencies:
# - Linkable dependencies (like a .a or a .so): add them to the link line.
# - Non-linkable dependencies (like a rule that generates a file
# and writes a stamp file): add them to implicit_deps
extra_link_deps = set()
for dep in spec['dependencies']:
target = self.target_outputs.get(dep)
if not target:
continue
linkable = target.Linkable()
if linkable:
new_deps = []
if (self.flavor == 'win' and
target.component_objs and
self.msvs_settings.IsUseLibraryDependencyInputs(config_name)):
new_deps = target.component_objs
if target.compile_deps:
order_deps.add(target.compile_deps)
elif self.flavor == 'win' and target.import_lib:
new_deps = [target.import_lib]
elif target.UsesToc(self.flavor):
solibs.add(target.binary)
implicit_deps.add(target.binary + '.TOC')
else:
new_deps = [target.binary]
for new_dep in new_deps:
if new_dep not in extra_link_deps:
extra_link_deps.add(new_dep)
link_deps.append(new_dep)
final_output = target.FinalOutput()
if not linkable or final_output != target.binary:
implicit_deps.add(final_output)
extra_bindings = []
if self.uses_cpp and self.flavor != 'win':
extra_bindings.append(('ld', '$ldxx'))
output = self.ComputeOutput(spec, arch)
if arch is None and not self.is_mac_bundle:
self.AppendPostbuildVariable(extra_bindings, spec, output, output)
is_executable = spec['type'] == 'executable'
# The ldflags config key is not used on mac or win. On those platforms
# linker flags are set via xcode_settings and msvs_settings, respectively.
env_ldflags = os.environ.get('LDFLAGS', '').split()
if self.flavor == 'mac':
ldflags = self.xcode_settings.GetLdflags(config_name,
self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
self.GypPathToNinja, arch)
ldflags = env_ldflags + ldflags
elif self.flavor == 'win':
manifest_base_name = self.GypPathToUniqueOutput(
self.ComputeOutputFileName(spec))
ldflags, intermediate_manifest, manifest_files = \
self.msvs_settings.GetLdflags(config_name, self.GypPathToNinja,
self.ExpandSpecial, manifest_base_name,
output, is_executable,
self.toplevel_build)
ldflags = env_ldflags + ldflags
self.WriteVariableList(ninja_file, 'manifests', manifest_files)
implicit_deps = implicit_deps.union(manifest_files)
if intermediate_manifest:
self.WriteVariableList(
ninja_file, 'intermediatemanifest', [intermediate_manifest])
command_suffix = _GetWinLinkRuleNameSuffix(
self.msvs_settings.IsEmbedManifest(config_name))
def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja)
if def_file:
implicit_deps.add(def_file)
else:
# Respect environment variables related to build, but target-specific
# flags can still override them.
ldflags = env_ldflags + config.get('ldflags', [])
if is_executable and len(solibs):
rpath = 'lib/'
if self.toolset != 'target':
rpath += self.toolset
ldflags.append(r'-Wl,-rpath=\$$ORIGIN/%s' % rpath)
ldflags.append('-Wl,-rpath-link=%s' % rpath)
self.WriteVariableList(ninja_file, 'ldflags',
map(self.ExpandSpecial, ldflags))
library_dirs = config.get('library_dirs', [])
if self.flavor == 'win':
library_dirs = [self.msvs_settings.ConvertVSMacros(l, config_name)
for l in library_dirs]
library_dirs = ['/LIBPATH:' + QuoteShellArgument(self.GypPathToNinja(l),
self.flavor)
for l in library_dirs]
else:
library_dirs = [QuoteShellArgument('-L' + self.GypPathToNinja(l),
self.flavor)
for l in library_dirs]
libraries = gyp.common.uniquer(map(self.ExpandSpecial,
spec.get('libraries', [])))
if self.flavor == 'mac':
libraries = self.xcode_settings.AdjustLibraries(libraries, config_name)
elif self.flavor == 'win':
libraries = self.msvs_settings.AdjustLibraries(libraries)
self.WriteVariableList(ninja_file, 'libs', library_dirs + libraries)
linked_binary = output
if command in ('solink', 'solink_module'):
extra_bindings.append(('soname', os.path.split(output)[1]))
extra_bindings.append(('lib',
gyp.common.EncodePOSIXShellArgument(output)))
if self.flavor != 'win':
link_file_list = output
if self.is_mac_bundle:
# 'Dependency Framework.framework/Versions/A/Dependency Framework' ->
# 'Dependency Framework.framework.rsp'
link_file_list = self.xcode_settings.GetWrapperName()
if arch:
link_file_list += '.' + arch
link_file_list += '.rsp'
# If an rspfile contains spaces, ninja surrounds the filename with
# quotes around it and then passes it to open(), creating a file with
# quotes in its name (and when looking for the rsp file, the name
# makes it through bash which strips the quotes) :-/
link_file_list = link_file_list.replace(' ', '_')
extra_bindings.append(
('link_file_list',
gyp.common.EncodePOSIXShellArgument(link_file_list)))
if self.flavor == 'win':
extra_bindings.append(('binary', output))
if ('/NOENTRY' not in ldflags and
not self.msvs_settings.GetNoImportLibrary(config_name)):
self.target.import_lib = output + '.lib'
extra_bindings.append(('implibflag',
'/IMPLIB:%s' % self.target.import_lib))
pdbname = self.msvs_settings.GetPDBName(
config_name, self.ExpandSpecial, output + '.pdb')
output = [output, self.target.import_lib]
if pdbname:
output.append(pdbname)
elif not self.is_mac_bundle:
output = [output, output + '.TOC']
else:
command = command + '_notoc'
elif self.flavor == 'win':
extra_bindings.append(('binary', output))
pdbname = self.msvs_settings.GetPDBName(
config_name, self.ExpandSpecial, output + '.pdb')
if pdbname:
output = [output, pdbname]
if len(solibs):
extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(solibs)))
ninja_file.build(output, command + command_suffix, link_deps,
implicit=list(implicit_deps),
order_only=list(order_deps),
variables=extra_bindings)
return linked_binary
def WriteTarget(self, spec, config_name, config, link_deps, compile_deps):
extra_link_deps = any(self.target_outputs.get(dep).Linkable()
for dep in spec.get('dependencies', [])
if dep in self.target_outputs)
if spec['type'] == 'none' or (not link_deps and not extra_link_deps):
# TODO(evan): don't call this function for 'none' target types, as
# it doesn't do anything, and we fake out a 'binary' with a stamp file.
self.target.binary = compile_deps
self.target.type = 'none'
elif spec['type'] == 'static_library':
self.target.binary = self.ComputeOutput(spec)
if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not
self.is_standalone_static_library):
self.ninja.build(self.target.binary, 'alink_thin', link_deps,
order_only=compile_deps)
else:
variables = []
if self.xcode_settings:
libtool_flags = self.xcode_settings.GetLibtoolflags(config_name)
if libtool_flags:
variables.append(('libtool_flags', libtool_flags))
if self.msvs_settings:
libflags = self.msvs_settings.GetLibFlags(config_name,
self.GypPathToNinja)
variables.append(('libflags', libflags))
if self.flavor != 'mac' or len(self.archs) == 1:
self.AppendPostbuildVariable(variables, spec,
self.target.binary, self.target.binary)
self.ninja.build(self.target.binary, 'alink', link_deps,
order_only=compile_deps, variables=variables)
else:
inputs = []
for arch in self.archs:
output = self.ComputeOutput(spec, arch)
self.arch_subninjas[arch].build(output, 'alink', link_deps[arch],
order_only=compile_deps,
variables=variables)
inputs.append(output)
# TODO: It's not clear if libtool_flags should be passed to the alink
# call that combines single-arch .a files into a fat .a file.
self.AppendPostbuildVariable(variables, spec,
self.target.binary, self.target.binary)
self.ninja.build(self.target.binary, 'alink', inputs,
# FIXME: test proving order_only=compile_deps isn't
# needed.
variables=variables)
else:
self.target.binary = self.WriteLink(spec, config_name, config, link_deps)
return self.target.binary
def WriteMacBundle(self, spec, mac_bundle_depends, is_empty):
assert self.is_mac_bundle
package_framework = spec['type'] in ('shared_library', 'loadable_module')
output = self.ComputeMacBundleOutput()
if is_empty:
output += '.stamp'
variables = []
self.AppendPostbuildVariable(variables, spec, output, self.target.binary,
is_command_start=not package_framework)
if package_framework and not is_empty:
variables.append(('version', self.xcode_settings.GetFrameworkVersion()))
self.ninja.build(output, 'package_framework', mac_bundle_depends,
variables=variables)
else:
self.ninja.build(output, 'stamp', mac_bundle_depends,
variables=variables)
self.target.bundle = output
return output
def GetToolchainEnv(self, additional_settings=None):
"""Returns the variables toolchain would set for build steps."""
env = self.GetSortedXcodeEnv(additional_settings=additional_settings)
if self.flavor == 'win':
env = self.GetMsvsToolchainEnv(
additional_settings=additional_settings)
return env
def GetMsvsToolchainEnv(self, additional_settings=None):
"""Returns the variables Visual Studio would set for build steps."""
return self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR',
config=self.config_name)
def GetSortedXcodeEnv(self, additional_settings=None):
"""Returns the variables Xcode would set for build steps."""
assert self.abs_build_dir
abs_build_dir = self.abs_build_dir
return gyp.xcode_emulation.GetSortedXcodeEnv(
self.xcode_settings, abs_build_dir,
os.path.join(abs_build_dir, self.build_to_base), self.config_name,
additional_settings)
def GetSortedXcodePostbuildEnv(self):
"""Returns the variables Xcode would set for postbuild steps."""
postbuild_settings = {}
# CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
# TODO(thakis): It would be nice to have some general mechanism instead.
strip_save_file = self.xcode_settings.GetPerTargetSetting(
'CHROMIUM_STRIP_SAVE_FILE')
if strip_save_file:
postbuild_settings['CHROMIUM_STRIP_SAVE_FILE'] = strip_save_file
return self.GetSortedXcodeEnv(additional_settings=postbuild_settings)
def AppendPostbuildVariable(self, variables, spec, output, binary,
is_command_start=False):
"""Adds a 'postbuild' variable if there is a postbuild for |output|."""
postbuild = self.GetPostbuildCommand(spec, output, binary, is_command_start)
if postbuild:
variables.append(('postbuilds', postbuild))
def GetPostbuildCommand(self, spec, output, output_binary, is_command_start):
"""Returns a shell command that runs all the postbuilds, and removes
|output| if any of them fails. If |is_command_start| is False, then the
returned string will start with ' && '."""
if not self.xcode_settings or spec['type'] == 'none' or not output:
return ''
output = QuoteShellArgument(output, self.flavor)
postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True)
if output_binary is not None:
postbuilds = self.xcode_settings.AddImplicitPostbuilds(
self.config_name,
os.path.normpath(os.path.join(self.base_to_build, output)),
QuoteShellArgument(
os.path.normpath(os.path.join(self.base_to_build, output_binary)),
self.flavor),
postbuilds, quiet=True)
if not postbuilds:
return ''
# Postbuilds expect to be run in the gyp file's directory, so insert an
# implicit postbuild to cd to there.
postbuilds.insert(0, gyp.common.EncodePOSIXShellList(
['cd', self.build_to_base]))
env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv())
# G will be non-null if any postbuild fails. Run all postbuilds in a
# subshell.
commands = env + ' (' + \
' && '.join([ninja_syntax.escape(command) for command in postbuilds])
command_string = (commands + '); G=$$?; '
# Remove the final output if any postbuild failed.
'((exit $$G) || rm -rf %s) ' % output + '&& exit $$G)')
if is_command_start:
return '(' + command_string + ' && '
else:
return '$ && (' + command_string
def ComputeExportEnvString(self, env):
"""Given an environment, returns a string looking like
'export FOO=foo; export BAR="${FOO} bar;'
that exports |env| to the shell."""
export_str = []
for k, v in env:
export_str.append('export %s=%s;' %
(k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v))))
return ' '.join(export_str)
def ComputeMacBundleOutput(self):
"""Return the 'output' (full output path) to a bundle output directory."""
assert self.is_mac_bundle
path = generator_default_variables['PRODUCT_DIR']
return self.ExpandSpecial(
os.path.join(path, self.xcode_settings.GetWrapperName()))
def ComputeOutputFileName(self, spec, type=None):
"""Compute the filename of the final output for the current target."""
if not type:
type = spec['type']
default_variables = copy.copy(generator_default_variables)
CalculateVariables(default_variables, {'flavor': self.flavor})
# Compute filename prefix: the product prefix, or a default for
# the product type.
DEFAULT_PREFIX = {
'loadable_module': default_variables['SHARED_LIB_PREFIX'],
'shared_library': default_variables['SHARED_LIB_PREFIX'],
'static_library': default_variables['STATIC_LIB_PREFIX'],
'executable': default_variables['EXECUTABLE_PREFIX'],
}
prefix = spec.get('product_prefix', DEFAULT_PREFIX.get(type, ''))
# Compute filename extension: the product extension, or a default
# for the product type.
DEFAULT_EXTENSION = {
'loadable_module': default_variables['SHARED_LIB_SUFFIX'],
'shared_library': default_variables['SHARED_LIB_SUFFIX'],
'static_library': default_variables['STATIC_LIB_SUFFIX'],
'executable': default_variables['EXECUTABLE_SUFFIX'],
}
extension = spec.get('product_extension')
if extension:
extension = '.' + extension
else:
extension = DEFAULT_EXTENSION.get(type, '')
if 'product_name' in spec:
# If we were given an explicit name, use that.
target = spec['product_name']
else:
# Otherwise, derive a name from the target name.
target = spec['target_name']
if prefix == 'lib':
# Snip out an extra 'lib' from libs if appropriate.
target = StripPrefix(target, 'lib')
if type in ('static_library', 'loadable_module', 'shared_library',
'executable'):
return '%s%s%s' % (prefix, target, extension)
elif type == 'none':
return '%s.stamp' % target
else:
raise Exception('Unhandled output type %s' % type)
def ComputeOutput(self, spec, arch=None):
"""Compute the path for the final output of the spec."""
type = spec['type']
if self.flavor == 'win':
override = self.msvs_settings.GetOutputName(self.config_name,
self.ExpandSpecial)
if override:
return override
if arch is None and self.flavor == 'mac' and type in (
'static_library', 'executable', 'shared_library', 'loadable_module'):
filename = self.xcode_settings.GetExecutablePath()
else:
filename = self.ComputeOutputFileName(spec, type)
if arch is None and 'product_dir' in spec:
path = os.path.join(spec['product_dir'], filename)
return self.ExpandSpecial(path)
# Some products go into the output root, libraries go into shared library
# dir, and everything else goes into the normal place.
type_in_output_root = ['executable', 'loadable_module']
if self.flavor == 'mac' and self.toolset == 'target':
type_in_output_root += ['shared_library', 'static_library']
elif self.flavor == 'win' and self.toolset == 'target':
type_in_output_root += ['shared_library']
if arch is not None:
# Make sure partial executables don't end up in a bundle or the regular
# output directory.
archdir = 'arch'
if self.toolset != 'target':
archdir = os.path.join('arch', '%s' % self.toolset)
return os.path.join(archdir, AddArch(filename, arch))
elif type in type_in_output_root or self.is_standalone_static_library:
return filename
elif type == 'shared_library':
libdir = 'lib'
if self.toolset != 'target':
libdir = os.path.join('lib', '%s' % self.toolset)
return os.path.join(libdir, filename)
else:
return self.GypPathToUniqueOutput(filename, qualified=False)
def WriteVariableList(self, ninja_file, var, values):
assert not isinstance(values, str)
if values is None:
values = []
ninja_file.variable(var, ' '.join(values))
def WriteNewNinjaRule(self, name, args, description, is_cygwin, env, pool,
depfile=None):
"""Write out a new ninja "rule" statement for a given command.
Returns the name of the new rule, and a copy of |args| with variables
expanded."""
if self.flavor == 'win':
args = [self.msvs_settings.ConvertVSMacros(
arg, self.base_to_build, config=self.config_name)
for arg in args]
description = self.msvs_settings.ConvertVSMacros(
description, config=self.config_name)
elif self.flavor == 'mac':
# |env| is an empty list on non-mac.
args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args]
description = gyp.xcode_emulation.ExpandEnvVars(description, env)
# TODO: we shouldn't need to qualify names; we do it because
# currently the ninja rule namespace is global, but it really
# should be scoped to the subninja.
rule_name = self.name
if self.toolset == 'target':
rule_name += '.' + self.toolset
rule_name += '.' + name
rule_name = re.sub('[^a-zA-Z0-9_]', '_', rule_name)
# Remove variable references, but not if they refer to the magic rule
# variables. This is not quite right, as it also protects these for
# actions, not just for rules where they are valid. Good enough.
protect = [ '${root}', '${dirname}', '${source}', '${ext}', '${name}' ]
protect = '(?!' + '|'.join(map(re.escape, protect)) + ')'
description = re.sub(protect + r'\$', '_', description)
# gyp dictates that commands are run from the base directory.
# cd into the directory before running, and adjust paths in
# the arguments to point to the proper locations.
rspfile = None
rspfile_content = None
args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args]
if self.flavor == 'win':
rspfile = rule_name + '.$unique_name.rsp'
# The cygwin case handles this inside the bash sub-shell.
run_in = '' if is_cygwin else ' ' + self.build_to_base
if is_cygwin:
rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine(
args, self.build_to_base)
else:
rspfile_content = gyp.msvs_emulation.EncodeRspFileList(args)
command = ('%s gyp-win-tool action-wrapper $arch ' % sys.executable +
rspfile + run_in)
else:
env = self.ComputeExportEnvString(env)
command = gyp.common.EncodePOSIXShellList(args)
command = 'cd %s; ' % self.build_to_base + env + command
# GYP rules/actions express being no-ops by not touching their outputs.
# Avoid executing downstream dependencies in this case by specifying
# restat=1 to ninja.
self.ninja.rule(rule_name, command, description, depfile=depfile,
restat=True, pool=pool,
rspfile=rspfile, rspfile_content=rspfile_content)
self.ninja.newline()
return rule_name, args
def CalculateVariables(default_variables, params):
"""Calculate additional variables for use in the build (called by gyp)."""
global generator_additional_non_configuration_keys
global generator_additional_path_sections
flavor = gyp.common.GetFlavor(params)
if flavor == 'mac':
default_variables.setdefault('OS', 'mac')
default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib')
default_variables.setdefault('SHARED_LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
default_variables.setdefault('LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
# Copy additional generator configuration data from Xcode, which is shared
# by the Mac Ninja generator.
import gyp.generator.xcode as xcode_generator
generator_additional_non_configuration_keys = getattr(xcode_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(xcode_generator,
'generator_additional_path_sections', [])
global generator_extra_sources_for_rules
generator_extra_sources_for_rules = getattr(xcode_generator,
'generator_extra_sources_for_rules', [])
elif flavor == 'win':
exts = gyp.MSVSUtil.TARGET_TYPE_EXT
default_variables.setdefault('OS', 'win')
default_variables['EXECUTABLE_SUFFIX'] = '.' + exts['executable']
default_variables['STATIC_LIB_PREFIX'] = ''
default_variables['STATIC_LIB_SUFFIX'] = '.' + exts['static_library']
default_variables['SHARED_LIB_PREFIX'] = ''
default_variables['SHARED_LIB_SUFFIX'] = '.' + exts['shared_library']
# Copy additional generator configuration data from VS, which is shared
# by the Windows Ninja generator.
import gyp.generator.msvs as msvs_generator
generator_additional_non_configuration_keys = getattr(msvs_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(msvs_generator,
'generator_additional_path_sections', [])
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
else:
operating_system = flavor
if flavor == 'android':
operating_system = 'linux' # Keep this legacy behavior for now.
default_variables.setdefault('OS', operating_system)
default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
default_variables.setdefault('SHARED_LIB_DIR',
os.path.join('$!PRODUCT_DIR', 'lib'))
default_variables.setdefault('LIB_DIR',
os.path.join('$!PRODUCT_DIR', 'obj'))
def ComputeOutputDir(params):
"""Returns the path from the toplevel_dir to the build output directory."""
# generator_dir: relative path from pwd to where make puts build files.
# Makes migrating from make to ninja easier, ninja doesn't put anything here.
generator_dir = os.path.relpath(params['options'].generator_output or '.')
# output_dir: relative path from generator_dir to the build directory.
output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
# Relative path from source root to our output files. e.g. "out"
return os.path.normpath(os.path.join(generator_dir, output_dir))
def CalculateGeneratorInputInfo(params):
"""Called by __init__ to initialize generator values based on params."""
# E.g. "out/gypfiles"
toplevel = params['options'].toplevel_dir
qualified_out_dir = os.path.normpath(os.path.join(
toplevel, ComputeOutputDir(params), 'gypfiles'))
global generator_filelist_paths
generator_filelist_paths = {
'toplevel': toplevel,
'qualified_out_dir': qualified_out_dir,
}
def OpenOutput(path, mode='w'):
"""Open |path| for writing, creating directories if necessary."""
gyp.common.EnsureDirExists(path)
return open(path, mode)
def CommandWithWrapper(cmd, wrappers, prog):
wrapper = wrappers.get(cmd, '')
if wrapper:
return wrapper + ' ' + prog
return prog
def GetDefaultConcurrentLinks():
"""Returns a best-guess for a number of concurrent links."""
pool_size = int(os.environ.get('GYP_LINK_CONCURRENCY', 0))
if pool_size:
return pool_size
if sys.platform in ('win32', 'cygwin'):
import ctypes
class MEMORYSTATUSEX(ctypes.Structure):
_fields_ = [
("dwLength", ctypes.c_ulong),
("dwMemoryLoad", ctypes.c_ulong),
("ullTotalPhys", ctypes.c_ulonglong),
("ullAvailPhys", ctypes.c_ulonglong),
("ullTotalPageFile", ctypes.c_ulonglong),
("ullAvailPageFile", ctypes.c_ulonglong),
("ullTotalVirtual", ctypes.c_ulonglong),
("ullAvailVirtual", ctypes.c_ulonglong),
("sullAvailExtendedVirtual", ctypes.c_ulonglong),
]
stat = MEMORYSTATUSEX()
stat.dwLength = ctypes.sizeof(stat)
ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
# VS 2015 uses 20% more working set than VS 2013 and can consume all RAM
# on a 64 GB machine.
mem_limit = max(1, stat.ullTotalPhys / (5 * (2 ** 30))) # total / 5GB
hard_cap = max(1, int(os.environ.get('GYP_LINK_CONCURRENCY_MAX', 2**32)))
return min(mem_limit, hard_cap)
elif sys.platform.startswith('linux'):
if os.path.exists("/proc/meminfo"):
with open("/proc/meminfo") as meminfo:
memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
for line in meminfo:
match = memtotal_re.match(line)
if not match:
continue
# Allow 8Gb per link on Linux because Gold is quite memory hungry
return max(1, int(match.group(1)) / (8 * (2 ** 20)))
return 1
elif sys.platform == 'darwin':
try:
avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
# A static library debug build of Chromium's unit_tests takes ~2.7GB, so
# 4GB per ld process allows for some more bloat.
return max(1, avail_bytes / (4 * (2 ** 30))) # total / 4GB
except:
return 1
else:
# TODO(scottmg): Implement this for other platforms.
return 1
def _GetWinLinkRuleNameSuffix(embed_manifest):
"""Returns the suffix used to select an appropriate linking rule depending on
whether the manifest embedding is enabled."""
return '_embed' if embed_manifest else ''
def _AddWinLinkRules(master_ninja, embed_manifest):
"""Adds link rules for Windows platform to |master_ninja|."""
def FullLinkCommand(ldcmd, out, binary_type):
resource_name = {
'exe': '1',
'dll': '2',
}[binary_type]
return '%(python)s gyp-win-tool link-with-manifests $arch %(embed)s ' \
'%(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" ' \
'$manifests' % {
'python': sys.executable,
'out': out,
'ldcmd': ldcmd,
'resname': resource_name,
'embed': embed_manifest }
rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest)
use_separate_mspdbsrv = (
int(os.environ.get('GYP_USE_SEPARATE_MSPDBSRV', '0')) != 0)
dlldesc = 'LINK%s(DLL) $binary' % rule_name_suffix.upper()
dllcmd = ('%s gyp-win-tool link-wrapper $arch %s '
'$ld /nologo $implibflag /DLL /OUT:$binary '
'@$binary.rsp' % (sys.executable, use_separate_mspdbsrv))
dllcmd = FullLinkCommand(dllcmd, '$binary', 'dll')
master_ninja.rule('solink' + rule_name_suffix,
description=dlldesc, command=dllcmd,
rspfile='$binary.rsp',
rspfile_content='$libs $in_newline $ldflags',
restat=True,
pool='link_pool')
master_ninja.rule('solink_module' + rule_name_suffix,
description=dlldesc, command=dllcmd,
rspfile='$binary.rsp',
rspfile_content='$libs $in_newline $ldflags',
restat=True,
pool='link_pool')
# Note that ldflags goes at the end so that it has the option of
# overriding default settings earlier in the command line.
exe_cmd = ('%s gyp-win-tool link-wrapper $arch %s '
'$ld /nologo /OUT:$binary @$binary.rsp' %
(sys.executable, use_separate_mspdbsrv))
exe_cmd = FullLinkCommand(exe_cmd, '$binary', 'exe')
master_ninja.rule('link' + rule_name_suffix,
description='LINK%s $binary' % rule_name_suffix.upper(),
command=exe_cmd,
rspfile='$binary.rsp',
rspfile_content='$in_newline $libs $ldflags',
pool='link_pool')
def GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name):
options = params['options']
flavor = gyp.common.GetFlavor(params)
generator_flags = params.get('generator_flags', {})
# build_dir: relative path from source root to our output files.
# e.g. "out/Debug"
build_dir = os.path.normpath(
os.path.join(ComputeOutputDir(params), config_name))
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
master_ninja_file = OpenOutput(os.path.join(toplevel_build, 'build.ninja'))
master_ninja = ninja_syntax.Writer(master_ninja_file, width=120)
# Put build-time support tools in out/{config_name}.
gyp.common.CopyTool(flavor, toplevel_build)
# Grab make settings for CC/CXX.
# The rules are
# - The priority from low to high is gcc/g++, the 'make_global_settings' in
# gyp, the environment variable.
# - If there is no 'make_global_settings' for CC.host/CXX.host or
# 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set
# to cc/cxx.
if flavor == 'win':
ar = 'lib.exe'
# cc and cxx must be set to the correct architecture by overriding with one
# of cl_x86 or cl_x64 below.
cc = 'UNSET'
cxx = 'UNSET'
ld = 'link.exe'
ld_host = '$ld'
else:
ar = 'ar'
cc = 'cc'
cxx = 'c++'
ld = '$cc'
ldxx = '$cxx'
ld_host = '$cc_host'
ldxx_host = '$cxx_host'
ar_host = 'ar'
cc_host = None
cxx_host = None
cc_host_global_setting = None
cxx_host_global_setting = None
clang_cl = None
nm = 'nm'
nm_host = 'nm'
readelf = 'readelf'
readelf_host = 'readelf'
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
make_global_settings = data[build_file].get('make_global_settings', [])
build_to_root = gyp.common.InvertRelativePath(build_dir,
options.toplevel_dir)
wrappers = {}
for key, value in make_global_settings:
if key == 'AR':
ar = os.path.join(build_to_root, value)
if key == 'AR.host':
ar_host = os.path.join(build_to_root, value)
if key == 'CC':
cc = os.path.join(build_to_root, value)
if cc.endswith('clang-cl'):
clang_cl = cc
if key == 'CXX':
cxx = os.path.join(build_to_root, value)
if key == 'CC.host':
cc_host = os.path.join(build_to_root, value)
cc_host_global_setting = value
if key == 'CXX.host':
cxx_host = os.path.join(build_to_root, value)
cxx_host_global_setting = value
if key == 'LD':
ld = os.path.join(build_to_root, value)
if key == 'LD.host':
ld_host = os.path.join(build_to_root, value)
if key == 'NM':
nm = os.path.join(build_to_root, value)
if key == 'NM.host':
nm_host = os.path.join(build_to_root, value)
if key == 'READELF':
readelf = os.path.join(build_to_root, value)
if key == 'READELF.host':
readelf_host = os.path.join(build_to_root, value)
if key.endswith('_wrapper'):
wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value)
# Support wrappers from environment variables too.
for key, value in os.environ.iteritems():
if key.lower().endswith('_wrapper'):
key_prefix = key[:-len('_wrapper')]
key_prefix = re.sub(r'\.HOST$', '.host', key_prefix)
wrappers[key_prefix] = os.path.join(build_to_root, value)
if flavor == 'win':
configs = [target_dicts[qualified_target]['configurations'][config_name]
for qualified_target in target_list]
shared_system_includes = None
if not generator_flags.get('ninja_use_custom_environment_files', 0):
shared_system_includes = \
gyp.msvs_emulation.ExtractSharedMSVSSystemIncludes(
configs, generator_flags)
cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
toplevel_build, generator_flags, shared_system_includes, OpenOutput)
for arch, path in cl_paths.iteritems():
if clang_cl:
# If we have selected clang-cl, use that instead.
path = clang_cl
command = CommandWithWrapper('CC', wrappers,
QuoteShellArgument(path, 'win'))
if clang_cl:
# Use clang-cl to cross-compile for x86 or x86_64.
command += (' -m32' if arch == 'x86' else ' -m64')
master_ninja.variable('cl_' + arch, command)
cc = GetEnvironFallback(['CC_target', 'CC'], cc)
master_ninja.variable('cc', CommandWithWrapper('CC', wrappers, cc))
cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx)
master_ninja.variable('cxx', CommandWithWrapper('CXX', wrappers, cxx))
if flavor == 'win':
master_ninja.variable('ld', ld)
master_ninja.variable('idl', 'midl.exe')
master_ninja.variable('ar', ar)
master_ninja.variable('rc', 'rc.exe')
master_ninja.variable('ml_x86', 'ml.exe')
master_ninja.variable('ml_x64', 'ml64.exe')
master_ninja.variable('mt', 'mt.exe')
else:
master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld))
master_ninja.variable('ldxx', CommandWithWrapper('LINK', wrappers, ldxx))
master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], ar))
if flavor != 'mac':
# Mac does not use readelf/nm for .TOC generation, so avoiding polluting
# the master ninja with extra unused variables.
master_ninja.variable(
'nm', GetEnvironFallback(['NM_target', 'NM'], nm))
master_ninja.variable(
'readelf', GetEnvironFallback(['READELF_target', 'READELF'], readelf))
if generator_supports_multiple_toolsets:
if not cc_host:
cc_host = cc
if not cxx_host:
cxx_host = cxx
master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], ar_host))
master_ninja.variable('nm_host', GetEnvironFallback(['NM_host'], nm_host))
master_ninja.variable('readelf_host',
GetEnvironFallback(['READELF_host'], readelf_host))
cc_host = GetEnvironFallback(['CC_host'], cc_host)
cxx_host = GetEnvironFallback(['CXX_host'], cxx_host)
# The environment variable could be used in 'make_global_settings', like
# ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here.
if '$(CC)' in cc_host and cc_host_global_setting:
cc_host = cc_host_global_setting.replace('$(CC)', cc)
if '$(CXX)' in cxx_host and cxx_host_global_setting:
cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx)
master_ninja.variable('cc_host',
CommandWithWrapper('CC.host', wrappers, cc_host))
master_ninja.variable('cxx_host',
CommandWithWrapper('CXX.host', wrappers, cxx_host))
if flavor == 'win':
master_ninja.variable('ld_host', ld_host)
else:
master_ninja.variable('ld_host', CommandWithWrapper(
'LINK', wrappers, ld_host))
master_ninja.variable('ldxx_host', CommandWithWrapper(
'LINK', wrappers, ldxx_host))
master_ninja.newline()
master_ninja.pool('link_pool', depth=GetDefaultConcurrentLinks())
master_ninja.newline()
deps = 'msvc' if flavor == 'win' else 'gcc'
if flavor != 'win':
master_ninja.rule(
'cc',
description='CC $out',
command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c '
'$cflags_pch_c -c $in -o $out'),
depfile='$out.d',
deps=deps)
master_ninja.rule(
'cc_s',
description='CC $out',
command=('$cc $defines $includes $cflags $cflags_c '
'$cflags_pch_c -c $in -o $out'))
master_ninja.rule(
'cxx',
description='CXX $out',
command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc '
'$cflags_pch_cc -c $in -o $out'),
depfile='$out.d',
deps=deps)
else:
# TODO(scottmg) Separate pdb names is a test to see if it works around
# http://crbug.com/142362. It seems there's a race between the creation of
# the .pdb by the precompiled header step for .cc and the compilation of
# .c files. This should be handled by mspdbsrv, but rarely errors out with
# c1xx : fatal error C1033: cannot open program database
# By making the rules target separate pdb files this might be avoided.
cc_command = ('ninja -t msvc -e $arch ' +
'-- '
'$cc /nologo /showIncludes /FC '
'@$out.rsp /c $in /Fo$out /Fd$pdbname_c ')
cxx_command = ('ninja -t msvc -e $arch ' +
'-- '
'$cxx /nologo /showIncludes /FC '
'@$out.rsp /c $in /Fo$out /Fd$pdbname_cc ')
master_ninja.rule(
'cc',
description='CC $out',
command=cc_command,
rspfile='$out.rsp',
rspfile_content='$defines $includes $cflags $cflags_c',
deps=deps)
master_ninja.rule(
'cxx',
description='CXX $out',
command=cxx_command,
rspfile='$out.rsp',
rspfile_content='$defines $includes $cflags $cflags_cc',
deps=deps)
master_ninja.rule(
'idl',
description='IDL $in',
command=('%s gyp-win-tool midl-wrapper $arch $outdir '
'$tlb $h $dlldata $iid $proxy $in '
'$midl_includes $idlflags' % sys.executable))
master_ninja.rule(
'rc',
description='RC $in',
# Note: $in must be last otherwise rc.exe complains.
command=('%s gyp-win-tool rc-wrapper '
'$arch $rc $defines $resource_includes $rcflags /fo$out $in' %
sys.executable))
master_ninja.rule(
'asm',
description='ASM $out',
command=('%s gyp-win-tool asm-wrapper '
'$arch $asm $defines $includes $asmflags /c /Fo $out $in' %
sys.executable))
if flavor != 'mac' and flavor != 'win':
master_ninja.rule(
'alink',
description='AR $out',
command='rm -f $out && $ar rcs $arflags $out $in')
master_ninja.rule(
'alink_thin',
description='AR $out',
command='rm -f $out && $ar rcsT $arflags $out $in')
# This allows targets that only need to depend on $lib's API to declare an
# order-only dependency on $lib.TOC and avoid relinking such downstream
# dependencies when $lib changes only in non-public ways.
# The resulting string leaves an uninterpolated %{suffix} which
# is used in the final substitution below.
mtime_preserving_solink_base = (
'if [ ! -e $lib -o ! -e $lib.TOC ]; then '
'%(solink)s && %(extract_toc)s > $lib.TOC; else '
'%(solink)s && %(extract_toc)s > $lib.tmp && '
'if ! cmp -s $lib.tmp $lib.TOC; then mv $lib.tmp $lib.TOC ; '
'fi; fi'
% { 'solink':
'$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s',
'extract_toc':
('{ $readelf -d $lib | grep SONAME ; '
'$nm -gD -f p $lib | cut -f1-2 -d\' \'; }')})
master_ninja.rule(
'solink',
description='SOLINK $lib',
restat=True,
command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
rspfile='$link_file_list',
rspfile_content=
'-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive $libs',
pool='link_pool')
master_ninja.rule(
'solink_module',
description='SOLINK(module) $lib',
restat=True,
command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
rspfile='$link_file_list',
rspfile_content='-Wl,--start-group $in -Wl,--end-group $solibs $libs',
pool='link_pool')
master_ninja.rule(
'link',
description='LINK $out',
command=('$ld $ldflags -o $out '
'-Wl,--start-group $in -Wl,--end-group $solibs $libs'),
pool='link_pool')
elif flavor == 'win':
master_ninja.rule(
'alink',
description='LIB $out',
command=('%s gyp-win-tool link-wrapper $arch False '
'$ar /nologo /ignore:4221 /OUT:$out @$out.rsp' %
sys.executable),
rspfile='$out.rsp',
rspfile_content='$in_newline $libflags')
_AddWinLinkRules(master_ninja, embed_manifest=True)
_AddWinLinkRules(master_ninja, embed_manifest=False)
else:
master_ninja.rule(
'objc',
description='OBJC $out',
command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc '
'$cflags_pch_objc -c $in -o $out'),
depfile='$out.d',
deps=deps)
master_ninja.rule(
'objcxx',
description='OBJCXX $out',
command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc '
'$cflags_pch_objcc -c $in -o $out'),
depfile='$out.d',
deps=deps)
master_ninja.rule(
'alink',
description='LIBTOOL-STATIC $out, POSTBUILDS',
command='rm -f $out && '
'./gyp-mac-tool filter-libtool libtool $libtool_flags '
'-static -o $out $in'
'$postbuilds')
master_ninja.rule(
'lipo',
description='LIPO $out, POSTBUILDS',
command='rm -f $out && lipo -create $in -output $out$postbuilds')
master_ninja.rule(
'solipo',
description='SOLIPO $out, POSTBUILDS',
command=(
'rm -f $lib $lib.TOC && lipo -create $in -output $lib$postbuilds &&'
'%(extract_toc)s > $lib.TOC'
% { 'extract_toc':
'{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'}))
# Record the public interface of $lib in $lib.TOC. See the corresponding
# comment in the posix section above for details.
solink_base = '$ld %(type)s $ldflags -o $lib %(suffix)s'
mtime_preserving_solink_base = (
'if [ ! -e $lib -o ! -e $lib.TOC ] || '
# Always force dependent targets to relink if this library
# reexports something. Handling this correctly would require
# recursive TOC dumping but this is rare in practice, so punt.
'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then '
'%(solink)s && %(extract_toc)s > $lib.TOC; '
'else '
'%(solink)s && %(extract_toc)s > $lib.tmp && '
'if ! cmp -s $lib.tmp $lib.TOC; then '
'mv $lib.tmp $lib.TOC ; '
'fi; '
'fi'
% { 'solink': solink_base,
'extract_toc':
'{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'})
solink_suffix = '@$link_file_list$postbuilds'
master_ninja.rule(
'solink',
description='SOLINK $lib, POSTBUILDS',
restat=True,
command=mtime_preserving_solink_base % {'suffix': solink_suffix,
'type': '-shared'},
rspfile='$link_file_list',
rspfile_content='$in $solibs $libs',
pool='link_pool')
master_ninja.rule(
'solink_notoc',
description='SOLINK $lib, POSTBUILDS',
restat=True,
command=solink_base % {'suffix':solink_suffix, 'type': '-shared'},
rspfile='$link_file_list',
rspfile_content='$in $solibs $libs',
pool='link_pool')
master_ninja.rule(
'solink_module',
description='SOLINK(module) $lib, POSTBUILDS',
restat=True,
command=mtime_preserving_solink_base % {'suffix': solink_suffix,
'type': '-bundle'},
rspfile='$link_file_list',
rspfile_content='$in $solibs $libs',
pool='link_pool')
master_ninja.rule(
'solink_module_notoc',
description='SOLINK(module) $lib, POSTBUILDS',
restat=True,
command=solink_base % {'suffix': solink_suffix, 'type': '-bundle'},
rspfile='$link_file_list',
rspfile_content='$in $solibs $libs',
pool='link_pool')
master_ninja.rule(
'link',
description='LINK $out, POSTBUILDS',
command=('$ld $ldflags -o $out '
'$in $solibs $libs$postbuilds'),
pool='link_pool')
master_ninja.rule(
'preprocess_infoplist',
description='PREPROCESS INFOPLIST $out',
command=('$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && '
'plutil -convert xml1 $out $out'))
master_ninja.rule(
'copy_infoplist',
description='COPY INFOPLIST $in',
command='$env ./gyp-mac-tool copy-info-plist $in $out $binary $keys')
master_ninja.rule(
'merge_infoplist',
description='MERGE INFOPLISTS $in',
command='$env ./gyp-mac-tool merge-info-plist $out $in')
master_ninja.rule(
'compile_xcassets',
description='COMPILE XCASSETS $in',
command='$env ./gyp-mac-tool compile-xcassets $keys $in')
master_ninja.rule(
'mac_tool',
description='MACTOOL $mactool_cmd $in',
command='$env ./gyp-mac-tool $mactool_cmd $in $out $binary')
master_ninja.rule(
'package_framework',
description='PACKAGE FRAMEWORK $out, POSTBUILDS',
command='./gyp-mac-tool package-framework $out $version$postbuilds '
'&& touch $out')
if flavor == 'win':
master_ninja.rule(
'stamp',
description='STAMP $out',
command='%s gyp-win-tool stamp $out' % sys.executable)
master_ninja.rule(
'copy',
description='COPY $in $out',
command='%s gyp-win-tool recursive-mirror $in $out' % sys.executable)
else:
master_ninja.rule(
'stamp',
description='STAMP $out',
command='${postbuilds}touch $out')
master_ninja.rule(
'copy',
description='COPY $in $out',
command='rm -rf $out && cp -af $in $out')
master_ninja.newline()
all_targets = set()
for build_file in params['build_files']:
for target in gyp.common.AllTargets(target_list,
target_dicts,
os.path.normpath(build_file)):
all_targets.add(target)
all_outputs = set()
# target_outputs is a map from qualified target name to a Target object.
target_outputs = {}
# target_short_names is a map from target short name to a list of Target
# objects.
target_short_names = {}
# short name of targets that were skipped because they didn't contain anything
# interesting.
# NOTE: there may be overlap between this an non_empty_target_names.
empty_target_names = set()
# Set of non-empty short target names.
# NOTE: there may be overlap between this an empty_target_names.
non_empty_target_names = set()
for qualified_target in target_list:
# qualified_target is like: third_party/icu/icu.gyp:icui18n#target
build_file, name, toolset = \
gyp.common.ParseQualifiedTarget(qualified_target)
this_make_global_settings = data[build_file].get('make_global_settings', [])
assert make_global_settings == this_make_global_settings, (
"make_global_settings needs to be the same for all targets. %s vs. %s" %
(this_make_global_settings, make_global_settings))
spec = target_dicts[qualified_target]
if flavor == 'mac':
gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
# If build_file is a symlink, we must not follow it because there's a chance
# it could point to a path above toplevel_dir, and we cannot correctly deal
# with that case at the moment.
build_file = gyp.common.RelativePath(build_file, options.toplevel_dir,
False)
qualified_target_for_hash = gyp.common.QualifiedTarget(build_file, name,
toolset)
hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest()
base_path = os.path.dirname(build_file)
obj = 'obj'
if toolset != 'target':
obj += '.' + toolset
output_file = os.path.join(obj, base_path, name + '.ninja')
ninja_output = StringIO()
writer = NinjaWriter(hash_for_rules, target_outputs, base_path, build_dir,
ninja_output,
toplevel_build, output_file,
flavor, toplevel_dir=options.toplevel_dir)
target = writer.WriteSpec(spec, config_name, generator_flags)
if ninja_output.tell() > 0:
# Only create files for ninja files that actually have contents.
with OpenOutput(os.path.join(toplevel_build, output_file)) as ninja_file:
ninja_file.write(ninja_output.getvalue())
ninja_output.close()
master_ninja.subninja(output_file)
if target:
if name != target.FinalOutput() and spec['toolset'] == 'target':
target_short_names.setdefault(name, []).append(target)
target_outputs[qualified_target] = target
if qualified_target in all_targets:
all_outputs.add(target.FinalOutput())
non_empty_target_names.add(name)
else:
empty_target_names.add(name)
if target_short_names:
# Write a short name to build this target. This benefits both the
# "build chrome" case as well as the gyp tests, which expect to be
# able to run actions and build libraries by their short name.
master_ninja.newline()
master_ninja.comment('Short names for targets.')
for short_name in target_short_names:
master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in
target_short_names[short_name]])
# Write phony targets for any empty targets that weren't written yet. As
# short names are not necessarily unique only do this for short names that
# haven't already been output for another target.
empty_target_names = empty_target_names - non_empty_target_names
if empty_target_names:
master_ninja.newline()
master_ninja.comment('Empty targets (output for completeness).')
for name in sorted(empty_target_names):
master_ninja.build(name, 'phony')
if all_outputs:
master_ninja.newline()
master_ninja.build('all', 'phony', list(all_outputs))
master_ninja.default(generator_flags.get('default_target', 'all'))
master_ninja_file.close()
def PerformBuild(data, configurations, params):
options = params['options']
for config in configurations:
builddir = os.path.join(options.toplevel_dir, 'out', config)
arguments = ['ninja', '-C', builddir]
print 'Building [%s]: %s' % (config, arguments)
subprocess.check_call(arguments)
def CallGenerateOutputForConfig(arglist):
# Ignore the interrupt signal so that the parent process catches it and
# kills all multiprocessing children.
signal.signal(signal.SIGINT, signal.SIG_IGN)
(target_list, target_dicts, data, params, config_name) = arglist
GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
def GenerateOutput(target_list, target_dicts, data, params):
# Update target_dicts for iOS device builds.
target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator(
target_dicts)
user_config = params.get('generator_flags', {}).get('config', None)
if gyp.common.GetFlavor(params) == 'win':
target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts)
target_list, target_dicts = MSVSUtil.InsertLargePdbShims(
target_list, target_dicts, generator_default_variables)
if user_config:
GenerateOutputForConfig(target_list, target_dicts, data, params,
user_config)
else:
config_names = target_dicts[target_list[0]]['configurations'].keys()
if params['parallel']:
try:
pool = multiprocessing.Pool(len(config_names))
arglists = []
for config_name in config_names:
arglists.append(
(target_list, target_dicts, data, params, config_name))
pool.map(CallGenerateOutputForConfig, arglists)
except KeyboardInterrupt, e:
pool.terminate()
raise e
else:
for config_name in config_names:
GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name)
| gpl-3.0 |
gem/sidd | sidd/operator/loaders/ms.py | 1 | 3048 | # Copyright (c) 2011-2013, ImageCat Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
module contains class for loading mapping scheme files
mapping scheme format defined in ms module
"""
from os.path import exists
from sidd.constants import logAPICall
from sidd.operator import Operator,OperatorError, OperatorDataError
from sidd.operator.data import OperatorDataTypes
from sidd.taxonomy import get_taxonomy, Taxonomy
from sidd.ms.ms import MappingScheme
class MappingSchemeLoader(Operator):
""" operator loading mapping scheme from XML """
def __init__(self, options=None, name='Mapping Scheme Loaded'):
""" constructor """
Operator.__init__(self, options, name)
if isinstance(options['taxonomy'], Taxonomy):
self._taxonomy = options['taxonomy']
else:
self._taxonomy = get_taxonomy(options['taxonomy'])
# self documenting method override
###########################
@property
def input_types(self):
return [OperatorDataTypes.File]
@property
def input_names(self):
return ["Mapping Scheme File"]
input_descriptions = input_names
@property
def output_types(self):
return [OperatorDataTypes.MappingScheme]
@property
def output_names(self):
return ["Mapping Scheme"]
# public method override
###########################
@logAPICall
def do_operation(self):
""" perform ms loading """
# verify that input/output data is correctly set
in_file = self.inputs[0].value
# load data
ms = MappingScheme(self._taxonomy)
ms.read(in_file)
# verify that input data is loaded correctly
if not ms.is_valid:
raise OperatorError('Error Loading data file' % (in_file), self.__class__)
self.outputs[0].value = ms
# protected method override
###########################
def _verify_inputs(self, inputs):
""" perform operator specific output validation """
if not exists(inputs[0].value):
raise OperatorDataError("input file %s does not exist" % (inputs[0].value))
def _verify_outputs(self, outputs):
""" perform operator specific output validation """
pass
| agpl-3.0 |
AlmostBetterNetwork/pinecast | pinecast/settings.py | 3 | 9211 | """
Django settings for pinecast project.
Generated by 'django-admin startproject' using Django 1.8.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
import logging
import os
import sys
import mimetypes
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('SECRET', 'p)r2w-c!m^znb%2ppj0rxp9uu$+$q928w#*$41y5(eu$friqqv')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = os.environ.get('DEBUG', 'True') == 'True'
DEBUG_TOOLBAR = os.environ.get('DEBUG_TOOLBAR', 'False') == 'True'
STAGING = os.environ.get('STAGING') == 'True'
TESTING = len(sys.argv) > 1 and sys.argv[1] == 'test'
if DEBUG:
ALLOWED_HOSTS = ['*']
else:
ALLOWED_HOSTS = ['pinecast.herokuapp.com', 'pinecast.com', 'pinecast.co', '.pinecast.co', 'tips.pinecast.com']
if STAGING:
ALLOWED_HOSTS.append('pinecast-staging.herokuapp.com')
ALLOWED_HOSTS.append('next.pinecast.com')
ALLOWED_HOSTS.append('tips.next.pinecast.com')
if os.environ.get('ADMIN_IP'):
INTERNAL_IPS = [os.environ.get('ADMIN_IP')]
else:
INTERNAL_IPS = []
if DEBUG_TOOLBAR:
print('Loading Django Debug Toolbar')
mimetypes.add_type("image/svg+xml", ".svg", True)
DATA_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024 * 5 # Required for RSS feed submission on import
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'whitenoise.runserver_nostatic',
'django.contrib.staticfiles',
'accounts',
'assets',
'analytics',
'dashboard',
'feedback',
'notifications',
'payments',
'podcasts',
'sites',
)
MIDDLEWARE_CLASSES = (
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'payments.middleware.tips_site.TipsSubdomainMiddleware',
'sites.middleware.subdomains.SubdomainMiddleware',
'pinecast.middleware.perf.PerfMiddleware',
'pinecast.middleware.hnredirect.HostnameRedirect',
'pinecast.middleware.tsredirect.TrailingSlashRedirect',
)
ROOT_URLCONF = 'pinecast.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.jinja2.Jinja2',
'APP_DIRS': True,
'DIRS': [
os.path.join(BASE_DIR, 'templates', 'jinja2'),
],
'OPTIONS': {
'environment': 'pinecast.jinja2_helper.environment',
},
},
# This is needed for the admin app
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
SILENCED_SYSTEM_CHECKS = ['urls.W002']
WSGI_APPLICATION = 'pinecast.wsgi.application'
ADMINS = [
('basta', 'mattbasta@gmail.com'),
]
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse',
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django': {
'handlers': ['console'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'),
'propagate': True,
},
},
}
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {}
try:
import dj_database_url
prod_db = dj_database_url.config()
assert prod_db, 'No DB config found...'
print('Using prod database')
DATABASES['default'] = prod_db
DATABASES['default']['CONN_MAX_AGE'] = 500
except Exception:
print('Using SQLite db')
DATABASES['default'] = {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
'OPTIONS': {'timeout': 5},
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_THOUSAND_SEPARATOR = True
USE_TZ = False
def show_debug_toolbar(req):
if req.is_ajax():
return False
return (
req.META.get('REMOTE_ADDR') in INTERNAL_IPS or
req.META.get('HTTP_CF_CONNECTING_IP') in INTERNAL_IPS or
not INTERNAL_IPS
)
DEBUG_TOOLBAR_CONFIG = {
'SHOW_TOOLBAR_CALLBACK': show_debug_toolbar,
}
TS_OMNIBUS = {
'hostname': os.environ.get('TS_HOSTNAME', 'pinecast-js.s3.amazonaws.com'),
'version': os.environ.get('TS_VERSION', '1'),
}
SITE_BUILDER = {
'hostname': os.environ.get('SB_HOSTNAME', 'pinecast-js.s3.amazonaws.com'),
'version': os.environ.get('SB_VERSION', '1'),
}
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = STATIC_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATIC_ROOT = STATIC_DIRS[0] + 'root'
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
S3_BUCKET = os.environ.get('S3_BUCKET')
S3_LOGS_BUCKET = os.environ.get('S3_LOGS_BUCKET')
S3_ACCESS_ID = os.environ.get('S3_ACCESS_ID')
S3_SECRET_KEY = os.environ.get('S3_SECRET_KEY')
SES_ACCESS_ID = os.environ.get('SES_ACCESS_ID')
SES_SECRET_KEY = os.environ.get('SES_SECRET_KEY')
KINESIS_ACCESS_ID = os.environ.get('KINESIS_ACCESS_ID')
KINESIS_SECRET_KEY = os.environ.get('KINESIS_SECRET_KEY')
UPLOAD_HOST = 'd34bo1v665hk6e.cloudfront.net'
RECAPTCHA_KEY = os.environ.get('RECAPTCHA_KEY')
RECAPTCHA_SECRET = os.environ.get('RECAPTCHA_SECRET')
STRIPE_API_KEY = os.environ.get('STRIPE_API_KEY')
STRIPE_PUBLISHABLE_KEY = os.environ.get('STRIPE_PUBLISHABLE_KEY')
LAMBDA_ACCESS_SECRET = os.environ.get('LAMBDA_ACCESS_SECRET')
RSS_FETCH_ENDPOINT = os.environ.get('RSS_FETCH_ENDPOINT')
DEPLOY_SLACKBOT_URL = os.environ.get('DEPLOY_SLACKBOT_URL')
INTERCOM_SECRET = os.environ.get('INTERCOM_SECRET')
INTERCOM_ACCESS_TOKEN = os.environ.get('INTERCOM_ACCESS_TOKEN')
SPOTIFY_TOKEN = os.environ.get('SPOTIFY_TOKEN')
MAX_FILE_SIZE = 1024 * 1024 * 256
EMAIL_CONFIRMATION_MAX_AGE = 3600 * 24 * 2 # Two days
SUPPORT_URL = 'https://help.pinecast.com'
SUPPORT_EMAIL = 'support@pinecast.zendesk.com'
SENDER_EMAIL = 'Matt@pinecast.com'
ANALYTICS_PROVIDER = os.environ.get('ANALYTICS_PROVIDER', 'apg')
APG_CONNSTRING_READ = os.environ.get('APG_CONNSTRING_READ')
APG_CONNSTRING_WRITE = os.environ.get('APG_CONNSTRING_WRITE')
APG_CONN_POOL_SIZE = int(os.environ.get('APG_CONN_POOL_SIZE', '20'))
APG_DB_SUBSCRIPTION = os.environ.get('APG_DB_SUBSCRIPTION', 'subscriptions.subscriptions')
APG_DB_LISTEN = os.environ.get('APG_DB_SUBSCRIPTION', 'listens.listens')
APG_CONDITION_OVERRIDES = {}
FORCE_EPISODE_SPARKLINE_OVERRIDE = False
CHALLENGE_URL = os.environ.get('CHALLENGE_URL')
CHALLENGE_RESPONSE = os.environ.get('CHALLENGE_RESPONSE')
REFERRAL_DISCOUNT = 40 # percent off
REFERRAL_DISCOUNT_DURATION = 4 # months
NPLUSONE_LOGGER = logging.getLogger('nplusone')
NPLUSONE_LOG_LEVEL = logging.ERROR
DISABLE_CONCURRENCY = os.environ.get('DISABLE_CONCURRENCY') == 'True'
FEED_GZIP = os.environ.get('FEED_GZIP') == 'True'
FEED_STREAMING = os.environ.get('FEED_STREAMING') == 'True'
try:
from .settings_local import *
except ImportError:
pass
ROLLBAR = {
'access_token': os.environ.get('ROLLBAR_ACCESS_TOKEN'),
'environment': 'development' if DEBUG else ('staging' if STAGING else 'production'),
'exception_level_filters': [
('django.http.Http404', 'ignored'),
],
'branch': 'master',
'root': os.getcwd(),
'capture_email': True,
}
ROLLBAR_POST_CLIENT_ITEM = os.environ.get('ROLLBAR_POST_CLIENT_ITEM')
if DEBUG:
INSTALLED_APPS = INSTALLED_APPS + ('django_nose', 'nplusone.ext.django', )
MIDDLEWARE_CLASSES = ('nplusone.ext.django.NPlusOneMiddleware', ) + MIDDLEWARE_CLASSES
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
if DEBUG_TOOLBAR:
INSTALLED_APPS = INSTALLED_APPS + ('debug_toolbar', )
MIDDLEWARE_CLASSES = ('debug_toolbar.middleware.DebugToolbarMiddleware', ) + MIDDLEWARE_CLASSES
if DISABLE_CONCURRENCY:
DEBUG_TOOLBAR_PANELS = (
'djdt_flamegraph.FlamegraphPanel',
)
if not DEBUG:
MIDDLEWARE_CLASSES = MIDDLEWARE_CLASSES + ('rollbar.contrib.django.middleware.RollbarNotifierMiddleware', )
| apache-2.0 |
vadimtk/chrome4sdp | build/android/pylib/uirobot/uirobot_test_instance.py | 42 | 2064 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import json
import logging
from pylib import constants
from pylib.base import test_instance
from pylib.utils import apk_helper
class UirobotTestInstance(test_instance.TestInstance):
def __init__(self, args, error_func):
"""Constructor.
Args:
args: Command line arguments.
"""
super(UirobotTestInstance, self).__init__()
if not args.app_under_test:
error_func('Must set --app-under-test.')
self._app_under_test = args.app_under_test
self._minutes = args.minutes
if args.remote_device_file:
with open(args.remote_device_file) as remote_device_file:
device_json = json.load(remote_device_file)
else:
device_json = {}
device_type = device_json.get('device_type', 'Android')
if args.device_type:
if device_type and device_type != args.device_type:
logging.info('Overriding device_type from %s to %s',
device_type, args.device_type)
device_type = args.device_type
if device_type == 'Android':
self._suite = 'Android Uirobot'
self._package_name = apk_helper.GetPackageName(self._app_under_test)
elif device_type == 'iOS':
self._suite = 'iOS Uirobot'
self._package_name = self._app_under_test
#override
def TestType(self):
"""Returns type of test."""
return 'uirobot'
#override
def SetUp(self):
"""Setup for test."""
pass
#override
def TearDown(self):
"""Teardown for test."""
pass
@property
def app_under_test(self):
"""Returns the app to run the test on."""
return self._app_under_test
@property
def minutes(self):
"""Returns the number of minutes to run the uirobot for."""
return self._minutes
@property
def package_name(self):
"""Returns the name of the package in the APK."""
return self._package_name
@property
def suite(self):
return self._suite
| bsd-3-clause |
ConnorGBrewster/servo | tests/wpt/web-platform-tests/tools/wptrunner/wptrunner/metadata.py | 3 | 14183 | import os
import shutil
import tempfile
import uuid
from mozlog import reader
from mozlog import structuredlog
import expected
import manifestupdate
import testloader
import wptmanifest
import wpttest
from vcs import git
manifest = None # Module that will be imported relative to test_root
manifestitem = None
logger = structuredlog.StructuredLogger("web-platform-tests")
try:
import ujson
except ImportError:
pass
else:
reader.json = ujson
def load_test_manifests(serve_root, test_paths):
do_delayed_imports(serve_root)
manifest_loader = testloader.ManifestLoader(test_paths, False)
return manifest_loader.load()
def update_expected(test_paths, serve_root, log_file_names,
rev_old=None, rev_new="HEAD", ignore_existing=False,
sync_root=None, property_order=None, boolean_properties=None,
stability=None):
"""Update the metadata files for web-platform-tests based on
the results obtained in a previous run or runs
If stability is not None, assume log_file_names refers to logs from repeated
test jobs, disable tests that don't behave as expected on all runs"""
manifests = load_test_manifests(serve_root, test_paths)
change_data = {}
if sync_root is not None:
if rev_old is not None:
rev_old = git("rev-parse", rev_old, repo=sync_root).strip()
rev_new = git("rev-parse", rev_new, repo=sync_root).strip()
if rev_old is not None:
change_data = load_change_data(rev_old, rev_new, repo=sync_root)
expected_map_by_manifest = update_from_logs(manifests,
*log_file_names,
ignore_existing=ignore_existing,
property_order=property_order,
boolean_properties=boolean_properties,
stability=stability)
for test_manifest, expected_map in expected_map_by_manifest.iteritems():
url_base = manifests[test_manifest]["url_base"]
metadata_path = test_paths[url_base]["metadata_path"]
write_changes(metadata_path, expected_map)
if stability is not None:
for tree in expected_map.itervalues():
for test in tree.iterchildren():
for subtest in test.iterchildren():
if subtest.new_disabled:
print "disabled: %s" % os.path.dirname(subtest.root.test_path) + "/" + subtest.name
if test.new_disabled:
print "disabled: %s" % test.root.test_path
results_changed = [item.test_path for item in expected_map.itervalues() if item.modified]
return unexpected_changes(manifests, change_data, results_changed)
def do_delayed_imports(serve_root):
global manifest, manifestitem
from manifest import manifest, item as manifestitem
def files_in_repo(repo_root):
return git("ls-tree", "-r", "--name-only", "HEAD").split("\n")
def rev_range(rev_old, rev_new, symmetric=False):
joiner = ".." if not symmetric else "..."
return "".join([rev_old, joiner, rev_new])
def paths_changed(rev_old, rev_new, repo):
data = git("diff", "--name-status", rev_range(rev_old, rev_new), repo=repo)
lines = [tuple(item.strip() for item in line.strip().split("\t", 1))
for line in data.split("\n") if line.strip()]
output = set(lines)
return output
def load_change_data(rev_old, rev_new, repo):
changes = paths_changed(rev_old, rev_new, repo)
rv = {}
status_keys = {"M": "modified",
"A": "new",
"D": "deleted"}
# TODO: deal with renames
for item in changes:
rv[item[1]] = status_keys[item[0]]
return rv
def unexpected_changes(manifests, change_data, files_changed):
files_changed = set(files_changed)
root_manifest = None
for manifest, paths in manifests.iteritems():
if paths["url_base"] == "/":
root_manifest = manifest
break
else:
return []
return [fn for _, fn, _ in root_manifest if fn in files_changed and change_data.get(fn) != "M"]
# For each testrun
# Load all files and scan for the suite_start entry
# Build a hash of filename: properties
# For each different set of properties, gather all chunks
# For each chunk in the set of chunks, go through all tests
# for each test, make a map of {conditionals: [(platform, new_value)]}
# Repeat for each platform
# For each test in the list of tests:
# for each conditional:
# If all the new values match (or there aren't any) retain that conditional
# If any new values mismatch:
# If stability and any repeated values don't match, disable the test
# else mark the test as needing human attention
# Check if all the RHS values are the same; if so collapse the conditionals
def update_from_logs(manifests, *log_filenames, **kwargs):
ignore_existing = kwargs.get("ignore_existing", False)
property_order = kwargs.get("property_order")
boolean_properties = kwargs.get("boolean_properties")
stability = kwargs.get("stability")
expected_map = {}
id_test_map = {}
for test_manifest, paths in manifests.iteritems():
expected_map_manifest, id_path_map_manifest = create_test_tree(
paths["metadata_path"],
test_manifest,
property_order=property_order,
boolean_properties=boolean_properties)
expected_map[test_manifest] = expected_map_manifest
id_test_map.update(id_path_map_manifest)
updater = ExpectedUpdater(manifests, expected_map, id_test_map,
ignore_existing=ignore_existing)
for log_filename in log_filenames:
with open(log_filename) as f:
updater.update_from_log(f)
for manifest_expected in expected_map.itervalues():
for tree in manifest_expected.itervalues():
for test in tree.iterchildren():
for subtest in test.iterchildren():
subtest.coalesce_properties(stability=stability)
test.coalesce_properties(stability=stability)
return expected_map
def directory_manifests(metadata_path):
rv = []
for dirpath, dirname, filenames in os.walk(metadata_path):
if "__dir__.ini" in filenames:
rel_path = os.path.relpath(dirpath, metadata_path)
rv.append(os.path.join(rel_path, "__dir__.ini"))
return rv
def write_changes(metadata_path, expected_map):
# First write the new manifest files to a temporary directory
temp_path = tempfile.mkdtemp(dir=os.path.split(metadata_path)[0])
write_new_expected(temp_path, expected_map)
# Keep all __dir__.ini files (these are not in expected_map because they
# aren't associated with a specific test)
keep_files = directory_manifests(metadata_path)
# Copy all files in the root to the temporary location since
# these cannot be ini files
keep_files.extend(item for item in os.listdir(metadata_path) if
not os.path.isdir(os.path.join(metadata_path, item)))
for item in keep_files:
dest_dir = os.path.dirname(os.path.join(temp_path, item))
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
shutil.copyfile(os.path.join(metadata_path, item),
os.path.join(temp_path, item))
# Then move the old manifest files to a new location
temp_path_2 = metadata_path + str(uuid.uuid4())
os.rename(metadata_path, temp_path_2)
# Move the new files to the destination location and remove the old files
os.rename(temp_path, metadata_path)
shutil.rmtree(temp_path_2)
def write_new_expected(metadata_path, expected_map):
# Serialize the data back to a file
for tree in expected_map.itervalues():
if not tree.is_empty:
manifest_str = wptmanifest.serialize(tree.node, skip_empty_data=True)
assert manifest_str != ""
path = expected.expected_path(metadata_path, tree.test_path)
dir = os.path.split(path)[0]
if not os.path.exists(dir):
os.makedirs(dir)
with open(path, "wb") as f:
f.write(manifest_str)
class ExpectedUpdater(object):
def __init__(self, test_manifests, expected_tree, id_path_map, ignore_existing=False):
self.test_manifests = test_manifests
self.expected_tree = expected_tree
self.id_path_map = id_path_map
self.ignore_existing = ignore_existing
self.run_info = None
self.action_map = {"suite_start": self.suite_start,
"test_start": self.test_start,
"test_status": self.test_status,
"test_end": self.test_end,
"assertion_count": self.assertion_count}
self.tests_visited = {}
self.test_cache = {}
self.types_by_path = {}
for manifest in self.test_manifests.iterkeys():
for test_type, path, _ in manifest:
if test_type in wpttest.manifest_test_cls:
self.types_by_path[path] = wpttest.manifest_test_cls[test_type]
def update_from_log(self, log_file):
self.run_info = None
log_reader = reader.read(log_file)
reader.each_log(log_reader, self.action_map)
def suite_start(self, data):
self.run_info = data["run_info"]
def test_start(self, data):
test_id = data["test"]
try:
test_manifest, test = self.id_path_map[test_id]
expected_node = self.expected_tree[test_manifest][test].get_test(test_id)
except KeyError:
print "Test not found %s, skipping" % test_id
return
self.test_cache[test_id] = expected_node
if test_id not in self.tests_visited:
if self.ignore_existing:
expected_node.clear("expected")
self.tests_visited[test_id] = set()
def test_status(self, data):
test_id = data["test"]
test = self.test_cache.get(test_id)
if test is None:
return
test_cls = self.types_by_path[test.root.test_path]
subtest = test.get_subtest(data["subtest"])
self.tests_visited[test_id].add(data["subtest"])
result = test_cls.subtest_result_cls(
data["subtest"],
data["status"],
None)
subtest.set_result(self.run_info, result)
def test_end(self, data):
test_id = data["test"]
test = self.test_cache.get(test_id)
if test is None:
return
test_cls = self.types_by_path[test.root.test_path]
if data["status"] == "SKIP":
return
result = test_cls.result_cls(
data["status"],
None)
test.set_result(self.run_info, result)
del self.test_cache[test_id]
def assertion_count(self, data):
test_id = data["test"]
test = self.test_cache.get(test_id)
if test is None:
return
test.set_asserts(self.run_info, data["count"])
def create_test_tree(metadata_path, test_manifest, property_order=None,
boolean_properties=None):
expected_map = {}
id_test_map = {}
exclude_types = frozenset(["stub", "helper", "manual", "support", "conformancechecker"])
all_types = [item.item_type for item in manifestitem.__dict__.itervalues()
if type(item) == type and
issubclass(item, manifestitem.ManifestItem) and
item.item_type is not None]
include_types = set(all_types) - exclude_types
for _, test_path, tests in test_manifest.itertypes(*include_types):
expected_data = load_expected(test_manifest, metadata_path, test_path, tests,
property_order=property_order,
boolean_properties=boolean_properties)
if expected_data is None:
expected_data = create_expected(test_manifest,
test_path,
tests,
property_order=property_order,
boolean_properties=boolean_properties)
for test in tests:
id_test_map[test.id] = (test_manifest, test)
expected_map[test] = expected_data
return expected_map, id_test_map
def create_expected(test_manifest, test_path, tests, property_order=None,
boolean_properties=None):
expected = manifestupdate.ExpectedManifest(None, test_path, test_manifest.url_base,
property_order=property_order,
boolean_properties=boolean_properties)
for test in tests:
expected.append(manifestupdate.TestNode.create(test.id))
return expected
def load_expected(test_manifest, metadata_path, test_path, tests, property_order=None,
boolean_properties=None):
expected_manifest = manifestupdate.get_manifest(metadata_path,
test_path,
test_manifest.url_base,
property_order=property_order,
boolean_properties=boolean_properties)
if expected_manifest is None:
return
tests_by_id = {item.id: item for item in tests}
# Remove expected data for tests that no longer exist
for test in expected_manifest.iterchildren():
if test.id not in tests_by_id:
test.remove()
# Add tests that don't have expected data
for test in tests:
if not expected_manifest.has_test(test.id):
expected_manifest.append(manifestupdate.TestNode.create(test.id))
return expected_manifest
| mpl-2.0 |
beniwohli/apm-agent-python | elasticapm/instrumentation/packages/django/__init__.py | 40 | 1591 | # BSD 3-Clause License
#
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| bsd-3-clause |
fuzeman/trakt.py | trakt/interfaces/users/history.py | 2 | 3409 | from __future__ import absolute_import, division, print_function
from trakt.core.helpers import clean_username, dictfilter, to_iso8601_datetime
from trakt.core.pagination import PaginationIterator
from trakt.interfaces.base import Interface, authenticated
from trakt.mapper import SyncMapper
import requests
class UsersHistoryInterface(Interface):
path = 'users/*/history'
flags = {'is_watched': True}
def get(self, username, media=None, id=None, start_at=None, end_at=None, store=None,
extended=None, page=None, per_page=None, **kwargs):
if not media and id:
raise ValueError('The "id" parameter also requires the "media" parameter to be defined')
# Build parameters
params = []
if media:
params.append(media)
if id:
params.append(id)
# Build query
query = {
'extended': extended,
'page': page,
'limit': per_page
}
if start_at:
query['start_at'] = to_iso8601_datetime(start_at)
if end_at:
query['end_at'] = to_iso8601_datetime(end_at)
# Send request
response = self.http.get(
'/users/%s/history' % (clean_username(username)),
params=params,
query=query,
**dictfilter(kwargs, get=[
'exceptions'
], pop=[
'authenticated',
'pagination',
'validate_token'
])
)
# Parse response
items = self.get_data(response, **kwargs)
if isinstance(items, PaginationIterator):
return items.with_mapper(lambda items: SyncMapper.process(
self.client, store, items,
media=media,
flat=True,
**self.flags
))
if isinstance(items, requests.Response):
return items
if type(items) is not list:
return None
return SyncMapper.process(
self.client, store, items,
media=media,
flat=True,
**self.flags
)
#
# Shortcut methods
#
@authenticated
def movies(self, username, id=None, start_at=None, end_at=None, store=None, **kwargs):
return self.get(
username, 'movies',
id=id,
start_at=start_at,
end_at=end_at,
store=store,
**kwargs
)
@authenticated
def shows(self, username, id=None, start_at=None, end_at=None, store=None, **kwargs):
return self.get(
username, 'shows',
id=id,
start_at=start_at,
end_at=end_at,
store=store,
**kwargs
)
@authenticated
def seasons(self, username, id=None, start_at=None, end_at=None, store=None, **kwargs):
return self.get(
username, 'seasons',
id=id,
start_at=start_at,
end_at=end_at,
store=store,
**kwargs
)
@authenticated
def episodes(self, username, id=None, start_at=None, end_at=None, store=None, **kwargs):
return self.get(
username, 'episodes',
id=id,
start_at=start_at,
end_at=end_at,
store=store,
**kwargs
)
| mit |
jumoconnect/openjumo | jumodjango/org/models.py | 1 | 14137 | from action.models import Action
from django.conf import settings
from django.contrib.contenttypes import generic
from django.db import models
from entity_items.models import Advocate, ContentItem, MediaItem, TimelineItem
from entity_items.models.location import Location
from etc import cache
from etc.templatetags.tags import _create_static_url
from issue.models import Issue
from lib.image_upload import ImageSize, ImageType, S3EnabledImageField
from users.models import User
from commitment.models import Commitment
REVENUE_CHOICES = (
("less than $100,000","less than $100,000",),
("$100,000 - $1,000,000","$100,000 - $1,000,000",),
("$1m - $5m","$1m - $5m",),
("$5m - $20m","$5m - $20m",),
("more than $20m","more than $20m",),
)
SIZE_CHOICES = (
("1-10","1-10"),
("10-50","10-50",),
("51-100","51-100",),
("100+","100+",),
)
class Org(models.Model):
#Public Properties
id = models.AutoField(db_column='org_id', primary_key=True)
name = models.CharField(max_length=200, verbose_name="Organization Name")
summary = models.CharField(max_length=255, verbose_name="Vision Statement")
handle = models.CharField(max_length=210, unique=True, verbose_name="Organization Handle",
help_text="Your organization's unique handle used for your public Jumo page: www.jumo.com/<b>HANDLE</b>")
ein = models.CharField(max_length=12, blank=True, verbose_name="EIN",
help_text="*Not required, but must be provided for 501(c)(3)'s that wish to receive donations on Jumo. Find your organization's EIN <a target='_blank' href='http://nccsdataweb.urban.org/PubApps/990search.php?a=a&bmf=1'>here</a>.")
email = models.EmailField(blank=True)
phone_number = models.CharField(max_length=50, blank=True, verbose_name="Phone")
img_small_url = S3EnabledImageField(image_type=ImageType.ORG, image_size=ImageSize.SMALL, blank=True)
img_large_url = S3EnabledImageField(image_type=ImageType.ORG, image_size=ImageSize.LARGE, blank=True)
year_founded = models.IntegerField(max_length=4, blank=True, null=True, verbose_name="Year Founded")
revenue = models.CharField(max_length=32, blank=True, choices=REVENUE_CHOICES, verbose_name="Revenue Size")
size = models.CharField(max_length=32, blank=True, choices=SIZE_CHOICES, verbose_name="# of Employees")
blog_url = models.URLField(verify_exists = False, blank=True, verbose_name="Blog")
site_url = models.URLField(verify_exists = False, blank=True, verbose_name="Website")
facebook_id = models.BigIntegerField(max_length=41, blank=True, null=True, verbose_name="Facebook ID")
twitter_id = models.CharField(max_length=64, blank=True, verbose_name="Twitter Username")
youtube_id = models.CharField(max_length=64, blank=True, verbose_name="YouTube Username")
flickr_id = models.CharField(max_length=64, blank=True, verbose_name="Flickr Username")
location = models.ForeignKey(Location, null=True, blank=True, related_name='location', verbose_name="Headquarters")
#Internal Properties
is_vetted = models.BooleanField(default=False)
is_active = models.BooleanField(default=True, verbose_name="Is Active") #Replaces the old ignore field.
donation_enabled = models.BooleanField(default=False, verbose_name="Is Donation Enabled")
claim_token = models.CharField(max_length = 32, blank = True, verbose_name="Claim Token")
is_claimed = models.BooleanField(default=False, verbose_name="Is Claimed")
date_created = models.DateTimeField(auto_now_add=True, verbose_name="Date Created")
date_updated = models.DateTimeField(auto_now=True, verbose_name="Date Updated")
facebook_last_fetched = models.CharField(max_length=24, null=True, blank=True, default=None, verbose_name='Facebook Last Fetched')
twitter_last_status_id = models.BigIntegerField(null=True, verbose_name='Twitter Last Status ID')
#Relationship Properties
admins = models.ManyToManyField(User, related_name = 'admins', db_table='org_org_admins')
content = generic.GenericRelation(ContentItem, related_name='content')
actions = generic.GenericRelation(Action, related_name='org_actions')
advocates = generic.GenericRelation(Advocate, related_name='advocates')
timeline = generic.GenericRelation(TimelineItem, related_name='timeline')
media = generic.GenericRelation(MediaItem, related_name='media')
followers = models.ManyToManyField(User, symmetrical=False, through='UserToOrgFollow', related_name='followed_orgs')
related_orgs = models.ManyToManyField('self', symmetrical = False, through='RelatedOrg', related_name="orgrelatedorgs")
working_locations = models.ManyToManyField(Location, null=True, symmetrical=False, related_name="working_locations",
db_table="org_working_locations", verbose_name="Working In")
issues = models.ManyToManyField(Issue, through='OrgIssueRelationship', verbose_name="Working On")
commitments = generic.GenericRelation(Commitment)
#aliases
class Meta:
verbose_name = "Org"
verbose_name_plural = "Orgs"
db_table = "orgs"
def __unicode__(self):
return self.name
def save(self):
#Note: I want to move all this img stuff to the forms that set them...
#not here on the model. This is a hack so we ensure the model id is
#used in the filename.
if not self.id and not self.img_large_url._committed:
#most likely you need to watch small img too
small_url_comm = self.img_url._committed
self.img_small_url._committed = True
self.img_large_url._committed = True
super(Org, self).save()
self.img_large_url._committed = False
self.img_small_url._committed = small_url_comm
if not self.id and not self.img_small_url._committed:
self.img_small_url._committed = True
super(Org, self).save()
self.img_small_url._committed = False
self.img_large_url.storage.inst_id = self.id
self.img_small_url.storage.inst_id = self.id
super(Org, self).save()
cache.bust(self)
@models.permalink
def get_absolute_url(self):
return ('entity_url', [self.handle])
@classmethod
def get(cls, id, force_db=False):
if force_db:
org = Org.objects.get(id=id)
cache.bust(org)
return org
return cache.get(cls, id)
@classmethod
def multiget(cls, ids, force_db=False):
if force_db:
return Org.objects.filter(id__in=ids)
return cache.get(cls, ids)
@property
def get_image_small(self):
if self.img_small_url:
return self.img_small_url.url
if self.facebook_id:
return 'http://graph.facebook.com/%s/picture?type=square' % self.facebook_id
return ''
@property
def get_image_large(self):
if self.img_large_url:
return self.img_large_url.url
if self.facebook_id:
return 'http://graph.facebook.com/%s/picture?type=large' % self.facebook_id
return ''
@property
def get_url(self):
return '/%s' % self.handle
@property
def get_name(self):
return self.name
@property
@cache.collection_cache(Action, '_all_actions')
def get_all_actions(self):
return self.actions.all().order_by('rank')
@property
@cache.collection_cache(Advocate, '_all_advocates')
def get_all_advocates(self):
return self.advocates.all()
@property
@cache.collection_cache(TimelineItem, '_all_timeline_items')
def get_all_timeline_items(self):
return self.timeline.all().order_by('year')
@property
@cache.collection_cache(MediaItem, '_all_media_items')
def get_all_media_items(self):
return self.media.all().order_by('position')
@property
@cache.collection_cache(MediaItem, '_photo_media_items')
def get_all_photos(self):
return self.media.filter(media_type="photo").order_by('position')
@property
@cache.collection_cache(ContentItem, '_all_content')
def get_all_content(self):
return self.content.all().order_by('position')
@property
def get_sub_heading_text(self):
t = ""
if self.year_founded:
t += "Since %s" % self.year_founded
if self.get_location:
if self.year_founded:
t += " // "
print t
t += str(self.get_location)
print t
if self.size:
if self.year_founded or self.get_location:
t += " // "
t += "%s employees" % self.size
if self.revenue:
if self.year_founded or self.size or self.get_location:
t += " // "
t += "%s revenue" % self.revenue
if self.site_url:
if self.year_founded or self.revenue or self.get_location or self.size:
t += " // "
t += self.site_url
return t
@property
def get_left_section_content(self):
return [item for item in self.get_all_content if item.section == ContentItem.ContentSection.LEFT]
@property
def get_center_section_content(self):
return [item for item in self.get_all_content if item.section == ContentItem.ContentSection.CENTER]
_location = None
@property
def get_location(self):
if self._location is not None:
return self._location
self._location = self.location
cache.put_on_handle(self, self.handle)
return self._location
@property
@cache.collection_cache(Location, '_working_locations')
def get_working_locations(self):
return self.working_locations.all()
@property
@cache.collection_cache(User, '_admins')
def get_admins(self):
return self.admins.all()
@property
@cache.collection_cache(User, '_all_followers')
def get_all_followers(self):
commitments = self.commitments.active().select_related()
return [c.user for c in commitments]
@property
def get_all_follower_ids(self):
return self.usertoorgfollow_set.filter(following = True).values_list('user', flat=True)
@property
def get_num_followers(self):
return self.commitments.active().count()
@property
def get_sample_followers(self):
commitments = self.commitments.active()[:16].select_related()
return [c.user for c in commitments]
@property
@cache.collection_cache(Issue, '_all_issues')
def get_all_issues(self):
return Issue.objects.filter(id__in = self.get_all_issues_ids)
@property
def get_all_issues_ids(self):
return self.orgissuerelationship_set.values_list('issue', flat = True)
@property
@cache.collection_cache('org.Org', '_all_related_orgs')
def get_all_related_orgs(self):
return self.related_orgs.all()
def get_related_orgs_for_user(self, user):
if not hasattr(self, '_all_related_orgs') or getattr(self, '_all_related_orgs') is None:
self.get_all_related_orgs
pos = dict((id, idx) for idx, id in enumerate(self._all_related_orgs['ids']))
orgs = sorted(list(set(self._all_related_orgs['ids']).difference(user.get_orgs_following_ids)), key=lambda id: pos[id])
return list(cache.get(Org, orgs[0:5]))
def delete(self):
cache.bust_on_handle(self, self.handle, False)
return super(self.__class__, self).delete()
def is_editable_by(self, user):
return not self.is_vetted and (user.is_staff or user in self.admins.all())
class Alias(models.Model):
"""
Another name an org might be known as.
"""
org = models.ForeignKey(Org)
alias = models.CharField(max_length=200)
date_created = models.DateTimeField(auto_now_add=True, verbose_name="Date Created")
date_updated = models.DateTimeField(auto_now=True, verbose_name="Date Updated")
class Meta:
unique_together = (("org", "alias"),)
db_table = 'org_alias'
def __unicode__(self):
return self.alias
class UserToOrgFollow(models.Model):
following = models.BooleanField(default = True, db_index = True)
started_following = models.DateTimeField(auto_now_add = True)
stopped_following = models.DateTimeField(blank = True, null = True)
user = models.ForeignKey(User)
org = models.ForeignKey(Org)
class Meta:
unique_together = (("user", "org"),)
verbose_name = "User Following Org"
verbose_name = "Users Following Orgs"
db_table = 'org_usertoorgfollow'
def __unicode__(self):
return "User '%s' following Org '%s'" % (self.user, self.org)
class RelatedOrg(models.Model):
org = models.ForeignKey(Org, related_name="org")
related_org = models.ForeignKey(Org, related_name="related_org")
rank = models.FloatField() #Value determined by magic algo that generated this item.
date_created = models.DateTimeField(auto_now_add=True, verbose_name="Date Created")
date_updated = models.DateTimeField(auto_now=True, verbose_name="Date Updated")
class Meta:
db_table = 'related_orgs'
ordering = ['rank']
unique_together = (("org", "rank"),)
verbose_name = "Org's Related Org"
verbose_name_plural = "Org's Related Orgs"
def __unicode__(self):
return "%s" % self.related_org
class OrgIssueRelationship(models.Model):
org = models.ForeignKey(Org)
issue = models.ForeignKey(Issue)
rank = models.IntegerField(default=0) #This is manually managed for each org:issues relations.
date_created = models.DateTimeField(auto_now_add=True, verbose_name="Date Created")
date_updated = models.DateTimeField(auto_now=True, verbose_name="Date Updated")
class Meta:
ordering = ['rank']
unique_together = (("org", "issue"),)
verbose_name = "Org's Issue"
verbose_name_plural = "Org's Issues"
db_table = 'org_orgissuerelationship'
def __unicode__(self):
return "%s" % self.issue
| mit |
dezelin/vbox | src/libs/xpcom18a4/python/test/regrtest.py | 25 | 3618 | # ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is Python XPCOM language bindings.
#
# The Initial Developer of the Original Code is
# ActiveState Tool Corp.
# Portions created by the Initial Developer are Copyright (C) 2000
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Hammond <mhammond@skippinet.com.au> (original author)
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
# regrtest.py
#
# The Regression Tests for the xpcom package.
import os
import sys
import unittest
# A little magic to create a single "test suite" from all test_ files
# in this dir. A single suite makes for prettier output test :)
def suite():
# Loop over all test_*.py files here
try:
me = __file__
except NameError:
me = sys.argv[0]
me = os.path.abspath(me)
files = os.listdir(os.path.dirname(me))
suite = unittest.TestSuite()
# XXX - add the others here!
#suite.addTest(unittest.FunctionTestCase(import_all))
for file in files:
base, ext = os.path.splitext(file)
if ext=='.py' and os.path.basename(base).startswith("test_"):
mod = __import__(base)
if hasattr(mod, "suite"):
test = mod.suite()
else:
test = unittest.defaultTestLoader.loadTestsFromModule(mod)
suite.addTest(test)
return suite
class CustomLoader(unittest.TestLoader):
def loadTestsFromModule(self, module):
return suite()
try:
unittest.TestProgram(testLoader=CustomLoader())(argv=sys.argv)
finally:
from xpcom import _xpcom
_xpcom.NS_ShutdownXPCOM() # To get leak stats and otherwise ensure life is good.
ni = _xpcom._GetInterfaceCount()
ng = _xpcom._GetGatewayCount()
if ni or ng:
# The old 'regrtest' that was not based purely on unittest did not
# do this check at the end - it relied on each module doing it itself.
# Thus, these leaks are not new, just newly noticed :) Likely to be
# something silly like module globals.
if ni == 6 and ng == 1:
print "Sadly, there are 6/1 leaks, but these appear normal and benign"
else:
print "********* WARNING - Leaving with %d/%d objects alive" % (ni,ng)
else:
print "yay! Our leaks have all vanished!"
| gpl-2.0 |
yannrouillard/weboob | weboob/capabilities/housing.py | 4 | 3614 | # -*- coding: utf-8 -*-
# Copyright(C) 2012 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from .base import IBaseCap, CapBaseObject, Field, IntField, DecimalField, \
StringField, BytesField, DateField
__all__ = ['HousingPhoto', 'Housing', 'Query', 'City', 'ICapHousing']
class HousingPhoto(CapBaseObject):
"""
Photo of a housing.
"""
url = StringField('Direct URL to photo')
data = BytesField('Data of photo')
def __init__(self, url):
CapBaseObject.__init__(self, url.split('/')[-1])
self.url = url
def __iscomplete__(self):
return self.data
def __str__(self):
return self.url
def __repr__(self):
return u'<HousingPhoto "%s" data=%do>' % (self.id, len(self.data) if self.data else 0)
class Housing(CapBaseObject):
"""
Content of a housing.
"""
title = StringField('Title of housing')
area = DecimalField('Area of housing, in m2')
cost = DecimalField('Cost of housing')
currency = StringField('Currency of cost')
date = DateField('Date when the housing has been published')
location = StringField('Location of housing')
station = StringField('What metro/bus station next to housing')
text = StringField('Text of the housing')
phone = StringField('Phone number to contact')
photos = Field('List of photos', list)
details = Field('Key/values of details', dict)
class Query(CapBaseObject):
"""
Query to find housings.
"""
TYPE_RENT = 0
TYPE_SALE = 1
type = IntField('Type of housing to find (TYPE_* constants)')
cities = Field('List of cities to search in', list, tuple)
area_min = IntField('Minimal area (in m2)')
area_max = IntField('Maximal area (in m2)')
cost_min = IntField('Minimal cost')
cost_max = IntField('Maximal cost')
nb_rooms = IntField('Number of rooms')
def __init__(self):
CapBaseObject.__init__(self, '')
class City(CapBaseObject):
"""
City.
"""
name = StringField('Name of city')
class ICapHousing(IBaseCap):
"""
Capability of websites to search housings.
"""
def search_housings(self, query):
"""
Search housings.
:param query: search query
:type query: :class:`Query`
:rtype: iter[:class:`Housing`]
"""
raise NotImplementedError()
def get_housing(self, housing):
"""
Get an housing from an ID.
:param housing: ID of the housing
:type housing: str
:rtype: :class:`Housing` or None if not found.
"""
raise NotImplementedError()
def search_city(self, pattern):
"""
Search a city from a pattern.
:param pattern: pattern to search
:type pattern: str
:rtype: iter[:class:`City`]
"""
raise NotImplementedError()
| agpl-3.0 |
jendap/tensorflow | tensorflow/contrib/eager/python/saver.py | 9 | 6931 | """Saver for eager mode TensorFlow."""
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
from tensorflow.python.eager import context
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.training import checkpoint_utils
from tensorflow.python.training import saver as _saver
def _init_from_checkpoint(self, *args, **kwargs):
"""Overrides default init by loading value from checkpoint."""
# pylint: disable=protected-access
self._old_init(*args, **kwargs)
ckpt_name = self._map_func(self._shared_name)
if ckpt_name not in self._ckpt_var_cache:
raise errors.NotFoundError(None, None,
"%s not found in checkpoint" % ckpt_name)
val = self._ckpt_var_cache.get(ckpt_name, None)
if val is not None:
self.assign(val)
# Avoid assigning for the second time.
self._ckpt_var_cache[ckpt_name] = None
# pylint: enable=protected-access
@contextlib.contextmanager
def restore_variables_on_create(save_path, map_func=None):
"""ContextManager that restores variables on creation.
When save_path is None (e.g. No checkpoint), does nothing.
Otherwise, it preloads all values from checkpoint. When the
corresponding variable is first created, it assigns the checkpoint
value to the variable.
```python
with restore_variables_on_create(
tf.train.latest_checkpoint(checkpoint_dir)):
```
Args:
save_path: The checkpoint file prefix.
map_func: A function that given the variable name as argument
and returns a variable name in checkpoint for restore. If
None, use the variable with the same name in checkpoint to restore.
It's an error that the mapped variable name doesn't exist in
checkpoint.
Yields:
Nothing.
Raises:
NotFoundError: If the variable is not found in checkpoint.
ValueError: If not used in eager mode or map_func is not callable.
"""
if not context.executing_eagerly():
raise ValueError(
"Currently, restore_variables_on_create can only be used with "
"eager execution enabled.")
if save_path:
if map_func is None:
map_func_wrapper = lambda self, x: x
else:
if not callable(map_func):
raise ValueError("map_func must be callable.")
map_func_wrapper = lambda self, x: map_func(x)
ckpt_var_cache = dict()
reader = checkpoint_utils.load_checkpoint(save_path)
for k, _ in checkpoint_utils.list_variables(save_path):
ckpt_var_cache[k] = reader.get_tensor(k)
old_init = getattr(resource_variable_ops.ResourceVariable,
"_init_from_args", None)
assert old_init, "ResourceVariable misses _init_from_args method."
setattr(resource_variable_ops.ResourceVariable, "_init_from_args",
_init_from_checkpoint)
setattr(resource_variable_ops.ResourceVariable, "_old_init", old_init)
setattr(resource_variable_ops.ResourceVariable, "_map_func",
map_func_wrapper)
setattr(resource_variable_ops.ResourceVariable, "_ckpt_var_cache",
ckpt_var_cache)
try:
yield
except Exception as e:
raise e
finally:
if save_path:
setattr(resource_variable_ops.ResourceVariable, "_init_from_args",
old_init)
setattr(resource_variable_ops.ResourceVariable, "_old_init", None)
setattr(resource_variable_ops.ResourceVariable, "_map_func", None)
setattr(resource_variable_ops.ResourceVariable, "_ckpt_var_cache", None)
class Saver(object):
"""A tf.train.Saver adapter for use when eager execution is enabled.
`Saver`'s name-based checkpointing strategy is fragile. Please switch to
`tf.train.Checkpoint` or `tf.keras.Model.save_weights`, which perform a more
robust object-based saving. These APIs will load checkpoints written by
`Saver`.
"""
def __init__(self, var_list):
"""A tf.train.Saver adapter for use when eager execution is enabled.
The API, and on-disk format, mimic tf.train.Saver except that no
Session is needed.
Args:
var_list: The list of variables that will be saved and restored. Either a
list of `tf.Variable` objects, or a dictionary mapping names to
`tf.Variable` objects.
Raises:
RuntimeError: if invoked when eager execution has not been enabled.
"""
if not context.executing_eagerly():
raise RuntimeError("tfe.Saver can only be used when eager "
"execution is enabled. Use tf.train.Saver when "
"building graphs.")
self._saver = _saver.Saver(var_list=var_list)
def save(self, file_prefix, global_step=None):
"""Saves variables.
Args:
file_prefix: Path prefix of files created for the checkpoint.
global_step: If provided the global step number is appended to file_prefix
to create the checkpoint filename. The optional argument can be a
Tensor, a Variable, or an integer.
Returns:
A string: prefix of filenames created for the checkpoint. This may be
an extension of file_prefix that is suitable to pass as an argument
to a subsequent call to `restore()`.
"""
with ops.device("/device:CPU:0"):
return self._saver.save(
None, file_prefix, write_meta_graph=False, global_step=global_step)
def restore(self, file_prefix):
"""Restores previously saved variables.
Args:
file_prefix: Path prefix where parameters were previously saved.
Typically obtained from a previous `save()` call, or from
`tf.train.latest_checkpoint`.
"""
with ops.device("/device:CPU:0"):
self._saver.restore(None, file_prefix)
def get_optimizer_variables(optimizer):
"""Returns a list of variables for the given `tf.train.Optimizer`.
Equivalent to `optimizer.variables()`.
Args:
optimizer: An instance of `tf.train.Optimizer` which has created variables
(typically after a call to `Optimizer.minimize`).
Returns:
A list of variables which have been created by the `Optimizer`.
"""
return optimizer.variables()
| apache-2.0 |
RobbieNor/amble | src/amble_lib/helpers.py | 2 | 3121 | # -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
### BEGIN LICENSE
# This file is in the public domain
### END LICENSE
### DO NOT EDIT THIS FILE ###
"""Helpers for an Ubuntu application."""
import logging
import os
from . ambleconfig import get_data_file
from . Builder import Builder
from locale import gettext as _
def get_builder(builder_file_name):
"""Return a fully-instantiated Gtk.Builder instance from specified ui
file
:param builder_file_name: The name of the builder file, without extension.
Assumed to be in the 'ui' directory under the data path.
"""
# Look for the ui file that describes the user interface.
ui_filename = get_data_file('ui', '%s.ui' % (builder_file_name,))
if not os.path.exists(ui_filename):
ui_filename = None
builder = Builder()
builder.set_translation_domain('amble')
builder.add_from_file(ui_filename)
return builder
# Owais Lone : To get quick access to icons and stuff.
def get_media_file(media_file_name):
media_filename = get_data_file('media', '%s' % (media_file_name,))
if not os.path.exists(media_filename):
media_filename = None
return "file:///"+media_filename
class NullHandler(logging.Handler):
def emit(self, record):
pass
def set_up_logging(opts):
# add a handler to prevent basicConfig
root = logging.getLogger()
null_handler = NullHandler()
root.addHandler(null_handler)
formatter = logging.Formatter("%(levelname)s:%(name)s: %(funcName)s() '%(message)s'")
logger = logging.getLogger('amble')
logger_sh = logging.StreamHandler()
logger_sh.setFormatter(formatter)
logger.addHandler(logger_sh)
lib_logger = logging.getLogger('amble_lib')
lib_logger_sh = logging.StreamHandler()
lib_logger_sh.setFormatter(formatter)
lib_logger.addHandler(lib_logger_sh)
# Set the logging level to show debug messages.
if opts.verbose:
logger.setLevel(logging.DEBUG)
logger.debug('logging enabled')
if opts.verbose > 1:
lib_logger.setLevel(logging.DEBUG)
def get_help_uri(page=None):
# help_uri from source tree - default language
here = os.path.dirname(__file__)
help_uri = os.path.abspath(os.path.join(here, '..', 'help', 'C'))
if not os.path.exists(help_uri):
# installed so use gnome help tree - user's language
help_uri = 'amble'
# unspecified page is the index.page
if page is not None:
help_uri = '%s#%s' % (help_uri, page)
return help_uri
def show_uri(parent, link):
from gi.repository import Gtk # pylint: disable=E0611
screen = parent.get_screen()
Gtk.show_uri(screen, link, Gtk.get_current_event_time())
def alias(alternative_function_name):
'''see http://www.drdobbs.com/web-development/184406073#l9'''
def decorator(function):
'''attach alternative_function_name(s) to function'''
if not hasattr(function, 'aliases'):
function.aliases = []
function.aliases.append(alternative_function_name)
return function
return decorator
| gpl-3.0 |
gtklocker/ting | API/chat/tests/message/test_delete_view.py | 3 | 1608 | from chat.tests.message.common import *
class MessageViewDELETETests(ChatTests):
client_class = ChatClient
def test_delete_message(self):
"""
The view should delete the message with the
specified id and respond with a 204(No Content)
code.
"""
timestamp = 10 ** 11
message = create_message(
message_content='Message',
username='vitsalis',
channel=self.channel,
timestamp=timestamp,
message_type='text'
)
response = self.privileged_operation(
reverse('chat:message', args=(message.id,)),
{},
'delete'
)
messages = Message.objects.filter(username='vitsalis')
self.assertEqual(response.status_code, 204)
self.assertEqual(len(messages), 0)
def test_delete_message_that_does_not_exist(self):
"""
When a message with the specified id doesn't exist
the view should respond with a 404(Not Found) code.
"""
timestamp = 10 ** 11
message = create_message(
message_content='Message',
username='vitsalis',
channel=self.channel,
timestamp=timestamp,
message_type='text'
)
response = self.privileged_operation(
reverse('chat:message', args=(message.id + 1,)),
{},
'delete'
)
self.assertEqual(response.status_code, 404)
messages = Message.objects.filter(username='vitsalis')
self.assertEqual(len(messages), 1)
| mit |
bigswitch/horizon | openstack_dashboard/utils/settings.py | 3 | 7585 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
from importlib import import_module
import logging
import os
import pkgutil
import six
from horizon.utils import file_discovery as fd
def import_submodules(module):
"""Import all submodules and make them available in a dict."""
submodules = {}
for loader, name, ispkg in pkgutil.iter_modules(module.__path__,
module.__name__ + '.'):
try:
submodule = import_module(name)
except ImportError as e:
# FIXME: Make the errors non-fatal (do we want that?).
logging.warning("Error importing %s" % name)
logging.exception(e)
else:
parent, child = name.rsplit('.', 1)
submodules[child] = submodule
return submodules
def import_dashboard_config(modules):
"""Imports configuration from all the modules and merges it."""
config = collections.defaultdict(dict)
for module in modules:
for key, submodule in six.iteritems(import_submodules(module)):
if hasattr(submodule, 'DASHBOARD'):
dashboard = submodule.DASHBOARD
config[dashboard].update(submodule.__dict__)
elif (hasattr(submodule, 'PANEL')
or hasattr(submodule, 'PANEL_GROUP')
or hasattr(submodule, 'FEATURE')):
config[submodule.__name__] = submodule.__dict__
else:
logging.warning("Skipping %s because it doesn't have DASHBOARD"
", PANEL, PANEL_GROUP, or FEATURE defined.",
submodule.__name__)
return sorted(six.iteritems(config),
key=lambda c: c[1]['__name__'].rsplit('.', 1)[1])
def update_dashboards(modules, horizon_config, installed_apps):
"""Imports dashboard and panel configuration from modules and applies it.
The submodules from specified modules are imported, and the configuration
for the specific dashboards is merged, with the later modules overriding
settings from the former. Then the configuration is applied to
horizon_config and installed_apps, in alphabetical order of files from
which the configurations were imported.
For example, given this setup:
| foo/__init__.py
| foo/_10_baz.py
| foo/_20_qux.py
| bar/__init__.py
| bar/_30_baz_.py
and being called with ``modules=[foo, bar]``, we will first have the
configuration from ``_10_baz`` and ``_30_baz`` merged, then the
configurations will be applied in order ``qux``, ``baz`` (``baz`` is
second, because the most recent file which contributed to it, ``_30_baz``,
comes after ``_20_qux``).
Panel specific configurations are stored in horizon_config. Dashboards
from both plugin-based and openstack_dashboard must be registered before
the panel configuration can be applied. Making changes to the panel is
deferred until the horizon autodiscover is completed, configurations are
applied in alphabetical order of files where it was imported.
"""
config_dashboards = horizon_config.get('dashboards', [])
if config_dashboards or horizon_config.get('default_dashboard'):
logging.warning(
'"dashboards" and "default_dashboard" in (local_)settings is '
'DEPRECATED now and may be unsupported in some future release. '
'The preferred way to specify the order of dashboards and the '
'default dashboard is the pluggable dashboard mechanism (in %s).',
', '.join([os.path.abspath(module.__path__[0])
for module in modules])
)
enabled_dashboards = []
disabled_dashboards = []
exceptions = horizon_config.get('exceptions', {})
apps = []
angular_modules = []
js_files = []
js_spec_files = []
scss_files = []
panel_customization = []
update_horizon_config = {}
for key, config in import_dashboard_config(modules):
if config.get('DISABLED', False):
if config.get('DASHBOARD'):
disabled_dashboards.append(config.get('DASHBOARD'))
continue
_apps = config.get('ADD_INSTALLED_APPS', [])
apps.extend(_apps)
if config.get('AUTO_DISCOVER_STATIC_FILES', False):
for _app in _apps:
module = import_module(_app)
base_path = os.path.join(module.__path__[0], 'static/')
fd.populate_horizon_config(horizon_config, base_path)
add_exceptions = six.iteritems(config.get('ADD_EXCEPTIONS', {}))
for category, exc_list in add_exceptions:
exceptions[category] = tuple(set(exceptions.get(category, ())
+ exc_list))
angular_modules.extend(config.get('ADD_ANGULAR_MODULES', []))
# avoid pulling in dashboard javascript dependencies multiple times
existing = set(js_files)
js_files.extend([f for f in config.get('ADD_JS_FILES', [])
if f not in existing])
js_spec_files.extend(config.get('ADD_JS_SPEC_FILES', []))
scss_files.extend(config.get('ADD_SCSS_FILES', []))
update_horizon_config.update(
config.get('UPDATE_HORIZON_CONFIG', {}))
if config.get('DASHBOARD'):
dashboard = key
enabled_dashboards.append(dashboard)
if config.get('DEFAULT', False):
horizon_config['default_dashboard'] = dashboard
elif config.get('PANEL') or config.get('PANEL_GROUP'):
config.pop("__builtins__", None)
panel_customization.append(config)
# Preserve the dashboard order specified in settings
dashboards = ([d for d in config_dashboards
if d not in disabled_dashboards] +
[d for d in enabled_dashboards
if d not in config_dashboards])
horizon_config['panel_customization'] = panel_customization
horizon_config['dashboards'] = tuple(dashboards)
horizon_config.setdefault('exceptions', {}).update(exceptions)
horizon_config.update(update_horizon_config)
horizon_config.setdefault('angular_modules', []).extend(angular_modules)
horizon_config.setdefault('js_files', []).extend(js_files)
horizon_config.setdefault('js_spec_files', []).extend(js_spec_files)
horizon_config.setdefault('scss_files', []).extend(scss_files)
# apps contains reference to applications declared in the enabled folder
# basically a list of applications that are internal and external plugins
# installed_apps contains reference to applications declared in settings
# such as django.contribe.*, django_pyscss, compressor, horizon, etc...
# for translation, we are only interested in the list of external plugins
# so we save the reference to it before we append to installed_apps
horizon_config.setdefault('plugins', []).extend(apps)
installed_apps[0:0] = apps
| apache-2.0 |
cdecker/bitcoin | contrib/signet/getcoins.py | 29 | 1634 | #!/usr/bin/env python3
# Copyright (c) 2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import argparse
import subprocess
import requests
import sys
parser = argparse.ArgumentParser(description='Script to get coins from a faucet.', epilog='You may need to start with double-dash (--) when providing bitcoin-cli arguments.')
parser.add_argument('-c', '--cmd', dest='cmd', default='bitcoin-cli', help='bitcoin-cli command to use')
parser.add_argument('-f', '--faucet', dest='faucet', default='https://signetfaucet.com/claim', help='URL of the faucet')
parser.add_argument('-a', '--addr', dest='addr', default='', help='Bitcoin address to which the faucet should send')
parser.add_argument('-p', '--password', dest='password', default='', help='Faucet password, if any')
parser.add_argument('bitcoin_cli_args', nargs='*', help='Arguments to pass on to bitcoin-cli (default: -signet)')
args = parser.parse_args()
if args.addr == '':
if args.bitcoin_cli_args == []:
args.bitcoin_cli_args = ['-signet']
# get address for receiving coins
try:
args.addr = subprocess.check_output([args.cmd] + args.bitcoin_cli_args + ['getnewaddress', 'faucet', 'bech32']).strip()
except FileNotFoundError:
print('The binary', args.cmd, 'could not be found.')
exit()
data = {'address': args.addr, 'password': args.password}
try:
res = requests.post(args.faucet, data=data)
except:
print('Unexpected error when contacting faucet:', sys.exc_info()[0])
exit()
print(res.text)
| mit |
ThePletch/ansible | lib/ansible/modules/system/crypttab.py | 32 | 11852 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Steve <yo@groks.org>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: crypttab
short_description: Encrypted Linux block devices
description:
- Control Linux encrypted block devices that are set up during system boot in C(/etc/crypttab).
version_added: "1.9"
options:
name:
description:
- Name of the encrypted block device as it appears in the C(/etc/crypttab) file, or
optionaly prefixed with C(/dev/mapper/), as it appears in the filesystem. I(/dev/mapper/)
will be stripped from I(name).
required: true
default: null
aliases: []
state:
description:
- Use I(present) to add a line to C(/etc/crypttab) or update it's definition
if already present. Use I(absent) to remove a line with matching I(name).
Use I(opts_present) to add options to those already present; options with
different values will be updated. Use I(opts_absent) to remove options from
the existing set.
required: true
choices: [ "present", "absent", "opts_present", "opts_absent"]
default: null
backing_device:
description:
- Path to the underlying block device or file, or the UUID of a block-device
prefixed with I(UUID=)
required: false
default: null
password:
description:
- Encryption password, the path to a file containing the password, or
'none' or '-' if the password should be entered at boot.
required: false
default: "none"
opts:
description:
- A comma-delimited list of options. See C(crypttab(5) ) for details.
required: false
path:
description:
- Path to file to use instead of C(/etc/crypttab). This might be useful
in a chroot environment.
required: false
default: /etc/crypttab
notes: []
requirements: []
author: "Steve (@groks)"
'''
EXAMPLES = '''
# Since column is a special character in YAML, if your string contains a column, it's better to use quotes around the string
- name: Set the options explicitly a device which must already exist
crypttab:
name: luks-home
state: present
opts: 'discard,cipher=aes-cbc-essiv:sha256'
- name: Add the 'discard' option to any existing options for all devices
crypttab:
name: '{{ item.device }}'
state: opts_present
opts: discard
with_items: '{{ ansible_mounts }}'
when: '/dev/mapper/luks-' in {{ item.device }}
'''
from ansible.module_utils.basic import *
from ansible.module_utils.pycompat24 import get_exception
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True),
state = dict(required=True, choices=['present', 'absent', 'opts_present', 'opts_absent']),
backing_device = dict(default=None),
password = dict(default=None, type='path'),
opts = dict(default=None),
path = dict(default='/etc/crypttab', type='path')
),
supports_check_mode = True
)
backing_device = module.params['backing_device']
password = module.params['password']
opts = module.params['opts']
state = module.params['state']
path = module.params['path']
name = module.params['name']
if name.startswith('/dev/mapper/'):
name = name[len('/dev/mapper/'):]
if state != 'absent' and backing_device is None and password is None and opts is None:
module.fail_json(msg="expected one or more of 'backing_device', 'password' or 'opts'",
**module.params)
if 'opts' in state and (backing_device is not None or password is not None):
module.fail_json(msg="cannot update 'backing_device' or 'password' when state=%s" % state,
**module.params)
for arg_name, arg in (('name', name),
('backing_device', backing_device),
('password', password),
('opts', opts)):
if (arg is not None
and (' ' in arg or '\t' in arg or arg == '')):
module.fail_json(msg="invalid '%s': contains white space or is empty" % arg_name,
**module.params)
try:
crypttab = Crypttab(path)
existing_line = crypttab.match(name)
except Exception:
e = get_exception()
module.fail_json(msg="failed to open and parse crypttab file: %s" % e,
**module.params)
if 'present' in state and existing_line is None and backing_device is None:
module.fail_json(msg="'backing_device' required to add a new entry",
**module.params)
changed, reason = False, '?'
if state == 'absent':
if existing_line is not None:
changed, reason = existing_line.remove()
elif state == 'present':
if existing_line is not None:
changed, reason = existing_line.set(backing_device, password, opts)
else:
changed, reason = crypttab.add(Line(None, name, backing_device, password, opts))
elif state == 'opts_present':
if existing_line is not None:
changed, reason = existing_line.opts.add(opts)
else:
changed, reason = crypttab.add(Line(None, name, backing_device, password, opts))
elif state == 'opts_absent':
if existing_line is not None:
changed, reason = existing_line.opts.remove(opts)
if changed and not module.check_mode:
try:
f = open(path, 'wb')
f.write(str(crypttab))
finally:
f.close()
module.exit_json(changed=changed, msg=reason, **module.params)
class Crypttab(object):
_lines = []
def __init__(self, path):
self.path = path
if not os.path.exists(path):
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
open(path,'a').close()
try:
f = open(path, 'r')
for line in f.readlines():
self._lines.append(Line(line))
finally:
f.close()
def add(self, line):
self._lines.append(line)
return True, 'added line'
def lines(self):
for line in self._lines:
if line.valid():
yield line
def match(self, name):
for line in self.lines():
if line.name == name:
return line
return None
def __str__(self):
lines = []
for line in self._lines:
lines.append(str(line))
crypttab = '\n'.join(lines)
if len(crypttab) == 0:
crypttab += '\n'
if crypttab[-1] != '\n':
crypttab += '\n'
return crypttab
class Line(object):
def __init__(self, line=None, name=None, backing_device=None, password=None, opts=None):
self.line = line
self.name = name
self.backing_device = backing_device
self.password = password
self.opts = Options(opts)
if line is not None:
if self._line_valid(line):
self.name, backing_device, password, opts = self._split_line(line)
self.set(backing_device, password, opts)
def set(self, backing_device, password, opts):
changed = False
if backing_device is not None and self.backing_device != backing_device:
self.backing_device = backing_device
changed = True
if password is not None and self.password != password:
self.password = password
changed = True
if opts is not None:
opts = Options(opts)
if opts != self.opts:
self.opts = opts
changed = True
return changed, 'updated line'
def _line_valid(self, line):
if not line.strip() or line.startswith('#') or len(line.split()) not in (2, 3, 4):
return False
return True
def _split_line(self, line):
fields = line.split()
try:
field2 = fields[2]
except IndexError:
field2 = None
try:
field3 = fields[3]
except IndexError:
field3 = None
return (fields[0],
fields[1],
field2,
field3)
def remove(self):
self.line, self.name, self.backing_device = '', None, None
return True, 'removed line'
def valid(self):
if self.name is not None and self.backing_device is not None:
return True
return False
def __str__(self):
if self.valid():
fields = [self.name, self.backing_device]
if self.password is not None or self.opts:
if self.password is not None:
fields.append(self.password)
else:
self.password('none')
if self.opts:
fields.append(str(self.opts))
return ' '.join(fields)
return self.line
class Options(dict):
"""opts_string looks like: 'discard,foo=bar,baz=greeble' """
def __init__(self, opts_string):
super(Options, self).__init__()
self.itemlist = []
if opts_string is not None:
for opt in opts_string.split(','):
kv = opt.split('=')
if len(kv) > 1:
k, v = (kv[0], kv[1])
else:
k, v = (kv[0], None)
self[k] = v
def add(self, opts_string):
changed = False
for k, v in Options(opts_string).items():
if k in self:
if self[k] != v:
changed = True
else:
changed = True
self[k] = v
return changed, 'updated options'
def remove(self, opts_string):
changed = False
for k in Options(opts_string):
if k in self:
del self[k]
changed = True
return changed, 'removed options'
def keys(self):
return self.itemlist
def values(self):
return [self[key] for key in self]
def items(self):
return [(key, self[key]) for key in self]
def __iter__(self):
return iter(self.itemlist)
def __setitem__(self, key, value):
if key not in self:
self.itemlist.append(key)
super(Options, self).__setitem__(key, value)
def __delitem__(self, key):
self.itemlist.remove(key)
super(Options, self).__delitem__(key)
def __ne__(self, obj):
return not (isinstance(obj, Options)
and sorted(self.items()) == sorted(obj.items()))
def __str__(self):
ret = []
for k, v in self.items():
if v is None:
ret.append(k)
else:
ret.append('%s=%s' % (k, v))
return ','.join(ret)
if __name__ == '__main__':
main()
| gpl-3.0 |
karies/root | tutorials/tmva/keras/ApplicationRegressionKeras.py | 6 | 1042 | #!/usr/bin/env python
from ROOT import TMVA, TFile, TString
from array import array
from subprocess import call
from os.path import isfile
# Setup TMVA
TMVA.Tools.Instance()
TMVA.PyMethodBase.PyInitialize()
reader = TMVA.Reader("Color:!Silent")
# Load data
if not isfile('tmva_reg_example.root'):
call(['curl', '-O', 'http://root.cern.ch/files/tmva_reg_example.root'])
data = TFile.Open('tmva_reg_example.root')
tree = data.Get('TreeR')
branches = {}
for branch in tree.GetListOfBranches():
branchName = branch.GetName()
branches[branchName] = array('f', [-999])
tree.SetBranchAddress(branchName, branches[branchName])
if branchName != 'fvalue':
reader.AddVariable(branchName, branches[branchName])
# Book methods
reader.BookMVA('PyKeras', TString('dataset/weights/TMVARegression_PyKeras.weights.xml'))
# Print some example regressions
print('Some example regressions:')
for i in range(20):
tree.GetEntry(i)
print('True/MVA value: {}/{}'.format(branches['fvalue'][0],reader.EvaluateMVA('PyKeras')))
| lgpl-2.1 |
2014c2g3/0623exam | static/Brython3.1.1-20150328-091302/Lib/_strptime_1.py | 518 | 21683 | """Strptime-related classes and functions.
CLASSES:
LocaleTime -- Discovers and stores locale-specific time information
TimeRE -- Creates regexes for pattern matching a string of text containing
time information
FUNCTIONS:
_getlang -- Figure out what language is being used for the locale
strptime -- Calculates the time struct represented by the passed-in string
"""
import time
import locale
import calendar
from re import compile as re_compile
from re import IGNORECASE
from re import escape as re_escape
from datetime import (date as datetime_date,
timedelta as datetime_timedelta,
timezone as datetime_timezone)
try:
from _thread import allocate_lock as _thread_allocate_lock
except ImportError:
from _dummy_thread import allocate_lock as _thread_allocate_lock
__all__ = []
def _getlang():
# Figure out what the current language is set to.
return locale.getlocale(locale.LC_TIME)
class LocaleTime(object):
"""Stores and handles locale-specific information related to time.
ATTRIBUTES:
f_weekday -- full weekday names (7-item list)
a_weekday -- abbreviated weekday names (7-item list)
f_month -- full month names (13-item list; dummy value in [0], which
is added by code)
a_month -- abbreviated month names (13-item list, dummy value in
[0], which is added by code)
am_pm -- AM/PM representation (2-item list)
LC_date_time -- format string for date/time representation (string)
LC_date -- format string for date representation (string)
LC_time -- format string for time representation (string)
timezone -- daylight- and non-daylight-savings timezone representation
(2-item list of sets)
lang -- Language used by instance (2-item tuple)
"""
def __init__(self):
"""Set all attributes.
Order of methods called matters for dependency reasons.
The locale language is set at the offset and then checked again before
exiting. This is to make sure that the attributes were not set with a
mix of information from more than one locale. This would most likely
happen when using threads where one thread calls a locale-dependent
function while another thread changes the locale while the function in
the other thread is still running. Proper coding would call for
locks to prevent changing the locale while locale-dependent code is
running. The check here is done in case someone does not think about
doing this.
Only other possible issue is if someone changed the timezone and did
not call tz.tzset . That is an issue for the programmer, though,
since changing the timezone is worthless without that call.
"""
self.lang = _getlang()
self.__calc_weekday()
self.__calc_month()
self.__calc_am_pm()
self.__calc_timezone()
self.__calc_date_time()
if _getlang() != self.lang:
raise ValueError("locale changed during initialization")
def __pad(self, seq, front):
# Add '' to seq to either the front (is True), else the back.
seq = list(seq)
if front:
seq.insert(0, '')
else:
seq.append('')
return seq
def __calc_weekday(self):
# Set self.a_weekday and self.f_weekday using the calendar
# module.
a_weekday = [calendar.day_abbr[i].lower() for i in range(7)]
f_weekday = [calendar.day_name[i].lower() for i in range(7)]
self.a_weekday = a_weekday
self.f_weekday = f_weekday
def __calc_month(self):
# Set self.f_month and self.a_month using the calendar module.
a_month = [calendar.month_abbr[i].lower() for i in range(13)]
f_month = [calendar.month_name[i].lower() for i in range(13)]
self.a_month = a_month
self.f_month = f_month
def __calc_am_pm(self):
# Set self.am_pm by using time.strftime().
# The magic date (1999,3,17,hour,44,55,2,76,0) is not really that
# magical; just happened to have used it everywhere else where a
# static date was needed.
am_pm = []
for hour in (1, 22):
time_tuple = time.struct_time((1999,3,17,hour,44,55,2,76,0))
am_pm.append(time.strftime("%p", time_tuple).lower())
self.am_pm = am_pm
def __calc_date_time(self):
# Set self.date_time, self.date, & self.time by using
# time.strftime().
# Use (1999,3,17,22,44,55,2,76,0) for magic date because the amount of
# overloaded numbers is minimized. The order in which searches for
# values within the format string is very important; it eliminates
# possible ambiguity for what something represents.
time_tuple = time.struct_time((1999,3,17,22,44,55,2,76,0))
date_time = [None, None, None]
date_time[0] = time.strftime("%c", time_tuple).lower()
date_time[1] = time.strftime("%x", time_tuple).lower()
date_time[2] = time.strftime("%X", time_tuple).lower()
replacement_pairs = [('%', '%%'), (self.f_weekday[2], '%A'),
(self.f_month[3], '%B'), (self.a_weekday[2], '%a'),
(self.a_month[3], '%b'), (self.am_pm[1], '%p'),
('1999', '%Y'), ('99', '%y'), ('22', '%H'),
('44', '%M'), ('55', '%S'), ('76', '%j'),
('17', '%d'), ('03', '%m'), ('3', '%m'),
# '3' needed for when no leading zero.
('2', '%w'), ('10', '%I')]
replacement_pairs.extend([(tz, "%Z") for tz_values in self.timezone
for tz in tz_values])
for offset,directive in ((0,'%c'), (1,'%x'), (2,'%X')):
current_format = date_time[offset]
for old, new in replacement_pairs:
# Must deal with possible lack of locale info
# manifesting itself as the empty string (e.g., Swedish's
# lack of AM/PM info) or a platform returning a tuple of empty
# strings (e.g., MacOS 9 having timezone as ('','')).
if old:
current_format = current_format.replace(old, new)
# If %W is used, then Sunday, 2005-01-03 will fall on week 0 since
# 2005-01-03 occurs before the first Monday of the year. Otherwise
# %U is used.
time_tuple = time.struct_time((1999,1,3,1,1,1,6,3,0))
if '00' in time.strftime(directive, time_tuple):
U_W = '%W'
else:
U_W = '%U'
date_time[offset] = current_format.replace('11', U_W)
self.LC_date_time = date_time[0]
self.LC_date = date_time[1]
self.LC_time = date_time[2]
def __calc_timezone(self):
# Set self.timezone by using time.tzname.
# Do not worry about possibility of time.tzname[0] == timetzname[1]
# and time.daylight; handle that in strptime .
#try:
#time.tzset()
#except AttributeError:
#pass
no_saving = frozenset(["utc", "gmt", time.tzname[0].lower()])
if time.daylight:
has_saving = frozenset([time.tzname[1].lower()])
else:
has_saving = frozenset()
self.timezone = (no_saving, has_saving)
class TimeRE(dict):
"""Handle conversion from format directives to regexes."""
def __init__(self, locale_time=None):
"""Create keys/values.
Order of execution is important for dependency reasons.
"""
if locale_time:
self.locale_time = locale_time
else:
self.locale_time = LocaleTime()
base = super()
base.__init__({
# The " \d" part of the regex is to make %c from ANSI C work
'd': r"(?P<d>3[0-1]|[1-2]\d|0[1-9]|[1-9]| [1-9])",
'f': r"(?P<f>[0-9]{1,6})",
'H': r"(?P<H>2[0-3]|[0-1]\d|\d)",
'I': r"(?P<I>1[0-2]|0[1-9]|[1-9])",
'j': r"(?P<j>36[0-6]|3[0-5]\d|[1-2]\d\d|0[1-9]\d|00[1-9]|[1-9]\d|0[1-9]|[1-9])",
'm': r"(?P<m>1[0-2]|0[1-9]|[1-9])",
'M': r"(?P<M>[0-5]\d|\d)",
'S': r"(?P<S>6[0-1]|[0-5]\d|\d)",
'U': r"(?P<U>5[0-3]|[0-4]\d|\d)",
'w': r"(?P<w>[0-6])",
# W is set below by using 'U'
'y': r"(?P<y>\d\d)",
#XXX: Does 'Y' need to worry about having less or more than
# 4 digits?
'Y': r"(?P<Y>\d\d\d\d)",
'z': r"(?P<z>[+-]\d\d[0-5]\d)",
'A': self.__seqToRE(self.locale_time.f_weekday, 'A'),
'a': self.__seqToRE(self.locale_time.a_weekday, 'a'),
'B': self.__seqToRE(self.locale_time.f_month[1:], 'B'),
'b': self.__seqToRE(self.locale_time.a_month[1:], 'b'),
'p': self.__seqToRE(self.locale_time.am_pm, 'p'),
'Z': self.__seqToRE((tz for tz_names in self.locale_time.timezone
for tz in tz_names),
'Z'),
'%': '%'})
base.__setitem__('W', base.__getitem__('U').replace('U', 'W'))
base.__setitem__('c', self.pattern(self.locale_time.LC_date_time))
base.__setitem__('x', self.pattern(self.locale_time.LC_date))
base.__setitem__('X', self.pattern(self.locale_time.LC_time))
def __seqToRE(self, to_convert, directive):
"""Convert a list to a regex string for matching a directive.
Want possible matching values to be from longest to shortest. This
prevents the possibility of a match occurring for a value that also
a substring of a larger value that should have matched (e.g., 'abc'
matching when 'abcdef' should have been the match).
"""
to_convert = sorted(to_convert, key=len, reverse=True)
for value in to_convert:
if value != '':
break
else:
return ''
regex = '|'.join(re_escape(stuff) for stuff in to_convert)
regex = '(?P<%s>%s' % (directive, regex)
return '%s)' % regex
def pattern(self, format):
"""Return regex pattern for the format string.
Need to make sure that any characters that might be interpreted as
regex syntax are escaped.
"""
processed_format = ''
# The sub() call escapes all characters that might be misconstrued
# as regex syntax. Cannot use re.escape since we have to deal with
# format directives (%m, etc.).
regex_chars = re_compile(r"([\\.^$*+?\(\){}\[\]|])")
format = regex_chars.sub(r"\\\1", format)
whitespace_replacement = re_compile('\s+')
format = whitespace_replacement.sub('\s+', format)
while '%' in format:
directive_index = format.index('%')+1
processed_format = "%s%s%s" % (processed_format,
format[:directive_index-1],
self[format[directive_index]])
format = format[directive_index+1:]
return "%s%s" % (processed_format, format)
def compile(self, format):
"""Return a compiled re object for the format string."""
return re_compile(self.pattern(format), IGNORECASE)
_cache_lock = _thread_allocate_lock()
# DO NOT modify _TimeRE_cache or _regex_cache without acquiring the cache lock
# first!
_TimeRE_cache = TimeRE()
_CACHE_MAX_SIZE = 5 # Max number of regexes stored in _regex_cache
_regex_cache = {}
def _calc_julian_from_U_or_W(year, week_of_year, day_of_week, week_starts_Mon):
"""Calculate the Julian day based on the year, week of the year, and day of
the week, with week_start_day representing whether the week of the year
assumes the week starts on Sunday or Monday (6 or 0)."""
first_weekday = datetime_date(year, 1, 1).weekday()
# If we are dealing with the %U directive (week starts on Sunday), it's
# easier to just shift the view to Sunday being the first day of the
# week.
if not week_starts_Mon:
first_weekday = (first_weekday + 1) % 7
day_of_week = (day_of_week + 1) % 7
# Need to watch out for a week 0 (when the first day of the year is not
# the same as that specified by %U or %W).
week_0_length = (7 - first_weekday) % 7
if week_of_year == 0:
return 1 + day_of_week - first_weekday
else:
days_to_week = week_0_length + (7 * (week_of_year - 1))
return 1 + days_to_week + day_of_week
def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
"""Return a 2-tuple consisting of a time struct and an int containing
the number of microseconds based on the input string and the
format string."""
for index, arg in enumerate([data_string, format]):
if not isinstance(arg, str):
msg = "strptime() argument {} must be str, not {}"
raise TypeError(msg.format(index, type(arg)))
global _TimeRE_cache, _regex_cache
with _cache_lock:
if _getlang() != _TimeRE_cache.locale_time.lang:
_TimeRE_cache = TimeRE()
_regex_cache.clear()
if len(_regex_cache) > _CACHE_MAX_SIZE:
_regex_cache.clear()
locale_time = _TimeRE_cache.locale_time
format_regex = _regex_cache.get(format)
if not format_regex:
try:
format_regex = _TimeRE_cache.compile(format)
# KeyError raised when a bad format is found; can be specified as
# \\, in which case it was a stray % but with a space after it
except KeyError as err:
bad_directive = err.args[0]
if bad_directive == "\\":
bad_directive = "%"
del err
raise ValueError("'%s' is a bad directive in format '%s'" %
(bad_directive, format)) from None
# IndexError only occurs when the format string is "%"
except IndexError:
raise ValueError("stray %% in format '%s'" % format) from None
_regex_cache[format] = format_regex
found = format_regex.match(data_string)
if not found:
raise ValueError("time data %r does not match format %r" %
(data_string, format))
if len(data_string) != found.end():
raise ValueError("unconverted data remains: %s" %
data_string[found.end():])
year = None
month = day = 1
hour = minute = second = fraction = 0
tz = -1
tzoffset = None
# Default to -1 to signify that values not known; not critical to have,
# though
week_of_year = -1
week_of_year_start = -1
# weekday and julian defaulted to -1 so as to signal need to calculate
# values
weekday = julian = -1
found_dict = found.groupdict()
for group_key in found_dict.keys():
# Directives not explicitly handled below:
# c, x, X
# handled by making out of other directives
# U, W
# worthless without day of the week
if group_key == 'y':
year = int(found_dict['y'])
# Open Group specification for strptime() states that a %y
#value in the range of [00, 68] is in the century 2000, while
#[69,99] is in the century 1900
if year <= 68:
year += 2000
else:
year += 1900
elif group_key == 'Y':
year = int(found_dict['Y'])
elif group_key == 'm':
month = int(found_dict['m'])
elif group_key == 'B':
month = locale_time.f_month.index(found_dict['B'].lower())
elif group_key == 'b':
month = locale_time.a_month.index(found_dict['b'].lower())
elif group_key == 'd':
day = int(found_dict['d'])
elif group_key == 'H':
hour = int(found_dict['H'])
elif group_key == 'I':
hour = int(found_dict['I'])
ampm = found_dict.get('p', '').lower()
# If there was no AM/PM indicator, we'll treat this like AM
if ampm in ('', locale_time.am_pm[0]):
# We're in AM so the hour is correct unless we're
# looking at 12 midnight.
# 12 midnight == 12 AM == hour 0
if hour == 12:
hour = 0
elif ampm == locale_time.am_pm[1]:
# We're in PM so we need to add 12 to the hour unless
# we're looking at 12 noon.
# 12 noon == 12 PM == hour 12
if hour != 12:
hour += 12
elif group_key == 'M':
minute = int(found_dict['M'])
elif group_key == 'S':
second = int(found_dict['S'])
elif group_key == 'f':
s = found_dict['f']
# Pad to always return microseconds.
s += "0" * (6 - len(s))
fraction = int(s)
elif group_key == 'A':
weekday = locale_time.f_weekday.index(found_dict['A'].lower())
elif group_key == 'a':
weekday = locale_time.a_weekday.index(found_dict['a'].lower())
elif group_key == 'w':
weekday = int(found_dict['w'])
if weekday == 0:
weekday = 6
else:
weekday -= 1
elif group_key == 'j':
julian = int(found_dict['j'])
elif group_key in ('U', 'W'):
week_of_year = int(found_dict[group_key])
if group_key == 'U':
# U starts week on Sunday.
week_of_year_start = 6
else:
# W starts week on Monday.
week_of_year_start = 0
elif group_key == 'z':
z = found_dict['z']
tzoffset = int(z[1:3]) * 60 + int(z[3:5])
if z.startswith("-"):
tzoffset = -tzoffset
elif group_key == 'Z':
# Since -1 is default value only need to worry about setting tz if
# it can be something other than -1.
found_zone = found_dict['Z'].lower()
for value, tz_values in enumerate(locale_time.timezone):
if found_zone in tz_values:
# Deal with bad locale setup where timezone names are the
# same and yet time.daylight is true; too ambiguous to
# be able to tell what timezone has daylight savings
if (time.tzname[0] == time.tzname[1] and
time.daylight and found_zone not in ("utc", "gmt")):
break
else:
tz = value
break
leap_year_fix = False
if year is None and month == 2 and day == 29:
year = 1904 # 1904 is first leap year of 20th century
leap_year_fix = True
elif year is None:
year = 1900
# If we know the week of the year and what day of that week, we can figure
# out the Julian day of the year.
if julian == -1 and week_of_year != -1 and weekday != -1:
week_starts_Mon = True if week_of_year_start == 0 else False
julian = _calc_julian_from_U_or_W(year, week_of_year, weekday,
week_starts_Mon)
# Cannot pre-calculate datetime_date() since can change in Julian
# calculation and thus could have different value for the day of the week
# calculation.
if julian == -1:
# Need to add 1 to result since first day of the year is 1, not 0.
julian = datetime_date(year, month, day).toordinal() - \
datetime_date(year, 1, 1).toordinal() + 1
else: # Assume that if they bothered to include Julian day it will
# be accurate.
datetime_result = datetime_date.fromordinal((julian - 1) + datetime_date(year, 1, 1).toordinal())
year = datetime_result.year
month = datetime_result.month
day = datetime_result.day
if weekday == -1:
weekday = datetime_date(year, month, day).weekday()
# Add timezone info
tzname = found_dict.get("Z")
if tzoffset is not None:
gmtoff = tzoffset * 60
else:
gmtoff = None
if leap_year_fix:
# the caller didn't supply a year but asked for Feb 29th. We couldn't
# use the default of 1900 for computations. We set it back to ensure
# that February 29th is smaller than March 1st.
year = 1900
return (year, month, day,
hour, minute, second,
weekday, julian, tz, tzname, gmtoff), fraction
def _strptime_time(data_string, format="%a %b %d %H:%M:%S %Y"):
"""Return a time struct based on the input string and the
format string."""
tt = _strptime(data_string, format)[0]
return time.struct_time(tt[:time._STRUCT_TM_ITEMS])
def _strptime_datetime(cls, data_string, format="%a %b %d %H:%M:%S %Y"):
"""Return a class cls instance based on the input string and the
format string."""
tt, fraction = _strptime(data_string, format)
tzname, gmtoff = tt[-2:]
args = tt[:6] + (fraction,)
if gmtoff is not None:
tzdelta = datetime_timedelta(seconds=gmtoff)
if tzname:
tz = datetime_timezone(tzdelta, tzname)
else:
tz = datetime_timezone(tzdelta)
args += (tz,)
return cls(*args)
| gpl-3.0 |
mgraupe/acq4 | acq4/analysis/scripts/eventExplorer.py | 4 | 15462 | from PyQt4 import QtCore, QtGui
import acq4.Manager
import acq4.pyqtgraph as pg
import acq4.pyqtgraph.opengl as gl
import numpy as np
import acq4.util.functions as fn
import re
man = acq4.Manager.getManager()
## update DB field to reflect dir meta info
#for i in db.select('Cell', ['rowid']):
#d = db.getDir('Cell', i[0])
#typ = d.info().get('type', '')
#db.update('Cell', {'type': typ}, rowid=i[0])
#print d, typ
global eventView, siteView, cells
eventView = 'events_view'
siteView = 'sites_view'
firstRun = False
if 'events' not in locals():
global events
events = {}
firstRun = True
win = QtGui.QMainWindow()
#cw = QtGui.QWidget()
layout = pg.LayoutWidget()
#layout = QtGui.QGridLayout()
#layout.setContentsMargins(0,0,0,0)
#layout.setSpacing(0)
#cw.setLayout(layout)
win.setCentralWidget(layout)
cellCombo = QtGui.QComboBox()
cellCombo.setSizeAdjustPolicy(cellCombo.AdjustToContents)
layout.addWidget(cellCombo)
reloadBtn = QtGui.QPushButton('reload')
layout.addWidget(reloadBtn)
separateCheck = QtGui.QCheckBox("color pre/post")
layout.addWidget(separateCheck)
colorCheck = QtGui.QCheckBox("color y position")
layout.addWidget(colorCheck)
errLimitSpin = pg.SpinBox(value=0.7, step=0.1)
layout.addWidget(errLimitSpin)
lengthRatioLimitSpin = pg.SpinBox(value=1.5, step=0.1)
layout.addWidget(lengthRatioLimitSpin)
postRgnStartSpin = pg.SpinBox(value=0.500, step=0.01, siPrefix=True, suffix='s')
layout.addWidget(postRgnStartSpin)
postRgnStopSpin = pg.SpinBox(value=0.700, step=0.01, siPrefix=True, suffix='s')
layout.addWidget(postRgnStopSpin)
spl1 = QtGui.QSplitter()
spl1.setOrientation(QtCore.Qt.Vertical)
layout.addWidget(spl1, row=1, col=0, rowspan=1, colspan=8)
pw1 = pg.PlotWidget()
spl1.addWidget(pw1)
pw1.setLabel('left', 'Amplitude', 'A')
pw1.setLabel('bottom', 'Decay Tau', 's')
spl2 = QtGui.QSplitter()
spl2.setOrientation(QtCore.Qt.Horizontal)
spl1.addWidget(spl2)
pw2 = pg.PlotWidget(labels={'bottom': ('time', 's')})
spl2.addWidget(pw2)
tab = QtGui.QTabWidget()
spl2.addWidget(tab)
## For viewing cell morphology
gv = pg.GraphicsView()
gv.setBackgroundBrush(pg.mkBrush('w'))
image = pg.ImageItem()
gv.addItem(image)
gv.enableMouse()
gv.setAspectLocked(True)
tab.addTab(gv, 'Morphology')
## 3D atlas
import acq4.analysis.atlas.CochlearNucleus as CN
atlas = CN.CNAtlasDisplayWidget()
atlas.showLabel('DCN')
atlas.showLabel('AVCN')
atlas.showLabel('PVCN')
tab.addTab(atlas, 'Atlas')
atlasPoints = gl.GLScatterPlotItem()
atlas.addItem(atlasPoints)
win.show()
win.resize(1000,800)
sp1 = pw1.scatterPlot([], pen=pg.mkPen(None), brush=(200,200,255,70), identical=True, size=8)
sp2 = pw1.scatterPlot([], pen=pg.mkPen(None), brush=(255,200,200,70), identical=True, size=8)
sp3 = pw1.scatterPlot([], pen=pg.mkPen(None), brush=(100,255,100,70), identical=True, size=8)
sp4 = pw1.scatterPlot([], pen=pg.mkPen(None), size=8)
print "Reading cell list..."
#import os, pickle
#md = os.path.abspath(os.path.split(__file__)[0])
#cacheFile = os.path.join(md, 'eventCache.p')
#if os.path.isfile(cacheFile):
#print "Read from cache..."
#ev = pickle.load(open(cacheFile, 'r'))
#else:
#pickle.dump(ev, open(cacheFile, 'w'))
## create views that link cell information to events/sites
db = man.getModule('Data Manager').currentDatabase()
if not db.hasTable(siteView):
print "Creating DB views."
db.createView(siteView, ['photostim_sites', 'DirTable_Protocol', 'DirTable_Cell']) ## seems to be unused.
if not db.hasTable(eventView):
db.createView(eventView, ['photostim_events', 'DirTable_Protocol', 'DirTable_Cell'])
cells = db.select(siteView, ['CellDir'], distinct=True)
cells = [c['CellDir'] for c in cells]
cells.sort(lambda a,b: cmp(a.name(), b.name()))
cellCombo.addItem('')
for c in cells:
cellCombo.addItem(c.name(relativeTo=man.baseDir))
#cellSpin.setMaximum(len(cells)-1)
print "Done."
def loadCell(cell, reloadData=False):
global events
if reloadData:
events.pop(cell, None)
if cell in events:
return
db = man.getModule('Data Manager').currentDatabase()
mod = man.dataModel
allEvents = []
hvals = {}
nEv = 0
positionCache = {}
tcache = {}
print "Loading all events for cell", cell
tot = db.select(eventView, 'count()', where={'CellDir': cell})[0]['count()']
print tot, "total events.."
with pg.ProgressDialog('Loading event data...', maximum=tot, wait=0) as dlg:
for ev in db.iterSelect(eventView, ['ProtocolSequenceDir', 'SourceFile', 'fitAmplitude', 'fitTime', 'fitDecayTau', 'fitRiseTau', 'fitTimeToPeak', 'fitLengthOverDecay', 'fitFractionalError', 'userTransform', 'CellType', 'CellDir', 'ProtocolDir'], where={'CellDir': cell}, toArray=True, chunkSize=200):
extra = np.empty(ev.shape, dtype=[('right', float), ('anterior', float), ('dorsal', float), ('holding', float)])
## insert holding levels
for i in range(len(ev)):
sd = ev[i]['ProtocolSequenceDir']
if sd not in hvals:
cf = ev[i]['SourceFile']
hvals[sd] = mod.getClampHoldingLevel(cf)
#print hvals[sd], cf
extra[i]['holding'] = hvals[sd]
## insert positions
for i in range(len(ev)):
protoDir = ev[i]['SourceFile'].parent()
key = protoDir
#key = (ev[i]['ProtocolSequenceDir'], ev[i]['SourceFile'])
if key not in positionCache:
#try:
#dh = ev[i]['ProtocolDir']
#p1 = pg.Point(dh.info()['Scanner']['position'])
#if key[0] not in tcache:
#tr = pg.SRTTransform()
#tr.restoreState(dh.parent().info()['userTransform'])
#tcache[key[0]] = tr
#trans = tcache[key[0]]
#p2 = trans.map(p1)
#pcache[key] = (p2.x(),p2.y())
#except:
#print key
#raise
rec = db.select('CochlearNucleus_Protocol', where={'ProtocolDir': protoDir})
if len(rec) == 0:
pos = (None, None, None)
elif len(rec) == 1:
pos = (rec[0]['right'], rec[0]['anterior'], rec[0]['dorsal'])
elif len(rec) == 2:
raise Exception("Multiple position records for %s!" % str(protoDir))
positionCache[key] = pos
extra[i]['right'] = positionCache[key][0]
extra[i]['anterior'] = positionCache[key][1]
extra[i]['dorsal'] = positionCache[key][2]
ev = fn.concatenateColumns([ev, extra])
allEvents.append(ev)
nEv += len(ev)
dlg.setValue(nEv)
if dlg.wasCanceled():
raise Exception('Canceled by user.')
ev = np.concatenate(allEvents)
numExSites = 0
numInSites = 0
for site in db.select(siteView, 'ProtocolSequenceDir', where={'CellDir': cell}):
h = hvals.get(site['ProtocolSequenceDir'],None)
if h is None:
continue
if h > -0.02:
numInSites += 1
elif h < -0.04:
numExSites += 1
events[cell] = (ev, numExSites, numInSites)
def init():
if not firstRun:
return
cellCombo.currentIndexChanged.connect(showCell)
separateCheck.toggled.connect(showCell)
colorCheck.toggled.connect(showCell)
errLimitSpin.valueChanged.connect(showCell)
lengthRatioLimitSpin.valueChanged.connect(showCell)
reloadBtn.clicked.connect(reloadCell)
for s in [sp1, sp2, sp3, sp4]:
s.sigPointsClicked.connect(plotClicked)
def plotClicked(plt, pts):
pt = pts[0]
#(id, fn, time) = pt.data
#[['SourceFile', 'ProtocolSequenceDir', 'fitTime']]
#fh = db.getDir('ProtocolSequence', id)[fn]
fh = pt.data()['SourceFile']
id = pt.data()['ProtocolSequenceDir']
time = pt.data()['fitTime']
data = fh.read()['Channel':'primary']
data = fn.besselFilter(data, 8e3)
p = pw2.plot(data, clear=True)
pos = time / data.xvals('Time')[-1]
arrow = pg.CurveArrow(p, pos=pos)
xr = pw2.viewRect().left(), pw2.viewRect().right()
if time < xr[0] or time > xr[1]:
w = xr[1]-xr[0]
pw2.setXRange(time-w/5., time+4*w/5., padding=0)
fitLen = pt.data()['fitDecayTau']*pt.data()['fitLengthOverDecay']
x = np.linspace(time, time+fitLen, fitLen * 50e3)
v = [pt.data()['fitAmplitude'], pt.data()['fitTime'], pt.data()['fitRiseTau'], pt.data()['fitDecayTau']]
y = fn.pspFunc(v, x, risePower=2.0) + data[np.argwhere(data.xvals('Time')>time)[0]-1]
pw2.plot(x, y, pen='b')
#plot.addItem(arrow)
def select(ev, ex=True):
#if source is not None:
#ev = ev[ev['CellDir']==source]
if ex:
ev = ev[ev['holding'] < -0.04] # excitatory events
ev = ev[(ev['fitAmplitude'] < 0) * (ev['fitAmplitude'] > -2e-10)]
else:
ev = ev[(ev['holding'] >= -0.02) * (ev['holding'] <= 0.01)] ## inhibitory events
ev = ev[(ev['fitAmplitude'] > 0) * (ev['fitAmplitude'] < 2e-10)]
ev = ev[(0 < ev['fitDecayTau']) * (ev['fitDecayTau'] < 0.2)] # select decay region
ev = ev[ev['fitFractionalError'] < errLimitSpin.value()]
ev = ev[ev['fitLengthOverDecay'] > lengthRatioLimitSpin.value()]
return ev
def reloadCell():
showCell(reloadData=True)
def showCell(**kwds):
pw2.clear()
reloadData = kwds.get('reloadData', False)
#global lock
#if lock:
#return
#lock = True
QtGui.QApplication.processEvents() ## prevents double-spin
#lock = False
cell = cells[cellCombo.currentIndex()-1]
dh = cell #db.getDir('Cell', cell)
loadCell(dh, reloadData=reloadData)
try:
image.setImage(dh['morphology.png'].read())
gv.setRange(image.sceneBoundingRect())
except:
image.setImage(np.zeros((2,2)))
pass
ev, numExSites, numInSites = events[cell]
ev2 = select(ev, ex=True)
ev3 = select(ev, ex=False)
if colorCheck.isChecked():
sp1.hide()
sp2.hide()
sp3.hide()
sp4.show()
start = postRgnStart()
stop = postRgnStop()
ev2post = ev2[(ev2['fitTime']>start) * (ev2['fitTime']<stop)]
ev3post = ev3[(ev3['fitTime']>start) * (ev3['fitTime']<stop)]
ev4 = np.concatenate([ev2post, ev3post])
yMax = ev4['dorsal'].max()
yMin = ev4['dorsal'].min()
brushes = []
for i in range(len(ev4)):
hue = 0.6*((ev4[i]['dorsal']-yMin) / (yMax-yMin))
brushes.append(pg.hsvColor(hue, 1.0, 1.0, 0.3))
#pts.append({
#'pos': (ev4[i]['fitDecayTau'], ev4[i]['fitAmplitude']),
#'brush': pg.hsvColor(hue, 1, 1, 0.3),
#'data': ev4[i]
#})
sp4.setData(x=ev4['fitDecayTau'], y=ev4['fitAmplitude'], symbolBrush=brushes, data=ev4)
else:
sp1.show()
sp2.show()
#sp3.show()
sp4.hide()
## excitatory
if separateCheck.isChecked():
pre = ev2[ev2['fitTime']< preRgnStop()]
post = ev2[(ev2['fitTime'] > postRgnStart()) * (ev2['fitTime'] < postRgnStop())]
else:
pre = ev2
sp1.setData(x=pre['fitDecayTau'], y=pre['fitAmplitude'], data=pre);
#print "Cell ", cell
#print " excitatory:", np.median(ev2['fitDecayTau']), np.median(ev2['fitAmplitude'])
## inhibitory
if separateCheck.isChecked():
pre = ev3[ev3['fitTime']< preRgnStop()]
post2 = ev3[(ev3['fitTime'] > postRgnStart()) * (ev3['fitTime'] < postRgnStop())]
post = np.concatenate([post, post2])
else:
pre = ev3
sp2.setData(x=pre['fitDecayTau'], y=pre['fitAmplitude'], data=pre);
#print " inhibitory:", np.median(ev2['fitDecayTau']), np.median(ev2['fitAmplitude'])
if separateCheck.isChecked():
sp3.setData(x=post['fitDecayTau'], y=post['fitAmplitude'], data=post)
sp3.show()
else:
sp3.hide()
try:
typ = ev2[0]['CellType']
except:
typ = ev3[0]['CellType']
sr = spontRate(ev2, numExSites)
sri = spontRate(ev3, numInSites)
title = "%s -- %s --- <span style='color: #99F;'>ex:</span> %s %s %s %0.1fHz --- <span style='color: #F99;'>in:</span> %s %s %s %0.1fHz" % (
dh.name(relativeTo=dh.parent().parent().parent()),
typ,
pg.siFormat(np.median(ev2['fitTimeToPeak']), error=np.std(ev2['fitTimeToPeak']), space=False, suffix='s'),
pg.siFormat(np.median(ev2['fitDecayTau']), error=np.std(ev2['fitDecayTau']), space=False, suffix='s'),
pg.siFormat(np.median(ev2['fitAmplitude']), error=np.std(ev2['fitAmplitude']), space=False, suffix='A'),
sr,
pg.siFormat(np.median(ev3['fitTimeToPeak']), error=np.std(ev3['fitTimeToPeak']), space=False, suffix='s'),
pg.siFormat(np.median(ev3['fitDecayTau']), error=np.std(ev3['fitDecayTau']), space=False, suffix='s'),
pg.siFormat(np.median(ev3['fitAmplitude']), error=np.std(ev3['fitAmplitude']), space=False, suffix='A'),
sri)
print re.sub(r'<[^>]+>', '', title)
pw1.setTitle(title)
### show cell in atlas
#rec = db.select('CochlearNucleus_Cell', where={'CellDir': cell})
#pts = []
#if len(rec) > 0:
#pos = (rec[0]['right'], rec[0]['anterior'], rec[0]['dorsal'])
#pts = [{'pos': pos, 'size': 100e-6, 'color': (0.7, 0.7, 1.0, 1.0)}]
### show event positions
evSpots = {}
for rec in ev:
p = (rec['right'], rec['anterior'], rec['dorsal'])
evSpots[p] = None
pos = np.array(evSpots.keys())
atlasPoints.setData(pos=pos, )
def spontRate(ev, n):
## This is broken. It does not take into account recordings that had no events.
ev = ev[ev['fitTime'] < preRgnStop()]
#count = {}
#dirs = set()
#for i in range(len(ev)):
#key = (ev[i]['ProtocolSequenceDir'], ev[i]['SourceFile'])
#dirs.add(set)
#if key not in count:
#count[key] = 0
#count[key] += 1
#sr = np.mean([v/(preRgnStop()) for v in count.itervalues()])
if n == 0:
return 0
return len(ev) / (preRgnStop() * n)
def preRgnStop():
return postRgnStartSpin.value() - 0.002
def postRgnStart():
return postRgnStartSpin.value() + 0.002
def postRgnStop():
return postRgnStopSpin.value()
init() | mit |
danellecline/stoqs | stoqs/loaders/MarMenor/loadMarMenor_nov2011.py | 4 | 1224 | #!/usr/bin/env python
__author__ = 'Mike McCann'
__copyright__ = '2011'
__license__ = 'GPL v3'
__contact__ = 'mccann at mbari.org'
__doc__ = '''
Master loader for all CANON activities
Mike McCann
MBARI 22 April 2012
@var __date__: Date of last svn commit
@undocumented: __doc__ parser
@status: production
@license: GPL
'''
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE']='settings'
project_dir = os.path.dirname(__file__)
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../")) # settings.py is one dir up
from MarMenor import MarMenorLoader
import timing
try:
stride = int(sys.argv[1])
except IndexError:
stride = 100
try:
dbAlias = sys.argv[2]
except IndexError:
dbAlias = 'stoqs_marmenor_nov2011_s100'
# ----------------------------------------------------------------------------------
mml = MarMenorLoader(dbAlias, 'MarMenor - October 2011')
##mml.sparus_base='http://odss.mbari.org/thredds/dodsC/'
##mml.sparus_files='marmenor/insitu/UniversityOfGirona/'
##mml.sparus_parms=['
mml.castaway_base='http://odss.mbari.org/thredds/dodsC/'
mml.castaway_files=['agg/Castaway/20111110']
mml.castaway_parms=['temperature', 'salinity']
mml.stride = stride
mml.loadAll()
| gpl-3.0 |
adngdb/socorro | webapp-django/crashstats/api/views.py | 4 | 15177 | import json
import re
import datetime
import inspect
from django import http
from django.shortcuts import render
from django.contrib.auth.models import Permission
from django.contrib.sites.requests import RequestSite
from django.core.urlresolvers import reverse
from django.conf import settings
from django import forms
from django.views.decorators.csrf import csrf_exempt
# explicit import because django.forms has an __all__
from django.forms.forms import DeclarativeFieldsMetaclass
from ratelimit.decorators import ratelimit
from socorro.lib import BadArgumentError, MissingArgumentError
from socorro.external.crashstorage_base import CrashIDNotFound
import crashstats
from crashstats.crashstats.decorators import track_api_pageview
from crashstats.crashstats import models
from crashstats.crashstats import utils
from crashstats.tokens.models import Token
from .cleaner import Cleaner
# List of all modules that contain models we want to expose.
MODELS_MODULES = (
models,
crashstats.tools.models,
crashstats.supersearch.models,
crashstats.symbols.models,
)
BAD_REQUEST_EXCEPTIONS = (
BadArgumentError,
MissingArgumentError,
models.RequiredParameterError,
)
NOT_FOUND_EXCEPTIONS = (
CrashIDNotFound,
)
class APIWhitelistError(Exception):
pass
class MultipleStringField(forms.TypedMultipleChoiceField):
"""Field that do not validate if the field values are in self.choices"""
def validate(self, value):
"""Nothing to do here"""
if self.required and not value:
raise forms.ValidationError(self.error_messages['required'])
TYPE_MAP = {
basestring: forms.CharField,
list: MultipleStringField,
datetime.date: forms.DateField,
datetime.datetime: forms.DateTimeField,
int: forms.IntegerField,
bool: forms.BooleanField,
}
def fancy_init(self, model, *args, **kwargs):
self.model = model
self.__old_init__(*args, **kwargs)
for parameter in model().get_annotated_params():
required = parameter['required']
name = parameter['name']
if parameter['type'] not in TYPE_MAP:
raise NotImplementedError(parameter['type'])
field_class = TYPE_MAP[parameter['type']]
self.fields[name] = field_class(required=required)
class FormWrapperMeta(DeclarativeFieldsMetaclass):
def __new__(cls, name, bases, attrs):
attrs['__old_init__'] = bases[0].__init__
attrs['__init__'] = fancy_init
return super(FormWrapperMeta, cls).__new__(cls, name, bases, attrs)
class FormWrapper(forms.Form):
__metaclass__ = FormWrapperMeta
def clean(self):
cleaned_data = super(FormWrapper, self).clean()
for field in self.fields:
# Because the context for all of this is the API,
# and we're using django forms there's a mismatch to how
# boolean fields should be handled.
# Django forms are meant for HTML forms. A key principle
# functionality of a HTML form and a checkbox is that
# if the user choses to NOT check a checkbox, the browser
# will not send `mybool=false` or `mybool=''`. It will simply
# not send anything and then the server has to assume the user
# chose to NOT check it because it was offerend.
# On a web API, however, the user doesn't use checkboxes.
# He uses `?mybool=truthy` or `&mybool=falsy`.
# Therefore, for our boolean fields, if the value is not
# present at all, we have to assume it to be None.
# That makes it possible to actually set `mybool=false`
if isinstance(self.fields[field], forms.BooleanField):
if field not in self.data:
self.cleaned_data[field] = None
return cleaned_data
# Names of models we don't want to serve at all
BLACKLIST = (
# only used for doing posts
'Releases',
# because it's only used for the admin
'Field',
'SuperSearchMissingFields',
# because it's very sensitive and we don't want to expose it
'Query',
# because it's an internal thing only
'GraphicsReport',
'Priorityjob',
)
def has_permissions(user, permissions):
for permission in permissions:
if not user.has_perm(permission):
return False
return True
def is_valid_model_class(model):
return (
issubclass(model, models.SocorroMiddleware) and
model is not models.SocorroMiddleware and
model is not crashstats.supersearch.models.ESSocorroMiddleware
)
@csrf_exempt
@ratelimit(
key='ip',
method=['GET', 'POST', 'PUT'],
rate=utils.ratelimit_rate,
block=True
)
@track_api_pageview
@utils.add_CORS_header # must be before `utils.json_view`
@utils.json_view
def model_wrapper(request, model_name):
if model_name in BLACKLIST:
raise http.Http404("Don't know what you're talking about!")
for source in MODELS_MODULES:
try:
model = getattr(source, model_name)
break
except AttributeError:
pass
else:
raise http.Http404('no service called `%s`' % model_name)
if not is_valid_model_class(model):
raise http.Http404('no service called `%s`' % model_name)
required_permissions = getattr(model(), 'API_REQUIRED_PERMISSIONS', None)
if isinstance(required_permissions, basestring):
required_permissions = [required_permissions]
if (
required_permissions and
(
not request.user.is_active or
not has_permissions(request.user, required_permissions)
)
):
permission_names = []
for permission in required_permissions:
codename = permission.split('.', 1)[1]
try:
permission_names.append(
Permission.objects.get(
codename=codename
).name
)
except Permission.DoesNotExist:
permission_names.append(codename)
# you're not allowed to use this model
return http.JsonResponse({
'error': "Use of this endpoint requires the '%s' permission" % (
', '.join(permission_names),
)
}, status=403)
# it being set to None means it's been deliberately disabled
if getattr(model, 'API_WHITELIST', False) is False:
raise APIWhitelistError('No API_WHITELIST defined for %r' % model)
instance = model()
# Any additional headers we intend to set on the response
headers = {}
# Certain models need to know who the user is to be able to
# internally use that to determine its output.
instance.api_user = request.user
if request.method == 'POST':
function = instance.post
else:
function = instance.get
if not function:
return http.HttpResponseNotAllowed([request.method])
# assume first that it won't need a binary response
binary_response = False
request_data = request.method == 'GET' and request.GET or request.POST
form = FormWrapper(model, request_data)
if form.is_valid():
try:
result = function(**form.cleaned_data)
except ValueError as e:
if (
# built in json module ValueError
'No JSON object could be decoded' in e or
# ujson module ValueError
'Expected object or value' in e
):
return http.HttpResponseBadRequest(
json.dumps({'error': 'Not a valid JSON response'}),
content_type='application/json; charset=UTF-8'
)
raise
except NOT_FOUND_EXCEPTIONS as exception:
return http.HttpResponseNotFound(
json.dumps({'error': unicode(exception)}),
content_type='application/json; charset=UTF-8'
)
except BAD_REQUEST_EXCEPTIONS as exception:
return http.HttpResponseBadRequest(
json.dumps({'error': unicode(exception)}),
content_type='application/json; charset=UTF-8'
)
# Some models allows to return a binary reponse. It does so based on
# the models `BINARY_RESPONSE` dict in which all keys and values
# need to be in the valid query. For example, if the query is
# `?foo=bar&other=thing&bar=baz` and the `BINARY_RESPONSE` dict is
# exactly: {'foo': 'bar', 'bar': 'baz'} it will return a binary
# response with content type `application/octet-stream`.
for key, value in model.API_BINARY_RESPONSE.items():
if form.cleaned_data.get(key) == value:
binary_response = True
else:
binary_response = False
break
if binary_response:
# if you don't have all required permissions, you'll get a 403
required_permissions = model.API_BINARY_PERMISSIONS
if isinstance(required_permissions, basestring):
required_permissions = [required_permissions]
if (
required_permissions and
not has_permissions(request.user, required_permissions)
):
permission_names = []
for permission in required_permissions:
codename = permission.split('.', 1)[1]
try:
permission_names.append(
Permission.objects.get(
codename=codename
).name
)
except Permission.DoesNotExist:
permission_names.append(codename)
# you're not allowed to get the binary response
return http.HttpResponseForbidden(
"Binary response requires the '%s' permission\n" %
(', '.join(permission_names))
)
elif not request.user.has_perm('crashstats.view_pii'):
clean_scrub = getattr(model, 'API_CLEAN_SCRUB', None)
if callable(model.API_WHITELIST):
whitelist = model.API_WHITELIST()
else:
whitelist = model.API_WHITELIST
if result and whitelist:
cleaner = Cleaner(
whitelist,
clean_scrub=clean_scrub,
# if True, uses warnings.warn() to show fields
# not whitelisted
debug=settings.DEBUG,
)
cleaner.start(result)
else:
# custom override of the status code
return {'errors': dict(form.errors)}, 400
if binary_response:
assert model.API_BINARY_FILENAME, 'No API_BINARY_FILENAME set on model'
response = http.HttpResponse(
result,
content_type='application/octet-stream'
)
filename = model.API_BINARY_FILENAME % form.cleaned_data
response['Content-Disposition'] = (
'attachment; filename="%s"' % filename
)
return response
if (
getattr(model, 'deprecation_warning', False)
):
if isinstance(result, dict):
result['DEPRECATION_WARNING'] = model.deprecation_warning
# If you return a tuple of two dicts, the second one becomes
# the extra headers.
# return result, {
headers['DEPRECATION-WARNING'] = (
model.deprecation_warning.replace('\n', ' ')
)
if model.cache_seconds:
# We can set a Cache-Control header.
# We say 'private' because the content can depend on the user
# and we don't want the response to be collected in HTTP proxies
# by mistake.
headers['Cache-Control'] = 'private, max-age={}'.format(
model.cache_seconds,
)
return result, headers
def documentation(request):
endpoints = []
all_models = []
unique_model_names = set()
for source in MODELS_MODULES:
for name, value in inspect.getmembers(source):
if name in unique_model_names:
# model potentially in multiple modules
continue
if inspect.isclass(value):
all_models.append(value)
unique_model_names.add(name)
for model in all_models:
try:
if not is_valid_model_class(model):
continue
if model.__name__ in BLACKLIST:
continue
except TypeError:
# most likely a builtin class or something
continue
model_inst = model()
if (
model_inst.API_REQUIRED_PERMISSIONS and
not has_permissions(
request.user,
model_inst.API_REQUIRED_PERMISSIONS
)
):
continue
endpoints.append(_describe_model(model))
base_url = (
'%s://%s' % (request.is_secure() and 'https' or 'http',
RequestSite(request).domain)
)
if request.user.is_active:
your_tokens = Token.objects.active().filter(user=request.user)
else:
your_tokens = Token.objects.none()
context = {
'endpoints': endpoints,
'base_url': base_url,
'count_tokens': your_tokens.count()
}
return render(request, 'api/documentation.html', context)
def _describe_model(model):
model_inst = model()
params = list(model_inst.get_annotated_params())
params.sort(key=lambda x: (not x['required'], x['name']))
methods = []
if model.get:
methods.append('GET')
elif model.post:
methods.append('POST')
docstring = model.__doc__
if docstring:
docstring = dedent_left(docstring.rstrip(), 4)
required_permissions = []
if model_inst.API_REQUIRED_PERMISSIONS:
permissions = model_inst.API_REQUIRED_PERMISSIONS
if isinstance(permissions, basestring):
permissions = [permissions]
for permission in permissions:
codename = permission.split('.', 1)[1]
required_permissions.append(
Permission.objects.get(codename=codename).name
)
data = {
'name': model.__name__,
'url': reverse('api:model_wrapper', args=(model.__name__,)),
'parameters': params,
'defaults': getattr(model, 'defaults', {}),
'methods': methods,
'docstring': docstring,
'required_permissions': required_permissions,
'deprecation_warning': getattr(model, 'deprecation_warning', None),
}
return data
def dedent_left(text, spaces):
"""
If the string is:
' One\n'
' Two\n'
'Three\n'
And you set @spaces=2
Then return this:
' One\n'
' Two\n'
'Three\n'
"""
lines = []
regex = re.compile('^\s{%s}' % spaces)
for line in text.splitlines():
line = regex.sub('', line)
lines.append(line)
return '\n'.join(lines)
| mpl-2.0 |
yelizariev/tkobr-addons | tko_l10n_br_account_invoice/__openerp__.py | 2 | 1886 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# ThinkOpen Solutions Brasil
# Copyright (C) Thinkopen Solutions <http://www.tkobr.com>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Layout Brasileiro da fatura com CNPJ/CPF',
'version': '0.001',
'category': 'Localisation',
'sequence': 7,
'complexity': 'normal',
'description': '''Este módulo adiciona o CNPJ/CPF, IE e telefone no relatório da fatura.''',
'author': 'ThinkOpen Solutions Brasil',
'license': 'AGPL-3',
'website': 'http://www.tkobr.com',
'depends': [
'base',
'account',
],
'data': [
'views/report_invoice.xml',
],
'init': [],
'demo': [],
'update': [],
'test': [], # YAML files with tests
'installable': True,
'application': False,
'auto_install': False, # If it's True, the modules will be auto-installed when all dependencies are installed
'certificate': '',
}
| agpl-3.0 |
rkmaddox/mne-python | examples/visualization/topo_compare_conditions.py | 20 | 1828 | """
=================================================
Compare evoked responses for different conditions
=================================================
In this example, an Epochs object for visual and auditory responses is created.
Both conditions are then accessed by their respective names to create a sensor
layout plot of the related evoked responses.
"""
# Authors: Denis Engemann <denis.engemann@gmail.com>
# Alexandre Gramfort <alexandre.gramfort@inria.fr>
# License: BSD (3-clause)
import matplotlib.pyplot as plt
import mne
from mne.viz import plot_evoked_topo
from mne.datasets import sample
print(__doc__)
data_path = sample.data_path()
###############################################################################
# Set parameters
raw_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw.fif'
event_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw-eve.fif'
tmin = -0.2
tmax = 0.5
# Setup for reading the raw data
raw = mne.io.read_raw_fif(raw_fname)
events = mne.read_events(event_fname)
# Set up amplitude-peak rejection values for MEG channels
reject = dict(grad=4000e-13, mag=4e-12)
# Create epochs including different events
event_id = {'audio/left': 1, 'audio/right': 2,
'visual/left': 3, 'visual/right': 4}
epochs = mne.Epochs(raw, events, event_id, tmin, tmax,
picks='meg', baseline=(None, 0), reject=reject)
# Generate list of evoked objects from conditions names
evokeds = [epochs[name].average() for name in ('left', 'right')]
###############################################################################
# Show topography for two different conditions
colors = 'blue', 'red'
title = 'MNE sample data\nleft vs right (A/V combined)'
plot_evoked_topo(evokeds, color=colors, title=title, background_color='w')
plt.show()
| bsd-3-clause |
qpython-android/QPYPI | docs/2x/scripts/rterminal.py | 1 | 3854 | #coding=utf-8
# -------------------------------------
#$PYTHONHOME/bin/online2.py
onlinePy='''
#coding=utf-8
import os,sys
from fabric.api import env,run,put,output
env.hosts=['{}']
env.password='{}'
output['running']=False
output['status']=False
output['aborts']=True
env.output_prefix=False
pyhome=os.popen('echo $PYTHONHOME').read().strip()
os.chdir(pyhome+'/bin')
def shell():run('{}')
def file(sfile):
dfile=sfile.split('/')[-1]
put(sfile,dfile)
run('{} %s'%dfile)
if __name__ == '__main__':
argv=[i for i in sys.argv if i]
if len(argv) < 2:
os.system('fab -f online2.py shell')
else:
os.system('fab -f online2.py file:%s'%argv[1])
'''
#$PYTHONHOME/bin/qpython-android5.sh A
qpython_android5='''
#!/system/bin/sh
DIR=${0%/*}
. $DIR/init.sh && $DIR/python-android5 "$@" && $DIR/end.sh
'''
#$PYTHONHOME/bin/qpython-android5.sh B
qpython_android6='''
#!/system/bin/sh
DIR=${0%/*}
. $DIR/init.sh && $DIR/python-android5 $DIR/online2.py "$@" && $DIR/end.sh
'''
# -------------------------------------
import os,sys,re
write=sys.stdout.write
class Rterminal(object):
hostname=''
password=''
command=''
def __init__(self):
pyhome = os.popen('echo $PYTHONHOME').read().strip()
self.online=os.path.join(pyhome,'bin','online2.py')
self.android5=os.path.join(pyhome,'bin','qpython-android5.sh')
if sys.version[0]=='3':
self.retconfig()
try:
import fabric
except ImportError:
print('\nHello, please install Fabric on QPYPI, and run again rterminal.py')
sys.exit()
if not os.path.exists(self.online):
with open(self.online,'w') as f:
f.write(onlinePy.format('pi@127.0.0.1:22','12345678','python','python'))
self.getconfig()
self.welcome()
self.setconfig()
def welcome(self):
from fabric.colors import yellow,green
os.system('clear')
print('\nRemote Terminal for QPython')
print('rterminal is running Python on server by ssh(fabric)')
print(yellow('You should enter the following information:'))
write('user@hostname:port')
write(green(' --> '))
print(yellow(self.hostname))
write(' '*10+'password')
write(green(' --> '))
print(yellow(self.password))
write(' '*11+'command')
write(green(' --> '))
print(yellow(self.command))
print('')
def getconfig(self):
with open(self.online,'r') as f:r=f.read()
self.hostname=re.findall("env\.hosts=\['(.*?)'\]",r)[0]
self.password=re.findall("env\.password='(.*?)'",r)[0]
self.command=re.findall("def shell\(\)\:run\('(.*?)'\)",r)[0]
def setconfig(self):
from fabric.colors import yellow,green
if raw_input("do you want to save the information(Enter or n): ")!='n':
pass
else:
print(yellow('please enter the following information:'))
self.hostname=raw_input('user@hostname:port --> ')
self.password=raw_input(' '*10+'password --> ')
self.command=raw_input(' '*11+'command --> ')
with open(self.online,'w') as f:f.write(onlinePy.format(self.hostname,self.password,self.command,self.command))
with open(self.android5,'w') as f:f.write(qpython_android6)
print('\nok, rterminal is now on.')
print(yellow('if you want to off rterminal,please switch python3 and run rterminal.py'))
sys.exit()
def retconfig(self):
with open(self.android5,'w') as f:
f.write(qpython_android5)
print('\nok, rterminal is now off.')
sys.exit()
def main():
r=Rterminal()
if __name__ == '__main__':
main()
| apache-2.0 |
fullfanta/mxnet | example/model-parallel/lstm/lstm.py | 13 | 22558 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint:skip-file
import sys
sys.path.insert(0, "../../python")
import mxnet as mx
import numpy as np
from collections import namedtuple
import time
import math
LSTMState = namedtuple("LSTMState", ["c", "h"])
LSTMParam = namedtuple("LSTMParam", ["i2h_weight", "i2h_bias",
"h2h_weight", "h2h_bias"])
LSTMModel = namedtuple("LSTMModel", ["rnn_exec", "symbol",
"init_states", "last_states",
"seq_data", "seq_labels", "seq_outputs",
"param_blocks"])
def lstm(num_hidden, indata, prev_state, param, seqidx, layeridx, dropout=0.):
"""LSTM Cell symbol"""
if dropout > 0.:
indata = mx.sym.Dropout(data=indata, p=dropout)
i2h = mx.sym.FullyConnected(data=indata,
weight=param.i2h_weight,
bias=param.i2h_bias,
num_hidden=num_hidden * 4,
name="t%d_l%d_i2h" % (seqidx, layeridx))
h2h = mx.sym.FullyConnected(data=prev_state.h,
weight=param.h2h_weight,
bias=param.h2h_bias,
num_hidden=num_hidden * 4,
name="t%d_l%d_h2h" % (seqidx, layeridx))
gates = i2h + h2h
slice_gates = mx.sym.SliceChannel(gates, num_outputs=4,
name="t%d_l%d_slice" % (seqidx, layeridx))
in_gate = mx.sym.Activation(slice_gates[0], act_type="sigmoid")
in_transform = mx.sym.Activation(slice_gates[1], act_type="tanh")
forget_gate = mx.sym.Activation(slice_gates[2], act_type="sigmoid")
out_gate = mx.sym.Activation(slice_gates[3], act_type="sigmoid")
next_c = (forget_gate * prev_state.c) + (in_gate * in_transform)
next_h = out_gate * mx.sym.Activation(next_c, act_type="tanh")
return LSTMState(c=next_c, h=next_h)
def lstm_unroll(num_lstm_layer, seq_len, input_size,
num_hidden, num_embed, num_label, dropout=0.,
concat_decode=True, use_loss=False):
"""unrolled lstm network"""
# initialize the parameter symbols
with mx.AttrScope(ctx_group='embed'):
embed_weight=mx.sym.Variable("embed_weight")
with mx.AttrScope(ctx_group='decode'):
cls_weight = mx.sym.Variable("cls_weight")
cls_bias = mx.sym.Variable("cls_bias")
param_cells = []
last_states = []
for i in range(num_lstm_layer):
with mx.AttrScope(ctx_group='layer%d' % i):
param_cells.append(LSTMParam(i2h_weight = mx.sym.Variable("l%d_i2h_weight" % i),
i2h_bias = mx.sym.Variable("l%d_i2h_bias" % i),
h2h_weight = mx.sym.Variable("l%d_h2h_weight" % i),
h2h_bias = mx.sym.Variable("l%d_h2h_bias" % i)))
state = LSTMState(c=mx.sym.Variable("l%d_init_c" % i),
h=mx.sym.Variable("l%d_init_h" % i))
last_states.append(state)
assert(len(last_states) == num_lstm_layer)
last_hidden = []
for seqidx in range(seq_len):
# embedding layer
with mx.AttrScope(ctx_group='embed'):
data = mx.sym.Variable("t%d_data" % seqidx)
hidden = mx.sym.Embedding(data=data, weight=embed_weight,
input_dim=input_size,
output_dim=num_embed,
name="t%d_embed" % seqidx)
# stack LSTM
for i in range(num_lstm_layer):
if i==0:
dp=0.
else:
dp = dropout
with mx.AttrScope(ctx_group='layer%d' % i):
next_state = lstm(num_hidden, indata=hidden,
prev_state=last_states[i],
param=param_cells[i],
seqidx=seqidx, layeridx=i, dropout=dp)
hidden = next_state.h
last_states[i] = next_state
# decoder
if dropout > 0.:
hidden = mx.sym.Dropout(data=hidden, p=dropout)
last_hidden.append(hidden)
out_prob = []
if not concat_decode:
for seqidx in range(seq_len):
with mx.AttrScope(ctx_group='decode'):
fc = mx.sym.FullyConnected(data=last_hidden[seqidx],
weight=cls_weight,
bias=cls_bias,
num_hidden=num_label,
name="t%d_cls" % seqidx)
label = mx.sym.Variable("t%d_label" % seqidx)
if use_loss:
# Currently softmax_cross_entropy fails https://github.com/apache/incubator-mxnet/issues/6874
# So, workaround for now to fix this example
out = mx.symbol.softmax(data=fc)
label = mx.sym.Reshape(label, shape=(-1,1))
ce = - mx.sym.broadcast_add(mx.sym.broadcast_mul(label, mx.sym.log(out)),
mx.sym.broadcast_mul((1 - label), mx.sym.log(1 - out)))
sm = mx.sym.MakeLoss(ce, name="t%d_sm" % seqidx)
else:
sm = mx.sym.SoftmaxOutput(data=fc, label=label, name="t%d_sm" % seqidx)
out_prob.append(sm)
else:
with mx.AttrScope(ctx_group='decode'):
concat = mx.sym.Concat(*last_hidden, dim = 0)
fc = mx.sym.FullyConnected(data=concat,
weight=cls_weight,
bias=cls_bias,
num_hidden=num_label)
label = mx.sym.Variable("label")
if use_loss:
# Currently softmax_cross_entropy fails https://github.com/apache/incubator-mxnet/issues/6874
# So, workaround for now to fix this example
out = mx.symbol.softmax(data=fc)
label = mx.sym.Reshape(label, shape=(-1, 1))
ce = mx.sym.broadcast_add(mx.sym.broadcast_mul(label, mx.sym.log(out)),
mx.sym.broadcast_mul((1 - label), mx.sym.log(1 - out)))
sm = mx.sym.MakeLoss(ce, name="sm")
else:
sm = mx.sym.SoftmaxOutput(data=fc, label=label, name="sm")
out_prob = [sm]
for i in range(num_lstm_layer):
state = last_states[i]
state = LSTMState(c=mx.sym.BlockGrad(state.c, name="l%d_last_c" % i),
h=mx.sym.BlockGrad(state.h, name="l%d_last_h" % i))
last_states[i] = state
unpack_c = [state.c for state in last_states]
unpack_h = [state.h for state in last_states]
list_all = out_prob + unpack_c + unpack_h
return mx.sym.Group(list_all)
def is_param_name(name):
return name.endswith("weight") or name.endswith("bias") or\
name.endswith("gamma") or name.endswith("beta")
def setup_rnn_model(default_ctx,
num_lstm_layer, seq_len,
num_hidden, num_embed, num_label,
batch_size, input_size,
initializer, dropout=0.,
group2ctx=None, concat_decode=True,
use_loss=False, buckets=None):
"""set up rnn model with lstm cells"""
max_len = max(buckets)
max_rnn_exec = None
models = {}
buckets.reverse()
for bucket_key in buckets:
# bind max_len first
rnn_sym = lstm_unroll(num_lstm_layer=num_lstm_layer,
num_hidden=num_hidden,
seq_len=seq_len,
input_size=input_size,
num_embed=num_embed,
num_label=num_label,
dropout=dropout,
concat_decode=concat_decode,
use_loss=use_loss)
arg_names = rnn_sym.list_arguments()
internals = rnn_sym.get_internals()
input_shapes = {}
for name in arg_names:
if name.endswith("init_c") or name.endswith("init_h"):
input_shapes[name] = (batch_size, num_hidden)
elif name.endswith("data"):
input_shapes[name] = (batch_size, )
elif name == "label":
input_shapes[name] = (batch_size * seq_len, )
elif name.endswith("label"):
input_shapes[name] = (batch_size, )
else:
pass
arg_shape, out_shape, aux_shape = rnn_sym.infer_shape(**input_shapes)
# bind arrays
arg_arrays = []
args_grad = {}
for shape, name in zip(arg_shape, arg_names):
group = internals[name].attr("__ctx_group__")
ctx = group2ctx[group] if group is not None else default_ctx
arg_arrays.append(mx.nd.zeros(shape, ctx))
if is_param_name(name):
args_grad[name] = mx.nd.zeros(shape, ctx)
if not name.startswith("t"):
print("%s group=%s, ctx=%s" % (name, group, str(ctx)))
# bind with shared executor
rnn_exec = None
if max_len == bucket_key:
rnn_exec = rnn_sym.bind(default_ctx, args=arg_arrays,
args_grad=args_grad,
grad_req="add", group2ctx=group2ctx)
max_rnn_exec = rnn_exec
else:
assert max_rnn_exec is not None
rnn_exec = rnn_sym.bind(default_ctx, args=arg_arrays,
args_grad=args_grad,
grad_req="add", group2ctx=group2ctx,
shared_exec = max_rnn_exec)
param_blocks = []
arg_dict = dict(zip(arg_names, rnn_exec.arg_arrays))
for i, name in enumerate(arg_names):
if is_param_name(name):
initializer(name, arg_dict[name])
param_blocks.append((i, arg_dict[name], args_grad[name], name))
else:
assert name not in args_grad
out_dict = dict(zip(rnn_sym.list_outputs(), rnn_exec.outputs))
init_states = [LSTMState(c=arg_dict["l%d_init_c" % i],
h=arg_dict["l%d_init_h" % i]) for i in range(num_lstm_layer)]
seq_data = [rnn_exec.arg_dict["t%d_data" % i] for i in range(seq_len)]
# we don't need to store the last state
last_states = None
if concat_decode:
seq_outputs = [out_dict["sm_output"]]
seq_labels = [rnn_exec.arg_dict["label"]]
else:
seq_outputs = [out_dict["t%d_sm_output" % i] for i in range(seq_len)]
seq_labels = [rnn_exec.arg_dict["t%d_label" % i] for i in range(seq_len)]
model = LSTMModel(rnn_exec=rnn_exec, symbol=rnn_sym,
init_states=init_states, last_states=last_states,
seq_data=seq_data, seq_labels=seq_labels, seq_outputs=seq_outputs,
param_blocks=param_blocks)
models[bucket_key] = model
buckets.reverse()
return models
def set_rnn_inputs(m, X, begin):
seq_len = len(m.seq_data)
batch_size = m.seq_data[0].shape[0]
for seqidx in range(seq_len):
idx = (begin + seqidx) % X.shape[0]
next_idx = (begin + seqidx + 1) % X.shape[0]
x = X[idx, :]
y = X[next_idx, :]
mx.nd.array(x).copyto(m.seq_data[seqidx])
if len(m.seq_labels) == 1:
m.seq_labels[0][seqidx*batch_size : seqidx*batch_size+batch_size] = y
else:
m.seq_labels[seqidx][:] = y
def set_rnn_inputs_from_batch(m, batch, batch_seq_length, batch_size):
X = batch.data
for seqidx in range(batch_seq_length):
idx = seqidx
next_idx = (seqidx + 1) % batch_seq_length
x = X[idx, :]
y = X[next_idx, :]
mx.nd.array(x).copyto(m.seq_data[seqidx])
if len(m.seq_labels) == 1:
m.seq_labels[0][seqidx*batch_size : seqidx*batch_size+batch_size] = y
else:
m.seq_labels[seqidx][:] = y
def calc_nll_concat(seq_label_probs, batch_size):
return -np.sum(np.log(seq_label_probs.asnumpy())) / batch_size
def calc_nll(seq_label_probs, batch_size, seq_len):
eps = 1e-10
nll = 0.
for seqidx in range(seq_len):
py = seq_label_probs[seqidx].asnumpy()
nll += -np.sum(np.log(np.maximum(py, eps))) / batch_size
return nll
def train_lstm(model, X_train_batch, X_val_batch,
num_round, update_period, concat_decode, batch_size, use_loss,
optimizer='sgd', half_life=2,max_grad_norm = 5.0, **kwargs):
opt = mx.optimizer.create(optimizer,
**kwargs)
updater = mx.optimizer.get_updater(opt)
epoch_counter = 0
#log_period = max(1000 / seq_len, 1)
log_period = 28
last_perp = 10000000.0
for iteration in range(num_round):
nbatch = 0
train_nll = 0
tic = time.time()
for data_batch in X_train_batch:
batch_seq_length = data_batch.bucket_key
m = model[batch_seq_length]
# reset init state
for state in m.init_states:
state.c[:] = 0.0
state.h[:] = 0.0
head_grad = []
if use_loss:
ctx = m.seq_outputs[0].context
head_grad = [mx.nd.ones((1,), ctx) for x in m.seq_outputs]
set_rnn_inputs_from_batch(m, data_batch, batch_seq_length, batch_size)
m.rnn_exec.forward(is_train=True)
# probability of each label class, used to evaluate nll
# Change back to individual ops to see if fine grained scheduling helps.
if not use_loss:
if concat_decode:
seq_label_probs = mx.nd.choose_element_0index(m.seq_outputs[0], m.seq_labels[0])
else:
seq_label_probs = [mx.nd.choose_element_0index(out, label).copyto(mx.cpu())
for out, label in zip(m.seq_outputs, m.seq_labels)]
m.rnn_exec.backward()
else:
seq_loss = [x.copyto(mx.cpu()) for x in m.seq_outputs]
m.rnn_exec.backward(head_grad)
# update epoch counter
epoch_counter += 1
if epoch_counter % update_period == 0:
# update parameters
norm = 0.
for idx, weight, grad, name in m.param_blocks:
grad /= batch_size
l2_norm = mx.nd.norm(grad).asscalar()
norm += l2_norm*l2_norm
norm = math.sqrt(norm)
for idx, weight, grad, name in m.param_blocks:
if norm > max_grad_norm:
grad *= (max_grad_norm / norm)
updater(idx, grad, weight)
# reset gradient to zero
grad[:] = 0.0
if not use_loss:
if concat_decode:
train_nll += calc_nll_concat(seq_label_probs, batch_size)
else:
train_nll += calc_nll(seq_label_probs, batch_size, batch_seq_length)
else:
train_nll += sum([x.sum().asscalar() for x in seq_loss]) / batch_size
nbatch += batch_size
toc = time.time()
if epoch_counter % log_period == 0:
print("Iter [%d] Train: Time: %.3f sec, NLL=%.3f, Perp=%.3f" % (
epoch_counter, toc - tic, train_nll / nbatch, np.exp(train_nll / nbatch)))
# end of training loop
toc = time.time()
print("Iter [%d] Train: Time: %.3f sec, NLL=%.3f, Perp=%.3f" % (
iteration, toc - tic, train_nll / nbatch, np.exp(train_nll / nbatch)))
val_nll = 0.0
nbatch = 0
for data_batch in X_val_batch:
batch_seq_length = data_batch.bucket_key
m = model[batch_seq_length]
# validation set, reset states
for state in m.init_states:
state.h[:] = 0.0
state.c[:] = 0.0
set_rnn_inputs_from_batch(m, data_batch, batch_seq_length, batch_size)
m.rnn_exec.forward(is_train=False)
# probability of each label class, used to evaluate nll
if not use_loss:
if concat_decode:
seq_label_probs = mx.nd.choose_element_0index(m.seq_outputs[0], m.seq_labels[0])
else:
seq_label_probs = [mx.nd.choose_element_0index(out, label).copyto(mx.cpu())
for out, label in zip(m.seq_outputs, m.seq_labels)]
else:
seq_loss = [x.copyto(mx.cpu()) for x in m.seq_outputs]
if not use_loss:
if concat_decode:
val_nll += calc_nll_concat(seq_label_probs, batch_size)
else:
val_nll += calc_nll(seq_label_probs, batch_size, batch_seq_length)
else:
val_nll += sum([x.sum().asscalar() for x in seq_loss]) / batch_size
nbatch += batch_size
perp = np.exp(val_nll / nbatch)
print("Iter [%d] Val: NLL=%.3f, Perp=%.3f" % (
iteration, val_nll / nbatch, np.exp(val_nll / nbatch)))
if last_perp - 1.0 < perp:
opt.lr *= 0.5
print("Reset learning rate to %g" % opt.lr)
last_perp = perp
X_val_batch.reset()
X_train_batch.reset()
# is this function being used?
def setup_rnn_sample_model(ctx,
params,
num_lstm_layer,
num_hidden, num_embed, num_label,
batch_size, input_size):
seq_len = 1
rnn_sym = lstm_unroll(num_lstm_layer=num_lstm_layer,
input_size=input_size,
num_hidden=num_hidden,
seq_len=seq_len,
num_embed=num_embed,
num_label=num_label)
arg_names = rnn_sym.list_arguments()
input_shapes = {}
for name in arg_names:
if name.endswith("init_c") or name.endswith("init_h"):
input_shapes[name] = (batch_size, num_hidden)
elif name.endswith("data"):
input_shapes[name] = (batch_size, )
else:
pass
arg_shape, out_shape, aux_shape = rnn_sym.infer_shape(**input_shapes)
arg_arrays = [mx.nd.zeros(s, ctx) for s in arg_shape]
arg_dict = dict(zip(arg_names, arg_arrays))
for name, arr in params.items():
arg_dict[name][:] = arr
rnn_exec = rnn_sym.bind(ctx=ctx, args=arg_arrays, args_grad=None, grad_req="null")
out_dict = dict(zip(rnn_sym.list_outputs(), rnn_exec.outputs))
param_blocks = []
params_array = list(params.items())
for i in range(len(params)):
param_blocks.append((i, params_array[i][1], None, params_array[i][0]))
init_states = [LSTMState(c=arg_dict["l%d_init_c" % i],
h=arg_dict["l%d_init_h" % i]) for i in range(num_lstm_layer)]
if concat_decode:
seq_labels = [rnn_exec.arg_dict["label"]]
seq_outputs = [out_dict["sm_output"]]
else:
seq_labels = [rnn_exec.arg_dict["t%d_label" % i] for i in range(seq_len)]
seq_outputs = [out_dict["t%d_sm" % i] for i in range(seq_len)]
seq_data = [rnn_exec.arg_dict["t%d_data" % i] for i in range(seq_len)]
last_states = [LSTMState(c=out_dict["l%d_last_c_output" % i],
h=out_dict["l%d_last_h_output" % i]) for i in range(num_lstm_layer)]
return LSTMModel(rnn_exec=rnn_exec, symbol=rnn_sym,
init_states=init_states, last_states=last_states,
seq_data=seq_data, seq_labels=seq_labels, seq_outputs=seq_outputs,
param_blocks=param_blocks)
# Python3 np.random.choice is too strict in eval float probability so we use an alternative
import random
import bisect
import collections
def _cdf(weights):
total = sum(weights)
result = []
cumsum = 0
for w in weights:
cumsum += w
result.append(cumsum / total)
return result
def _choice(population, weights):
assert len(population) == len(weights)
cdf_vals = _cdf(weights)
x = random.random()
idx = bisect.bisect(cdf_vals, x)
return population[idx]
def sample_lstm(model, X_input_batch, seq_len, temperature=1., sample=True):
m = model
vocab = m.seq_outputs.shape[1]
batch_size = m.seq_data[0].shape[0]
outputs_ndarray = mx.nd.zeros(m.seq_outputs.shape)
outputs_batch = []
tmp = [i for i in range(vocab)]
for i in range(seq_len):
outputs_batch.append(np.zeros(X_input_batch.shape))
for i in range(seq_len):
set_rnn_inputs(m, X_input_batch, 0)
m.rnn_exec.forward(is_train=False)
outputs_ndarray[:] = m.seq_outputs
for init, last in zip(m.init_states, m.last_states):
last.c.copyto(init.c)
last.h.copyto(init.h)
prob = np.clip(outputs_ndarray.asnumpy(), 1e-6, 1 - 1e-6)
if sample:
rescale = np.exp(np.log(prob) / temperature)
for j in range(batch_size):
p = rescale[j, :]
p[:] /= p.sum()
outputs_batch[i][j] = _choice(tmp, p)
else:
outputs_batch[i][:] = np.argmax(prob, axis=1)
X_input_batch[:] = outputs_batch[i]
return outputs_batch
| apache-2.0 |
sauliusl/scipy | benchmarks/run.py | 73 | 4077 | #!/usr/bin/env python
"""
run.py [options] ASV_COMMAND..
Convenience wrapper around the ``asv`` command; just sets environment
variables and chdirs to the correct place etc.
"""
from __future__ import division, absolute_import, print_function
import os
import sys
import subprocess
import json
import shutil
import argparse
import sysconfig
EXTRA_PATH = ['/usr/lib/ccache', '/usr/lib/f90cache',
'/usr/local/lib/ccache', '/usr/local/lib/f90cache']
from benchmarks.common import set_mem_rlimit
def main():
class ASVHelpAction(argparse.Action):
nargs = 0
def __call__(self, parser, namespace, values, option_string=None):
sys.exit(run_asv(['--help']))
p = argparse.ArgumentParser(usage=__doc__.strip())
p.add_argument('--help-asv', nargs=0, action=ASVHelpAction,
help="""show ASV help""")
p.add_argument("--current-repo", action="store_true",
help="""use current repository as the upstream repository,
rather than cloning it from the internet; enables running
benchmarks on e.g. your own branches""")
p.add_argument('asv_command', nargs=argparse.REMAINDER)
args = p.parse_args()
sys.exit(run_asv(args.asv_command, current_repo=args.current_repo))
def run_asv(args, current_repo=False):
cwd = os.path.abspath(os.path.dirname(__file__))
if current_repo:
try:
from asv.util import load_json, write_json
conf = load_json(os.path.join(cwd, 'asv.conf.json'))
conf['repo'] = os.path.normpath(os.path.join(cwd, '..'))
cfg_fn = os.path.join(cwd, '.asvconf.tmp')
write_json(cfg_fn, conf)
args = ['--config', cfg_fn] + args
except ImportError:
pass
repo_dir = os.path.join(cwd, 'scipy')
if is_git_repo_root(repo_dir):
if current_repo:
url = os.path.normpath(os.path.join(cwd, '..'))
else:
url = "https://github.com/scipy/scipy.git"
subprocess.call(['git', 'remote', 'set-url', "origin", url],
cwd=repo_dir)
cmd = ['asv'] + list(args)
env = dict(os.environ)
# Inject ccache/f90cache paths
if sys.platform.startswith('linux'):
env['PATH'] = os.pathsep.join(EXTRA_PATH + env.get('PATH', '').split(os.pathsep))
# Control BLAS and CFLAGS
env['OPENBLAS_NUM_THREADS'] = '1'
env['CFLAGS'] = drop_bad_flags(sysconfig.get_config_var('CFLAGS'))
# Limit memory usage
try:
set_mem_rlimit()
except (ImportError, RuntimeError):
pass
# Check scipy version if in dev mode; otherwise clone and setup results
# repository
if args and (args[0] == 'dev' or '--python=same' in args):
import scipy
print("Running benchmarks for Scipy version %s at %s" % (scipy.__version__, scipy.__file__))
# Override gh-pages
if 'gh-pages' in args:
print("gh-pages command is disabled")
return 1
# Run
try:
return subprocess.call(cmd, env=env, cwd=cwd)
except OSError as err:
if err.errno == 2:
print("Error when running '%s': %s\n" % (" ".join(cmd), str(err),))
print("You need to install Airspeed Velocity https://spacetelescope.github.io/asv/")
print("to run Scipy benchmarks")
return 1
raise
def is_git_repo_root(path):
try:
p = subprocess.Popen(['git', '-C', path, 'rev-parse', '--git-dir'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
if p.returncode != 0:
return False
return (out.strip() == '.git')
except OSError:
return False
def drop_bad_flags(flags):
"""
Drop flags that are problematic for compiling old scipy versions
"""
if not flags:
return flags
return " ".join(x for x in flags.split()
if not (x.startswith("-Werror")
or x in ("-pedantic-errors",)))
if __name__ == "__main__":
sys.exit(main())
| bsd-3-clause |
michaelni/audacity | locale/smartmsgmerge.py | 33 | 7285 | #!/usr/bin/python
#
# smartmsgmerge.py
#
# Written by Dominic Mazzoni, 2006
# GNU General Public License 2.0
#
# This is a replacement for the GNU gettext "msgmerge" program, which
# is typically used to update a .po file (def) to the latest .pot file
# (ref). This program is not command-line compatible; it takes no
# flags but simply the def, ref, and output file names.
#
# It uses a much faster and also much stricter policy for finding new
# fuzzy matches: the edit-distance must be no more than 4%, or for very
# short strings, no more than 1 character.
#
# This makes it safe for you to enable fuzzy strings in your .mo file
# without worrying that they'll be too terrible.
#
# It also fixes translations where the beginning and ending newlines
# do not match the original string.
#
import sys, os
if len(sys.argv) != 4:
print "Usage: %s def.po ref.pot out.po" % sys.argv[0]
sys.exit()
def_filename = sys.argv[1]
ref_filename = sys.argv[2]
out_filename = sys.argv[3]
# Each object will contain the comments, msgid (untranslated),
# msgstr (translated), and a fuzzy flag. For simplicity this is
# not a typechecked class, just a dummy dynamic container class.
class obj:
pass
# Compute the edit-distance between str1 and str2, taking a couple
# of shortcuts such that it returns 999 quickly if the edit-distance
# is clearly not going to be less than 10 percent.
def edit_distance(str1, str2):
l1 = len(str1)
l2 = len(str2)
# Exit if the difference in the string lenghts is 10% or more
if l1*1.0/l2 < 0.9 or l1*1.0/l2 > 1.1:
return 999
# Compute a beam width of +/- 5% - the path through the matrix cannot
# go outside the main diagonal +/- the beam.
beam = int(0.5 + 0.1 * ((l1 + l2) / 2))
# Create a 2D array
d = [None]*(l1+1)
for i in range(l1+1):
d[i] = [999]*(l2+1)
# Initialize the first row and column
for i in range(l1+1):
d[i][0] = i
for j in range(l2+1):
d[0][j] = j
# Dynamic programming
for i in range(1, l1+1):
# Quick short-circuit after 30 rows; stop if things are
# looking really bad
if i==30 and l2>=30 and d[29][29] > 20:
return 999
for j in range(max(1, i-beam), min(l2+1, i+beam+1)):
if str1[i-1] == str2[j-1]:
cost = 0
else:
cost = 1
d[i][j] = min(
d[i-1][j] + 1, # deletion
d[i][j-1] + 1, # insertion
d[i-1][j-1] + cost # substitution
)
return d[l1][l2]
# Take a string and format it on a bunch of separate lines in quotes
def quote(str):
if str=="":
return "\"\"\n"
q = ""
p = str.find("\\n")
while p >= 0:
line = str[:p]
str = str[p+2:]
q += "\"%s\\n\"\n" % line
p = str.find("\\n")
if len(str) > 0:
q += "\"%s\"\n" % str
return q
# Take a bunch of separate lines in quotes and turn them into a single string
def unquote(str):
u = ""
for line in str.split("\n"):
line = line.strip()
if len(line)>=2:
if line[0]=='"' and line[-1]=='"':
u += line[1:-1]
else:
print "Error with:"
print '**%s**' % line
sys.exit()
return u
# Parse one file in the .po / .pot format, returning a hash of all
# msgids and a list of all msgids in order.
def parse(fname):
h = {}
l = []
msgid = ""
msgstr = ""
comments = ""
fuzzy = False
first = True
line_no = 0
# Read the lines of the file and make sure it always ends in a
# blank line
lines = open(fname).readlines()
lines.append("\n")
for line in (lines + ["\n"]):
# Handle DOS line endings
if len(line)>=2 and line[-2]=='\r' and line[-1]=='\n':
line = line[:-2]+'\n'
line_no += 1
if line=="\n":
if len(msgid)==0 and not first:
# We found a blank line or comments in the middle of nowhere
comments = ""
fuzzy = False
msgstr = ""
continue
# Otherwise, a blank line in the middle of the file
# signifies the end of a translation
msgid = unquote(msgid)
msgstr = unquote(msgstr)
if msgid in h:
print "Duplicate msgid in %s:" % (fname)
print quote(msgid)
print "Found on line %d, previously defined on line %d" % \
(line_no, h[msgid].line_no)
sys.exit()
o = obj()
o.comments = comments
o.msgid = msgid
o.msgstr = msgstr
o.fuzzy = fuzzy
o.line_no = line_no
h[msgid] = o
l.append(msgid)
comments = ""
msgstr = ""
msgid = ""
fuzzy = False
first = False
elif len(line)>=8 and line[:8] == "#, fuzzy":
fuzzy = True
comments += line
elif line[0] == '#':
comments += line
elif len(line)>6 and line[:6]=="msgid ":
msgid += line[6:]
elif len(line)>7 and line[:7]=="msgstr ":
msgstr += line[7:]
else:
if len(msgstr):
msgstr += line
else:
msgid += line
return (h, l)
(def_h, def_l) = parse(def_filename)
(ref_h, ref_l) = parse(ref_filename)
# Handle the exact matches
final_h = {}
for msgid in ref_l:
if msgid in def_h:
final_h[msgid] = def_h[msgid]
# Try for fuzzy matches
for ref_msgid in [x for x in ref_l if x not in final_h]:
min_ed = 999
min_msgid = None
for def_msgid in [x for x in def_l if x not in final_h]:
if len(def_h[def_msgid].msgstr) < 3:
continue
ed = edit_distance(ref_msgid, def_msgid)
if ed < min_ed:
min_ed = ed
min_msgid = def_msgid
if min_msgid != None:
pct = min_ed * 100.0 / min(len(ref_msgid), len(min_msgid))
if min_ed == 1 or pct <= 4.0:
refstr = ref_msgid
if len(refstr)>40:
refstr = refstr[:37]+"..."
minstr = min_msgid
if len(minstr)>40:
minstr = minstr[:37]+"..."
print "Found fuzzy match:"
print " %s" % refstr
print " %s" % minstr
print " def_len=%d, ref_len=%d, edit_distance=%d" % \
(len(min_msgid), len(ref_msgid), min_ed)
o = obj()
def_h[min_msgid]
o.msgid = ref_msgid
o.comments = ref_h[ref_msgid].comments
o.msgstr = def_h[min_msgid].msgstr
o.fuzzy = True
final_h[ref_msgid] = o
# Generate output file
translated = 0
fuzzy = 0
empty = 0
out_fp = open(out_filename, "w")
for msgid in ref_l:
if msgid in final_h:
o = final_h[msgid]
if o.fuzzy:
fuzzy += 1
elif msgid != "":
translated += 1
else:
o = ref_h[msgid]
empty += 1
msgstr = o.msgstr
# Fix leading and trailing newlines
if len(msgid)>4 and len(msgstr)>4:
# Add newline if missing
if msgid[:2]=="\\n" and msgstr[:2]!="\\n":
msgstr = "\\n" + msgstr
if msgid[-2:]=="\\n" and msgstr[-2:]!="\\n":
msgstr = msgstr + "\\n"
# Remove newline if extraneous
if msgid[:2]!="\\n" and msgstr[:2]=="\\n":
msgstr = msgstr[2:]
if msgid[-2:]!="\\n" and msgstr[-2:]=="\\n":
msgstr = msgstr[:-2]
# Write the entry
out_fp.write(o.comments)
if o.fuzzy and o.comments.find("fuzzy")==-1:
out_fp.write("#, fuzzy\n")
out_fp.write("msgid " + quote(msgid))
out_fp.write("msgstr " + quote(msgstr))
out_fp.write("\n")
# Print stats
print "Translated: %d Fuzzy: %d Empty: %d" % (translated, fuzzy, empty)
print "Wrote output to %s" % out_filename
| gpl-2.0 |
amenonsen/ansible | lib/ansible/modules/cloud/amazon/ec2_vpc_subnet.py | 33 | 23367 | #!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: ec2_vpc_subnet
short_description: Manage subnets in AWS virtual private clouds
description:
- Manage subnets in AWS virtual private clouds
version_added: "2.0"
author:
- Robert Estelle (@erydo)
- Brad Davidson (@brandond)
requirements: [ boto3 ]
options:
az:
description:
- "The availability zone for the subnet."
cidr:
description:
- "The CIDR block for the subnet. E.g. 192.0.2.0/24."
ipv6_cidr:
description:
- "The IPv6 CIDR block for the subnet. The VPC must have a /56 block assigned and this value must be a valid IPv6 /64 that falls in the VPC range."
- "Required if I(assign_instances_ipv6=true)"
version_added: "2.5"
tags:
description:
- "A dict of tags to apply to the subnet. Any tags currently applied to the subnet and not present here will be removed."
aliases: [ 'resource_tags' ]
state:
description:
- "Create or remove the subnet"
default: present
choices: [ 'present', 'absent' ]
vpc_id:
description:
- "VPC ID of the VPC in which to create or delete the subnet."
required: true
map_public:
description:
- "Specify C(yes) to indicate that instances launched into the subnet should be assigned public IP address by default."
type: bool
default: 'no'
version_added: "2.4"
assign_instances_ipv6:
description:
- "Specify C(yes) to indicate that instances launched into the subnet should be automatically assigned an IPv6 address."
type: bool
default: 'no'
version_added: "2.5"
wait:
description:
- "When specified,I(state=present) module will wait for subnet to be in available state before continuing."
type: bool
default: 'yes'
version_added: "2.5"
wait_timeout:
description:
- "Number of seconds to wait for subnet to become available I(wait=True)."
default: 300
version_added: "2.5"
purge_tags:
description:
- Whether or not to remove tags that do not appear in the I(tags) list.
type: bool
default: 'yes'
version_added: "2.5"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
- name: Create subnet for database servers
ec2_vpc_subnet:
state: present
vpc_id: vpc-123456
cidr: 10.0.1.16/28
resource_tags:
Name: Database Subnet
register: database_subnet
- name: Remove subnet for database servers
ec2_vpc_subnet:
state: absent
vpc_id: vpc-123456
cidr: 10.0.1.16/28
- name: Create subnet with IPv6 block assigned
ec2_vpc_subnet:
state: present
vpc_id: vpc-123456
cidr: 10.1.100.0/24
ipv6_cidr: 2001:db8:0:102::/64
- name: Remove IPv6 block assigned to subnet
ec2_vpc_subnet:
state: present
vpc_id: vpc-123456
cidr: 10.1.100.0/24
ipv6_cidr: ''
'''
RETURN = '''
subnet:
description: Dictionary of subnet values
returned: I(state=present)
type: complex
contains:
id:
description: Subnet resource id
returned: I(state=present)
type: str
sample: subnet-b883b2c4
cidr_block:
description: The IPv4 CIDR of the Subnet
returned: I(state=present)
type: str
sample: "10.0.0.0/16"
ipv6_cidr_block:
description: The IPv6 CIDR block actively associated with the Subnet
returned: I(state=present)
type: str
sample: "2001:db8:0:102::/64"
availability_zone:
description: Availability zone of the Subnet
returned: I(state=present)
type: str
sample: us-east-1a
state:
description: state of the Subnet
returned: I(state=present)
type: str
sample: available
tags:
description: tags attached to the Subnet, includes name
returned: I(state=present)
type: dict
sample: {"Name": "My Subnet", "env": "staging"}
map_public_ip_on_launch:
description: whether public IP is auto-assigned to new instances
returned: I(state=present)
type: bool
sample: false
assign_ipv6_address_on_creation:
description: whether IPv6 address is auto-assigned to new instances
returned: I(state=present)
type: bool
sample: false
vpc_id:
description: the id of the VPC where this Subnet exists
returned: I(state=present)
type: str
sample: vpc-67236184
available_ip_address_count:
description: number of available IPv4 addresses
returned: I(state=present)
type: str
sample: 251
default_for_az:
description: indicates whether this is the default Subnet for this Availability Zone
returned: I(state=present)
type: bool
sample: false
ipv6_association_id:
description: The IPv6 association ID for the currently associated CIDR
returned: I(state=present)
type: str
sample: subnet-cidr-assoc-b85c74d2
ipv6_cidr_block_association_set:
description: An array of IPv6 cidr block association set information.
returned: I(state=present)
type: complex
contains:
association_id:
description: The association ID
returned: always
type: str
ipv6_cidr_block:
description: The IPv6 CIDR block that is associated with the subnet.
returned: always
type: str
ipv6_cidr_block_state:
description: A hash/dict that contains a single item. The state of the cidr block association.
returned: always
type: dict
contains:
state:
description: The CIDR block association state.
returned: always
type: str
'''
import time
import traceback
try:
import botocore
except ImportError:
pass # caught by AnsibleAWSModule
from ansible.module_utils._text import to_text
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils.aws.waiters import get_waiter
from ansible.module_utils.ec2 import (ansible_dict_to_boto3_filter_list, ansible_dict_to_boto3_tag_list,
ec2_argument_spec, camel_dict_to_snake_dict, get_aws_connection_info,
boto3_conn, boto3_tag_list_to_ansible_dict, compare_aws_tags, AWSRetry)
def get_subnet_info(subnet):
if 'Subnets' in subnet:
return [get_subnet_info(s) for s in subnet['Subnets']]
elif 'Subnet' in subnet:
subnet = camel_dict_to_snake_dict(subnet['Subnet'])
else:
subnet = camel_dict_to_snake_dict(subnet)
if 'tags' in subnet:
subnet['tags'] = boto3_tag_list_to_ansible_dict(subnet['tags'])
else:
subnet['tags'] = dict()
if 'subnet_id' in subnet:
subnet['id'] = subnet['subnet_id']
del subnet['subnet_id']
subnet['ipv6_cidr_block'] = ''
subnet['ipv6_association_id'] = ''
ipv6set = subnet.get('ipv6_cidr_block_association_set')
if ipv6set:
for item in ipv6set:
if item.get('ipv6_cidr_block_state', {}).get('state') in ('associated', 'associating'):
subnet['ipv6_cidr_block'] = item['ipv6_cidr_block']
subnet['ipv6_association_id'] = item['association_id']
return subnet
@AWSRetry.exponential_backoff()
def describe_subnets_with_backoff(client, **params):
return client.describe_subnets(**params)
def waiter_params(module, params, start_time):
if not module.botocore_at_least("1.7.0"):
remaining_wait_timeout = int(module.params['wait_timeout'] + start_time - time.time())
params['WaiterConfig'] = {'Delay': 5, 'MaxAttempts': remaining_wait_timeout // 5}
return params
def handle_waiter(conn, module, waiter_name, params, start_time):
try:
get_waiter(conn, waiter_name).wait(
**waiter_params(module, params, start_time)
)
except botocore.exceptions.WaiterError as e:
module.fail_json_aws(e, "Failed to wait for updates to complete")
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, "An exception happened while trying to wait for updates")
def create_subnet(conn, module, vpc_id, cidr, ipv6_cidr=None, az=None, start_time=None):
wait = module.params['wait']
wait_timeout = module.params['wait_timeout']
params = dict(VpcId=vpc_id,
CidrBlock=cidr)
if ipv6_cidr:
params['Ipv6CidrBlock'] = ipv6_cidr
if az:
params['AvailabilityZone'] = az
try:
subnet = get_subnet_info(conn.create_subnet(**params))
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Couldn't create subnet")
# Sometimes AWS takes its time to create a subnet and so using
# new subnets's id to do things like create tags results in
# exception.
if wait and subnet.get('state') != 'available':
handle_waiter(conn, module, 'subnet_exists', {'SubnetIds': [subnet['id']]}, start_time)
try:
conn.get_waiter('subnet_available').wait(
**waiter_params(module, {'SubnetIds': [subnet['id']]}, start_time)
)
subnet['state'] = 'available'
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, "Create subnet action timed out waiting for subnet to become available")
return subnet
def ensure_tags(conn, module, subnet, tags, purge_tags, start_time):
changed = False
filters = ansible_dict_to_boto3_filter_list({'resource-id': subnet['id'], 'resource-type': 'subnet'})
try:
cur_tags = conn.describe_tags(Filters=filters)
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Couldn't describe tags")
to_update, to_delete = compare_aws_tags(boto3_tag_list_to_ansible_dict(cur_tags.get('Tags')), tags, purge_tags)
if to_update:
try:
if not module.check_mode:
AWSRetry.exponential_backoff(
catch_extra_error_codes=['InvalidSubnetID.NotFound']
)(conn.create_tags)(
Resources=[subnet['id']],
Tags=ansible_dict_to_boto3_tag_list(to_update)
)
changed = True
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Couldn't create tags")
if to_delete:
try:
if not module.check_mode:
tags_list = []
for key in to_delete:
tags_list.append({'Key': key})
AWSRetry.exponential_backoff(
catch_extra_error_codes=['InvalidSubnetID.NotFound']
)(conn.delete_tags)(Resources=[subnet['id']], Tags=tags_list)
changed = True
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Couldn't delete tags")
if module.params['wait'] and not module.check_mode:
# Wait for tags to be updated
filters = [{'Name': 'tag:{0}'.format(k), 'Values': [v]} for k, v in tags.items()]
handle_waiter(conn, module, 'subnet_exists',
{'SubnetIds': [subnet['id']], 'Filters': filters}, start_time)
return changed
def ensure_map_public(conn, module, subnet, map_public, check_mode, start_time):
if check_mode:
return
try:
conn.modify_subnet_attribute(SubnetId=subnet['id'], MapPublicIpOnLaunch={'Value': map_public})
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Couldn't modify subnet attribute")
def ensure_assign_ipv6_on_create(conn, module, subnet, assign_instances_ipv6, check_mode, start_time):
if check_mode:
return
try:
conn.modify_subnet_attribute(SubnetId=subnet['id'], AssignIpv6AddressOnCreation={'Value': assign_instances_ipv6})
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Couldn't modify subnet attribute")
def disassociate_ipv6_cidr(conn, module, subnet, start_time):
if subnet.get('assign_ipv6_address_on_creation'):
ensure_assign_ipv6_on_create(conn, module, subnet, False, False, start_time)
try:
conn.disassociate_subnet_cidr_block(AssociationId=subnet['ipv6_association_id'])
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Couldn't disassociate ipv6 cidr block id {0} from subnet {1}"
.format(subnet['ipv6_association_id'], subnet['id']))
# Wait for cidr block to be disassociated
if module.params['wait']:
filters = ansible_dict_to_boto3_filter_list(
{'ipv6-cidr-block-association.state': ['disassociated'],
'vpc-id': subnet['vpc_id']}
)
handle_waiter(conn, module, 'subnet_exists',
{'SubnetIds': [subnet['id']], 'Filters': filters}, start_time)
def ensure_ipv6_cidr_block(conn, module, subnet, ipv6_cidr, check_mode, start_time):
wait = module.params['wait']
changed = False
if subnet['ipv6_association_id'] and not ipv6_cidr:
if not check_mode:
disassociate_ipv6_cidr(conn, module, subnet, start_time)
changed = True
if ipv6_cidr:
filters = ansible_dict_to_boto3_filter_list({'ipv6-cidr-block-association.ipv6-cidr-block': ipv6_cidr,
'vpc-id': subnet['vpc_id']})
try:
check_subnets = get_subnet_info(describe_subnets_with_backoff(conn, Filters=filters))
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Couldn't get subnet info")
if check_subnets and check_subnets[0]['ipv6_cidr_block']:
module.fail_json(msg="The IPv6 CIDR '{0}' conflicts with another subnet".format(ipv6_cidr))
if subnet['ipv6_association_id']:
if not check_mode:
disassociate_ipv6_cidr(conn, module, subnet, start_time)
changed = True
try:
if not check_mode:
associate_resp = conn.associate_subnet_cidr_block(SubnetId=subnet['id'], Ipv6CidrBlock=ipv6_cidr)
changed = True
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Couldn't associate ipv6 cidr {0} to {1}".format(ipv6_cidr, subnet['id']))
else:
if not check_mode and wait:
filters = ansible_dict_to_boto3_filter_list(
{'ipv6-cidr-block-association.state': ['associated'],
'vpc-id': subnet['vpc_id']}
)
handle_waiter(conn, module, 'subnet_exists',
{'SubnetIds': [subnet['id']], 'Filters': filters}, start_time)
if associate_resp.get('Ipv6CidrBlockAssociation', {}).get('AssociationId'):
subnet['ipv6_association_id'] = associate_resp['Ipv6CidrBlockAssociation']['AssociationId']
subnet['ipv6_cidr_block'] = associate_resp['Ipv6CidrBlockAssociation']['Ipv6CidrBlock']
if subnet['ipv6_cidr_block_association_set']:
subnet['ipv6_cidr_block_association_set'][0] = camel_dict_to_snake_dict(associate_resp['Ipv6CidrBlockAssociation'])
else:
subnet['ipv6_cidr_block_association_set'].append(camel_dict_to_snake_dict(associate_resp['Ipv6CidrBlockAssociation']))
return changed
def get_matching_subnet(conn, module, vpc_id, cidr):
filters = ansible_dict_to_boto3_filter_list({'vpc-id': vpc_id, 'cidr-block': cidr})
try:
subnets = get_subnet_info(describe_subnets_with_backoff(conn, Filters=filters))
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Couldn't get matching subnet")
if subnets:
return subnets[0]
return None
def ensure_subnet_present(conn, module):
subnet = get_matching_subnet(conn, module, module.params['vpc_id'], module.params['cidr'])
changed = False
# Initialize start so max time does not exceed the specified wait_timeout for multiple operations
start_time = time.time()
if subnet is None:
if not module.check_mode:
subnet = create_subnet(conn, module, module.params['vpc_id'], module.params['cidr'],
ipv6_cidr=module.params['ipv6_cidr'], az=module.params['az'], start_time=start_time)
changed = True
# Subnet will be None when check_mode is true
if subnet is None:
return {
'changed': changed,
'subnet': {}
}
if module.params['wait']:
handle_waiter(conn, module, 'subnet_exists', {'SubnetIds': [subnet['id']]}, start_time)
if module.params['ipv6_cidr'] != subnet.get('ipv6_cidr_block'):
if ensure_ipv6_cidr_block(conn, module, subnet, module.params['ipv6_cidr'], module.check_mode, start_time):
changed = True
if module.params['map_public'] != subnet['map_public_ip_on_launch']:
ensure_map_public(conn, module, subnet, module.params['map_public'], module.check_mode, start_time)
changed = True
if module.params['assign_instances_ipv6'] != subnet.get('assign_ipv6_address_on_creation'):
ensure_assign_ipv6_on_create(conn, module, subnet, module.params['assign_instances_ipv6'], module.check_mode, start_time)
changed = True
if module.params['tags'] != subnet['tags']:
stringified_tags_dict = dict((to_text(k), to_text(v)) for k, v in module.params['tags'].items())
if ensure_tags(conn, module, subnet, stringified_tags_dict, module.params['purge_tags'], start_time):
changed = True
subnet = get_matching_subnet(conn, module, module.params['vpc_id'], module.params['cidr'])
if not module.check_mode and module.params['wait']:
# GET calls are not monotonic for map_public_ip_on_launch and assign_ipv6_address_on_creation
# so we only wait for those if necessary just before returning the subnet
subnet = ensure_final_subnet(conn, module, subnet, start_time)
return {
'changed': changed,
'subnet': subnet
}
def ensure_final_subnet(conn, module, subnet, start_time):
for rewait in range(0, 30):
map_public_correct = False
assign_ipv6_correct = False
if module.params['map_public'] == subnet['map_public_ip_on_launch']:
map_public_correct = True
else:
if module.params['map_public']:
handle_waiter(conn, module, 'subnet_has_map_public', {'SubnetIds': [subnet['id']]}, start_time)
else:
handle_waiter(conn, module, 'subnet_no_map_public', {'SubnetIds': [subnet['id']]}, start_time)
if module.params['assign_instances_ipv6'] == subnet.get('assign_ipv6_address_on_creation'):
assign_ipv6_correct = True
else:
if module.params['assign_instances_ipv6']:
handle_waiter(conn, module, 'subnet_has_assign_ipv6', {'SubnetIds': [subnet['id']]}, start_time)
else:
handle_waiter(conn, module, 'subnet_no_assign_ipv6', {'SubnetIds': [subnet['id']]}, start_time)
if map_public_correct and assign_ipv6_correct:
break
time.sleep(5)
subnet = get_matching_subnet(conn, module, module.params['vpc_id'], module.params['cidr'])
return subnet
def ensure_subnet_absent(conn, module):
subnet = get_matching_subnet(conn, module, module.params['vpc_id'], module.params['cidr'])
if subnet is None:
return {'changed': False}
try:
if not module.check_mode:
conn.delete_subnet(SubnetId=subnet['id'])
if module.params['wait']:
handle_waiter(conn, module, 'subnet_deleted', {'SubnetIds': [subnet['id']]}, time.time())
return {'changed': True}
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Couldn't delete subnet")
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
az=dict(default=None, required=False),
cidr=dict(default=None, required=True),
ipv6_cidr=dict(default='', required=False),
state=dict(default='present', choices=['present', 'absent']),
tags=dict(default={}, required=False, type='dict', aliases=['resource_tags']),
vpc_id=dict(default=None, required=True),
map_public=dict(default=False, required=False, type='bool'),
assign_instances_ipv6=dict(default=False, required=False, type='bool'),
wait=dict(type='bool', default=True),
wait_timeout=dict(type='int', default=300, required=False),
purge_tags=dict(default=True, type='bool')
)
)
required_if = [('assign_instances_ipv6', True, ['ipv6_cidr'])]
module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=True, required_if=required_if)
if module.params.get('assign_instances_ipv6') and not module.params.get('ipv6_cidr'):
module.fail_json(msg="assign_instances_ipv6 is True but ipv6_cidr is None or an empty string")
if not module.botocore_at_least("1.7.0"):
module.warn("botocore >= 1.7.0 is required to use wait_timeout for custom wait times")
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
connection = boto3_conn(module, conn_type='client', resource='ec2', region=region, endpoint=ec2_url, **aws_connect_params)
state = module.params.get('state')
try:
if state == 'present':
result = ensure_subnet_present(connection, module)
elif state == 'absent':
result = ensure_subnet_absent(connection, module)
except botocore.exceptions.ClientError as e:
module.fail_json_aws(e)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
chylex/Discord-History-Tracker | build.py | 1 | 7236 | # Python 3
import fileinput
import glob
import shutil
import sys
import os
import re
import distutils.dir_util
VERSION_SHORT = "v.31"
VERSION_FULL = VERSION_SHORT + ", released 3 April 2021"
EXEC_UGLIFYJS_WIN = "{2}/lib/uglifyjs.cmd --parse bare_returns --compress --mangle toplevel --mangle-props keep_quoted,reserved=[{3}] --output \"{1}\" \"{0}\""
EXEC_UGLIFYJS_AUTO = "uglifyjs --parse bare_returns --compress --mangle toplevel --mangle-props keep_quoted,reserved=[{3}] --output \"{1}\" \"{0}\""
USE_UGLIFYJS = "--nominify" not in sys.argv
USE_MINIFICATION = "--nominify" not in sys.argv
BUILD_WEBSITE = "--website" in sys.argv
CLIPBOARD_TRACKER = "--copytracker" in sys.argv
WORKING_DIR = os.getcwd()
# UglifyJS Setup
if os.name == "nt":
EXEC_UGLIFYJS = EXEC_UGLIFYJS_WIN
else:
EXEC_UGLIFYJS = EXEC_UGLIFYJS_AUTO
if USE_UGLIFYJS and shutil.which("uglifyjs") is None:
USE_UGLIFYJS = False
print("Could not find 'uglifyjs', JS minification will be disabled")
if USE_UGLIFYJS:
with open("reserve.txt", "r") as reserved:
RESERVED_PROPS = ",".join(line.strip() for line in reserved.readlines())
# File Utilities
def combine_files(input_pattern, output_file):
is_first_file = True
with fileinput.input(sorted(glob.glob(input_pattern))) as stream:
for line in stream:
if stream.isfirstline():
if is_first_file:
is_first_file = False
else:
output_file.write("\n")
output_file.write(line.replace("{{{version:full}}}", VERSION_FULL))
def minify_css(input_file, output_file):
if not USE_MINIFICATION:
if input_file != output_file:
shutil.copyfile(input_file, output_file)
return
with open(input_file, "r") as fin:
css = fin.read()
css = re.sub(r"^\s+(.+?):\s*(.+?)(?:\s*(!important))?;\n", r"\1:\2\3;", css, flags = re.M) # remove spaces after colons
css = re.sub(r"\{\n", r"{", css, flags = re.M) # remove new lines after {
css = re.sub(r"\n\}", r"}", css, flags = re.M) # remove new lines before }
css = re.sub(r"\n\n", r"\n", css, flags = re.M) # remove empty lines
css = re.sub(r";\}$", r"}", css, flags = re.M) # remove last semicolons
css = re.sub(r"rgb\((.*?),\s*(.*?),\s*(.*?)\)", r"rgb(\1,\2,\3)", css, flags = re.M) # remove spaces after commas in rgb()
css = re.sub(r"rgba\((.*?),\s*(.*?),\s*(.*?),\s*(.*?)\)", r"rgba(\1,\2,\3,\4)", css, flags = re.M) # remove spaces after commas in rgba()
with open(output_file, "w") as out:
out.write(css)
# Build System
def build_tracker_html():
output_file_raw = "bld/track.js"
output_file_html = "bld/track.html"
output_file_tmp = "bld/track.tmp.js"
input_pattern = "src/tracker/*.js"
with open(output_file_raw, "w") as out:
if not USE_UGLIFYJS:
out.write("(function(){\n")
combine_files(input_pattern, out)
if not USE_UGLIFYJS:
out.write("})()")
if USE_UGLIFYJS:
os.system(EXEC_UGLIFYJS.format(output_file_raw, output_file_tmp, WORKING_DIR, RESERVED_PROPS))
with open(output_file_raw, "w") as out:
out.write("javascript:(function(){")
with open(output_file_tmp, "r") as minified:
out.write(minified.read().replace("\n", " ").replace("\r", ""))
out.write("})()")
os.remove(output_file_tmp)
with open(output_file_raw, "r") as raw:
script_contents = raw.read().replace("&", "&").replace('"', """).replace("'", "'").replace("<", "<").replace(">", ">")
with open(output_file_html, "w") as out:
out.write(script_contents)
def build_tracker_userscript():
output_file = "bld/track.user.js"
input_pattern = "src/tracker/*.js"
userscript_base = "src/base/track.user.js"
with open(userscript_base, "r") as base:
userscript_contents = base.read().replace("{{{version}}}", VERSION_SHORT).split("{{{contents}}}")
with open(output_file, "w") as out:
out.write(userscript_contents[0])
combine_files(input_pattern, out)
out.write(userscript_contents[1])
def build_viewer():
output_file = "bld/viewer.html"
input_html = "src/viewer/index.html"
input_css_pattern = "src/viewer/styles/*.css"
tmp_css_file_combined = "bld/viewer.tmp.css"
tmp_css_file_minified = "bld/viewer.min.css"
with open(tmp_css_file_combined, "w") as out:
combine_files(input_css_pattern, out)
minify_css(tmp_css_file_combined, tmp_css_file_minified)
os.remove(tmp_css_file_combined)
input_js_pattern = "src/viewer/scripts/*.js"
tmp_js_file_combined = "bld/viewer.tmp.js"
tmp_js_file_minified = "bld/viewer.min.js"
with open(tmp_js_file_combined, "w") as out:
combine_files(input_js_pattern, out)
if USE_UGLIFYJS:
os.system(EXEC_UGLIFYJS.format(tmp_js_file_combined, tmp_js_file_minified, WORKING_DIR, RESERVED_PROPS))
else:
shutil.copyfile(tmp_js_file_combined, tmp_js_file_minified)
os.remove(tmp_js_file_combined)
tokens = {
"/*{js}*/": tmp_js_file_minified,
"/*{css}*/": tmp_css_file_minified
}
with open(output_file, "w") as out:
with open(input_html, "r") as fin:
for line in fin:
token = None
for token in (token for token in tokens if token in line):
with open(tokens[token], "r") as token_file:
embedded = token_file.read()
out.write(embedded)
os.remove(tokens[token])
if token is None:
out.write(line)
def build_website():
tracker_file_html = "bld/track.html"
tracker_file_userscript = "bld/track.user.js"
viewer_file = "bld/viewer.html"
web_style_file = "bld/web/style.css"
distutils.dir_util.copy_tree("web", "bld/web")
index_file = "bld/web/index.php"
with open(index_file, "r") as index:
index_contents = index.read()
with open(index_file, "w") as index:
index.write(index_contents.replace("{{{version:web}}}", VERSION_SHORT.replace(" ", " ")))
shutil.copyfile(tracker_file_html, "bld/web/build/track.html")
shutil.copyfile(tracker_file_userscript, "bld/web/build/track.user.js")
shutil.copyfile(viewer_file, "bld/web/build/viewer.html")
minify_css(web_style_file, web_style_file)
# Build Process
os.makedirs("bld", exist_ok = True)
print("Building tracker html...")
build_tracker_html()
print("Building tracker userscript...")
build_tracker_userscript()
print("Building viewer...")
build_viewer()
if BUILD_WEBSITE:
print("Building website...")
build_website()
if CLIPBOARD_TRACKER:
if os.name == "nt":
print("Copying to clipboard...")
os.system("clip < bld/track.js")
else:
print("Clipboard is only supported on Windows")
print("Done")
| mit |
baggioss/hadoop-with-transparentcompress | src/contrib/failmon/bin/scheduler.py | 118 | 6683 | #!/usr/bin/python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Schedule FailMon execution for nodes of file hosts.list, according to
# the properties file conf/global.config.
import time
import ConfigParser
import subprocess
import threading
import random
jobs = []
username = "user"
connections = 10
failmonDir = ""
maxFiles = 100
# This class represents a thread that connects to a set of cluster
# nodes to locally execute monitoring jobs. These jobs are specified
# as a shell command in the constructor.
class sshThread (threading.Thread):
def __init__(self, threadname, username, command, failmonDir):
threading.Thread.__init__(self)
self.name = threadname
self.username = username
self.command = command
self.failmonDir = failmonDir
self.hosts = []
def addHost(self, host):
self.hosts.append(host)
def run (self):
for host in self.hosts:
toRun = ["ssh", self.username + "@" + host, "cd " + self.failmonDir + " ; " + self.command]
print "Thread", self.name, "invoking command on", host, ":\t", toRun, "...",
subprocess.check_call(toRun)
print "Done!"
# This class represents a monitoring job. The param member is a string
# that can be passed in the '--only' list of jobs given to the Java
# class org.apache.hadoop.contrib.failmon.RunOnce for execution on a
# node.
class Job:
def __init__(self, param, interval):
self.param = param
self.interval = interval
self.counter = interval
return
def reset(self):
self.counter = self.interval
# This function reads the configuration file to get the values of the
# configuration parameters.
def getJobs(file):
global username
global connections
global jobs
global failmonDir
global maxFiles
conf = ConfigParser.SafeConfigParser()
conf.read(file)
username = conf.get("Default", "ssh.username")
connections = int(conf.get("Default", "max.connections"))
failmonDir = conf.get("Default", "failmon.dir")
maxFiles = conf.get("Default", "hdfs.files.max")
# Hadoop Log
interval = int(conf.get("Default", "log.hadoop.interval"))
if interval != 0:
jobs.append(Job("hadoopLog", interval))
# System Log
interval = int(conf.get("Default", "log.system.interval"))
if interval != 0:
jobs.append(Job("systemLog", interval))
# NICs
interval = int(conf.get("Default", "nics.interval"))
if interval != 0:
jobs.append(Job("nics", interval))
# CPU
interval = int(conf.get("Default", "cpu.interval"))
if interval != 0:
jobs.append(Job("cpu", interval))
# CPU
interval = int(conf.get("Default", "disks.interval"))
if interval != 0:
jobs.append(Job("disks", interval))
# sensors
interval = int(conf.get("Default", "sensors.interval"))
if interval != 0:
jobs.append(Job("sensors", interval))
# upload
interval = int(conf.get("Default", "upload.interval"))
if interval != 0:
jobs.append(Job("upload", interval))
return
# Compute the gcd (Greatest Common Divisor) of two integerss
def GCD(a, b):
assert isinstance(a, int)
assert isinstance(b, int)
while a:
a, b = b%a, a
return b
# Compute the gcd (Greatest Common Divisor) of a list of integers
def listGCD(joblist):
assert isinstance(joblist, list)
if (len(joblist) == 1):
return joblist[0].interval
g = GCD(joblist[0].interval, joblist[1].interval)
for i in range (2, len(joblist)):
g = GCD(g, joblist[i].interval)
return g
# Merge all failmon files created on the HDFS into a single file
def mergeFiles():
global username
global failmonDir
hostList = []
hosts = open('./conf/hosts.list', 'r')
for host in hosts:
hostList.append(host.strip().rstrip())
randomHost = random.sample(hostList, 1)
mergeCommand = "bin/failmon.sh --mergeFiles"
toRun = ["ssh", username + "@" + randomHost[0], "cd " + failmonDir + " ; " + mergeCommand]
print "Invoking command on", randomHost, ":\t", mergeCommand, "...",
subprocess.check_call(toRun)
print "Done!"
return
# The actual scheduling is done here
def main():
getJobs("./conf/global.config")
for job in jobs:
print "Configuration: ", job.param, "every", job.interval, "seconds"
globalInterval = listGCD(jobs)
while True :
time.sleep(globalInterval)
params = []
for job in jobs:
job.counter -= globalInterval
if (job.counter <= 0):
params.append(job.param)
job.reset()
if (len(params) == 0):
continue;
onlyStr = "--only " + params[0]
for i in range(1, len(params)):
onlyStr += ',' + params[i]
command = "bin/failmon.sh " + onlyStr
# execute on all nodes
hosts = open('./conf/hosts.list', 'r')
threadList = []
# create a thread for every connection
for i in range(0, connections):
threadList.append(sshThread(i, username, command, failmonDir))
# assign some hosts/connections hosts to every thread
cur = 0;
for host in hosts:
threadList[cur].addHost(host.strip().rstrip())
cur += 1
if (cur == len(threadList)):
cur = 0
for ready in threadList:
ready.start()
for ssht in threading.enumerate():
if ssht != threading.currentThread():
ssht.join()
# if an upload has been done, then maybe we need to merge the
# HDFS files
if "upload" in params:
mergeFiles()
return
if __name__ == '__main__':
main()
| apache-2.0 |
dmlc/tvm | vta/python/vta/top/vta_conv2d_transpose.py | 3 | 7429 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Conv2D_transpose operator declaration and schedule registration for VTA."""
import numpy as np
import tvm
from tvm import te
from tvm import autotvm
from tvm import topi
from tvm.topi.utils import get_const_tuple
from tvm.topi.nn.utils import get_pad_tuple
from ..environment import get_env
@autotvm.register_topi_compute("conv2d_transpose_packed.vta")
def conv2d_transpose_packed(cfg, data, kernel, strides, padding, out_dtype, output_padding=(0, 0)):
"""Packed conv2d_transpose compute"""
ishape = get_const_tuple(data.shape)
kshape = get_const_tuple(kernel.shape)
b, c_i, i_h, i_w, t_b, t_ci = ishape
c_o, _, k_h, k_w, t_co, t_ci = kshape
stride_h, stride_w = strides
opad_h, opad_w = output_padding
# FIXME(tmoreau89): currently IR pass breaks when output padding != (0,0)
assert opad_h == 0 and opad_w == 0, "VTA does not support output padding for now"
# derive padding parameters
fpad_top, fpad_left, fpad_bottom, fpad_right = get_pad_tuple(padding, (k_h, k_w))
bpad_top = k_h - 1 - fpad_top
bpad_bottom = k_h - 1 - fpad_bottom + opad_h
bpad_left = k_w - 1 - fpad_left
bpad_right = k_w - 1 - fpad_right + opad_w
# padding stage
dilated_input = topi.nn.dilate(data, [1, 1, stride_h, stride_w, 1, 1])
data_pad = topi.nn.pad(
dilated_input, [0, 0, bpad_top, bpad_left, 0, 0], [0, 0, bpad_bottom, bpad_right, 0, 0]
)
# convolution transpose stage
out_h = (i_h - 1) * stride_h - fpad_top - fpad_bottom + k_h + opad_h
out_w = (i_w - 1) * stride_w - fpad_left - fpad_right + k_w + opad_w
oshape = (b, c_o, out_h, out_w, t_b, t_co)
d_c = te.reduce_axis((0, c_i), name="d_c")
d_h = te.reduce_axis((0, k_h), name="d_h")
d_w = te.reduce_axis((0, k_w), name="d_w")
d_ci = te.reduce_axis((0, t_ci), name="d_ci")
out = te.compute(
oshape,
lambda i_n, i_c, i_h, i_w, j_n, j_c: te.sum(
data_pad(i_n, d_c, i_h + d_h, i_w + d_w, j_n, d_ci).astype(out_dtype)
* kernel[i_c, d_c, d_h, d_w, j_c, d_ci].astype(out_dtype),
axis=[d_c, d_h, d_w, d_ci],
),
tag="packed_conv2d_transpose",
name="res",
)
cfg.add_flop(
2
* np.prod(topi.utils.get_const_tuple(oshape))
* kshape[2]
* kshape[3]
* ishape[1]
* ishape[-1]
)
return out
@autotvm.register_topi_schedule("conv2d_transpose_packed.vta")
def schedule_conv2d_transpose_packed(cfg, outs):
"""Schedule packed conv2d_transpose"""
assert len(outs) == 1
output = outs[0]
ewise_inputs = []
ewise_ops = []
conv2d_res = []
assert output.dtype == "int8"
assert output.op.input_tensors[0].dtype == "int32"
def _traverse(op):
if topi.tag.is_broadcast(op.tag):
if not op.same_as(output.op):
ewise_ops.append(op)
for tensor in op.input_tensors:
if isinstance(tensor.op, tvm.te.PlaceholderOp):
ewise_inputs.append((op, tensor))
else:
_traverse(tensor.op)
else:
assert op.tag == "packed_conv2d_transpose"
conv2d_res.append(op)
_traverse(output.op)
assert len(conv2d_res) == 1
conv2d_stage = conv2d_res[0].output(0)
s = te.create_schedule(output.op)
##### space definition begin #####
b, c_o, x_i, x_j, _, c_i = s[conv2d_stage].op.axis
c_i, _, _, _ = s[conv2d_stage].op.reduce_axis
cfg.define_split("tile_b", b, num_outputs=2)
cfg.define_split("tile_h", x_i, num_outputs=2)
cfg.define_split("tile_w", x_j, num_outputs=2)
cfg.define_split("tile_ci", c_i, num_outputs=2)
cfg.define_split("tile_co", c_o, num_outputs=2)
cfg.define_knob("oc_nthread", [1, 2])
cfg.define_knob("h_nthread", [1, 2])
###### space definition end ######
data, kernel = conv2d_stage.op.input_tensors
if isinstance(data.op, tvm.te.ComputeOp) and "pad" in data.op.tag:
temp = data.op.input_tensors[0]
pad_data = data
data = temp
else:
pad_data = None
env = get_env()
# setup pad
if pad_data is not None:
cdata = pad_data
s[pad_data].set_scope(env.inp_scope)
else:
cdata = s.cache_read(data, env.inp_scope, [conv2d_stage])
ckernel = s.cache_read(kernel, env.wgt_scope, [conv2d_stage])
s[conv2d_stage].set_scope(env.acc_scope)
# cache read input
cache_read_ewise = []
for consumer, tensor in ewise_inputs:
cache_read_ewise.append(s.cache_read(tensor, env.acc_scope, [consumer]))
# set ewise scope
for op in ewise_ops:
s[op].set_scope(env.acc_scope)
s[op].pragma(s[op].op.axis[0], env.alu)
# tile
x_bo, x_co, x_i, x_j, x_bi, x_ci = s[output].op.axis
x_co0, x_co1 = cfg["tile_co"].apply(s, output, x_co)
x_i0, x_i1 = cfg["tile_h"].apply(s, output, x_i)
x_j0, x_j1 = cfg["tile_w"].apply(s, output, x_j)
s[output].reorder(x_bo, x_i0, x_co0, x_j0, x_co1, x_i1, x_j1, x_bi, x_ci)
store_pt = x_j0
# set all compute scopes
s[conv2d_stage].compute_at(s[output], store_pt)
for op in ewise_ops:
s[op].compute_at(s[output], store_pt)
for tensor in cache_read_ewise:
s[tensor].compute_at(s[output], store_pt)
s[tensor].pragma(s[tensor].op.axis[0], env.dma_copy)
# virtual threading along output channel axes
if cfg["oc_nthread"].val > 1:
_, v_t = s[output].split(x_co0, factor=cfg["oc_nthread"].val)
s[output].reorder(v_t, x_bo)
s[output].bind(v_t, te.thread_axis("cthread"))
# virtual threading along spatial rows
if cfg["h_nthread"].val > 1:
_, v_t = s[output].split(x_i0, factor=cfg["h_nthread"].val)
s[output].reorder(v_t, x_bo)
s[output].bind(v_t, te.thread_axis("cthread"))
x_bo, x_co, x_i, x_j, x_bi, x_ci = s[conv2d_stage].op.axis
k_o, d_i, d_j, k_i = s[conv2d_stage].op.reduce_axis
x_i, x_ii = s[conv2d_stage].split(x_i, 4)
x_j, x_jj = s[conv2d_stage].split(x_j, 2)
s[conv2d_stage].reorder(x_bo, k_o, x_j, x_co, x_i, x_jj, d_j, d_i, x_ii, x_bi, x_ci, k_i)
for axis in [d_j, d_i, x_ii, x_jj]:
s[conv2d_stage].unroll(axis)
k_o, _ = cfg["tile_ci"].apply(s, conv2d_stage, k_o)
s[cdata].compute_at(s[conv2d_stage], k_o)
s[ckernel].compute_at(s[conv2d_stage], k_o)
# Use VTA instructions
s[cdata].pragma(s[cdata].op.axis[0], env.dma_copy)
s[ckernel].pragma(s[ckernel].op.axis[0], env.dma_copy)
s[conv2d_stage].pragma(x_bi, "conv2d_transpose_gemm")
s[output].pragma(x_co1, env.dma_copy)
return s
| apache-2.0 |
benoitsteiner/tensorflow-opencl | tensorflow/contrib/receptive_field/python/util/graph_compute_order.py | 25 | 2454 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Library to compute order of computations in a graph.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
class GraphDefHelper(object):
"""Helper class to collect node names and definitions.
Example:
b = GraphDefHelper(graph_def)
# Prints node that produces given output.
print b.output_of['conv/foo/bar']
"""
def __init__(self, gd):
self.output_of = {}
for each in gd.node:
self.output_of[each.name] = each
# pylint: disable=invalid-name
_NodeEntry = collections.namedtuple('NodeEntry', field_names=['order', 'node'])
def _get_computed_nodes(g, output, seen):
"""Traverses the graph in topological order.
Args:
g: GraphDefHelper object.
output: current node.
seen: map of nodes we've already traversed.
Returns:
order in topological sort for 'output'.
"""
if output in seen:
return seen[output].order
node_def = g.output_of.get(output, None)
if node_def is None:
seen[output] = _NodeEntry(0, None)
return 0
r = 0
for each in node_def.input:
# Parses name of input node.
if each.startswith('^'):
each = each[1:]
each = each.split(':')[0]
# Recursively computes ordering.
new_v = _get_computed_nodes(g, each, seen)
r = max(r, new_v + 1)
seen[output] = _NodeEntry(r, node_def)
return seen[output].order
def get_compute_order(graph_def):
"""Computes order of computation for a given graph.
Args:
graph_def: GraphDef object.
Returns:
map: name -> {order, node}
"""
helper = GraphDefHelper(graph_def)
seen = collections.defaultdict(_NodeEntry)
for each in graph_def.node:
_get_computed_nodes(helper, each.name, seen)
return seen
| apache-2.0 |
protomouse/Flexget | flexget/plugins/metainfo/imdb_url.py | 6 | 1287 | from __future__ import unicode_literals, division, absolute_import
import re
import logging
from flexget import plugin
from flexget.event import event
log = logging.getLogger('metainfo_imdb_url')
class MetainfoImdbUrl(object):
"""
Scan entry information for imdb url.
"""
schema = {'type': 'boolean'}
def on_task_metainfo(self, task, config):
# check if disabled (value set to false)
if 'scan_imdb' in task.config:
if not task.config['scan_imdb']:
return
for entry in task.entries:
if not 'description' in entry:
continue
urls = re.findall(r'\bimdb.com/title/tt\d+\b', entry['description'])
if not urls:
continue
# Uniquify the list of urls.
urls = list(set(urls))
if 1 < len(urls):
log.debug('Found multiple imdb urls; not using any of: %s' %
' '.join(urls))
continue
url = ''.join(['http://www.', urls[0]])
entry['imdb_url'] = url
log.debug('Found imdb url in description %s' % url)
@event('plugin.register')
def register_plugin():
plugin.register(MetainfoImdbUrl, 'scan_imdb', builtin=True, api_ver=2)
| mit |
bryback/quickseq | genescript/Bio/Emboss/Applications.py | 1 | 53527 | # Copyright 2001-2009 Brad Chapman.
# Revisions copyright 2009-2010 by Peter Cock.
# Revisions copyright 2009 by David Winter.
# Revisions copyright 2009-2010 by Leighton Pritchard.
# All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Code to interact with and run various EMBOSS programs.
These classes follow the AbstractCommandline interfaces for running
programs.
"""
from Bio.Application import _Option, _Switch, AbstractCommandline
class _EmbossMinimalCommandLine(AbstractCommandline):
"""Base Commandline object for EMBOSS wrappers (PRIVATE).
This is provided for subclassing, it deals with shared options
common to all the EMBOSS tools:
- auto Turn off prompts
- stdout Write standard output
- filter Read standard input, write standard output
- options Prompt for standard and additional values
- debug Write debug output to program.dbg
- verbose Report some/full command line options
- help Report command line options. More
information on associated and general
qualifiers can be found with -help -verbose
- warning Report warnings
- error Report errors
- fatal Report fatal errors
- die Report dying program messages
"""
def __init__(self, cmd=None, **kwargs):
assert cmd is not None
extra_parameters = [
_Switch(["-auto","auto"],
"""Turn off prompts.
Automatic mode disables prompting, so we recommend you set
this argument all the time when calling an EMBOSS tool from
Biopython.
"""),
_Switch(["-stdout","stdout"],
"Write standard output."),
_Switch(["-filter","filter"],
"Read standard input, write standard output."),
_Switch(["-options","options"],
"""Prompt for standard and additional values.
If you are calling an EMBOSS tool from within Biopython,
we DO NOT recommend using this option.
"""),
_Switch(["-debug","debug"],
"Write debug output to program.dbg."),
_Switch(["-verbose","verbose"],
"Report some/full command line options"),
_Switch(["-help","help"],
"""Report command line options.
More information on associated and general qualifiers can
be found with -help -verbose
"""),
_Switch(["-warning","warning"],
"Report warnings."),
_Switch(["-error","error"],
"Report errors."),
_Switch(["-die","die"],
"Report dying program messages."),
]
try:
#Insert extra parameters - at the start just in case there
#are any arguments which must come last:
self.parameters = extra_parameters + self.parameters
except AttributeError:
#Should we raise an error? The subclass should have set this up!
self.parameters = extra_parameters
AbstractCommandline.__init__(self, cmd, **kwargs)
class _EmbossCommandLine(_EmbossMinimalCommandLine):
"""Base Commandline object for EMBOSS wrappers (PRIVATE).
This is provided for subclassing, it deals with shared options
common to all the EMBOSS tools plus:
- outfile Output filename
"""
def __init__(self, cmd=None, **kwargs):
assert cmd is not None
extra_parameters = [
_Option(["-outfile","outfile"],
"Output filename",
filename=True),
]
try:
#Insert extra parameters - at the start just in case there
#are any arguments which must come last:
self.parameters = extra_parameters + self.parameters
except AttributeError:
#Should we raise an error? The subclass should have set this up!
self.parameters = extra_parameters
_EmbossMinimalCommandLine.__init__(self, cmd, **kwargs)
def _validate(self):
#Check the outfile, filter, or stdout option has been set.
#We can't simply do this via the required flag for the outfile
#output - this seems the simplest solution.
if not (self.outfile or self.filter or self.stdout):
raise ValueError("You must either set outfile (output filename), "
"or enable filter or stdout (output to stdout).")
return _EmbossMinimalCommandLine._validate(self)
class Primer3Commandline(_EmbossCommandLine):
"""Commandline object for the Primer3 interface from EMBOSS.
The precise set of supported arguments depends on your version of EMBOSS.
This version accepts arguments current at EMBOSS 6.1.0, but in order to
remain backwards compatible also support the old argument names as well.
e.g. Using EMBOSS 6.1.0 or later,
>>> cline = Primer3Commandline(sequence="mysequence.fas", auto=True, hybridprobe=True)
>>> cline.explainflag = True
>>> cline.osizeopt=20
>>> cline.psizeopt=200
>>> cline.outfile = "myresults.out"
>>> cline.bogusparameter = 1967 # Invalid parameter
Traceback (most recent call last):
...
ValueError: Option name bogusparameter was not found.
>>> print cline
eprimer3 -auto -outfile=myresults.out -sequence=mysequence.fas -hybridprobe=True -psizeopt=200 -osizeopt=20 -explainflag=True
The equivalent for anyone still using an older version of EMBOSS would be:
>>> cline = Primer3Commandline(sequence="mysequence.fas", auto=True, hybridprobe=True)
>>> cline.explainflag = True
>>> cline.oligosize=20 # Old EMBOSS, instead of osizeopt
>>> cline.productosize=200 # Old EMBOSS, instead of psizeopt
>>> cline.outfile = "myresults.out"
>>> print cline
eprimer3 -auto -outfile=myresults.out -sequence=mysequence.fas -hybridprobe=True -productosize=200 -oligosize=20 -explainflag=True
"""
def __init__(self, cmd="eprimer3", **kwargs):
self.parameters = [
_Option(["-sequence","sequence"],
"Sequence to choose primers from.",
is_required=True),
_Option(["-task","task"],
"Tell eprimer3 what task to perform."),
_Option(["-hybridprobe","hybridprobe"],
"Find an internal oligo to use as a hyb probe."),
_Option(["-numreturn","numreturn"],
"Maximum number of primer pairs to return."),
_Option(["-includedregion","includedregion"],
"Subregion of the sequence in which to pick primers."),
_Option(["-target","target"],
"Sequence to target for flanking primers."),
_Option(["-excludedregion","excludedregion"],
"Regions to exclude from primer picking."),
_Option(["-forwardinput","forwardinput"],
"Sequence of a forward primer to check."),
_Option(["-reverseinput","reverseinput"],
"Sequence of a reverse primer to check."),
_Option(["-gcclamp","gcclamp"],
"The required number of Gs and Cs at the 3' of each primer."),
_Option(["-osize","osize"],
"Optimum length of a primer oligo."),
_Option(["-minsize","minsize"],
"Minimum length of a primer oligo."),
_Option(["-maxsize","maxsize"],
"Maximum length of a primer oligo."),
_Option(["-otm","otm"],
"Optimum melting temperature for a primer oligo."),
_Option(["-mintm","mintm"],
"Minimum melting temperature for a primer oligo."),
_Option(["-maxtm","maxtm"],
"Maximum melting temperature for a primer oligo."),
_Option(["-maxdifftm","maxdifftm"],
"Maximum difference in melting temperatures between "
"forward and reverse primers."),
_Option(["-ogcpercent","ogcpercent"],
"Optimum GC% for a primer."),
_Option(["-mingc","mingc"],
"Minimum GC% for a primer."),
_Option(["-maxgc","maxgc"],
"Maximum GC% for a primer."),
_Option(["-saltconc","saltconc"],
"Millimolar salt concentration in the PCR."),
_Option(["-dnaconc","dnaconc"],
"Nanomolar concentration of annealing oligos in the PCR."),
_Option(["-maxpolyx","maxpolyx"],
"Maximum allowable mononucleotide repeat length in a primer."),
#Primer length:
_Option(["-productosize","productosize"],
"""Optimum size for the PCR product (OBSOLETE).
Option replaced in EMBOSS 6.1.0 by -psizeopt
"""),
_Option(["-psizeopt", "psizeopt"],
"""Optimum size for the PCR product.
Option added in EMBOSS 6.1.0, replacing -productosize
"""),
_Option(["-productsizerange","productsizerange"],
"""Acceptable range of length for the PCR product (OBSOLETE).
Option replaced in EMBOSS 6.1.0 by -prange
"""),
_Option(["-prange", "prange"],
"""Acceptable range of length for the PCR product.
Option added in EMBOSS 6.1.0, replacing -productsizerange
"""),
#Primer temperature:
_Option(["-productotm","productotm"],
"""Optimum melting temperature for the PCR product (OBSOLETE).
Option replaced in EMBOSS 6.1.0 by -ptmopt
"""),
_Option(["-ptmopt", "ptmopt"],
"""Optimum melting temperature for the PCR product.
Option added in EMBOSS 6.1.0, replacing -productotm
"""),
_Option(["-productmintm","productmintm"],
"""Minimum allowed melting temperature for the amplicon (OBSOLETE)
Option replaced in EMBOSS 6.1.0 by -ptmmin
"""),
_Option(["-ptmmin", "ptmmin"],
"""Minimum allowed melting temperature for the amplicon."),
Option added in EMBOSS 6.1.0, replacing -productmintm
"""),
_Option(["-productmaxtm","productmaxtm"],
"""Maximum allowed melting temperature for the amplicon (OBSOLETE).
Option replaced in EMBOSS 6.1.0 by -ptmmax
"""),
_Option(["-ptmmax", "ptmmax"],
"""Maximum allowed melting temperature for the amplicon."),
Option added in EMBOSS 6.1.0, replacing -productmaxtm
"""),
#Note to self, should be -oexcludedregion not -oexcluderegion
_Option(["-oexcludedregion", "oexcludedregion"],
"""Do not pick internal oligos in this region."),
Option added in EMBOSS 6.1.0, replacing -oligoexcludedregion.
"""),
_Option(["-oligoexcludedregion", "oligoexcludedregion"],
"""Do not pick internal oligos in this region (OBSOLETE)."),
Option replaced in EMBOSS 6.1.0 by -oexcluderegion.
"""),
_Option(["-oligoinput","oligoinput"],
"Sequence of the internal oligo."),
#Oligo length:
_Option(["-oligosize","oligosize"],
"""Optimum length of internal oligo (OBSOLETE).
Option replaced in EMBOSS 6.1.0 by -osizeopt.
"""),
_Option(["-osizeopt", "osizeopt"],
"""Optimum length of internal oligo.
Option added in EMBOSS 6.1.0, replaces -oligosize
"""),
_Option(["-oligominsize","oligominsize"],
"""Minimum length of internal oligo (OBSOLETE)."),
Option replaced in EMBOSS 6.1.0 by -ominsize.
"""),
_Option(["-ominsize", "ominsize"],
"""Minimum length of internal oligo."
Option added in EMBOSS 6.1.0, replaces -oligominsize
"""),
_Option(["-oligomaxsize","oligomaxsize"],
"""Maximum length of internal oligo (OBSOLETE).
Option replaced in EMBOSS 6.1.0 by -omaxsize.
"""),
_Option(["-omaxsize", "omaxsize"],
"""Maximum length of internal oligo.
Option added in EMBOSS 6.1.0, replaces -oligomaxsize
"""),
#Oligo GC temperature:
_Option(["-oligotm","oligotm"],
"""Optimum melting temperature of internal oligo (OBSOLETE).
Option replaced in EMBOSS 6.1.0 by -otmopt.
"""),
_Option(["-otmopt", "otmopt"],
"""Optimum melting temperature of internal oligo.
Option added in EMBOSS 6.1.0.
"""),
_Option(["-oligomintm","oligomintm"],
"""Minimum melting temperature of internal oligo (OBSOLETE).
Option replaced in EMBOSS 6.1.0 by -otmmin.
"""),
_Option(["-otmmin", "otmmin"],
"""Minimum melting temperature of internal oligo.
Option added in EMBOSS 6.1.0, replacing -oligomintm
"""),
_Option(["-oligomaxtm","oligomaxtm"],
"""Maximum melting temperature of internal oligo (OBSOLETE).
Option replaced in EMBOSS 6.1.0 by -otmmax.
"""),
_Option(["-otmmax", "otmmax"],
"""Maximum melting temperature of internal oligo.
Option added in EMBOSS 6.1.0, replacing -oligomaxtm
"""),
#Oligo GC percent:
_Option(["-oligoogcpercent","oligoogcpercent"],
"""Optimum GC% for internal oligo (OBSOLETE).
Option replaced in EMBOSS 6.1.0 by -ogcopt.
"""),
_Option(["-ogcopt", "ogcopt"],
"""Optimum GC% for internal oligo."
Option added in EMBOSS 6.1.0, replacing -oligoogcpercent
"""),
_Option(["-oligomingc","oligomingc"],
"""Minimum GC% for internal oligo (OBSOLETE).
Option replaced in EMBOSS 6.1.0 by -ogcmin.
"""),
_Option(["-ogcmin", "ogcmin"],
"""Minimum GC% for internal oligo.
Option added in EMBOSS 6.1.0, replacing -oligomingc
"""),
_Option(["-oligomaxgc","oligomaxgc"],
"""Maximum GC% for internal oligo.
Option replaced in EMBOSS 6.1.0 by -ogcmax
"""),
_Option(["-ogcmax", "ogcmax"],
"""Maximum GC% for internal oligo."),
Option added in EMBOSS 6.1.0, replacing -oligomaxgc
"""),
#Oligo salt concentration:
_Option(["-oligosaltconc","oligosaltconc"],
"""Millimolar concentration of salt in the hybridisation."),
Option replaced in EMBOSS 6.1.0 by -osaltconc
"""),
_Option(["-osaltconc", "osaltconc"],
"""Millimolar concentration of salt in the hybridisation."),
Option added in EMBOSS 6.1.0, replacing -oligosaltconc
"""),
_Option(["-oligodnaconc","oligodnaconc"],
"""Nanomolar concentration of internal oligo in the hybridisation.
Option replaced in EMBOSS 6.1.0 by -odnaconc
"""),
_Option(["-odnaconc", "odnaconc"],
"""Nanomolar concentration of internal oligo in the hybridisation.
Option added in EMBOSS 6.1.0, replacing -oligodnaconc
"""),
#Oligo self complementarity
_Option(["-oligoselfany","oligoselfany"],
"""Maximum allowable alignment score for self-complementarity (OBSOLETE).
Option replaced in EMBOSS 6.1.0 by -oanyself
"""),
_Option(["-oanyself", "oanyself"],
"""Maximum allowable alignment score for self-complementarity."),
Option added in EMBOSS 6.1.0, replacing -oligoselfany
"""),
_Option(["-oligoselfend","oligoselfend"],
"""Maximum allowable 3`-anchored global alignment score "
for self-complementarity (OBSOLETE).
Option replaced in EMBOSS 6.1.0 by -oendself
"""),
_Option(["-oendself", "oendself"],
"""Max 3`-anchored self-complementarity global alignment score.
Option added in EMBOSS 6.1.0, replacing -oligoselfend
"""),
_Option(["-oligomaxpolyx","oligomaxpolyx"],
"""Maximum length of mononucleotide repeat in internal oligo (OBSOLETE).
Option replaced in EMBOSS 6.1.0 by -opolyxmax
"""),
_Option(["-opolyxmax", "opolyxmax"],
"""Maximum length of mononucleotide repeat in internal oligo."),
Option added in EMBOSS 6.1.0, replacing -oligomaxpolyx
"""),
_Option(["-mispriminglibraryfile","mispriminglibraryfile"],
"File containing library of sequences to avoid amplifying"),
_Option(["-maxmispriming","maxmispriming"],
"Maximum allowed similarity of primers to sequences in "
"library specified by -mispriminglibrary"),
_Option(["-oligomaxmishyb","oligomaxmishyb"],
"""Maximum alignment score for hybridisation of internal oligo to
library specified by -oligomishyblibraryfile (OBSOLETE).
Option replaced in EMBOSS 6.1.0 by -omishybmax
"""),
_Option(["-omishybmax", "omishybmax"],
"""Maximum alignment score for hybridisation of internal oligo to
library specified by -mishyblibraryfile.
Option added in EMBOSS 6.1.0, replacing -oligomaxmishyb
"""),
_Option(["-oligomishyblibraryfile", "oligomishyblibraryfile"],
"""Library file of seqs to avoid internal oligo hybridisation (OBSOLETE).
Option replaced in EMBOSS 6.1.0 by -mishyblibraryfile
"""),
_Option(["-mishyblibraryfile", "mishyblibraryfile"],
"""Library file of seqs to avoid internal oligo hybridisation.
Option added in EMBOSS 6.1.0, replacing -oligomishyblibraryfile
"""),
_Option(["-explainflag","explainflag"],
"Produce output tags with eprimer3 statistics"),
]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
class PrimerSearchCommandline(_EmbossCommandLine):
"""Commandline object for the primersearch program from EMBOSS.
"""
def __init__(self, cmd="primersearch", **kwargs):
self.parameters = [
_Option(["-seqall","-sequences","sequences","seqall"],
"Sequence to look for the primer pairs in.",
is_required=True),
#When this wrapper was written primersearch used -sequences
#as the argument name. Since at least EMBOSS 5.0 (and
#perhaps earlier) this has been -seqall instead.
_Option(["-infile","-primers","primers","infile"],
"File containing the primer pairs to search for.",
filename=True,
is_required=True),
#When this wrapper was written primersearch used -primers
#as the argument name. Since at least EMBOSS 5.0 (and
#perhaps earlier) this has been -infile instead.
_Option(["-mismatchpercent","mismatchpercent"],
"Allowed percentage mismatch (any integer value, default 0).",
is_required=True),
_Option(["-snucleotide","snucleotide"],
"Sequences are nucleotide (boolean)"),
_Option(["-sprotein","sprotein"],
"Sequences are protein (boolean)"),
]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
class FDNADistCommandline(_EmbossCommandLine):
"""Commandline object for the fdnadist program from EMBOSS.
fdnadist is an EMBOSS wrapper for the PHYLIP program dnadist for
calulating distance matrices from DNA sequence files.
"""
def __init__(self, cmd = "fdnadist", **kwargs):
self.parameters = [
_Option(["-sequence", "sequence"],
"seq file to use (phylip)",
filename=True,
is_required=True),
_Option(["-method", "method"],
"sub. model [f,k,j,l,s]",
is_required=True),
_Option(["-gamma", "gamma"],
"gamma [g, i,n]"),
_Option(["-ncategories", "ncategories"],
"number of rate catergories (1-9)"),
_Option(["-rate", "rate"],
"rate for each category"),
_Option(["-categories","categories"],
"File of substitution rate categories"),
_Option(["-weights", "weights"],
"weights file"),
_Option(["-gammacoefficient", "gammacoefficient"],
"value for gamma (> 0.001)"),
_Option(["-invarfrac", "invarfrac"],
"proportoin of invariant sites"),
_Option(["-ttratio", "ttratio"],
"ts/tv ratio"),
_Option(["-freqsfrom", "freqsfrom"],
"use emprical base freqs"),
_Option(["-basefreq", "basefreq"],
"specify basefreqs"),
_Option(["-lower", "lower"],
"lower triangle matrix (y/N)")]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
class FTreeDistCommandline(_EmbossCommandLine):
"""Commandline object for the ftreedist program from EMBOSS.
ftreedist is an EMBOSS wrapper for the PHYLIP program treedist used for
calulating distance measures between phylogentic trees.
"""
def __init__(self, cmd = "ftreedist", **kwargs):
self.parameters = [
_Option(["-intreefile", "intreefile"],
"tree file to score (phylip)",
filename=True,
is_required=True),
_Option(["-dtype", "dtype"],
"distance type ([S]ymetric, [b]ranch score)"),
_Option(["-pairing", "pairing"],
"tree pairing method ([A]djacent pairs, all [p]ossible pairs)"),
_Option(["-style", "style"],
"output style - [V]erbose, [f]ill, [s]parse"),
_Option(["-noroot", "noroot"],
"treat trees as rooted [N/y]"),
_Option(["-outgrno", "outgrno"],
"which taxon to root the trees with (starts from 0)")]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
class FNeighborCommandline(_EmbossCommandLine):
"""Commandline object for the fneighbor program from EMBOSS.
fneighbor is an EMBOSS wrapper for the PHYLIP program neighbor used for
calulating neighbor-joining or UPGMA trees from distance matrices.
"""
def __init__(self, cmd = "fneighbor", **kwargs):
self.parameters = [
_Option(["-datafile", "datafile"],
"dist file to use (phylip)",
filename=True,
is_required=True),
_Option(["-matrixtype", "matrixtype"],
"is martrix [S]quare pr [u]pper or [l]ower"),
_Option(["-treetype", "treetype"],
"nj or UPGMA tree (n/u)"),
_Option(["-outgrno","outgrno" ],
"taxon to use as OG"),
_Option(["-jumble", "jumble"],
"randommise input order (Y/n)"),
_Option(["-seed", "seed"],
"provide a random seed"),
_Option(["-trout", "trout"],
"write tree (Y/n)"),
_Option(["-outtreefile", "outtreefile"],
"filename for output tree"),
_Option(["-progress", "progress"],
"print progress (Y/n)"),
_Option(["-treeprint", "treeprint"],
"print tree (Y/n)")]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
class FSeqBootCommandline(_EmbossCommandLine):
"""Commandline object for the fseqboot program from EMBOSS.
fseqboot is an EMBOSS wrapper for the PHYLIP program seqboot used to
pseudo-sample alignment files.
"""
def __init__(self, cmd = "fseqboot", **kwargs):
self.parameters = [
_Option(["-sequence", "sequence"],
"seq file to sample (phylip)",
filename=True,
is_required=True),
_Option(["-categories", "catergories"],
"file of input categories"),
_Option(["-weights", "weights"],
" weights file"),
_Option(["-test", "test"],
"specify operation, default is bootstrap"),
_Option(["-regular", "regular"],
"absolute number to resample"),
_Option(["-fracsample", "fracsample"],
"fraction to resample"),
_Option(["-rewriteformat", "rewriteformat"],
"output format ([P]hyilp, [n]exus, [x]ml"),
_Option(["-seqtype", "seqtype"],
"output format ([D]na, [p]rotein, [r]na"),
_Option(["-blocksize", "blocksize"],
"print progress (Y/n)"),
_Option(["-reps", "reps"],
"how many replicates, defaults to 100)"),
_Option(["-justweights", "jusweights"],
"what to write out [D]atasets of just [w]eights"),
_Option(["-seed", "seed"],
"specify random seed"),
_Option(["-dotdiff", "dotdiff"],
"Use dot-differencing? [Y/n]"),]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
class FDNAParsCommandline(_EmbossCommandLine):
"""Commandline object for the fdnapars program from EMBOSS.
fdnapars is an EMBOSS version of the PHYLIP program dnapars, for
estimating trees from DNA sequences using parsiomny. Calling this command
without providing a value for the option "-intreefile" will invoke
"interactive mode" (and as a result fail if called with subprocess) if
"-auto" is not set to true.
"""
def __init__(self, cmd = "fdnapars", **kwargs):
self.parameters = [
_Option(["-sequence", "sequence"],
"seq file to use (phylip)",
filename=True,
is_required=True),
_Option(["-intreefile", "intreefile"],
"Phylip tree file"),
_Option(["-weights", "weights"],
"weights file"),
_Option(["-maxtrees", "maxtrees"],
"max trees to save during run"),
_Option(["-thorough", "thorough"],
"more thorough search (Y/n)"),
_Option(["-rearrange", "rearrange"],
"Rearrange on jsut 1 best tree (Y/n)"),
_Option(["-transversion", "transversion"],
"Use tranversion parsimony (y/N)"),
_Option(["-njumble", "njumble"],
"number of times to randomise input order (default is 0)"),
_Option(["-seed", "seed"],
"provde random seed"),
_Option(["-outgrno", "outgrno"],
"Specify outgroup"),
_Option(["-thresh", "thresh"],
"Use threshold parsimony (y/N)"),
_Option(["-threshold", "threshold"],
"Threshold value"),
_Option(["-trout", "trout"],
"Write trees to file (Y/n)"),
_Option(["-outtreefile", "outtreefile"],
"filename for output tree"),
_Option(["-dotdiff", "dotdiff"],
"Use dot-differencing? [Y/n]")]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
class FProtParsCommandline(_EmbossCommandLine):
"""Commandline object for the fdnapars program from EMBOSS.
fprotpars is an EMBOSS version of the PHYLIP program protpars, for
estimating trees from protein sequences using parsiomny. Calling this
command without providing a value for the option "-intreefile" will invoke
"interactive mode" (and as a result fail if called with subprocess) if
"-auto" is not set to true.
"""
def __init__(self, cmd = "fprotpars", **kwargs):
self.parameters = [
_Option(["-sequence", "sequence"],
"seq file to use (phylip)",
filename=True,
is_required=True),
_Option(["-intreefile", "intreefile"],
"Phylip tree file to score"),
_Option(["-outtreefile", "outtreefile"],
"phylip tree output file",
filename=True,
is_required=True),
_Option(["-weights", "weights"],
"weights file"),
_Option(["-whichcode", "whichcode"],
"which genetic code, [U,M,V,F,Y]]"),
_Option(["-njumble", "njumble"],
"number of times to randomise input order (default is 0)"),
_Option(["-seed", "seed"],
"provde random seed"),
_Option(["-outgrno", "outgrno"],
"Specify outgroup"),
_Option(["-thresh", "thresh"],
"Use threshold parsimony (y/N)"),
_Option(["-threshold", "threshold"],
"Threshold value"),
_Option(["-trout", "trout"],
"Write trees to file (Y/n)"),
_Option(["-dotdiff", "dotdiff"],
"Use dot-differencing? [Y/n]")]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
class FProtDistCommandline(_EmbossCommandLine):
"""Commandline object for the fprotdist program from EMBOSS.
fprotdist is an EMBOSS wrapper for the PHYLIP program protdist used to
estimate trees from protein sequences using parsimony
"""
def __init__(self, cmd = "fprotdist", **kwargs):
self.parameters = [
_Option(["-sequence", "sequence"],
"seq file to use (phylip)",
filename=True,
is_required=True),
_Option(["-ncategories", "ncategories"],
"number of rate catergories (1-9)"),
_Option(["-rate", "rate"],
"rate for each category"),
_Option(["-catergories","catergories"],
"file of rates"),
_Option(["-weights", "weights"],
"weights file"),
_Option(["-method", "method"],
"sub. model [j,h,d,k,s,c]"),
_Option(["-gamma", "gamma"],
"gamma [g, i,c]"),
_Option(["-gammacoefficient", "gammacoefficient"],
"value for gamma (> 0.001)"),
_Option(["-invarcoefficient", "invarcoefficient"],
"float for variation of substitution rate among sites"),
_Option(["-aacateg", "aacateg"],
"Choose the category to use [G,C,H]"),
_Option(["-whichcode", "whichcode"],
"genetic code [c,m,v,f,y]"),
_Option(["-ease", "ease"],
"Pob change catergory (float between -0 and 1)"),
_Option(["-ttratio", "ttratio"],
"Transition/transversion ratio (0-1)"),
_Option(["-basefreq", "basefreq"],
"DNA base frequencies (space seperated list)")]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
class FConsenseCommandline(_EmbossCommandLine):
"""Commandline object for the fconsense program from EMBOSS.
fconsense is an EMBOSS wrapper for the PHYLIP program consense used to
calculate consensus trees.
"""
def __init__(self, cmd = "fconsense", **kwargs):
self.parameters = [
_Option(["-intreefile", "intreefile"],
"file with phylip trees to make consensus from",
filename=True,
is_required=True),
_Option(["-method", "method"],
"consensus method [s, mr, MRE, ml]"),
_Option(["-mlfrac", "mlfrac"],
"cut-off freq for a branch to appear in consensus (0.5-1.0)"),
_Option(["-root", "root"],
"treat trees as rooted (YES, no)"),
_Option(["-outgrno", "outgrno"],
"OTU to use as outgroup (starts from 0)"),
_Option(["-trout", "trout"],
"treat trees as rooted (YES, no)"),
_Option(["-outtreefile", "outtreefile"],
"Phylip tree output file (optional)")]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
class WaterCommandline(_EmbossCommandLine):
"""Commandline object for the water program from EMBOSS.
"""
def __init__(self, cmd="water", **kwargs):
self.parameters = [
_Option(["-asequence","asequence"],
"First sequence to align",
filename=True,
is_required=True),
_Option(["-bsequence","bsequence"],
"Second sequence to align",
filename=True,
is_required=True),
_Option(["-gapopen","gapopen"],
"Gap open penalty",
is_required=True),
_Option(["-gapextend","gapextend"],
"Gap extension penalty",
is_required=True),
_Option(["-datafile","datafile"],
"Matrix file",
filename=True),
_Switch(["-nobrief", "nobrief"],
"Display extended identity and similarity"),
_Switch(["-brief", "brief"],
"Display brief identity and similarity"),
_Option(["-similarity","similarity"],
"Display percent identity and similarity"),
_Option(["-snucleotide","snucleotide"],
"Sequences are nucleotide (boolean)"),
_Option(["-sprotein","sprotein"],
"Sequences are protein (boolean)"),
_Option(["-aformat","aformat"],
"Display output in a different specified output format")]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
class NeedleCommandline(_EmbossCommandLine):
"""Commandline object for the needle program from EMBOSS.
"""
def __init__(self, cmd="needle", **kwargs):
self.parameters = [
_Option(["-asequence","asequence"],
"First sequence to align",
filename=True,
is_required=True),
_Option(["-bsequence","bsequence"],
"Second sequence to align",
filename=True,
is_required=True),
_Option(["-gapopen","gapopen"],
"Gap open penalty",
is_required=True),
_Option(["-gapextend","gapextend"],
"Gap extension penalty",
is_required=True),
_Option(["-datafile","datafile"],
"Matrix file",
filename=True),
_Option(["-endweight", "endweight"],
"Apply And gap penalties"),
_Option(["-endopen", "endopen"],
"The score taken away when an end gap is created."),
_Option(["-endextend", "endextend"],
"The score added to the end gap penality for each base or "
"residue in the end gap."),
_Switch(["-nobrief", "nobrief"],
"Display extended identity and similarity"),
_Switch(["-brief", "brief"],
"Display brief identity and similarity"),
_Option(["-similarity","similarity"],
"Display percent identity and similarity"),
_Option(["-snucleotide","snucleotide"],
"Sequences are nucleotide (boolean)"),
_Option(["-sprotein","sprotein"],
"Sequences are protein (boolean)"),
_Option(["-aformat","aformat"],
"Display output in a different specified output format")]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
class NeedleallCommandline(_EmbossCommandLine):
"""Commandline object for the needleall program from EMBOSS.
"""
def __init__(self, cmd="needleall", **kwargs):
self.parameters = [
_Option(["-asequence","asequence"],
"First sequence to align",
filename=True,
is_required=True),
_Option(["-bsequence","bsequence"],
"Second sequence to align",
filename=True,
is_required=True),
_Option(["-gapopen","gapopen"],
"Gap open penalty",
is_required=True),
_Option(["-gapextend","gapextend"],
"Gap extension penalty",
is_required=True),
_Option(["-datafile","datafile"],
"Matrix file",
filename=True),
_Option(["-minscore","minscore"],
"Exclude alignments with scores below this threshold score."),
_Option(["-errorfile", "errorfile"],
"Error file to be written to."),
_Option(["-endweight", "endweight"],
"Apply And gap penalties"),
_Option(["-endopen", "endopen"],
"The score taken away when an end gap is created."),
_Option(["-endextend", "endextend"],
"The score added to the end gap penality for each base or "
"residue in the end gap."),
_Switch(["-nobrief", "nobrief"],
"Display extended identity and similarity"),
_Switch(["-brief", "brief"],
"Display brief identity and similarity"),
_Option(["-similarity","similarity"],
"Display percent identity and similarity"),
_Option(["-snucleotide","snucleotide"],
"Sequences are nucleotide (boolean)"),
_Option(["-sprotein","sprotein"],
"Sequences are protein (boolean)"),
_Option(["-aformat","aformat"],
"Display output in a different specified output format")]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
class StretcherCommandline(_EmbossCommandLine):
"""Commandline object for the stretcher program from EMBOSS.
"""
def __init__(self, cmd="stretcher", **kwargs):
self.parameters = [
_Option(["-asequence","asequence"],
"First sequence to align",
filename=True,
is_required=True),
_Option(["-bsequence","bsequence"],
"Second sequence to align",
filename=True,
is_required=True),
_Option(["-gapopen","gapopen"],
"Gap open penalty",
is_required=True,
checker_function=lambda value: isinstance(value, int)),
_Option(["-gapextend","gapextend"],
"Gap extension penalty",
is_required=True,
checker_function=lambda value: isinstance(value, int)),
_Option(["-datafile","datafile"],
"Matrix file",
filename=True),
_Option(["-snucleotide","snucleotide"],
"Sequences are nucleotide (boolean)"),
_Option(["-sprotein","sprotein"],
"Sequences are protein (boolean)"),
_Option(["-aformat","aformat"],
"Display output in a different specified output format")]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
class FuzznucCommandline(_EmbossCommandLine):
"""Commandline object for the fuzznuc program from EMBOSS.
"""
def __init__(self, cmd="fuzznuc", **kwargs):
self.parameters = [
_Option(["-sequence","sequence"],
"Sequence database USA",
is_required=True),
_Option(["-pattern","pattern"],
"Search pattern, using standard IUPAC one-letter codes",
is_required=True),
_Option(["-mismatch","mismatch"],
"Number of mismatches",
is_required=True),
_Option(["-complement","complement"],
"Search complementary strand"),
_Option(["-rformat","rformat"],
"Specify the report format to output in.")]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
class Est2GenomeCommandline(_EmbossCommandLine):
"""Commandline object for the est2genome program from EMBOSS.
"""
def __init__(self, cmd="est2genome", **kwargs):
self.parameters = [
_Option(["-est","est"],
"EST sequence(s)",
is_required=True),
_Option(["-genome","genome"],
"Genomic sequence",
is_required=True),
_Option(["-match","match"],
"Score for matching two bases"),
_Option(["-mismatch","mismatch"],
"Cost for mismatching two bases"),
_Option(["-gappenalty","gappenalty"],
"Cost for deleting a single base in either sequence, "
"excluding introns"),
_Option(["-intronpenalty","intronpenalty"],
"Cost for an intron, independent of length."),
_Option(["-splicepenalty","splicepenalty"],
"Cost for an intron, independent of length "
"and starting/ending on donor-acceptor sites"),
_Option(["-minscore","minscore"],
"Exclude alignments with scores below this threshold score."),
_Option(["-reverse","reverse"],
"Reverse the orientation of the EST sequence"),
_Option(["-splice","splice"],
"Use donor and acceptor splice sites."),
_Option(["-mode","mode"],
"This determines the comparion mode. 'both', 'forward' "
"'reverse'"),
_Option(["-best","best"],
"You can print out all comparisons instead of just the best"),
_Option(["-space","space"],
"for linear-space recursion."),
_Option(["-shuffle","shuffle"],
"Shuffle"),
_Option(["-seed","seed"],
"Random number seed"),
_Option(["-align","align"],
"Show the alignment."),
_Option(["-width","width"],
"Alignment width")
]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
class ETandemCommandline(_EmbossCommandLine):
"""Commandline object for the etandem program from EMBOSS.
"""
def __init__(self, cmd="etandem", **kwargs):
self.parameters = [
_Option(["-sequence","sequence"],
"Sequence",
filename=True,
is_required=True),
_Option(["-minrepeat","minrepeat"],
"Minimum repeat size",
is_required=True),
_Option(["-maxrepeat","maxrepeat"],
"Maximum repeat size",
is_required=True),
_Option(["-threshold","threshold"],
"Threshold score"),
_Option(["-mismatch","mismatch"],
"Allow N as a mismatch"),
_Option(["-uniform","uniform"],
"Allow uniform consensus"),
_Option(["-rformat","rformat"],
"Output report format")]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
class EInvertedCommandline(_EmbossCommandLine):
"""Commandline object for the einverted program from EMBOSS.
"""
def __init__(self, cmd="einverted", **kwargs):
self.parameters = [
_Option(["-sequence","sequence"],
"Sequence",
filename=True,
is_required=True),
_Option(["-gap","gap"],
"Gap penalty",
filename=True,
is_required=True),
_Option(["-threshold","threshold"],
"Minimum score threshold",
is_required=True),
_Option(["-match","match"],
"Match score",
is_required=True),
_Option(["-mismatch","mismatch"],
"Mismatch score",
is_required=True),
_Option(["-maxrepeat","maxrepeat"],
"Maximum separation between the start and end of repeat"),
]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
class PalindromeCommandline(_EmbossCommandLine):
"""Commandline object for the palindrome program from EMBOSS.
"""
def __init__(self, cmd="palindrome", **kwargs):
self.parameters = [
_Option(["-sequence","sequence"],
"Sequence",
filename=True,
is_required=True),
_Option(["-minpallen","minpallen"],
"Minimum palindrome length",
is_required=True),
_Option(["-maxpallen","maxpallen"],
"Maximum palindrome length",
is_required=True),
_Option(["-gaplimit","gaplimit"],
"Maximum gap between repeats",
is_required=True),
_Option(["-nummismatches","nummismatches"],
"Number of mismatches allowed",
is_required=True),
_Option(["-overlap","overlap"],
"Report overlapping matches",
is_required=True),
]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
class TranalignCommandline(_EmbossCommandLine):
"""Commandline object for the tranalign program from EMBOSS.
"""
def __init__(self, cmd="tranalign", **kwargs):
self.parameters = [
_Option(["-asequence","asequence"],
"Nucleotide sequences to be aligned.",
filename=True,
is_required=True),
_Option(["-bsequence","bsequence"],
"Protein sequence alignment",
filename=True,
is_required=True),
_Option(["-outseq","outseq"],
"Output sequence file.",
filename=True,
is_required=True),
_Option(["-table","table"],
"Code to use")]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
class DiffseqCommandline(_EmbossCommandLine):
"""Commandline object for the diffseq program from EMBOSS.
"""
def __init__(self, cmd="diffseq", **kwargs):
self.parameters = [
_Option(["-asequence","asequence"],
"First sequence to compare",
filename=True,
is_required=True),
_Option(["-bsequence","bsequence"],
"Second sequence to compare",
filename=True,
is_required=True),
_Option(["-wordsize","wordsize"],
"Word size to use for comparisons (10 default)",
is_required=True),
_Option(["-aoutfeat","aoutfeat"],
"File for output of first sequence's features",
filename=True,
is_required=True),
_Option(["-boutfeat","boutfeat"],
"File for output of second sequence's features",
filename=True,
is_required=True),
_Option(["-rformat","rformat"],
"Output report file format")
]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
class IepCommandline(_EmbossCommandLine):
"""Commandline for EMBOSS iep: calculated isoelectric point and charge.
Example:
>>> from Bio.Emboss.Applications import IepCommandline
>>> iep_cline = IepCommandline(sequence="proteins.faa",
... outfile="proteins.txt")
>>> print iep_cline
iep -outfile=proteins.txt -sequence=proteins.faa
You would typically run the command line with iep_cline() or via the
Python subprocess module, as described in the Biopython tutorial.
"""
def __init__(self, cmd="iep", **kwargs):
self.parameters = [
_Option(["-sequence","sequence"],
"Protein sequence(s) filename",
filename=True,
is_required=True),
_Option(["-amino","amino"],
"""Number of N-termini
Integer 0 (default) or more.
"""),
_Option(["-carboxyl","carboxyl"],
"""Number of C-termini
Integer 0 (default) or more.
"""),
_Option(["-lysinemodified","lysinemodified"],
"""Number of modified lysines
Integer 0 (default) or more.
"""),
_Option(["-disulphides","disulphides"],
"""Number of disulphide bridges
Integer 0 (default) or more.
"""),
#Should we implement the -termini switch as well?
_Option(["-notermini","notermini"],
"Exclude (True) or include (False) charge at N and C terminus."),
]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
#seqret uses -outseq, not -outfile, so use the base class:
class SeqretCommandline(_EmbossMinimalCommandLine):
"""Commandline object for the seqret program from EMBOSS.
This tool allows you to interconvert between different sequence file
formats (e.g. GenBank to FASTA). Combining Biopython's Bio.SeqIO module
with seqret using a suitable intermediate file format can allow you to
read/write to an even wider range of file formats.
This wrapper currently only supports the core functionality, things like
feature tables (in EMBOSS 6.1.0 onwards) are not yet included.
"""
def __init__(self, cmd="seqret", **kwargs):
self.parameters = [
_Option(["-sequence","sequence"],
"Input sequence(s) filename",
filename=True),
_Option(["-outseq","outseq"],
"Output sequence file.",
filename=True),
_Option(["-sformat","sformat"],
"Input sequence(s) format (e.g. fasta, genbank)"),
_Option(["-osformat","osformat"],
"Output sequence(s) format (e.g. fasta, genbank)"),
]
_EmbossMinimalCommandLine.__init__(self, cmd, **kwargs)
def _validate(self):
#Check the outfile, filter, or stdout option has been set.
#We can't simply do this via the required flag for the outfile
#output - this seems the simplest solution.
if not (self.outseq or self.filter or self.stdout):
raise ValueError("You must either set outfile (output filename), "
"or enable filter or stdout (output to stdout).")
if not (self.sequence or self.filter or self.stdint):
raise ValueError("You must either set sequence (input filename), "
"or enable filter or stdin (input from stdin).")
return _EmbossMinimalCommandLine._validate(self)
class SeqmatchallCommandline(_EmbossCommandLine):
""" Commandline object for the seqmatchall program from EMBOSS
e.g.
>>> cline = SeqmatchallCommandline(sequence="opuntia.fasta", outfile="opuntia.txt")
>>> cline.auto = True
>>> cline.wordsize = 18
>>> cline.aformat = "pair"
>>> print cline
seqmatchall -auto -outfile=opuntia.txt -sequence=opuntia.fasta -wordsize=18 -aformat=pair
"""
def __init__(self, cmd="seqmatchall", **kwargs):
self.parameters = [
_Option(["-sequence", "sequence"],
"Readable set of sequences",
filename=True,
is_required=True),
_Option(["-wordsize", "wordsize"],
"Word size (Integer 2 or more, default 4)"),
_Option(["-aformat","aformat"],
"Display output in a different specified output format"),
]
_EmbossCommandLine.__init__(self, cmd, **kwargs)
def _test():
"""Run the Bio.Emboss.Applications module doctests."""
import doctest
doctest.testmod()
if __name__ == "__main__":
#Run the doctests
_test()
| mit |
interlegis/sigi | sigi/apps/contatos/migrations/0002_auto_20151104_0810.py | 1 | 2409 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import sigi.apps.utils
class Migration(migrations.Migration):
dependencies = [
('contatos', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Mesorregiao',
fields=[
('codigo_ibge', models.PositiveIntegerField(help_text='C\xf3digo da mesorregi\xe3o segundo o IBGE', unique=True, serialize=False, verbose_name='C\xf3digo IBGE', primary_key=True)),
('nome', models.CharField(max_length=100, verbose_name='Nome mesorregi\xe3o')),
('search_text', sigi.apps.utils.SearchField(field_names=[b'nome'], editable=False)),
('uf', models.ForeignKey(verbose_name='UF', to='contatos.UnidadeFederativa')),
],
options={
'ordering': ('uf', 'nome'),
'verbose_name': 'Mesorregi\xe3o',
'verbose_name_plural': 'Mesorregi\xf5es',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Microrregiao',
fields=[
('codigo_ibge', models.PositiveIntegerField(help_text='C\xf3digo da microrregi\xe3o segundo o IBGE', unique=True, serialize=False, verbose_name='C\xf3digo IBGE', primary_key=True)),
('nome', models.CharField(max_length=100, verbose_name='Nome microrregi\xe3o')),
('search_text', sigi.apps.utils.SearchField(field_names=[b'nome'], editable=False)),
('mesorregiao', models.ForeignKey(to='contatos.Mesorregiao')),
],
options={
'ordering': ('mesorregiao', 'nome'),
'verbose_name': 'Microrregi\xe3o',
'verbose_name_plural': 'Microrregi\xf5es',
},
bases=(models.Model,),
),
migrations.RemoveField(
model_name='municipio',
name='codigo_mesorregiao',
),
migrations.RemoveField(
model_name='municipio',
name='codigo_microrregiao',
),
migrations.AddField(
model_name='municipio',
name='microrregiao',
field=models.ForeignKey(verbose_name='Microrregi\xe3o', blank=True, to='contatos.Microrregiao', null=True),
preserve_default=True,
),
]
| gpl-2.0 |
frreiss/tensorflow-fred | tensorflow/python/data/kernel_tests/from_tensors_test.py | 2 | 11615 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for `tf.data.Dataset.from_tensors()."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.util import nest
from tensorflow.python.framework import combinations
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops.ragged import ragged_factory_ops
from tensorflow.python.platform import test
class FromTensorsTest(test_base.DatasetTestBase, parameterized.TestCase):
@combinations.generate(test_base.default_test_combinations())
def testFromTensors(self):
"""Test a dataset that represents a single tuple of tensors."""
components = (np.array(1), np.array([1, 2, 3]), np.array(37.0))
dataset = dataset_ops.Dataset.from_tensors(components)
self.assertEqual(
[c.shape for c in components],
nest.flatten(dataset_ops.get_legacy_output_shapes(dataset)))
self.assertDatasetProduces(dataset, expected_output=[components])
@combinations.generate(test_base.default_test_combinations())
def testFromTensorsDataset(self):
"""Test a dataset that represents a dataset."""
dataset = dataset_ops.Dataset.from_tensors(dataset_ops.Dataset.range(10))
dataset = dataset.flat_map(lambda x: x)
self.assertDatasetProduces(dataset, expected_output=range(10))
@combinations.generate(test_base.default_test_combinations())
def testFromTensorsTensorArray(self):
"""Test a dataset that represents a TensorArray."""
components = (
tensor_array_ops.TensorArray(dtypes.float32, element_shape=(), size=2)
.unstack([1.0, 2.0]))
dataset = dataset_ops.Dataset.from_tensors(components)
self.assertDatasetProduces(
dataset, expected_output=[[1.0, 2.0]], requires_initialization=True)
@combinations.generate(test_base.default_test_combinations())
def testFromTensorsSparse(self):
"""Test a dataset that represents a single tuple of tensors."""
components = (sparse_tensor.SparseTensorValue(
indices=np.array([[0]]),
values=np.array([0]),
dense_shape=np.array([1])),
sparse_tensor.SparseTensorValue(
indices=np.array([[0, 0], [1, 1]]),
values=np.array([-1, 1]),
dense_shape=np.array([2, 2])))
dataset = dataset_ops.Dataset.from_tensors(components)
self.assertEqual(
[tensor_shape.TensorShape(c.dense_shape) for c in components],
[shape for shape in dataset_ops.get_legacy_output_shapes(dataset)])
self.assertDatasetProduces(dataset, expected_output=[components])
@combinations.generate(test_base.default_test_combinations())
def testFromTensorsMixed(self):
"""Test an dataset that represents a single tuple of tensors."""
components = (np.array(1), np.array([1, 2, 3]), np.array(37.0),
sparse_tensor.SparseTensorValue(
indices=np.array([[0]]),
values=np.array([0]),
dense_shape=np.array([1])),
sparse_tensor.SparseTensorValue(
indices=np.array([[0, 0], [1, 1]]),
values=np.array([-1, 1]),
dense_shape=np.array([2, 2])))
dataset = dataset_ops.Dataset.from_tensors(components)
self.assertEqual([
tensor_shape.TensorShape(c.dense_shape)
if sparse_tensor.is_sparse(c) else c.shape for c in components
], [shape for shape in dataset_ops.get_legacy_output_shapes(dataset)])
self.assertDatasetProduces(dataset, expected_output=[components])
@combinations.generate(test_base.default_test_combinations())
def testFromTensorsRagged(self):
components = (
ragged_factory_ops.constant_value([[[0]], [[1]], [[2]]]),
ragged_factory_ops.constant_value([[[3]], [[4]], [[5]]]),
)
dataset = dataset_ops.Dataset.from_tensors(components)
self.assertDatasetProduces(dataset, expected_output=[components])
@combinations.generate(test_base.default_test_combinations())
def testFromTensorsMixedRagged(self):
components = (np.array(1), np.array([1, 2, 3]), np.array(37.0),
sparse_tensor.SparseTensorValue(
indices=np.array([[0]]),
values=np.array([0]),
dense_shape=np.array([1])),
sparse_tensor.SparseTensorValue(
indices=np.array([[0, 0], [1, 1]]),
values=np.array([-1, 1]),
dense_shape=np.array([2, 2])),
ragged_factory_ops.constant_value([[[0]], [[1]], [[2]]]))
dataset = dataset_ops.Dataset.from_tensors(components)
self.assertDatasetProduces(dataset, expected_output=[components])
@combinations.generate(
combinations.combine(
tf_api_version=[1],
mode=["graph"],
components=(np.array([1, 2, 3], dtype=np.int64),
(np.array([4., 5.]), np.array(
[6., 7.])), np.array([8, 9, 10], dtype=np.int64)),
expected_shapes=[[[None, 3], [None, 3], [None, 2], [None, 2]]]) +
combinations.combine(
tf_api_version=[1],
mode=["eager"],
components=(np.array([1, 2, 3], dtype=np.int64),
(np.array([4., 5.]), np.array(
[6., 7.])), np.array([8, 9, 10], dtype=np.int64)),
expected_shapes=[[[1, 3], [1, 3], [1, 2], [1, 2]]]))
def testNestedStructure(self, components, expected_shapes):
dataset = dataset_ops.Dataset.from_tensors(components)
dataset = dataset.map(lambda x, y, z: ((x, z), (y[0], y[1])))
dataset = dataset.flat_map(
lambda x, y: dataset_ops.Dataset.from_tensors(
((x[0], x[1]), (y[0], y[1])))).batch(32)
get_next = self.getNext(dataset)
(w, x), (y, z) = get_next()
self.assertEqual(dtypes.int64, w.dtype)
self.assertEqual(dtypes.int64, x.dtype)
self.assertEqual(dtypes.float64, y.dtype)
self.assertEqual(dtypes.float64, z.dtype)
self.assertEqual(expected_shapes, [
w.shape.as_list(),
x.shape.as_list(),
y.shape.as_list(),
z.shape.as_list()
])
get_next = self.getNext(dataset)
(w, x), (y, z) = get_next()
self.assertEqual(dtypes.int64, w.dtype)
self.assertEqual(dtypes.int64, x.dtype)
self.assertEqual(dtypes.float64, y.dtype)
self.assertEqual(dtypes.float64, z.dtype)
self.assertEqual(expected_shapes, [
w.shape.as_list(),
x.shape.as_list(),
y.shape.as_list(),
z.shape.as_list()
])
@combinations.generate(test_base.default_test_combinations())
def testNestedDict(self):
components = {"a": {"aa": 1, "ab": [2.0, 2.0]}, "b": [3, 3, 3]}
dataset = dataset_ops.Dataset.from_tensors(components)
self.assertEqual(dtypes.int32,
dataset_ops.get_legacy_output_types(dataset)["a"]["aa"])
self.assertEqual(dtypes.float32,
dataset_ops.get_legacy_output_types(dataset)["a"]["ab"])
self.assertEqual(dtypes.int32,
dataset_ops.get_legacy_output_types(dataset)["b"])
self.assertEqual([],
dataset_ops.get_legacy_output_shapes(dataset)["a"]["aa"])
self.assertEqual([2],
dataset_ops.get_legacy_output_shapes(dataset)["a"]["ab"])
self.assertEqual([3],
dataset_ops.get_legacy_output_shapes(dataset)["b"])
@combinations.generate(test_base.default_test_combinations())
def testNonSequenceNestedStructure(self):
components = np.array([1, 2, 3], dtype=np.int64)
dataset = dataset_ops.Dataset.from_tensors(components)
self.assertEqual(dtypes.int64,
dataset_ops.get_legacy_output_types(dataset))
self.assertEqual([3], dataset_ops.get_legacy_output_shapes(dataset))
dataset = dataset.filter(
lambda x: math_ops.reduce_all(math_ops.equal(x, components)))
self.assertEqual(dtypes.int64,
dataset_ops.get_legacy_output_types(dataset))
self.assertEqual([3], dataset_ops.get_legacy_output_shapes(dataset))
dataset = dataset.map(lambda x: array_ops.stack([x, x]))
self.assertEqual(dtypes.int64,
dataset_ops.get_legacy_output_types(dataset))
self.assertEqual([2, 3], dataset_ops.get_legacy_output_shapes(dataset))
dataset = dataset.flat_map(
lambda x: dataset_ops.Dataset.from_tensor_slices(x))
self.assertEqual(dtypes.int64,
dataset_ops.get_legacy_output_types(dataset))
self.assertEqual([3], dataset_ops.get_legacy_output_shapes(dataset))
get_next = self.getNext(dataset)
self.assertEqual(dtypes.int64, get_next().dtype)
self.assertEqual([3], get_next().shape)
# TODO(b/121264236): needs mechanism for multiple device in eager mode.
@combinations.generate(test_base.graph_only_combinations())
def testSplitPipeline(self):
with session.Session(
target="",
config=config_pb2.ConfigProto(device_count={"CPU": 2})) as sess:
dataset = dataset_ops.Dataset.from_tensors(0)
# Define a pipeline that attempts to use variables on two
# different devices.
#
# Initialize the variables before creating to iterator, to avoid the
# placement algorithm overriding the DT_RESOURCE colocation constraints.
with ops.device("/cpu:0"):
var_0 = resource_variable_ops.ResourceVariable(initial_value=1)
dataset = dataset.map(lambda x: x + var_0.read_value())
sess.run(var_0.initializer)
with ops.device("/cpu:1"):
var_1 = resource_variable_ops.ResourceVariable(initial_value=1)
dataset = dataset.map(lambda x: x + var_1.read_value())
sess.run(var_1.initializer)
iterator = dataset_ops.make_initializable_iterator(dataset)
sess.run(iterator.initializer)
self.assertEqual(sess.run(iterator.get_next()), 2)
@combinations.generate(test_base.default_test_combinations())
def testDatasetInputSerialization(self):
dataset = dataset_ops.Dataset.range(100)
dataset = dataset_ops.Dataset.from_tensors(dataset).flat_map(lambda x: x)
dataset = self.graphRoundTrip(dataset)
self.assertDatasetProduces(dataset, range(100))
if __name__ == "__main__":
test.main()
| apache-2.0 |
tacaswell/bokeh | tests/integration/webserver.py | 21 | 3888 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Taken from
# https://github.com/SeleniumHQ/selenium/blob/52e9d6407248bce5de2b6a73103a50bb0e670c1f/py/test/selenium/webdriver/common/webserver.py
# with small modifications
"""A simple web server for testing purpose.
It serves the testing html pages that are needed by the webdriver unit tests."""
import logging
import os
import socket
import threading
from io import open
try:
from urllib import request as urllib_request
except ImportError:
import urllib as urllib_request
try:
from http.server import BaseHTTPRequestHandler, HTTPServer
except ImportError:
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
LOGGER = logging.getLogger(__name__)
HTML_ROOT = os.path.dirname(__file__)
if not os.path.isdir(HTML_ROOT):
message = ("Can't find 'common_web' directory, try setting WEBDRIVER"
" environment variable WEBDRIVER:" + WEBDRIVER + " HTML_ROOT:" + HTML_ROOT )
LOGGER.error(message)
assert 0, message
DEFAULT_HOST = "127.0.0.1"
DEFAULT_PORT = 8000
class HtmlOnlyHandler(BaseHTTPRequestHandler):
"""Http handler."""
def do_GET(self):
"""GET method handler."""
try:
path = self.path[1:].split('?')[0]
html = open(os.path.join(HTML_ROOT, path), 'r', encoding='latin-1')
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(html.read().encode('utf-8'))
html.close()
except IOError:
self.send_error(404, 'File Not Found: %s' % path)
def log_message(self, format, *args):
"""Override default to avoid trashing stderr"""
pass
class SimpleWebServer(object):
"""A very basic web server."""
def __init__(self, host=DEFAULT_HOST, port=DEFAULT_PORT):
self.stop_serving = False
host = host
port = port
while True:
try:
self.server = HTTPServer(
(host, port), HtmlOnlyHandler)
self.host = host
self.port = port
break
except socket.error:
LOGGER.debug("port %d is in use, trying to next one"
% port)
port += 1
self.thread = threading.Thread(target=self._run_web_server)
def _run_web_server(self):
"""Runs the server loop."""
LOGGER.debug("web server started")
while not self.stop_serving:
self.server.handle_request()
self.server.server_close()
def start(self):
"""Starts the server."""
self.thread.start()
def stop(self):
"""Stops the server."""
self.stop_serving = True
try:
# This is to force stop the server loop
urllib_request.URLopener().open("http://%s:%d" % (self.host,self.port))
except IOError:
pass
LOGGER.info("Shutting down the webserver")
self.thread.join()
def where_is(self, path):
return "http://%s:%d/%s" % (self.host, self.port, path)
| bsd-3-clause |
dsiddharth/access-keys | keystone/tests/test_cert_setup.py | 2 | 3191 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
from keystone.common import openssl
from keystone import exception
from keystone import tests
from keystone.tests import default_fixtures
from keystone import token
SSLDIR = tests.dirs.tests('ssl')
CONF = tests.CONF
DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
CERTDIR = os.path.join(SSLDIR, 'certs')
KEYDIR = os.path.join(SSLDIR, 'private')
class CertSetupTestCase(tests.TestCase):
def setUp(self):
super(CertSetupTestCase, self).setUp()
ca_certs = os.path.join(CERTDIR, 'ca.pem')
ca_key = os.path.join(CERTDIR, 'cakey.pem')
self.opt_in_group(
'signing',
certfile=os.path.join(CERTDIR, 'signing_cert.pem'),
ca_certs=ca_certs,
ca_key=ca_key,
keyfile=os.path.join(KEYDIR, 'signing_key.pem'))
self.opt_in_group(
'ssl',
ca_certs=ca_certs,
ca_key=ca_key,
certfile=os.path.join(CERTDIR, 'keystone.pem'),
keyfile=os.path.join(KEYDIR, 'keystonekey.pem'))
self.load_backends()
self.load_fixtures(default_fixtures)
self.controller = token.controllers.Auth()
def cleanup_ssldir():
try:
shutil.rmtree(SSLDIR)
except OSError:
pass
self.addCleanup(cleanup_ssldir)
def test_can_handle_missing_certs(self):
self.opt_in_group('signing', certfile='invalid')
user = {
'id': 'fake1',
'name': 'fake1',
'password': 'fake1',
'domain_id': DEFAULT_DOMAIN_ID
}
body_dict = {
'passwordCredentials': {
'userId': user['id'],
'password': user['password'],
},
}
self.identity_api.create_user(user['id'], user)
self.assertRaises(exception.UnexpectedError,
self.controller.authenticate,
{}, body_dict)
def test_create_pki_certs(self):
pki = openssl.ConfigurePKI(None, None)
pki.run()
self.assertTrue(os.path.exists(CONF.signing.certfile))
self.assertTrue(os.path.exists(CONF.signing.ca_certs))
self.assertTrue(os.path.exists(CONF.signing.keyfile))
def test_create_ssl_certs(self):
ssl = openssl.ConfigureSSL(None, None)
ssl.run()
self.assertTrue(os.path.exists(CONF.ssl.ca_certs))
self.assertTrue(os.path.exists(CONF.ssl.certfile))
self.assertTrue(os.path.exists(CONF.ssl.keyfile))
| apache-2.0 |
vinoth3v/In | In/core/context.py | 1 | 13710 | import greenlet
import wsgiref, wsgiref.headers
import datetime
import logging
#import In.nabar
import In.core.response
import In.core.action as action
from In.core.asset import Asset
from In.core.cookie import Cookie
from In.html.menu import PageMenuTab
class ContextEnd(RuntimeError):
'''Context has been explicitly ended.
'''
class ContextRedirect(RuntimeError):
'''Context has been explicitly redirected.
'''
def __init__(self, path, status = None, ajax_redirect = True, **args):
self.path = path
self.status = status
self.ajax_redirect = ajax_redirect
super().__init__(**args)
class ContextBadRequest(RuntimeError):
'''Context request is bad.'''
class ContextNotFound(RuntimeError):
'''Exception: Page not found.'''
class ContextAccessDenied(RuntimeError):
'''Exception: Nabar has no access.'''
class ContextInitFailedException(BaseException):
'''Context faled to init.'''
class ContextPool(greenlet.greenlet):
'''Main Greenlet'''
def __init__(self, pool = None):
# just simple list of contexts
# application will switch to any context randomly
# keyed by id() of context
if pool is None:
self.pool = {}
else:
self.pool = pool
super().__init__(self.run)
def run(self):
#while True:
try:
if not self.pool:
#asyncio.sleep(5)
#continue
return
values = self.pool.values()
context = next(iter(values))
if context is None or context.dead:
self.free(context)
#sleep(.2)
# no break,
#continue
return
# run another context
context.switch()
#result = context.switch()
#if result is not None:
#self.free(context)
#return result
except Exception:
IN.logger.debug()
def put(self, context):
'''Add context to pool'''
self.pool[id(context)] = context
def free(self, context):
'''Delete this context from pool and del it'''
context_id = id(context)
try:
# add db connection as free
IN.db.free(context)
except Exception:
IN.logger.debug()
try:
del self.pool[context_id]
except Exception:
IN.logger.debug()
context.__free__()
try:
context.kill() # force greenlet kill
except Exception:
pass
try:
del context
except Exception:
pass
#IN.__context__ = None
#def switch(self):
#'''run another event'''
#if self.dead:
## create new ContextPool
#IN.APP.context_pool = ContextPool(self.pool)
#IN.APP.context_pool.switch()
#else:
#super().switch()
class Context(greenlet.greenlet): # , asyncio.Task
'''Main request/response logic class.
'''
## __disabled_hooks__ - to disable the hook invoke on some hooks temporaryly
## IN.Registry will use this - for context based state.
#__disabled_hooks__ = []
## hooks will be added here if those are not allowed for recursive calls
#__not_recursive_hooks__ = []
## contains all the hook names which are in action. We have to check
## for ignore the recursive hook invoke calls.
#__hooks_in_action__ = []
def __init__(self, app, environ, start_response, **args):
'''__init__ the Context.
'''
super().__init__()
# application Object
#self.APP = app
# use this for now request_time
self.now = datetime.datetime.now()
self.environ = environ
self.wsgi_callback = start_response
# available when doing request process
self.active_action = None
# assets of css and js. it is still here even response type is changed from Page to Form.
# response may make use of it if needed
self.asset = Asset()
self.page_menu_tab = PageMenuTab()
self.page_menu_sub_tab = PageMenuTab()
self.page_menu_sub_tab_2 = PageMenuTab()
# themer will use it
self.page_title = ''
self.display_title = True
self.headers = wsgiref.headers.Headers([])
self.cookie = Cookie()
# additional args for this context
self.args = args
#moved to IN
# theme for the current context. each context output may be rendered by different theme engine.
#self.themer = None
themer = IN.themer
APP = IN.APP
default_theme_name = APP.config.default_theme_name
context_theme_name = APP.decide_theme(self)
# current theme for this request
if default_theme_name != themer.default_theme_name:
self.current_theme = themer.load_theme(context_theme_name)
else:
self.current_theme = themer.default_theme
# simple dict cache for the current context
self.static_cache = {}
# TODO: session for the current nabar
self.session = {}
# database connection class
self.db_connections = []
# __disabled_hooks__ - to disable the hook invoke on some hooks temporarly
# IN.Registry will use this - for context based state.
self.__disabled_hooks__ = []
# hooks will be added here if those are not allowed for recursive calls
self.__not_recursive_hooks__ = []
# TODO
#self.__In_static__ = {} # context static value container
# init the request
self.request = In.core.request.Request(self)
#IN.hook_invoke('__context_request_process__', self, self.request)
# current logged in nabar
path_parts = self.request.path_parts
nabar = None
# ignore nabar for static files path
if len(path_parts) == 0 or path_parts[0] != IN.APP.config.pfpp:
try:
nabar_id = IN.nabar.auth_cookie(self)
if nabar_id:
nabar = IN.entitier.load('Nabar', nabar_id)
except Exception as e:
IN.logger.debug()
# TODO: delete the cookie if nabar is None
if nabar is None: # use the default
nabar = In.nabar.anonymous()
self.nabar = nabar
try: # context may not available
self.language = nabar.language
except AttributeError:
self.language = APP.config.language
# init the response
#IN.hook_invoke('__context_response_init__', self, self.request)
# use default page from default theme
# SPEED # TODO: do we want this even for Form submit / File request?
res_args = {}
page_class = APP.decide_page_class(self)
if type(page_class) is str:
page = Object.new(page_class)
else:
page = page_class()
#res_args['output'] = page_class()
# default to PageResponse
self.response = In.core.response.PageResponse(output = page)
IN.hook_invoke('__context_init__', self)
@property
def application_uri(self):
return wsgiref.util.application_uri(self.environ)
@property
def request_uri(self):
return wsgiref.util.request_uri(self.environ)
def request_end(self):
raise ContextEnd('ContextEnd')
def redirect(self, path, status = None, ajax_redirect = True):
# set redirect processor
if status is None:
status = In.http.Status.SEE_OTHER
self.response = In.core.response.RedirectResponse(path = path, status = status, ajax_redirect = ajax_redirect)
raise ContextRedirect(path, status, ajax_redirect = ajax_redirect)
def bad_request(self, output = None, status = None):
if status is None:
status = In.http.Status.BAD_REQUEST
self.response = In.core.response.BadResponse(output = output, status = status)
raise ContextBadRequest() # raise it
def not_found(self, message = None, title = None):
current_reponse = self.response
output = current_reponse.output
args = {'output' : output}
self.response = In.core.response.NotFoundResponse(**args)
# set the new path, so blocks can be added by this
self.request.path = '__page_not_found__'
self.page_title = title or s('Page not found')
#if not message:
#message = IN.APP.config.message_page_not_found
if message:
self.response.output.add('Text', {'value' : message})
atn = IN.APP.__page_not_found__(self)
if atn:
try:
self.run_actions(atn)
except Exception as e:
IN.logger.debug()
raise ContextNotFound()
def send_headers(self):
'''Calls the WSGI specific start_response method to send headers to client.
'''
response_headers = self.headers.items()
# set the cookies
for c in self.cookie.values():
response_headers.append(('Set-Cookie', c.OutputString(None)))
## set the output so application __call__ will return it.
self.environ['In_output'] = self.response.output
#self.In_output.put([str(self.response.status), response_headers, self.response.output])
self.wsgi_callback(str(self.response.status), response_headers)
def run(self):
try:
self.run_actions()
except ContextEnd as end:
IN.hook_invoke('__context_end__', self.environ, self.wsgi_callback, end)
except ContextRedirect as red:
IN.hook_invoke('__context_redirect__', self.environ, self.wsgi_callback, red)
except ContextBadRequest as bad:
IN.hook_invoke('__context_bad_request__', self.environ, self.wsgi_callback, bad)
except ContextNotFound as nf:
IN.hook_invoke('__context_not_found__', self.environ, self.wsgi_callback, nf)
except: # internal server error?
IN.logger.debug()
#IN.APP.wait()
self.process_response()
IN.hook_invoke('__context_request_process_done__', self)
return self.environ['In_output']
#try:
#except:
## TODO:
#IN.logger.debug()
#return [''.encode('utf-8')]
def process_response(self):
# if no response set
if self.response is None:
return self.bad_request()
#if not type(self.response) is response.ResponseBase:
#return self.bad_request()
# process the response
IN.hook_invoke('__context_response_preprocess__', self, self.response)
# set output, headers
self.response.process(self)
IN.hook_invoke('__context_response_process__', self, self.response)
# start the response
self.send_headers()
def set_active_action(self, action):
self.active_action = action
# set title
self.set_page_title_from_action(action)
def set_page_title_from_action(self, action):
# set page title
if isinstance(action, In.core.action.ActionObject):
if type(action.title) is str:
self.page_title = action.title
else: # call the function
self.page_title = action.title(a)
elif action.actions:
action = action.actions[-1]
self.set_page_title_from_action(action)
def run_actions(self, actions = None):
'''check for the suitable action for the request
'''
if actions is None:
actions = self.actions()
if type(actions) is In.action.ActionObject:
self.set_active_action(actions)
actions.__call__(self)
else:
for action in actions:
if action:
# set it, so modules can change pass_next to continue to next or break it
self.set_active_action(action)
action.__call__(self)
if not action.pass_next: # pass to next action
break
#def __call__(self):
#return self.run()
def actions(self):
'''get action based on request.
'''
'''Handle direct actions such as form submit'''
for atn in IN.hook_invoke_yield('__context_early_action__', self):
if atn:
yield atn
if not atn.pass_next:
return
'''Find action by path'''
path = self.request.path
if path:
# TODO: REMOVE UNWANTED
p = path.replace('/', '_')
for atn in IN.hook_invoke('_'.join(('direct_path_action', p)), self):
if atn:
yield atn
if not atn.pass_next:
return
for atn in action.path_action():
if atn:
yield atn
if not atn.pass_next:
return
# path not found
self.not_found()
return
# no path # use index page
yield IN.APP.__index_page__(self)
return
def ensure_page_response(self):
'''make sure that the response is type if PageResponse'''
if not isinstance(self.response, In.core.response.ObjectResponse): # Object like
self.response = In.core.response.PageResponse()
def access(self, key, account = None, deny = False):
'''shortcut methof to IN.access
return true if account has access key
if context.access('view_content'):
do something
account : nabar object. context.nabar by default
deny: redirect to access denied page if True
TODO: cache?
'''
if account is None:
account = IN.context.nabar
result = IN.nabar.access(key, account)
if deny and not result:
self.access_denied()
return result
def access_denied(self, message = None, title = None):
current_reponse = self.response
output = current_reponse.output
args = {'output' : output}
self.response = In.core.response.AccessDeniedResponse(**args)
if isinstance(output, Object):
# set this only if output is IN Object like
if title is None:
title = s('Access Denied!')
self.response.output.title = title
if message is None:
message = IN.APP.config.message_access_denied
self.response.output.add('Text', {'value' : message})
raise ContextAccessDenied()
#def switch(self):
#'''overrides greenlet switch to set the context.'''
#IN.__context__ = self
#super().switch() # greenlet.greenlet
def __free__(self):
'''freeup resources'''
del self.request
del self.response
del self.environ
del self.nabar
del self.static_cache
del self.asset
del self.args
del self.page_menu_tab
del self.page_menu_sub_tab
del self.page_menu_sub_tab_2
del self.headers
del self.cookie
del self.now
| apache-2.0 |
pratyakshs/pgmpy | pgmpy/models/NoisyOrModel.py | 3 | 5531 | #!/usr/bin/env python3
from itertools import chain
import numpy as np
import networkx as nx
class NoisyOrModel(nx.DiGraph):
"""
Base class for Noisy-Or models.
This is an implementation of generalized Noisy-Or models and
is not limited to Boolean variables and also any arbitrary
function can be used instead of the boolean OR function.
Reference: http://xenon.stanford.edu/~srinivas/research/6-UAI93-Srinivas-Generalization-of-Noisy-Or.pdf
"""
def __init__(self, variables, cardinality, inhibitor_probability):
# TODO: Accept values of each state so that it could be
# put into F to compute the final state values of the output
"""
Init method for NoisyOrModel.
Parameters
----------
variables: list, tuple, dict (array like)
array containing names of the variables.
cardinality: list, tuple, dict (array like)
array containing integers representing the cardinality
of the variables.
inhibitor_probability: list, tuple, dict (array_like)
array containing the inhibitor probabilities of each variable.
Examples
--------
>>> from pgmpy.models import NoisyOrModel
>>> model = NoisyOrModel(['x1', 'x2', 'x3'], [2, 3, 2], [[0.6, 0.4],
... [0.2, 0.4, 0.7],
... [0.1, 0.4]])
"""
self.variables = np.array([])
self.cardinality = np.array([], dtype=np.int)
self.inhibitor_probability = []
self.add_variables(variables, cardinality, inhibitor_probability)
def add_variables(self, variables, cardinality, inhibitor_probability):
"""
Adds variables to the NoisyOrModel.
Parameters
----------
variables: list, tuple, dict (array like)
array containing names of the variables that are to be added.
cardinality: list, tuple, dict (array like)
array containing integers representing the cardinality
of the variables.
inhibitor_probability: list, tuple, dict (array_like)
array containing the inhibitor probabilities corresponding to each variable.
Examples
--------
>>> from pgmpy.models import NoisyOrModel
>>> model = NoisyOrModel(['x1', 'x2', 'x3'], [2, 3, 2], [[0.6, 0.4],
... [0.2, 0.4, 0.7],
... [0.1, 0. 4]])
>>> model.add_variables(['x4'], [3], [0.1, 0.4, 0.2])
"""
if len(variables) == 1:
if not isinstance(inhibitor_probability[0], (list, tuple)):
inhibitor_probability = [inhibitor_probability]
if len(variables) != len(cardinality):
raise ValueError("Size of variables and cardinality should be same")
elif any(cardinal != len(prob_array) for prob_array, cardinal in zip(inhibitor_probability, cardinality)) or \
len(cardinality) != len(inhibitor_probability):
raise ValueError("Size of variables and inhibitor_probability should be same")
elif not all(0 <= item <= 1 for item in chain.from_iterable(inhibitor_probability)):
raise ValueError("Probability values should be between 0 and 1(both inclusive).")
else:
self.variables = np.concatenate((self.variables, variables))
self.cardinality = np.concatenate((self.cardinality, cardinality))
self.inhibitor_probability.extend(inhibitor_probability)
def del_variables(self, variables):
"""
Deletes variables from the NoisyOrModel.
Parameters
----------
variables: list, tuple, dict (array like)
list of variables to be deleted.
Examples
--------
>>> from pgmpy.models import NoisyOrModel
>>> model = NoisyOrModel(['x1', 'x2', 'x3'], [2, 3, 2], [[0.6, 0.4],
... [0.2, 0.4, 0.7],
... [0.1, 0. 4]])
>>> model.del_variables(['x1'])
"""
variables = [variables] if isinstance(variables, str) else set(variables)
indices = [index for index, variable in enumerate(self.variables) if variable in variables]
self.variables = np.delete(self.variables, indices, 0)
self.cardinality = np.delete(self.cardinality, indices, 0)
self.inhibitor_probability = [prob_array for index, prob_array in enumerate(self.inhibitor_probability)
if index not in indices]
#
# def out_prob(self, func):
# """
# Compute the conditional probability of output variable
# given all other variables [P(X|U)] where X is the output
# variable and U is the set of input variables.
#
# Parameters
# ----------
# func: function
# The deterministic function which maps input to the
# output.
#
# Returns
# -------
# List of tuples. Each tuple is of the form (state, probability).
# """
# states = []
# from itertools import product
# for u in product([(values(var)) for var in self.variables]):
# for state in product([(values(var) for var in self.variables)]):
| mit |
Stratoscale/rackattack-physical | rackattack/physical/provider_tests/test_priority.py | 1 | 1269 | import pytest
import logging
@pytest.fixture()
def chosen(priority):
_chosen = priority.allocated().values()[0].id()
logging.info("CHOSEN: {chosen}".format(chosen=_chosen))
return _chosen
def test_no_tags(chosen):
# some servers with tags, single server without tags -> expect the one with no tags to be picked
assert chosen == 'rack01-server01'
def test_unique(chosen):
# 2 candidates, 1 with very unique tag, 1 with very common
assert chosen == 'rack01-server01'
def test_not_uniform(chosen):
# get many candidates with obviously shifted tags distribution
assert chosen != 'rack01-server01'
def test_large_numbers_unique(chosen):
assert chosen == 'rack01-server01'
@pytest.mark.repeat(100)
def test_large_numbers(chosen):
assert chosen != 'rack01-server01'
assert 'server01' not in chosen
def test_multiple_servers(priority):
assert ['rack01-server01', 'rack01-server02'] == sorted([v.id() for v in priority.allocated().values()])
def test_multiple_servers_negative(priority):
assert priority is None
def test_tags_overload(chosen):
assert chosen == 'rack01-server01'
# def test_uniform(chosen):
# assert chosen == 'rack01-server01'
# def test_no_intersections(priority):
# pass
| apache-2.0 |
esatel/ADCPy | doc/source/conf.py | 1 | 8929 | # -*- coding: utf-8 -*-
#
# ADCpy documentation build configuration file, created by
# sphinx-quickstart on Tue Oct 07 11:54:34 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'matplotlib.sphinxext.mathmpl',
'matplotlib.sphinxext.only_directives',
'matplotlib.sphinxext.plot_directive',
'matplotlib.sphinxext.ipython_directive',
'sphinx.ext.intersphinx',
'sphinx.ext.autodoc',
'sphinx.ext.doctest','numpydoc',
'sphinx.ext.autosummary']
#'numpydoc']
#'ipython_console_highlighting',
#'inheritance_diagram',
#'numpydoc']
autodoc_member_order = 'alphabetical'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'ADCPy'
copyright = u'2014, California Department of Water Resources'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinxdoc'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = 'dwrsmall.gif'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
# This prevents the weird 2-index result if you use numpydoc
html_domain_indices = ['py-modindex']
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'ADCPydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'ADCPy.tex', u'ADCPy Documentation',
u'Benjamin Saenz, David Ralston, Rusty Holleman,\nEd Gross, Eli Ateljevich', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
latex_domain_indices = ['py-modindex']
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'adcpy', u'ADCpy Documentation',
[u'Benjamin Saenz, David Ralston, Rusty Holleman, Ed Gross, Eli Ateljevich'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'ADCpy', u'ADCpy Documentation',
u'Benjamin Saenz, David Ralston, Rusty Holleman, Ed Gross, Eli Ateljevich', 'ADCPy', 'Tools for ADCP analysis and visualization.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| mit |
MayQ/shadowsocks | shadowsocks/crypto/sodium.py | 1032 | 3778 | #!/usr/bin/env python
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
from ctypes import c_char_p, c_int, c_ulonglong, byref, \
create_string_buffer, c_void_p
from shadowsocks.crypto import util
__all__ = ['ciphers']
libsodium = None
loaded = False
buf_size = 2048
# for salsa20 and chacha20
BLOCK_SIZE = 64
def load_libsodium():
global loaded, libsodium, buf
libsodium = util.find_library('sodium', 'crypto_stream_salsa20_xor_ic',
'libsodium')
if libsodium is None:
raise Exception('libsodium not found')
libsodium.crypto_stream_salsa20_xor_ic.restype = c_int
libsodium.crypto_stream_salsa20_xor_ic.argtypes = (c_void_p, c_char_p,
c_ulonglong,
c_char_p, c_ulonglong,
c_char_p)
libsodium.crypto_stream_chacha20_xor_ic.restype = c_int
libsodium.crypto_stream_chacha20_xor_ic.argtypes = (c_void_p, c_char_p,
c_ulonglong,
c_char_p, c_ulonglong,
c_char_p)
buf = create_string_buffer(buf_size)
loaded = True
class SodiumCrypto(object):
def __init__(self, cipher_name, key, iv, op):
if not loaded:
load_libsodium()
self.key = key
self.iv = iv
self.key_ptr = c_char_p(key)
self.iv_ptr = c_char_p(iv)
if cipher_name == 'salsa20':
self.cipher = libsodium.crypto_stream_salsa20_xor_ic
elif cipher_name == 'chacha20':
self.cipher = libsodium.crypto_stream_chacha20_xor_ic
else:
raise Exception('Unknown cipher')
# byte counter, not block counter
self.counter = 0
def update(self, data):
global buf_size, buf
l = len(data)
# we can only prepend some padding to make the encryption align to
# blocks
padding = self.counter % BLOCK_SIZE
if buf_size < padding + l:
buf_size = (padding + l) * 2
buf = create_string_buffer(buf_size)
if padding:
data = (b'\0' * padding) + data
self.cipher(byref(buf), c_char_p(data), padding + l,
self.iv_ptr, int(self.counter / BLOCK_SIZE), self.key_ptr)
self.counter += l
# buf is copied to a str object when we access buf.raw
# strip off the padding
return buf.raw[padding:padding + l]
ciphers = {
'salsa20': (32, 8, SodiumCrypto),
'chacha20': (32, 8, SodiumCrypto),
}
def test_salsa20():
cipher = SodiumCrypto('salsa20', b'k' * 32, b'i' * 16, 1)
decipher = SodiumCrypto('salsa20', b'k' * 32, b'i' * 16, 0)
util.run_cipher(cipher, decipher)
def test_chacha20():
cipher = SodiumCrypto('chacha20', b'k' * 32, b'i' * 16, 1)
decipher = SodiumCrypto('chacha20', b'k' * 32, b'i' * 16, 0)
util.run_cipher(cipher, decipher)
if __name__ == '__main__':
test_chacha20()
test_salsa20()
| apache-2.0 |
jonathonwalz/ansible | test/units/module_utils/facts/test_facts.py | 80 | 22585 | # This file is part of Ansible
# -*- coding: utf-8 -*-
#
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# Make coding more python3-ish
from __future__ import (absolute_import, division)
__metaclass__ = type
import os
import pytest
# for testing
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import Mock, patch
from ansible.module_utils import facts
from ansible.module_utils.facts import hardware
from ansible.module_utils.facts import network
from ansible.module_utils.facts import virtual
class BaseTestFactsPlatform(unittest.TestCase):
platform_id = 'Generic'
fact_class = hardware.base.Hardware
collector_class = None
"""Verify that the automagic in Hardware.__new__ selects the right subclass."""
@patch('platform.system')
def test_new(self, mock_platform):
if not self.fact_class:
pytest.skip('This platform (%s) does not have a fact_class.' % self.platform_id)
mock_platform.return_value = self.platform_id
inst = self.fact_class(module=Mock(), load_on_init=False)
self.assertIsInstance(inst, self.fact_class)
self.assertEqual(inst.platform, self.platform_id)
def test_subclass(self):
if not self.fact_class:
pytest.skip('This platform (%s) does not have a fact_class.' % self.platform_id)
# 'Generic' will try to map to platform.system() that we are not mocking here
if self.platform_id == 'Generic':
return
inst = self.fact_class(module=Mock(), load_on_init=False)
self.assertIsInstance(inst, self.fact_class)
self.assertEqual(inst.platform, self.platform_id)
def test_collector(self):
if not self.collector_class:
pytest.skip('This test class needs to be updated to specify collector_class')
inst = self.collector_class()
self.assertIsInstance(inst, self.collector_class)
self.assertEqual(inst._platform, self.platform_id)
class TestLinuxFactsPlatform(BaseTestFactsPlatform):
platform_id = 'Linux'
fact_class = hardware.linux.LinuxHardware
collector_class = hardware.linux.LinuxHardwareCollector
class TestHurdFactsPlatform(BaseTestFactsPlatform):
platform_id = 'GNU'
fact_class = hardware.hurd.HurdHardware
collector_class = hardware.hurd.HurdHardwareCollector
class TestSunOSHardware(BaseTestFactsPlatform):
platform_id = 'SunOS'
fact_class = hardware.sunos.SunOSHardware
collector_class = hardware.sunos.SunOSHardwareCollector
class TestOpenBSDHardware(BaseTestFactsPlatform):
platform_id = 'OpenBSD'
fact_class = hardware.openbsd.OpenBSDHardware
collector_class = hardware.openbsd.OpenBSDHardwareCollector
class TestFreeBSDHardware(BaseTestFactsPlatform):
platform_id = 'FreeBSD'
fact_class = hardware.freebsd.FreeBSDHardware
collector_class = hardware.freebsd.FreeBSDHardwareCollector
class TestDragonFlyHardware(BaseTestFactsPlatform):
platform_id = 'DragonFly'
fact_class = None
collector_class = hardware.dragonfly.DragonFlyHardwareCollector
class TestNetBSDHardware(BaseTestFactsPlatform):
platform_id = 'NetBSD'
fact_class = hardware.netbsd.NetBSDHardware
collector_class = hardware.netbsd.NetBSDHardwareCollector
class TestAIXHardware(BaseTestFactsPlatform):
platform_id = 'AIX'
fact_class = hardware.aix.AIXHardware
collector_class = hardware.aix.AIXHardwareCollector
class TestHPUXHardware(BaseTestFactsPlatform):
platform_id = 'HP-UX'
fact_class = hardware.hpux.HPUXHardware
collector_class = hardware.hpux.HPUXHardwareCollector
class TestDarwinHardware(BaseTestFactsPlatform):
platform_id = 'Darwin'
fact_class = hardware.darwin.DarwinHardware
collector_class = hardware.darwin.DarwinHardwareCollector
class TestGenericNetwork(BaseTestFactsPlatform):
platform_id = 'Generic'
fact_class = network.base.Network
class TestHurdPfinetNetwork(BaseTestFactsPlatform):
platform_id = 'GNU'
fact_class = network.hurd.HurdPfinetNetwork
collector_class = network.hurd.HurdNetworkCollector
class TestLinuxNetwork(BaseTestFactsPlatform):
platform_id = 'Linux'
fact_class = network.linux.LinuxNetwork
collector_class = network.linux.LinuxNetworkCollector
class TestGenericBsdIfconfigNetwork(BaseTestFactsPlatform):
platform_id = 'Generic_BSD_Ifconfig'
fact_class = network.generic_bsd.GenericBsdIfconfigNetwork
collector_class = None
class TestHPUXNetwork(BaseTestFactsPlatform):
platform_id = 'HP-UX'
fact_class = network.hpux.HPUXNetwork
collector_class = network.hpux.HPUXNetworkCollector
class TestDarwinNetwork(BaseTestFactsPlatform):
platform_id = 'Darwin'
fact_class = network.darwin.DarwinNetwork
collector_class = network.darwin.DarwinNetworkCollector
class TestFreeBSDNetwork(BaseTestFactsPlatform):
platform_id = 'FreeBSD'
fact_class = network.freebsd.FreeBSDNetwork
collector_class = network.freebsd.FreeBSDNetworkCollector
class TestDragonFlyNetwork(BaseTestFactsPlatform):
platform_id = 'DragonFly'
fact_class = network.dragonfly.DragonFlyNetwork
collector_class = network.dragonfly.DragonFlyNetworkCollector
class TestAIXNetwork(BaseTestFactsPlatform):
platform_id = 'AIX'
fact_class = network.aix.AIXNetwork
collector_class = network.aix.AIXNetworkCollector
class TestNetBSDNetwork(BaseTestFactsPlatform):
platform_id = 'NetBSD'
fact_class = network.netbsd.NetBSDNetwork
collector_class = network.netbsd.NetBSDNetworkCollector
class TestOpenBSDNetwork(BaseTestFactsPlatform):
platform_id = 'OpenBSD'
fact_class = network.openbsd.OpenBSDNetwork
collector_class = network.openbsd.OpenBSDNetworkCollector
class TestSunOSNetwork(BaseTestFactsPlatform):
platform_id = 'SunOS'
fact_class = network.sunos.SunOSNetwork
collector_class = network.sunos.SunOSNetworkCollector
class TestLinuxVirtual(BaseTestFactsPlatform):
platform_id = 'Linux'
fact_class = virtual.linux.LinuxVirtual
collector_class = virtual.linux.LinuxVirtualCollector
class TestFreeBSDVirtual(BaseTestFactsPlatform):
platform_id = 'FreeBSD'
fact_class = virtual.freebsd.FreeBSDVirtual
collector_class = virtual.freebsd.FreeBSDVirtualCollector
class TestNetBSDVirtual(BaseTestFactsPlatform):
platform_id = 'NetBSD'
fact_class = virtual.netbsd.NetBSDVirtual
collector_class = virtual.netbsd.NetBSDVirtualCollector
class TestOpenBSDVirtual(BaseTestFactsPlatform):
platform_id = 'OpenBSD'
fact_class = virtual.openbsd.OpenBSDVirtual
collector_class = virtual.openbsd.OpenBSDVirtualCollector
class TestHPUXVirtual(BaseTestFactsPlatform):
platform_id = 'HP-UX'
fact_class = virtual.hpux.HPUXVirtual
collector_class = virtual.hpux.HPUXVirtualCollector
class TestSunOSVirtual(BaseTestFactsPlatform):
platform_id = 'SunOS'
fact_class = virtual.sunos.SunOSVirtual
collector_class = virtual.sunos.SunOSVirtualCollector
LSBLK_OUTPUT = b"""
/dev/sda
/dev/sda1 32caaec3-ef40-4691-a3b6-438c3f9bc1c0
/dev/sda2 66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK
/dev/mapper/fedora_dhcp129--186-swap eae6059d-2fbe-4d1c-920d-a80bbeb1ac6d
/dev/mapper/fedora_dhcp129--186-root d34cf5e3-3449-4a6c-8179-a1feb2bca6ce
/dev/mapper/fedora_dhcp129--186-home 2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d
/dev/sr0
/dev/loop0 0f031512-ab15-497d-9abd-3a512b4a9390
/dev/loop1 7c1b0f30-cf34-459f-9a70-2612f82b870a
/dev/loop9 0f031512-ab15-497d-9abd-3a512b4a9390
/dev/loop9 7c1b4444-cf34-459f-9a70-2612f82b870a
/dev/mapper/docker-253:1-1050967-pool
/dev/loop2
/dev/mapper/docker-253:1-1050967-pool
"""
LSBLK_OUTPUT_2 = b"""
/dev/sda
/dev/sda1 32caaec3-ef40-4691-a3b6-438c3f9bc1c0
/dev/sda2 66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK
/dev/mapper/fedora_dhcp129--186-swap eae6059d-2fbe-4d1c-920d-a80bbeb1ac6d
/dev/mapper/fedora_dhcp129--186-root d34cf5e3-3449-4a6c-8179-a1feb2bca6ce
/dev/mapper/fedora_dhcp129--186-home 2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d
/dev/mapper/an-example-mapper with a space in the name 84639acb-013f-4d2f-9392-526a572b4373
/dev/sr0
/dev/loop0 0f031512-ab15-497d-9abd-3a512b4a9390
"""
LSBLK_UUIDS = {'/dev/sda1': '66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK'}
MTAB = """
sysfs /sys sysfs rw,seclabel,nosuid,nodev,noexec,relatime 0 0
proc /proc proc rw,nosuid,nodev,noexec,relatime 0 0
devtmpfs /dev devtmpfs rw,seclabel,nosuid,size=8044400k,nr_inodes=2011100,mode=755 0 0
securityfs /sys/kernel/security securityfs rw,nosuid,nodev,noexec,relatime 0 0
tmpfs /dev/shm tmpfs rw,seclabel,nosuid,nodev 0 0
devpts /dev/pts devpts rw,seclabel,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=000 0 0
tmpfs /run tmpfs rw,seclabel,nosuid,nodev,mode=755 0 0
tmpfs /sys/fs/cgroup tmpfs ro,seclabel,nosuid,nodev,noexec,mode=755 0 0
cgroup /sys/fs/cgroup/systemd cgroup rw,nosuid,nodev,noexec,relatime,xattr,release_agent=/usr/lib/systemd/systemd-cgroups-agent,name=systemd 0 0
pstore /sys/fs/pstore pstore rw,seclabel,nosuid,nodev,noexec,relatime 0 0
cgroup /sys/fs/cgroup/devices cgroup rw,nosuid,nodev,noexec,relatime,devices 0 0
cgroup /sys/fs/cgroup/freezer cgroup rw,nosuid,nodev,noexec,relatime,freezer 0 0
cgroup /sys/fs/cgroup/memory cgroup rw,nosuid,nodev,noexec,relatime,memory 0 0
cgroup /sys/fs/cgroup/pids cgroup rw,nosuid,nodev,noexec,relatime,pids 0 0
cgroup /sys/fs/cgroup/blkio cgroup rw,nosuid,nodev,noexec,relatime,blkio 0 0
cgroup /sys/fs/cgroup/cpuset cgroup rw,nosuid,nodev,noexec,relatime,cpuset 0 0
cgroup /sys/fs/cgroup/cpu,cpuacct cgroup rw,nosuid,nodev,noexec,relatime,cpu,cpuacct 0 0
cgroup /sys/fs/cgroup/hugetlb cgroup rw,nosuid,nodev,noexec,relatime,hugetlb 0 0
cgroup /sys/fs/cgroup/perf_event cgroup rw,nosuid,nodev,noexec,relatime,perf_event 0 0
cgroup /sys/fs/cgroup/net_cls,net_prio cgroup rw,nosuid,nodev,noexec,relatime,net_cls,net_prio 0 0
configfs /sys/kernel/config configfs rw,relatime 0 0
/dev/mapper/fedora_dhcp129--186-root / ext4 rw,seclabel,relatime,data=ordered 0 0
selinuxfs /sys/fs/selinux selinuxfs rw,relatime 0 0
systemd-1 /proc/sys/fs/binfmt_misc autofs rw,relatime,fd=24,pgrp=1,timeout=0,minproto=5,maxproto=5,direct 0 0
debugfs /sys/kernel/debug debugfs rw,seclabel,relatime 0 0
hugetlbfs /dev/hugepages hugetlbfs rw,seclabel,relatime 0 0
tmpfs /tmp tmpfs rw,seclabel 0 0
mqueue /dev/mqueue mqueue rw,seclabel,relatime 0 0
/dev/loop0 /var/lib/machines btrfs rw,seclabel,relatime,space_cache,subvolid=5,subvol=/ 0 0
/dev/sda1 /boot ext4 rw,seclabel,relatime,data=ordered 0 0
/dev/mapper/fedora_dhcp129--186-home /home ext4 rw,seclabel,relatime,data=ordered 0 0
tmpfs /run/user/1000 tmpfs rw,seclabel,nosuid,nodev,relatime,size=1611044k,mode=700,uid=1000,gid=1000 0 0
gvfsd-fuse /run/user/1000/gvfs fuse.gvfsd-fuse rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
fusectl /sys/fs/fuse/connections fusectl rw,relatime 0 0
grimlock.g.a: /home/adrian/sshfs-grimlock fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
grimlock.g.a:test_path/path_with'single_quotes /home/adrian/sshfs-grimlock-single-quote fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
grimlock.g.a:path_with'single_quotes /home/adrian/sshfs-grimlock-single-quote-2 fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
grimlock.g.a:/mnt/data/foto's /home/adrian/fotos fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
"""
MTAB_ENTRIES = [
[
'sysfs',
'/sys',
'sysfs',
'rw,seclabel,nosuid,nodev,noexec,relatime',
'0',
'0'
],
['proc', '/proc', 'proc', 'rw,nosuid,nodev,noexec,relatime', '0', '0'],
[
'devtmpfs',
'/dev',
'devtmpfs',
'rw,seclabel,nosuid,size=8044400k,nr_inodes=2011100,mode=755',
'0',
'0'
],
[
'securityfs',
'/sys/kernel/security',
'securityfs',
'rw,nosuid,nodev,noexec,relatime',
'0',
'0'
],
['tmpfs', '/dev/shm', 'tmpfs', 'rw,seclabel,nosuid,nodev', '0', '0'],
[
'devpts',
'/dev/pts',
'devpts',
'rw,seclabel,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=000',
'0',
'0'
],
['tmpfs', '/run', 'tmpfs', 'rw,seclabel,nosuid,nodev,mode=755', '0', '0'],
[
'tmpfs',
'/sys/fs/cgroup',
'tmpfs',
'ro,seclabel,nosuid,nodev,noexec,mode=755',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/systemd',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,xattr,release_agent=/usr/lib/systemd/systemd-cgroups-agent,name=systemd',
'0',
'0'
],
[
'pstore',
'/sys/fs/pstore',
'pstore',
'rw,seclabel,nosuid,nodev,noexec,relatime',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/devices',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,devices',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/freezer',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,freezer',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/memory',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,memory',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/pids',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,pids',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/blkio',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,blkio',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/cpuset',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,cpuset',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/cpu,cpuacct',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,cpu,cpuacct',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/hugetlb',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,hugetlb',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/perf_event',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,perf_event',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/net_cls,net_prio',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,net_cls,net_prio',
'0',
'0'
],
['configfs', '/sys/kernel/config', 'configfs', 'rw,relatime', '0', '0'],
[
'/dev/mapper/fedora_dhcp129--186-root',
'/',
'ext4',
'rw,seclabel,relatime,data=ordered',
'0',
'0'
],
['selinuxfs', '/sys/fs/selinux', 'selinuxfs', 'rw,relatime', '0', '0'],
[
'systemd-1',
'/proc/sys/fs/binfmt_misc',
'autofs',
'rw,relatime,fd=24,pgrp=1,timeout=0,minproto=5,maxproto=5,direct',
'0',
'0'
],
['debugfs', '/sys/kernel/debug', 'debugfs', 'rw,seclabel,relatime', '0', '0'],
[
'hugetlbfs',
'/dev/hugepages',
'hugetlbfs',
'rw,seclabel,relatime',
'0',
'0'
],
['tmpfs', '/tmp', 'tmpfs', 'rw,seclabel', '0', '0'],
['mqueue', '/dev/mqueue', 'mqueue', 'rw,seclabel,relatime', '0', '0'],
[
'/dev/loop0',
'/var/lib/machines',
'btrfs',
'rw,seclabel,relatime,space_cache,subvolid=5,subvol=/',
'0',
'0'
],
['/dev/sda1', '/boot', 'ext4', 'rw,seclabel,relatime,data=ordered', '0', '0'],
# A 'none' fstype
['/dev/sdz3', '/not/a/real/device', 'none', 'rw,seclabel,relatime,data=ordered', '0', '0'],
# lets assume this is a bindmount
['/dev/sdz4', '/not/a/real/bind_mount', 'ext4', 'rw,seclabel,relatime,data=ordered', '0', '0'],
[
'/dev/mapper/fedora_dhcp129--186-home',
'/home',
'ext4',
'rw,seclabel,relatime,data=ordered',
'0',
'0'
],
[
'tmpfs',
'/run/user/1000',
'tmpfs',
'rw,seclabel,nosuid,nodev,relatime,size=1611044k,mode=700,uid=1000,gid=1000',
'0',
'0'
],
[
'gvfsd-fuse',
'/run/user/1000/gvfs',
'fuse.gvfsd-fuse',
'rw,nosuid,nodev,relatime,user_id=1000,group_id=1000',
'0',
'0'
],
['fusectl', '/sys/fs/fuse/connections', 'fusectl', 'rw,relatime', '0', '0']]
BIND_MOUNTS = ['/not/a/real/bind_mount']
with open(os.path.join(os.path.dirname(__file__), 'fixtures/findmount_output.txt')) as f:
FINDMNT_OUTPUT = f.read()
class TestFactsLinuxHardwareGetMountFacts(unittest.TestCase):
# FIXME: mock.patch instead
def setUp(self):
# The @timeout tracebacks if there isn't a GATHER_TIMEOUT is None (the default until get_all_facts sets it via global)
facts.GATHER_TIMEOUT = 10
def tearDown(self):
facts.GATHER_TIMEOUT = None
# The Hardware subclasses freakout if instaniated directly, so
# mock platform.system and inst Hardware() so we get a LinuxHardware()
# we can test.
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._mtab_entries', return_value=MTAB_ENTRIES)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._find_bind_mounts', return_value=BIND_MOUNTS)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._lsblk_uuid', return_value=LSBLK_UUIDS)
def test_get_mount_facts(self,
mock_lsblk_uuid,
mock_find_bind_mounts,
mock_mtab_entries):
module = Mock()
# Returns a LinuxHardware-ish
lh = hardware.linux.LinuxHardware(module=module, load_on_init=False)
# Nothing returned, just self.facts modified as a side effect
mount_facts = lh.get_mount_facts()
self.assertIsInstance(mount_facts, dict)
self.assertIn('mounts', mount_facts)
self.assertIsInstance(mount_facts['mounts'], list)
self.assertIsInstance(mount_facts['mounts'][0], dict)
@patch('ansible.module_utils.facts.hardware.linux.get_file_content', return_value=MTAB)
def test_get_mtab_entries(self, mock_get_file_content):
module = Mock()
lh = hardware.linux.LinuxHardware(module=module, load_on_init=False)
mtab_entries = lh._mtab_entries()
self.assertIsInstance(mtab_entries, list)
self.assertIsInstance(mtab_entries[0], list)
self.assertEqual(len(mtab_entries), 38)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_findmnt', return_value=(0, FINDMNT_OUTPUT, ''))
def test_find_bind_mounts(self, mock_run_findmnt):
module = Mock()
lh = hardware.linux.LinuxHardware(module=module, load_on_init=False)
bind_mounts = lh._find_bind_mounts()
# If bind_mounts becomes another seq type, feel free to change
self.assertIsInstance(bind_mounts, set)
self.assertEqual(len(bind_mounts), 1)
self.assertIn('/not/a/real/bind_mount', bind_mounts)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_findmnt', return_value=(37, '', ''))
def test_find_bind_mounts_non_zero(self, mock_run_findmnt):
module = Mock()
lh = hardware.linux.LinuxHardware(module=module, load_on_init=False)
bind_mounts = lh._find_bind_mounts()
self.assertIsInstance(bind_mounts, set)
self.assertEqual(len(bind_mounts), 0)
def test_find_bind_mounts_no_findmnts(self):
module = Mock()
module.get_bin_path = Mock(return_value=None)
lh = hardware.linux.LinuxHardware(module=module, load_on_init=False)
bind_mounts = lh._find_bind_mounts()
self.assertIsInstance(bind_mounts, set)
self.assertEqual(len(bind_mounts), 0)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_lsblk', return_value=(0, LSBLK_OUTPUT, ''))
def test_lsblk_uuid(self, mock_run_lsblk):
module = Mock()
lh = hardware.linux.LinuxHardware(module=module, load_on_init=False)
lsblk_uuids = lh._lsblk_uuid()
self.assertIsInstance(lsblk_uuids, dict)
self.assertIn(b'/dev/loop9', lsblk_uuids)
self.assertIn(b'/dev/sda1', lsblk_uuids)
self.assertEqual(lsblk_uuids[b'/dev/sda1'], b'32caaec3-ef40-4691-a3b6-438c3f9bc1c0')
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_lsblk', return_value=(37, LSBLK_OUTPUT, ''))
def test_lsblk_uuid_non_zero(self, mock_run_lsblk):
module = Mock()
lh = hardware.linux.LinuxHardware(module=module, load_on_init=False)
lsblk_uuids = lh._lsblk_uuid()
self.assertIsInstance(lsblk_uuids, dict)
self.assertEqual(len(lsblk_uuids), 0)
def test_lsblk_uuid_no_lsblk(self):
module = Mock()
module.get_bin_path = Mock(return_value=None)
lh = hardware.linux.LinuxHardware(module=module, load_on_init=False)
lsblk_uuids = lh._lsblk_uuid()
self.assertIsInstance(lsblk_uuids, dict)
self.assertEqual(len(lsblk_uuids), 0)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_lsblk', return_value=(0, LSBLK_OUTPUT_2, ''))
def test_lsblk_uuid_dev_with_space_in_name(self, mock_run_lsblk):
module = Mock()
lh = hardware.linux.LinuxHardware(module=module, load_on_init=False)
lsblk_uuids = lh._lsblk_uuid()
self.assertIsInstance(lsblk_uuids, dict)
self.assertIn(b'/dev/loop0', lsblk_uuids)
self.assertIn(b'/dev/sda1', lsblk_uuids)
self.assertEqual(lsblk_uuids[b'/dev/mapper/an-example-mapper with a space in the name'], b'84639acb-013f-4d2f-9392-526a572b4373')
self.assertEqual(lsblk_uuids[b'/dev/sda1'], b'32caaec3-ef40-4691-a3b6-438c3f9bc1c0')
| gpl-3.0 |
tupolev/plugin.video.mitele | lib/youtube_dl/extractor/spankwire.py | 20 | 4782 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse_unquote,
compat_urllib_parse_urlparse,
)
from ..utils import (
sanitized_Request,
str_to_int,
unified_strdate,
)
from ..aes import aes_decrypt_text
class SpankwireIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?(?P<url>spankwire\.com/[^/]*/video(?P<id>[0-9]+)/?)'
_TESTS = [{
# download URL pattern: */<height>P_<tbr>K_<video_id>.mp4
'url': 'http://www.spankwire.com/Buckcherry-s-X-Rated-Music-Video-Crazy-Bitch/video103545/',
'md5': '8bbfde12b101204b39e4b9fe7eb67095',
'info_dict': {
'id': '103545',
'ext': 'mp4',
'title': 'Buckcherry`s X Rated Music Video Crazy Bitch',
'description': 'Crazy Bitch X rated music video.',
'uploader': 'oreusz',
'uploader_id': '124697',
'upload_date': '20070507',
'age_limit': 18,
}
}, {
# download URL pattern: */mp4_<format_id>_<video_id>.mp4
'url': 'http://www.spankwire.com/Titcums-Compiloation-I/video1921551/',
'md5': '09b3c20833308b736ae8902db2f8d7e6',
'info_dict': {
'id': '1921551',
'ext': 'mp4',
'title': 'Titcums Compiloation I',
'description': 'cum on tits',
'uploader': 'dannyh78999',
'uploader_id': '3056053',
'upload_date': '20150822',
'age_limit': 18,
},
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
req = sanitized_Request('http://www.' + mobj.group('url'))
req.add_header('Cookie', 'age_verified=1')
webpage = self._download_webpage(req, video_id)
title = self._html_search_regex(
r'<h1>([^<]+)', webpage, 'title')
description = self._html_search_regex(
r'(?s)<div\s+id="descriptionContent">(.+?)</div>',
webpage, 'description', fatal=False)
thumbnail = self._html_search_regex(
r'playerData\.screenShot\s*=\s*["\']([^"\']+)["\']',
webpage, 'thumbnail', fatal=False)
uploader = self._html_search_regex(
r'by:\s*<a [^>]*>(.+?)</a>',
webpage, 'uploader', fatal=False)
uploader_id = self._html_search_regex(
r'by:\s*<a href="/(?:user/viewProfile|Profile\.aspx)\?.*?UserId=(\d+).*?"',
webpage, 'uploader id', fatal=False)
upload_date = unified_strdate(self._html_search_regex(
r'</a> on (.+?) at \d+:\d+',
webpage, 'upload date', fatal=False))
view_count = str_to_int(self._html_search_regex(
r'<div id="viewsCounter"><span>([\d,\.]+)</span> views</div>',
webpage, 'view count', fatal=False))
comment_count = str_to_int(self._html_search_regex(
r'<span\s+id="spCommentCount"[^>]*>([\d,\.]+)</span>',
webpage, 'comment count', fatal=False))
videos = re.findall(
r'playerData\.cdnPath([0-9]{3,})\s*=\s*(?:encodeURIComponent\()?["\']([^"\']+)["\']', webpage)
heights = [int(video[0]) for video in videos]
video_urls = list(map(compat_urllib_parse_unquote, [video[1] for video in videos]))
if webpage.find('flashvars\.encrypted = "true"') != -1:
password = self._search_regex(
r'flashvars\.video_title = "([^"]+)',
webpage, 'password').replace('+', ' ')
video_urls = list(map(
lambda s: aes_decrypt_text(s, password, 32).decode('utf-8'),
video_urls))
formats = []
for height, video_url in zip(heights, video_urls):
path = compat_urllib_parse_urlparse(video_url).path
m = re.search(r'/(?P<height>\d+)[pP]_(?P<tbr>\d+)[kK]', path)
if m:
tbr = int(m.group('tbr'))
height = int(m.group('height'))
else:
tbr = None
formats.append({
'url': video_url,
'format_id': '%dp' % height,
'height': height,
'tbr': tbr,
})
self._sort_formats(formats)
age_limit = self._rta_search(webpage)
return {
'id': video_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'uploader': uploader,
'uploader_id': uploader_id,
'upload_date': upload_date,
'view_count': view_count,
'comment_count': comment_count,
'formats': formats,
'age_limit': age_limit,
}
| gpl-3.0 |
dennybaa/st2 | st2actions/tests/unit/test_actionchain.py | 2 | 39995 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from st2actions.runners import actionchainrunner as acr
from st2actions.container.service import RunnerContainerService
from st2common.exceptions import actionrunner as runnerexceptions
from st2common.constants.action import LIVEACTION_STATUS_RUNNING
from st2common.constants.action import LIVEACTION_STATUS_SUCCEEDED
from st2common.constants.action import LIVEACTION_STATUS_CANCELED
from st2common.constants.action import LIVEACTION_STATUS_TIMED_OUT
from st2common.constants.action import LIVEACTION_STATUS_FAILED
from st2common.models.api.notification import NotificationsHelper
from st2common.models.db.liveaction import LiveActionDB
from st2common.models.db.keyvalue import KeyValuePairDB
from st2common.models.system.common import ResourceReference
from st2common.persistence.keyvalue import KeyValuePair
from st2common.persistence.runner import RunnerType
from st2common.services import action as action_service
from st2common.util import action_db as action_db_util
from st2common.exceptions.action import ParameterRenderingFailedException
from st2tests import DbTestCase
from st2tests.fixturesloader import FixturesLoader
class DummyActionExecution(object):
def __init__(self, status=LIVEACTION_STATUS_SUCCEEDED, result=''):
self.id = None
self.status = status
self.result = result
FIXTURES_PACK = 'generic'
TEST_MODELS = {
'actions': ['a1.yaml', 'a2.yaml', 'action_4_action_context_param.yaml'],
'runners': ['testrunner1.yaml']
}
MODELS = FixturesLoader().load_models(fixtures_pack=FIXTURES_PACK,
fixtures_dict=TEST_MODELS)
ACTION_1 = MODELS['actions']['a1.yaml']
ACTION_2 = MODELS['actions']['a2.yaml']
ACTION_3 = MODELS['actions']['action_4_action_context_param.yaml']
RUNNER = MODELS['runners']['testrunner1.yaml']
CHAIN_1_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain1.yaml')
CHAIN_2_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain2.yaml')
CHAIN_ACTION_CALL_NO_PARAMS_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_action_call_no_params.yaml')
CHAIN_NO_DEFAULT = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'no_default_chain.yaml')
CHAIN_NO_DEFAULT_2 = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'no_default_chain_2.yaml')
CHAIN_BAD_DEFAULT = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'bad_default_chain.yaml')
CHAIN_BROKEN_ON_SUCCESS_PATH_STATIC_TASK_NAME = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_broken_on_success_path_static_task_name.yaml')
CHAIN_BROKEN_ON_FAILURE_PATH_STATIC_TASK_NAME = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_broken_on_failure_path_static_task_name.yaml')
CHAIN_FIRST_TASK_RENDER_FAIL_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_first_task_parameter_render_fail.yaml')
CHAIN_SECOND_TASK_RENDER_FAIL_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_second_task_parameter_render_fail.yaml')
CHAIN_LIST_TEMP_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_list_template.yaml')
CHAIN_DICT_TEMP_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_dict_template.yaml')
CHAIN_DEP_INPUT = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_dependent_input.yaml')
CHAIN_DEP_RESULTS_INPUT = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_dep_result_input.yaml')
MALFORMED_CHAIN_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'malformedchain.yaml')
CHAIN_TYPED_PARAMS = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_typed_params.yaml')
CHAIN_SYSTEM_PARAMS = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_typed_system_params.yaml')
CHAIN_WITH_ACTIONPARAM_VARS = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_with_actionparam_vars.yaml')
CHAIN_WITH_SYSTEM_VARS = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_with_system_vars.yaml')
CHAIN_WITH_PUBLISH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_with_publish.yaml')
CHAIN_WITH_PUBLISH_PARAM_RENDERING_FAILURE = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_publish_params_rendering_failure.yaml')
CHAIN_WITH_INVALID_ACTION = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_with_invalid_action.yaml')
CHAIN_ACTION_PARAMS_AND_PARAMETERS_ATTRIBUTE = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_action_params_and_parameters.yaml')
CHAIN_ACTION_PARAMS_ATTRIBUTE = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_action_params_attribute.yaml')
CHAIN_ACTION_PARAMETERS_ATTRIBUTE = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_action_parameters_attribute.yaml')
CHAIN_ACTION_INVALID_PARAMETER_TYPE = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_invalid_parameter_type_passed_to_action.yaml')
CHAIN_NOTIFY_API = {'notify': {'on-complete': {'message': 'foo happened.'}}}
CHAIN_NOTIFY_DB = NotificationsHelper.to_model(CHAIN_NOTIFY_API)
@mock.patch.object(action_db_util, 'get_runnertype_by_name',
mock.MagicMock(return_value=RUNNER))
class TestActionChainRunner(DbTestCase):
def test_runner_creation(self):
runner = acr.get_runner()
self.assertTrue(runner)
self.assertTrue(runner.runner_id)
def test_malformed_chain(self):
try:
chain_runner = acr.get_runner()
chain_runner.entry_point = MALFORMED_CHAIN_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
self.assertTrue(False, 'Expected pre_run to fail.')
except runnerexceptions.ActionRunnerPreRunError:
self.assertTrue(True)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_success_path(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_1_PATH
chain_runner.action = ACTION_1
action_ref = ResourceReference.to_string_reference(name=ACTION_1.name,
pack=ACTION_1.pack)
chain_runner.liveaction = LiveActionDB(action=action_ref)
chain_runner.liveaction.notify = CHAIN_NOTIFY_DB
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# based on the chain the callcount is known to be 3. Not great but works.
self.assertEqual(request.call_count, 3)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_chain_second_task_times_out(self, request):
# Second task in the chain times out so the action chain status should be timeout
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_2_PATH
chain_runner.action = ACTION_1
original_run_action = chain_runner._run_action
def mock_run_action(*args, **kwargs):
original_live_action = args[0]
liveaction = original_run_action(*args, **kwargs)
if original_live_action.action == 'wolfpack.a2':
# Mock a timeout for second task
liveaction.status = LIVEACTION_STATUS_TIMED_OUT
return liveaction
chain_runner._run_action = mock_run_action
action_ref = ResourceReference.to_string_reference(name=ACTION_1.name,
pack=ACTION_1.pack)
chain_runner.liveaction = LiveActionDB(action=action_ref)
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
status, _, _ = chain_runner.run({})
self.assertEqual(status, LIVEACTION_STATUS_TIMED_OUT)
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# based on the chain the callcount is known to be 3. Not great but works.
self.assertEqual(request.call_count, 3)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_task_is_canceled_while_running(self, request):
# Second task in the action is CANCELED, make sure runner doesn't get stuck in an infinite
# loop
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_2_PATH
chain_runner.action = ACTION_1
original_run_action = chain_runner._run_action
def mock_run_action(*args, **kwargs):
original_live_action = args[0]
if original_live_action.action == 'wolfpack.a2':
status = LIVEACTION_STATUS_CANCELED
else:
status = LIVEACTION_STATUS_SUCCEEDED
request.return_value = (DummyActionExecution(status=status), None)
liveaction = original_run_action(*args, **kwargs)
return liveaction
chain_runner._run_action = mock_run_action
action_ref = ResourceReference.to_string_reference(name=ACTION_1.name,
pack=ACTION_1.pack)
chain_runner.liveaction = LiveActionDB(action=action_ref)
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
status, _, _ = chain_runner.run({})
self.assertEqual(status, LIVEACTION_STATUS_CANCELED)
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# Chain count should be 2 since the last task doesn't get called since the second one was
# canceled
self.assertEqual(request.call_count, 2)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_success_task_action_call_with_no_params(self, request):
# Make sure that the runner doesn't explode if task definition contains
# no "params" section
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_ACTION_CALL_NO_PARAMS_PATH
chain_runner.action = ACTION_1
action_ref = ResourceReference.to_string_reference(name=ACTION_1.name,
pack=ACTION_1.pack)
chain_runner.liveaction = LiveActionDB(action=action_ref)
chain_runner.liveaction.notify = CHAIN_NOTIFY_DB
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# based on the chain the callcount is known to be 3. Not great but works.
self.assertEqual(request.call_count, 3)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_no_default(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_NO_DEFAULT
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# In case of this chain default_node is the first_node.
default_node = chain_runner.chain_holder.actionchain.default
first_node = chain_runner.chain_holder.actionchain.chain[0]
self.assertEqual(default_node, first_node.name)
# based on the chain the callcount is known to be 3. Not great but works.
self.assertEqual(request.call_count, 3)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_no_default_multiple_options(self, request):
# subtle difference is that when there are multiple possible default nodes
# the order per chain definition may not be preseved. This is really a
# poorly formatted chain but we still the best attempt to work.
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_NO_DEFAULT_2
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# In case of this chain default_node is the first_node.
default_node = chain_runner.chain_holder.actionchain.default
first_node = chain_runner.chain_holder.actionchain.chain[0]
self.assertEqual(default_node, first_node.name)
# based on the chain the callcount is known to be 2.
self.assertEqual(request.call_count, 2)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_bad_default(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_BAD_DEFAULT
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
expected_msg = 'Unable to find node with name "bad_default" referenced in "default".'
self.assertRaisesRegexp(runnerexceptions.ActionRunnerPreRunError,
expected_msg, chain_runner.pre_run)
@mock.patch('eventlet.sleep', mock.MagicMock())
@mock.patch.object(action_db_util, 'get_liveaction_by_id', mock.MagicMock(
return_value=DummyActionExecution()))
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(status=LIVEACTION_STATUS_RUNNING), None))
def test_chain_runner_success_path_with_wait(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_1_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# based on the chain the callcount is known to be 3. Not great but works.
self.assertEqual(request.call_count, 3)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(status=LIVEACTION_STATUS_FAILED), None))
def test_chain_runner_failure_path(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_1_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
status, _, _ = chain_runner.run({})
self.assertEqual(status, LIVEACTION_STATUS_FAILED)
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# based on the chain the callcount is known to be 2. Not great but works.
self.assertEqual(request.call_count, 2)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(), None))
def test_chain_runner_broken_on_success_path_static_task_name(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_BROKEN_ON_SUCCESS_PATH_STATIC_TASK_NAME
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
expected_msg = ('Unable to find node with name "c5" referenced in "on-success" '
'in task "c2"')
self.assertRaisesRegexp(runnerexceptions.ActionRunnerPreRunError,
expected_msg, chain_runner.pre_run)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(), None))
def test_chain_runner_broken_on_failure_path_static_task_name(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_BROKEN_ON_FAILURE_PATH_STATIC_TASK_NAME
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
expected_msg = ('Unable to find node with name "c6" referenced in "on-failure" '
'in task "c2"')
self.assertRaisesRegexp(runnerexceptions.ActionRunnerPreRunError,
expected_msg, chain_runner.pre_run)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', side_effect=RuntimeError('Test Failure.'))
def test_chain_runner_action_exception(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_1_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
status, results, _ = chain_runner.run({})
self.assertEqual(status, LIVEACTION_STATUS_FAILED)
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# based on the chain the callcount is known to be 2. Not great but works.
self.assertEqual(request.call_count, 2)
error_count = 0
for task_result in results['tasks']:
if task_result['result'].get('error', None):
error_count += 1
self.assertEqual(error_count, 2)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_str_param_temp(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_FIRST_TASK_RENDER_FAIL_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({'s1': 1, 's2': 2, 's3': 3, 's4': 4})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, {"p1": "1"})
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_list_param_temp(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_LIST_TEMP_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({'s1': 1, 's2': 2, 's3': 3, 's4': 4})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, {"p1": "[2, 3, 4]"})
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_dict_param_temp(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_DICT_TEMP_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({'s1': 1, 's2': 2, 's3': 3, 's4': 4})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_value = {"p1": {"p1.3": "[3, 4]", "p1.2": "2", "p1.1": "1"}}
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, expected_value)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(result={'o1': '1'}), None))
def test_chain_runner_dependent_param_temp(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_DEP_INPUT
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({'s1': 1, 's2': 2, 's3': 3, 's4': 4})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_values = [{u'p1': u'1'},
{u'p1': u'1'},
{u'p2': u'1', u'p3': u'1', u'p1': u'1'}]
# Each of the call_args must be one of
for call_args in request.call_args_list:
self.assertTrue(call_args[0][0].parameters in expected_values)
expected_values.remove(call_args[0][0].parameters)
self.assertEqual(len(expected_values), 0, 'Not all expected values received.')
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(result={'o1': '1'}), None))
def test_chain_runner_dependent_results_param(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_DEP_RESULTS_INPUT
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({'s1': 1})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_values = [{u'p1': u'1'},
{u'p1': u'1'},
{u'out': u"{'c2': {'o1': '1'}, 'c1': {'o1': '1'}}"}]
# Each of the call_args must be one of
self.assertEqual(request.call_count, 3)
for call_args in request.call_args_list:
self.assertTrue(call_args[0][0].parameters in expected_values)
expected_values.remove(call_args[0][0].parameters)
self.assertEqual(len(expected_values), 0, 'Not all expected values received.')
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(RunnerType, 'get_by_name',
mock.MagicMock(return_value=RUNNER))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_missing_param_temp(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_FIRST_TASK_RENDER_FAIL_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertEqual(request.call_count, 0, 'No call expected.')
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_failure_during_param_rendering_single_task(self, request):
# Parameter rendering should result in a top level error which aborts
# the whole chain
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_FIRST_TASK_RENDER_FAIL_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
status, result, _ = chain_runner.run({})
# No tasks ran because rendering of parameters for the first task failed
self.assertEqual(status, LIVEACTION_STATUS_FAILED)
self.assertEqual(result['tasks'], [])
self.assertTrue('error' in result)
self.assertTrue('traceback' in result)
self.assertTrue('Failed to run task "c1". Parameter rendering failed' in result['error'])
self.assertTrue('Traceback' in result['traceback'])
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_failure_during_param_rendering_multiple_tasks(self, request):
# Parameter rendering should result in a top level error which aborts
# the whole chain
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_SECOND_TASK_RENDER_FAIL_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
status, result, _ = chain_runner.run({})
# Verify that only first task has ran
self.assertEqual(status, LIVEACTION_STATUS_FAILED)
self.assertEqual(len(result['tasks']), 1)
self.assertEqual(result['tasks'][0]['name'], 'c1')
expected_error = ('Failed rendering value for action parameter "p1" in '
'task "c2" (template string={{s1}}):')
self.assertTrue('error' in result)
self.assertTrue('traceback' in result)
self.assertTrue('Failed to run task "c2". Parameter rendering failed' in result['error'])
self.assertTrue(expected_error in result['error'])
self.assertTrue('Traceback' in result['traceback'])
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_2))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_typed_params(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_TYPED_PARAMS
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({'s1': 1, 's2': 'two', 's3': 3.14})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_value = {'booltype': True,
'inttype': 1,
'numbertype': 3.14,
'strtype': 'two',
'arrtype': ['1', 'two'],
'objtype': {'s2': 'two',
'k1': '1'}}
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, expected_value)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_2))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_typed_system_params(self, request):
kvps = []
try:
kvps.append(KeyValuePair.add_or_update(KeyValuePairDB(name='a', value='1')))
kvps.append(KeyValuePair.add_or_update(KeyValuePairDB(name='a.b.c', value='two')))
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_SYSTEM_PARAMS
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_value = {'inttype': 1,
'strtype': 'two'}
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, expected_value)
finally:
for kvp in kvps:
KeyValuePair.delete(kvp)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_2))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_vars_system_params(self, request):
kvps = []
try:
kvps.append(KeyValuePair.add_or_update(KeyValuePairDB(name='a', value='two')))
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_WITH_SYSTEM_VARS
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_value = {'inttype': 1,
'strtype': 'two',
'booltype': True}
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, expected_value)
finally:
for kvp in kvps:
KeyValuePair.delete(kvp)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_2))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_vars_action_params(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_WITH_ACTIONPARAM_VARS
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({'input_a': 'two'})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_value = {'inttype': 1,
'strtype': 'two',
'booltype': True}
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, expected_value)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_2))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(result={'raw_out': 'published'}), None))
def test_chain_runner_publish(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_WITH_PUBLISH
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.runner_parameters = {'display_published': True}
chain_runner.pre_run()
action_parameters = {'action_param_1': 'test value 1'}
_, result, _ = chain_runner.run(action_parameters=action_parameters)
# We also assert that the action parameters are available in the
# "publish" scope
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_value = {'inttype': 1,
'strtype': 'published',
'booltype': True,
'published_action_param': action_parameters['action_param_1']}
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, expected_value)
# Assert that the variables are correctly published
self.assertEqual(result['published'],
{'published_action_param': u'test value 1', 'o1': u'published'})
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_publish_param_rendering_failure(self, request):
# Parameter rendering should result in a top level error which aborts
# the whole chain
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_WITH_PUBLISH_PARAM_RENDERING_FAILURE
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
try:
chain_runner.run({})
except ParameterRenderingFailedException as e:
# TODO: Should we treat this as task error? Right now it bubbles all
# the way up and it's not really consistent with action param
# rendering failure
expected_error = ('Failed rendering value for publish parameter "p1" in '
'task "c2" (template string={{ not_defined }}):')
self.assertTrue(expected_error in str(e))
pass
else:
self.fail('Exception was not thrown')
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_2))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_task_passes_invalid_parameter_type_to_action(self, mock_request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_ACTION_INVALID_PARAMETER_TYPE
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
action_parameters = {}
expected_msg = ('Failed to cast value "stringnotanarray" for parameter '
'"arrtype" of type "array"')
self.assertRaisesRegexp(ValueError, expected_msg, chain_runner.run,
action_parameters=action_parameters)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=None))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(result={'raw_out': 'published'}), None))
def test_action_chain_runner_referenced_action_doesnt_exist(self, mock_request):
# Action referenced by a task doesn't exist, should result in a top level error
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_WITH_INVALID_ACTION
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
action_parameters = {}
status, output, _ = chain_runner.run(action_parameters=action_parameters)
expected_error = ('Failed to run task "c1". Action with reference "wolfpack.a2" '
'doesn\'t exist.')
self.assertEqual(status, LIVEACTION_STATUS_FAILED)
self.assertTrue(expected_error in output['error'])
self.assertTrue('Traceback' in output['traceback'], output['traceback'])
def test_exception_is_thrown_if_both_params_and_parameters_attributes_are_provided(self):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_ACTION_PARAMS_AND_PARAMETERS_ATTRIBUTE
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
expected_msg = ('Either "params" or "parameters" attribute needs to be provided, but '
'not both')
self.assertRaisesRegexp(runnerexceptions.ActionRunnerPreRunError, expected_msg,
chain_runner.pre_run)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_params_and_parameters_attributes_both_work(self, _):
# "params" attribute used
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_ACTION_PARAMS_ATTRIBUTE
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
original_build_liveaction_object = chain_runner._build_liveaction_object
def mock_build_liveaction_object(action_node, resolved_params, parent_context):
# Verify parameters are correctly passed to the action
self.assertEqual(resolved_params, {'pparams': 'v1'})
original_build_liveaction_object(action_node=action_node,
resolved_params=resolved_params,
parent_context=parent_context)
chain_runner._build_liveaction_object = mock_build_liveaction_object
action_parameters = {}
status, output, _ = chain_runner.run(action_parameters=action_parameters)
self.assertEqual(status, LIVEACTION_STATUS_SUCCEEDED)
# "parameters" attribute used
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_ACTION_PARAMETERS_ATTRIBUTE
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
def mock_build_liveaction_object(action_node, resolved_params, parent_context):
# Verify parameters are correctly passed to the action
self.assertEqual(resolved_params, {'pparameters': 'v1'})
original_build_liveaction_object(action_node=action_node,
resolved_params=resolved_params,
parent_context=parent_context)
chain_runner._build_liveaction_object = mock_build_liveaction_object
action_parameters = {}
status, output, _ = chain_runner.run(action_parameters=action_parameters)
self.assertEqual(status, LIVEACTION_STATUS_SUCCEEDED)
@classmethod
def tearDownClass(cls):
FixturesLoader().delete_models_from_db(MODELS)
| apache-2.0 |
josecolella/PLD | bin/osx/treasurehunters.app/Contents/Resources/lib/python3.4/numpy/linalg/tests/test_regression.py | 7 | 2479 | """ Test functions for linalg module
"""
from __future__ import division, absolute_import, print_function
from numpy.testing import *
import numpy as np
from numpy import linalg, arange, float64, array, dot, transpose
rlevel = 1
class TestRegression(TestCase):
def test_eig_build(self, level = rlevel):
"""Ticket #652"""
rva = array([1.03221168e+02 +0.j,
-1.91843603e+01 +0.j,
-6.04004526e-01+15.84422474j,
-6.04004526e-01-15.84422474j,
-1.13692929e+01 +0.j,
-6.57612485e-01+10.41755503j,
-6.57612485e-01-10.41755503j,
1.82126812e+01 +0.j,
1.06011014e+01 +0.j,
7.80732773e+00 +0.j,
-7.65390898e-01 +0.j,
1.51971555e-15 +0.j,
-1.51308713e-15 +0.j])
a = arange(13*13, dtype = float64)
a.shape = (13, 13)
a = a%17
va, ve = linalg.eig(a)
va.sort()
rva.sort()
assert_array_almost_equal(va, rva)
def test_eigh_build(self, level = rlevel):
"""Ticket 662."""
rvals = [68.60568999, 89.57756725, 106.67185574]
cov = array([[ 77.70273908, 3.51489954, 15.64602427],
[3.51489954, 88.97013878, -1.07431931],
[15.64602427, -1.07431931, 98.18223512]])
vals, vecs = linalg.eigh(cov)
assert_array_almost_equal(vals, rvals)
def test_svd_build(self, level = rlevel):
"""Ticket 627."""
a = array([[ 0., 1.], [ 1., 1.], [ 2., 1.], [ 3., 1.]])
m, n = a.shape
u, s, vh = linalg.svd(a)
b = dot(transpose(u[:, n:]), a)
assert_array_almost_equal(b, np.zeros((2, 2)))
def test_norm_vector_badarg(self):
"""Regression for #786: Froebenius norm for vectors raises
TypeError."""
self.assertRaises(ValueError, linalg.norm, array([1., 2., 3.]), 'fro')
def test_lapack_endian(self):
# For bug #1482
a = array([[5.7998084, -2.1825367 ],
[-2.1825367, 9.85910595]], dtype='>f8')
b = array(a, dtype='<f8')
ap = linalg.cholesky(a)
bp = linalg.cholesky(b)
assert_array_equal(ap, bp)
def test_large_svd_32bit(self):
# See gh-4442, 64bit would require very large/slow matrices.
x = np.eye(1000, 66)
np.linalg.svd(x)
if __name__ == '__main__':
run_module_suite()
| mit |
knowledgepoint-devs/askbot-devel | askbot/conf/question_lists.py | 13 | 2487 | """
Settings responsible for display of questions lists
"""
from askbot.conf.settings_wrapper import settings
from askbot.conf.super_groups import DATA_AND_FORMATTING
from askbot.deps import livesettings
from django.utils.translation import ugettext_lazy as _
QUESTION_LISTS = livesettings.ConfigurationGroup(
'QUESTION_LISTS',
_('Listings of questions'),
super_group=DATA_AND_FORMATTING
)
settings.register(
livesettings.BooleanValue(
QUESTION_LISTS,
'ALL_SCOPE_ENABLED',
default=True,
description=_('Enable "All Questions" selector'),
help_text=_('At least one of these selectors must be enabled')
)
)
settings.register(
livesettings.BooleanValue(
QUESTION_LISTS,
'UNANSWERED_SCOPE_ENABLED',
default=True,
description=_('Enable "Unanswered Questions" selector'),
help_text=_('At least one of these selectors must be enabled')
)
)
settings.register(
livesettings.BooleanValue(
QUESTION_LISTS,
'FOLLOWED_SCOPE_ENABLED',
default=True,
description=_('Enable "Followed Questions" selector'),
help_text=_('At least one of these selectors must be enabled')
)
)
def enable_default_selector_if_disabled(old_value, new_value):
scope_switch_name = new_value.upper() + '_SCOPE_ENABLED'
is_enabled = getattr(settings, scope_switch_name)
if is_enabled is False:
settings.update(scope_switch_name, True)
return new_value
SCOPE_CHOICES_AUTHENTICATED = (
('all', _('All Questions')),
('unanswered', _('Unanswered Questions')),
('followed', _('Followed Questions'))
)
settings.register(
livesettings.StringValue(
QUESTION_LISTS,
'DEFAULT_SCOPE_AUTHENTICATED',
choices=SCOPE_CHOICES_AUTHENTICATED,
default='all',
description=_('Default questions selector for the authenticated users'),
update_callback=enable_default_selector_if_disabled
)
)
SCOPE_CHOICES_ANONYMOUS = (#anonymous users can't see followed questions
('all', _('All Questions')),
('unanswered', _('Unanswered Questions')),
)
settings.register(
livesettings.StringValue(
QUESTION_LISTS,
'DEFAULT_SCOPE_ANONYMOUS',
choices=SCOPE_CHOICES_ANONYMOUS,
default='all',
description=_('Default questions selector for the anonymous users'),
update_callback=enable_default_selector_if_disabled
)
)
| gpl-3.0 |
OpenUpgrade/OpenUpgrade | openerp/tools/convert.py | 1 | 41298 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import cStringIO
import csv
import logging
import os.path
import re
import sys
# for eval context:
import time
import openerp
import openerp.release
import openerp.workflow
from yaml_import import convert_yaml_import
import assertion_report
_logger = logging.getLogger(__name__)
try:
import pytz
except:
_logger.warning('could not find pytz library, please install it')
class pytzclass(object):
all_timezones=[]
pytz=pytzclass()
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from lxml import etree, builder
import misc
from config import config
from translate import _
# List of etree._Element subclasses that we choose to ignore when parsing XML.
from misc import SKIPPED_ELEMENT_TYPES
from misc import pickle, unquote
from openerp import SUPERUSER_ID
from safe_eval import safe_eval as s_eval
safe_eval = lambda expr, ctx={}: s_eval(expr, ctx, nocopy=True)
from openerp.openupgrade import openupgrade_log
class ParseError(Exception):
def __init__(self, msg, text, filename, lineno):
self.msg = msg
self.text = text
self.filename = filename
self.lineno = lineno
def __str__(self):
return '"%s" while parsing %s:%s, near\n%s' \
% (self.msg, self.filename, self.lineno, self.text)
def _ref(self, cr):
return lambda x: self.id_get(cr, x)
def _obj(pool, cr, uid, model_str, context=None):
model = pool[model_str]
return lambda x: model.browse(cr, uid, x, context=context)
def _get_idref(self, cr, uid, model_str, context, idref):
idref2 = dict(idref,
time=time,
DateTime=datetime,
datetime=datetime,
timedelta=timedelta,
relativedelta=relativedelta,
version=openerp.release.major_version,
ref=_ref(self, cr),
pytz=pytz)
if len(model_str):
idref2['obj'] = _obj(self.pool, cr, uid, model_str, context=context)
return idref2
def _fix_multiple_roots(node):
"""
Surround the children of the ``node`` element of an XML field with a
single root "data" element, to prevent having a document with multiple
roots once parsed separately.
XML nodes should have one root only, but we'd like to support
direct multiple roots in our partial documents (like inherited view architectures).
As a convention we'll surround multiple root with a container "data" element, to be
ignored later when parsing.
"""
real_nodes = [x for x in node if not isinstance(x, SKIPPED_ELEMENT_TYPES)]
if len(real_nodes) > 1:
data_node = etree.Element("data")
for child in node:
data_node.append(child)
node.append(data_node)
def _eval_xml(self, node, pool, cr, uid, idref, context=None):
if context is None:
context = {}
if node.tag in ('field','value'):
t = node.get('type','char')
f_model = node.get('model', '').encode('utf-8')
if node.get('search'):
f_search = node.get("search",'').encode('utf-8')
f_use = node.get("use",'id').encode('utf-8')
f_name = node.get("name",'').encode('utf-8')
idref2 = {}
if f_search:
idref2 = _get_idref(self, cr, uid, f_model, context, idref)
q = safe_eval(f_search, idref2)
ids = pool[f_model].search(cr, uid, q)
if f_use != 'id':
ids = map(lambda x: x[f_use], pool[f_model].read(cr, uid, ids, [f_use]))
_cols = pool[f_model]._columns
if (f_name in _cols) and _cols[f_name]._type=='many2many':
return ids
f_val = False
if len(ids):
f_val = ids[0]
if isinstance(f_val, tuple):
f_val = f_val[0]
return f_val
a_eval = node.get('eval','')
if a_eval:
idref2 = _get_idref(self, cr, uid, f_model, context, idref)
try:
return safe_eval(a_eval, idref2)
except Exception:
logging.getLogger('openerp.tools.convert.init').error(
'Could not eval(%s) for %s in %s', a_eval, node.get('name'), context)
raise
def _process(s, idref):
matches = re.finditer('[^%]%\((.*?)\)[ds]', s)
done = []
for m in matches:
found = m.group()[1:]
if found in done:
continue
done.append(found)
id = m.groups()[0]
if not id in idref:
idref[id] = self.id_get(cr, id)
s = s.replace(found, str(idref[id]))
s = s.replace('%%', '%') # Quite wierd but it's for (somewhat) backward compatibility sake
return s
if t == 'xml':
_fix_multiple_roots(node)
return '<?xml version="1.0"?>\n'\
+_process("".join([etree.tostring(n, encoding='utf-8')
for n in node]), idref)
if t == 'html':
return _process("".join([etree.tostring(n, encoding='utf-8')
for n in node]), idref)
data = node.text
if node.get('file'):
with openerp.tools.file_open(node.get('file'), 'rb') as f:
data = f.read()
if t == 'file':
from ..modules import module
path = data.strip()
if not module.get_module_resource(self.module, path):
raise IOError("No such file or directory: '%s' in %s" % (
path, self.module))
return '%s,%s' % (self.module, path)
if t == 'char':
return data
if t == 'base64':
return data.encode('base64')
if t == 'int':
d = data.strip()
if d == 'None':
return None
return int(d)
if t == 'float':
return float(data.strip())
if t in ('list','tuple'):
res=[]
for n in node.iterchildren(tag='value'):
res.append(_eval_xml(self,n,pool,cr,uid,idref))
if t=='tuple':
return tuple(res)
return res
elif node.tag == "function":
args = []
a_eval = node.get('eval','')
# FIXME: should probably be exclusive
if a_eval:
idref['ref'] = lambda x: self.id_get(cr, x)
args = safe_eval(a_eval, idref)
for n in node:
return_val = _eval_xml(self,n, pool, cr, uid, idref, context)
if return_val is not None:
args.append(return_val)
model = pool[node.get('model', '')]
method = node.get('name')
res = getattr(model, method)(cr, uid, *args)
return res
elif node.tag == "test":
return node.text
escape_re = re.compile(r'(?<!\\)/')
def escape(x):
return x.replace('\\/', '/')
class xml_import(object):
@staticmethod
def nodeattr2bool(node, attr, default=False):
if not node.get(attr):
return default
val = node.get(attr).strip()
if not val:
return default
return val.lower() not in ('0', 'false', 'off')
def isnoupdate(self, data_node=None):
return self.noupdate or (len(data_node) and self.nodeattr2bool(data_node, 'noupdate', False))
def get_context(self, data_node, node, eval_dict):
data_node_context = (len(data_node) and data_node.get('context','').encode('utf8'))
node_context = node.get("context",'').encode('utf8')
context = {}
for ctx in (data_node_context, node_context):
if ctx:
try:
ctx_res = safe_eval(ctx, eval_dict)
if isinstance(context, dict):
context.update(ctx_res)
else:
context = ctx_res
except (ValueError, NameError):
# Some contexts contain references that are only valid at runtime at
# client-side, so in that case we keep the original context string
# as it is. We also log it, just in case.
context = ctx
_logger.debug('Context value (%s) for element with id "%s" or its data node does not parse '\
'at server-side, keeping original string, in case it\'s meant for client side only',
ctx, node.get('id','n/a'), exc_info=True)
return context
def get_uid(self, cr, uid, data_node, node):
node_uid = node.get('uid','') or (len(data_node) and data_node.get('uid',''))
if node_uid:
return self.id_get(cr, node_uid)
return uid
def _test_xml_id(self, xml_id):
id = xml_id
if '.' in xml_id:
module, id = xml_id.split('.', 1)
assert '.' not in id, """The ID reference "%s" must contain
maximum one dot. They are used to refer to other modules ID, in the
form: module.record_id""" % (xml_id,)
if module != self.module:
modcnt = self.pool['ir.module.module'].search_count(self.cr, self.uid, ['&', ('name', '=', module), ('state', 'in', ['installed'])])
assert modcnt == 1, """The ID "%s" refers to an uninstalled module""" % (xml_id,)
if len(id) > 64:
_logger.error('id: %s is to long (max: 64)', id)
openupgrade_log.log_xml_id(self.cr, self.module, xml_id)
def _tag_delete(self, cr, rec, data_node=None, mode=None):
d_model = rec.get("model")
d_search = rec.get("search",'').encode('utf-8')
d_id = rec.get("id")
ids = []
if d_search:
idref = _get_idref(self, cr, self.uid, d_model, context={}, idref={})
try:
ids = self.pool[d_model].search(cr, self.uid, safe_eval(d_search, idref))
except ValueError:
_logger.warning('Skipping deletion for failed search `%r`', d_search, exc_info=True)
pass
if d_id:
try:
ids.append(self.id_get(cr, d_id))
except ValueError:
# d_id cannot be found. doesn't matter in this case
_logger.warning('Skipping deletion for missing XML ID `%r`', d_id, exc_info=True)
pass
if ids:
self.pool[d_model].unlink(cr, self.uid, ids)
def _remove_ir_values(self, cr, name, value, model):
ir_values_obj = self.pool['ir.values']
ir_value_ids = ir_values_obj.search(cr, self.uid, [('name','=',name),('value','=',value),('model','=',model)])
if ir_value_ids:
ir_values_obj.unlink(cr, self.uid, ir_value_ids)
return True
def _tag_report(self, cr, rec, data_node=None, mode=None):
res = {}
for dest,f in (('name','string'),('model','model'),('report_name','name')):
res[dest] = rec.get(f,'').encode('utf8')
assert res[dest], "Attribute %s of report is empty !" % (f,)
for field,dest in (('rml','report_rml'),('file','report_rml'),('xml','report_xml'),('xsl','report_xsl'),
('attachment','attachment'),('attachment_use','attachment_use'), ('usage','usage'),
('report_type', 'report_type'), ('parser', 'parser')):
if rec.get(field):
res[dest] = rec.get(field).encode('utf8')
if rec.get('auto'):
res['auto'] = safe_eval(rec.get('auto','False'))
if rec.get('sxw'):
sxw_content = misc.file_open(rec.get('sxw')).read()
res['report_sxw_content'] = sxw_content
if rec.get('header'):
res['header'] = safe_eval(rec.get('header','False'))
res['multi'] = rec.get('multi') and safe_eval(rec.get('multi','False'))
xml_id = rec.get('id','').encode('utf8')
self._test_xml_id(xml_id)
if rec.get('groups'):
g_names = rec.get('groups','').split(',')
groups_value = []
for group in g_names:
if group.startswith('-'):
group_id = self.id_get(cr, group[1:])
groups_value.append((3, group_id))
else:
group_id = self.id_get(cr, group)
groups_value.append((4, group_id))
res['groups_id'] = groups_value
id = self.pool['ir.model.data']._update(cr, self.uid, "ir.actions.report.xml", self.module, res, xml_id, noupdate=self.isnoupdate(data_node), mode=self.mode)
self.idref[xml_id] = int(id)
if not rec.get('menu') or safe_eval(rec.get('menu','False')):
keyword = str(rec.get('keyword', 'client_print_multi'))
value = 'ir.actions.report.xml,'+str(id)
replace = rec.get('replace', True)
self.pool['ir.model.data'].ir_set(cr, self.uid, 'action', keyword, res['name'], [res['model']], value, replace=replace, isobject=True, xml_id=xml_id)
elif self.mode=='update' and safe_eval(rec.get('menu','False'))==False:
# Special check for report having attribute menu=False on update
value = 'ir.actions.report.xml,'+str(id)
self._remove_ir_values(cr, res['name'], value, res['model'])
return id
def _tag_function(self, cr, rec, data_node=None, mode=None):
if self.isnoupdate(data_node) and self.mode != 'init':
return
context = self.get_context(data_node, rec, {'ref': _ref(self, cr)})
uid = self.get_uid(cr, self.uid, data_node, rec)
_eval_xml(self,rec, self.pool, cr, uid, self.idref, context=context)
return
def _tag_url(self, cr, rec, data_node=None, mode=None):
url = rec.get("url",'').encode('utf8')
target = rec.get("target",'').encode('utf8')
name = rec.get("name",'').encode('utf8')
xml_id = rec.get('id','').encode('utf8')
self._test_xml_id(xml_id)
res = {'name': name, 'url': url, 'target':target}
id = self.pool['ir.model.data']._update(cr, self.uid, "ir.actions.act_url", self.module, res, xml_id, noupdate=self.isnoupdate(data_node), mode=self.mode)
self.idref[xml_id] = int(id)
def _tag_act_window(self, cr, rec, data_node=None, mode=None):
name = rec.get('name','').encode('utf-8')
xml_id = rec.get('id','').encode('utf8')
self._test_xml_id(xml_id)
type = rec.get('type','').encode('utf-8') or 'ir.actions.act_window'
view_id = False
if rec.get('view_id'):
view_id = self.id_get(cr, rec.get('view_id','').encode('utf-8'))
domain = rec.get('domain','').encode('utf-8') or '[]'
res_model = rec.get('res_model','').encode('utf-8')
src_model = rec.get('src_model','').encode('utf-8')
view_type = rec.get('view_type','').encode('utf-8') or 'form'
view_mode = rec.get('view_mode','').encode('utf-8') or 'tree,form'
usage = rec.get('usage','').encode('utf-8')
limit = rec.get('limit','').encode('utf-8')
auto_refresh = rec.get('auto_refresh','').encode('utf-8')
uid = self.uid
# Act_window's 'domain' and 'context' contain mostly literals
# but they can also refer to the variables provided below
# in eval_context, so we need to eval() them before storing.
# Among the context variables, 'active_id' refers to
# the currently selected items in a list view, and only
# takes meaning at runtime on the client side. For this
# reason it must remain a bare variable in domain and context,
# even after eval() at server-side. We use the special 'unquote'
# class to achieve this effect: a string which has itself, unquoted,
# as representation.
active_id = unquote("active_id")
active_ids = unquote("active_ids")
active_model = unquote("active_model")
def ref(str_id):
return self.id_get(cr, str_id)
# Include all locals() in eval_context, for backwards compatibility
eval_context = {
'name': name,
'xml_id': xml_id,
'type': type,
'view_id': view_id,
'domain': domain,
'res_model': res_model,
'src_model': src_model,
'view_type': view_type,
'view_mode': view_mode,
'usage': usage,
'limit': limit,
'auto_refresh': auto_refresh,
'uid' : uid,
'active_id': active_id,
'active_ids': active_ids,
'active_model': active_model,
'ref' : ref,
}
context = self.get_context(data_node, rec, eval_context)
try:
domain = safe_eval(domain, eval_context)
except (ValueError, NameError):
# Some domains contain references that are only valid at runtime at
# client-side, so in that case we keep the original domain string
# as it is. We also log it, just in case.
_logger.debug('Domain value (%s) for element with id "%s" does not parse '\
'at server-side, keeping original string, in case it\'s meant for client side only',
domain, xml_id or 'n/a', exc_info=True)
res = {
'name': name,
'type': type,
'view_id': view_id,
'domain': domain,
'context': context,
'res_model': res_model,
'src_model': src_model,
'view_type': view_type,
'view_mode': view_mode,
'usage': usage,
'limit': limit,
'auto_refresh': auto_refresh,
}
if rec.get('groups'):
g_names = rec.get('groups','').split(',')
groups_value = []
for group in g_names:
if group.startswith('-'):
group_id = self.id_get(cr, group[1:])
groups_value.append((3, group_id))
else:
group_id = self.id_get(cr, group)
groups_value.append((4, group_id))
res['groups_id'] = groups_value
if rec.get('target'):
res['target'] = rec.get('target','')
if rec.get('multi'):
res['multi'] = safe_eval(rec.get('multi', 'False'))
id = self.pool['ir.model.data']._update(cr, self.uid, 'ir.actions.act_window', self.module, res, xml_id, noupdate=self.isnoupdate(data_node), mode=self.mode)
self.idref[xml_id] = int(id)
if src_model:
#keyword = 'client_action_relate'
keyword = rec.get('key2','').encode('utf-8') or 'client_action_relate'
value = 'ir.actions.act_window,'+str(id)
replace = rec.get('replace','') or True
self.pool['ir.model.data'].ir_set(cr, self.uid, 'action', keyword, xml_id, [src_model], value, replace=replace, isobject=True, xml_id=xml_id)
# TODO add remove ir.model.data
def _tag_ir_set(self, cr, rec, data_node=None, mode=None):
if self.mode != 'init':
return
res = {}
for field in rec.findall('./field'):
f_name = field.get("name",'').encode('utf-8')
f_val = _eval_xml(self,field,self.pool, cr, self.uid, self.idref)
res[f_name] = f_val
self.pool['ir.model.data'].ir_set(cr, self.uid, res['key'], res['key2'], res['name'], res['models'], res['value'], replace=res.get('replace',True), isobject=res.get('isobject', False), meta=res.get('meta',None))
def _tag_workflow(self, cr, rec, data_node=None, mode=None):
if self.isnoupdate(data_node) and self.mode != 'init':
return
model = rec.get('model').encode('ascii')
w_ref = rec.get('ref')
if w_ref:
id = self.id_get(cr, w_ref)
else:
number_children = len(rec)
assert number_children > 0,\
'You must define a child node if you dont give a ref'
assert number_children == 1,\
'Only one child node is accepted (%d given)' % number_children
id = _eval_xml(self, rec[0], self.pool, cr, self.uid, self.idref)
uid = self.get_uid(cr, self.uid, data_node, rec)
openerp.workflow.trg_validate(
uid, model, id, rec.get('action').encode('ascii'), cr)
#
# Support two types of notation:
# name="Inventory Control/Sending Goods"
# or
# action="action_id"
# parent="parent_id"
#
def _tag_menuitem(self, cr, rec, data_node=None, mode=None):
rec_id = rec.get("id",'').encode('ascii')
self._test_xml_id(rec_id)
m_l = map(escape, escape_re.split(rec.get("name",'').encode('utf8')))
values = {'parent_id': False}
if rec.get('parent', False) is False and len(m_l) > 1:
# No parent attribute specified and the menu name has several menu components,
# try to determine the ID of the parent according to menu path
pid = False
res = None
values['name'] = m_l[-1]
m_l = m_l[:-1] # last part is our name, not a parent
for idx, menu_elem in enumerate(m_l):
if pid:
cr.execute('select id from ir_ui_menu where parent_id=%s and name=%s', (pid, menu_elem))
else:
cr.execute('select id from ir_ui_menu where parent_id is null and name=%s', (menu_elem,))
res = cr.fetchone()
if res:
pid = res[0]
else:
# the menuitem does't exist but we are in branch (not a leaf)
_logger.warning('Warning no ID for submenu %s of menu %s !', menu_elem, str(m_l))
pid = self.pool['ir.ui.menu'].create(cr, self.uid, {'parent_id' : pid, 'name' : menu_elem})
values['parent_id'] = pid
else:
# The parent attribute was specified, if non-empty determine its ID, otherwise
# explicitly make a top-level menu
if rec.get('parent'):
menu_parent_id = self.id_get(cr, rec.get('parent',''))
else:
# we get here with <menuitem parent="">, explicit clear of parent, or
# if no parent attribute at all but menu name is not a menu path
menu_parent_id = False
values = {'parent_id': menu_parent_id}
if rec.get('name'):
values['name'] = rec.get('name')
try:
res = [ self.id_get(cr, rec.get('id','')) ]
except:
res = None
if rec.get('action'):
a_action = rec.get('action','').encode('utf8')
# determine the type of action
action_type, action_id = self.model_id_get(cr, a_action)
action_type = action_type.split('.')[-1] # keep only type part
if not values.get('name') and action_type in ('act_window', 'wizard', 'url', 'client', 'server'):
a_table = 'ir_act_%s' % action_type.replace('act_', '')
cr.execute('select name from "%s" where id=%%s' % a_table, (int(action_id),))
resw = cr.fetchone()
if resw:
values['name'] = resw[0]
if not values.get('name'):
# ensure menu has a name
values['name'] = rec_id or '?'
if rec.get('sequence'):
values['sequence'] = int(rec.get('sequence'))
if rec.get('groups'):
g_names = rec.get('groups','').split(',')
groups_value = []
for group in g_names:
if group.startswith('-'):
group_id = self.id_get(cr, group[1:])
groups_value.append((3, group_id))
else:
group_id = self.id_get(cr, group)
groups_value.append((4, group_id))
values['groups_id'] = groups_value
pid = self.pool['ir.model.data']._update(cr, self.uid, 'ir.ui.menu', self.module, values, rec_id, noupdate=self.isnoupdate(data_node), mode=self.mode, res_id=res and res[0] or False)
if rec_id and pid:
self.idref[rec_id] = int(pid)
if rec.get('action') and pid:
action = "ir.actions.%s,%d" % (action_type, action_id)
self.pool['ir.model.data'].ir_set(cr, self.uid, 'action', 'tree_but_open', 'Menuitem', [('ir.ui.menu', int(pid))], action, True, True, xml_id=rec_id)
return 'ir.ui.menu', pid
def _assert_equals(self, f1, f2, prec=4):
return not round(f1 - f2, prec)
def _tag_assert(self, cr, rec, data_node=None, mode=None):
if self.isnoupdate(data_node) and self.mode != 'init':
return
rec_model = rec.get("model",'').encode('ascii')
model = self.pool[rec_model]
rec_id = rec.get("id",'').encode('ascii')
self._test_xml_id(rec_id)
rec_src = rec.get("search",'').encode('utf8')
rec_src_count = rec.get("count")
rec_string = rec.get("string",'').encode('utf8') or 'unknown'
ids = None
eval_dict = {'ref': _ref(self, cr)}
context = self.get_context(data_node, rec, eval_dict)
uid = self.get_uid(cr, self.uid, data_node, rec)
if rec_id:
ids = [self.id_get(cr, rec_id)]
elif rec_src:
q = safe_eval(rec_src, eval_dict)
ids = self.pool[rec_model].search(cr, uid, q, context=context)
if rec_src_count:
count = int(rec_src_count)
if len(ids) != count:
self.assertion_report.record_failure()
msg = 'assertion "%s" failed!\n' \
' Incorrect search count:\n' \
' expected count: %d\n' \
' obtained count: %d\n' \
% (rec_string, count, len(ids))
_logger.error(msg)
return
assert ids is not None,\
'You must give either an id or a search criteria'
ref = _ref(self, cr)
for id in ids:
brrec = model.browse(cr, uid, id, context)
class d(dict):
def __getitem__(self2, key):
if key in brrec:
return brrec[key]
return dict.__getitem__(self2, key)
globals_dict = d()
globals_dict['floatEqual'] = self._assert_equals
globals_dict['ref'] = ref
globals_dict['_ref'] = ref
for test in rec.findall('./test'):
f_expr = test.get("expr",'').encode('utf-8')
expected_value = _eval_xml(self, test, self.pool, cr, uid, self.idref, context=context) or True
expression_value = safe_eval(f_expr, globals_dict)
if expression_value != expected_value: # assertion failed
self.assertion_report.record_failure()
msg = 'assertion "%s" failed!\n' \
' xmltag: %s\n' \
' expected value: %r\n' \
' obtained value: %r\n' \
% (rec_string, etree.tostring(test), expected_value, expression_value)
_logger.error(msg)
return
else: # all tests were successful for this assertion tag (no break)
self.assertion_report.record_success()
def _tag_record(self, cr, rec, data_node=None, mode=None):
rec_model = rec.get("model").encode('ascii')
model = self.pool[rec_model]
rec_id = rec.get("id",'').encode('ascii')
rec_context = rec.get("context", None)
if rec_context:
rec_context = safe_eval(rec_context)
self._test_xml_id(rec_id)
# in update mode, the record won't be updated if the data node explicitely
# opt-out using @noupdate="1". A second check will be performed in
# ir.model.data#_update() using the record's ir.model.data `noupdate` field.
if self.isnoupdate(data_node) and self.mode != 'init':
# check if the xml record has no id, skip
if not rec_id:
return None
if '.' in rec_id:
module,rec_id2 = rec_id.split('.')
else:
module = self.module
rec_id2 = rec_id
id = self.pool['ir.model.data']._update_dummy(cr, self.uid, rec_model, module, rec_id2)
if id:
# if the resource already exists, don't update it but store
# its database id (can be useful)
self.idref[rec_id] = int(id)
return None
elif not self.nodeattr2bool(rec, 'forcecreate', True):
# if it doesn't exist and we shouldn't create it, skip it
return None
# else create it normally
res = {}
for field in rec.findall('./field'):
#TODO: most of this code is duplicated above (in _eval_xml)...
f_name = field.get("name").encode('utf-8')
f_ref = field.get("ref",'').encode('utf-8')
f_search = field.get("search",'').encode('utf-8')
f_model = field.get("model",'').encode('utf-8')
if not f_model and f_name in model._fields:
f_model = model._fields[f_name].comodel_name
f_use = field.get("use",'').encode('utf-8') or 'id'
f_val = False
if f_search:
q = safe_eval(f_search, self.idref)
assert f_model, 'Define an attribute model="..." in your .XML file !'
f_obj = self.pool[f_model]
# browse the objects searched
s = f_obj.browse(cr, self.uid, f_obj.search(cr, self.uid, q))
# column definitions of the "local" object
_fields = self.pool[rec_model]._fields
# if the current field is many2many
if (f_name in _fields) and _fields[f_name].type == 'many2many':
f_val = [(6, 0, map(lambda x: x[f_use], s))]
elif len(s):
# otherwise (we are probably in a many2one field),
# take the first element of the search
f_val = s[0][f_use]
elif f_ref:
if f_name in model._fields and model._fields[f_name].type == 'reference':
val = self.model_id_get(cr, f_ref)
f_val = val[0] + ',' + str(val[1])
else:
f_val = self.id_get(cr, f_ref)
else:
f_val = _eval_xml(self,field, self.pool, cr, self.uid, self.idref)
if f_name in model._fields:
if model._fields[f_name].type == 'integer':
f_val = int(f_val)
res[f_name] = f_val
id = self.pool['ir.model.data']._update(cr, self.uid, rec_model, self.module, res, rec_id or False, not self.isnoupdate(data_node), noupdate=self.isnoupdate(data_node), mode=self.mode, context=rec_context )
if rec_id:
self.idref[rec_id] = int(id)
if config.get('import_partial'):
cr.commit()
return rec_model, id
def _tag_template(self, cr, el, data_node=None, mode=None):
# This helper transforms a <template> element into a <record> and forwards it
tpl_id = el.get('id', el.get('t-name', '')).encode('ascii')
full_tpl_id = tpl_id
if '.' not in full_tpl_id:
full_tpl_id = '%s.%s' % (self.module, tpl_id)
# set the full template name for qweb <module>.<id>
if not el.get('inherit_id'):
el.set('t-name', full_tpl_id)
el.tag = 't'
else:
el.tag = 'data'
el.attrib.pop('id', None)
record_attrs = {
'id': tpl_id,
'model': 'ir.ui.view',
}
for att in ['forcecreate', 'context']:
if att in el.keys():
record_attrs[att] = el.attrib.pop(att)
Field = builder.E.field
name = el.get('name', tpl_id)
record = etree.Element('record', attrib=record_attrs)
record.append(Field(name, name='name'))
record.append(Field("qweb", name='type'))
record.append(Field(el.get('priority', "16"), name='priority'))
if 'inherit_id' in el.attrib:
record.append(Field(name='inherit_id', ref=el.get('inherit_id')))
if el.get('active') in ("True", "False"):
view_id = self.id_get(cr, tpl_id, raise_if_not_found=False)
if mode != "update" or not view_id:
record.append(Field(name='active', eval=el.get('active')))
if el.get('customize_show') in ("True", "False"):
record.append(Field(name='customize_show', eval=el.get('customize_show')))
groups = el.attrib.pop('groups', None)
if groups:
grp_lst = map(lambda x: "ref('%s')" % x, groups.split(','))
record.append(Field(name="groups_id", eval="[(6, 0, ["+', '.join(grp_lst)+"])]"))
if el.attrib.pop('page', None) == 'True':
record.append(Field(name="page", eval="True"))
if el.get('primary') == 'True':
# Pseudo clone mode, we'll set the t-name to the full canonical xmlid
el.append(
builder.E.xpath(
builder.E.attribute(full_tpl_id, name='t-name'),
expr=".",
position="attributes",
)
)
record.append(Field('primary', name='mode'))
# inject complete <template> element (after changing node name) into
# the ``arch`` field
record.append(Field(el, name="arch", type="xml"))
return self._tag_record(cr, record, data_node)
def id_get(self, cr, id_str, raise_if_not_found=True):
if id_str in self.idref:
return self.idref[id_str]
res = self.model_id_get(cr, id_str, raise_if_not_found)
if res and len(res)>1: res = res[1]
return res
def model_id_get(self, cr, id_str, raise_if_not_found=True):
model_data_obj = self.pool['ir.model.data']
mod = self.module
if '.' not in id_str:
id_str = '%s.%s' % (mod, id_str)
return model_data_obj.xmlid_to_res_model_res_id(
cr, self.uid, id_str,
raise_if_not_found=raise_if_not_found)
def parse(self, de, mode=None):
if de.tag != 'openerp':
raise Exception("Mismatch xml format: root tag must be `openerp`.")
for n in de.findall('./data'):
for rec in n:
if rec.tag in self._tags:
try:
self._tags[rec.tag](self.cr, rec, n, mode=mode)
except Exception, e:
self.cr.rollback()
exc_info = sys.exc_info()
raise ParseError, (misc.ustr(e), etree.tostring(rec).rstrip(), rec.getroottree().docinfo.URL, rec.sourceline), exc_info[2]
return True
def __init__(self, cr, module, idref, mode, report=None, noupdate=False):
self.mode = mode
self.module = module
self.cr = cr
self.idref = idref
self.pool = openerp.registry(cr.dbname)
self.uid = 1
if report is None:
report = assertion_report.assertion_report()
self.assertion_report = report
self.noupdate = noupdate
self._tags = {
'record': self._tag_record,
'delete': self._tag_delete,
'function': self._tag_function,
'menuitem': self._tag_menuitem,
'template': self._tag_template,
'workflow': self._tag_workflow,
'report': self._tag_report,
'ir_set': self._tag_ir_set,
'act_window': self._tag_act_window,
'url': self._tag_url,
'assert': self._tag_assert,
}
def convert_file(cr, module, filename, idref, mode='update', noupdate=False, kind=None, report=None, pathname=None):
if pathname is None:
pathname = os.path.join(module, filename)
fp = misc.file_open(pathname)
ext = os.path.splitext(filename)[1].lower()
try:
if ext == '.csv':
convert_csv_import(cr, module, pathname, fp.read(), idref, mode, noupdate)
elif ext == '.sql':
convert_sql_import(cr, fp)
elif ext == '.yml':
convert_yaml_import(cr, module, fp, kind, idref, mode, noupdate, report)
elif ext == '.xml':
convert_xml_import(cr, module, fp, idref, mode, noupdate, report)
elif ext == '.js':
pass # .js files are valid but ignored here.
else:
_logger.warning("Can't load unknown file type %s.", filename)
finally:
fp.close()
def convert_sql_import(cr, fp):
queries = fp.read().split(';')
for query in queries:
new_query = ' '.join(query.split())
if new_query:
cr.execute(new_query)
def convert_csv_import(cr, module, fname, csvcontent, idref=None, mode='init',
noupdate=False):
'''Import csv file :
quote: "
delimiter: ,
encoding: utf-8'''
if not idref:
idref={}
model = ('.'.join(fname.split('.')[:-1]).split('-'))[0]
#remove folder path from model
head, model = os.path.split(model)
input = cStringIO.StringIO(csvcontent) #FIXME
reader = csv.reader(input, quotechar='"', delimiter=',')
fields = reader.next()
fname_partial = ""
if config.get('import_partial'):
fname_partial = module + '/'+ fname
if not os.path.isfile(config.get('import_partial')):
pickle.dump({}, file(config.get('import_partial'),'w+'))
else:
data = pickle.load(file(config.get('import_partial')))
if fname_partial in data:
if not data[fname_partial]:
return
else:
for i in range(data[fname_partial]):
reader.next()
if not (mode == 'init' or 'id' in fields):
_logger.error("Import specification does not contain 'id' and we are in init mode, Cannot continue.")
return
uid = 1
datas = []
for line in reader:
if not (line and any(line)):
continue
try:
datas.append(map(misc.ustr, line))
except:
_logger.error("Cannot import the line: %s", line)
registry = openerp.registry(cr.dbname)
result, rows, warning_msg, dummy = registry[model].import_data(cr, uid, fields, datas,mode, module, noupdate, filename=fname_partial)
if result < 0:
# Report failed import and abort module install
raise Exception(_('Module loading %s failed: file %s could not be processed:\n %s') % (module, fname, warning_msg))
if config.get('import_partial'):
data = pickle.load(file(config.get('import_partial')))
data[fname_partial] = 0
pickle.dump(data, file(config.get('import_partial'),'wb'))
cr.commit()
#
# xml import/export
#
def convert_xml_import(cr, module, xmlfile, idref=None, mode='init', noupdate=False, report=None):
doc = etree.parse(xmlfile)
relaxng = etree.RelaxNG(
etree.parse(os.path.join(config['root_path'],'import_xml.rng' )))
try:
relaxng.assert_(doc)
except Exception:
_logger.error('The XML file does not fit the required schema !')
_logger.error(misc.ustr(relaxng.error_log.last_error))
raise
if idref is None:
idref={}
obj = xml_import(cr, module, idref, mode, report=report, noupdate=noupdate)
obj.parse(doc.getroot(), mode=mode)
return True
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
cmbclh/vnpy1.7 | docker/dockerTrader/gateway/sgitGateway/sgitDataType.py | 27 | 257893 | # encoding: UTF-8
defineDict = {}
typedefDict = {}
#//////////////////////////////////////////////////////////////////////
#@system
#@company
#@file SgitFtdcUserApiDataType.h
#@brief 定义了客户端接口使用的业务数据类型
#@history
#20150810
#//////////////////////////////////////////////////////////////////////
#//////////////////////////////////////////////////////////////////////
#TFtdcTraderIDType是一个交易所交易员代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcTraderIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcInvestorIDType是一个投资者代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcInvestorIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBrokerIDType是一个经纪公司代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBrokerIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBrokerAbbrType是一个经纪公司简称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBrokerAbbrType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBrokerNameType是一个经纪公司名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBrokerNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcExchangeInstIDType是一个合约在交易所的代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcExchangeInstIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcOrderRefType是一个报单引用类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcOrderRefType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcParticipantIDType是一个会员代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcParticipantIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcUserIDType是一个用户代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcUserIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcPasswordType是一个密码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcPasswordType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcClientIDType是一个交易编码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcClientIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcInstrumentIDType是一个合约代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcInstrumentIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcMarketIDType是一个市场代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcMarketIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcProductNameType是一个产品名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcProductNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcExchangeIDType是一个交易所代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcExchangeIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcExchangeNameType是一个交易所名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcExchangeNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcExchangeAbbrType是一个交易所简称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcExchangeAbbrType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcExchangeFlagType是一个交易所标志类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcExchangeFlagType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcMacAddressType是一个Mac地址类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcMacAddressType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcSystemIDType是一个系统编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSystemIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcExchangePropertyType是一个交易所属性类型
#//////////////////////////////////////////////////////////////////////
#正常
defineDict["THOST_FTDC_EXP_Normal"] = '0'
#根据成交生成报单
defineDict["THOST_FTDC_EXP_GenOrderByTrade"] = '1'
typedefDict["TThostFtdcExchangePropertyType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcDateType是一个日期类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcDateType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcTimeType是一个时间类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcTimeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcLongTimeType是一个长时间类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcLongTimeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcInstrumentNameType是一个合约名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcInstrumentNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcSettlementGroupIDType是一个结算组代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSettlementGroupIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcOrderSysIDType是一个报单编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcOrderSysIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcTradeIDType是一个成交编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcTradeIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCommandTypeType是一个DB命令类型类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCommandTypeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcIPAddressType是一个IP地址类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcIPAddressType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcIPPortType是一个IP端口类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcIPPortType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcProductInfoType是一个产品信息类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcProductInfoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcProtocolInfoType是一个协议信息类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcProtocolInfoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBusinessUnitType是一个业务单元类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBusinessUnitType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcDepositSeqNoType是一个出入金流水号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcDepositSeqNoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcIdentifiedCardNoType是一个证件号码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcIdentifiedCardNoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcIdCardTypeType是一个证件类型类型
#//////////////////////////////////////////////////////////////////////
#组织机构代码
defineDict["THOST_FTDC_ICT_EID"] = '0'
#中国公民身份证
defineDict["THOST_FTDC_ICT_IDCard"] = '1'
#军官证
defineDict["THOST_FTDC_ICT_OfficerIDCard"] = '2'
#警官证
defineDict["THOST_FTDC_ICT_PoliceIDCard"] = '3'
#士兵证
defineDict["THOST_FTDC_ICT_SoldierIDCard"] = '4'
#户口簿
defineDict["THOST_FTDC_ICT_HouseholdRegister"] = '5'
#护照
defineDict["THOST_FTDC_ICT_Passport"] = '6'
#台胞证
defineDict["THOST_FTDC_ICT_TaiwanCompatriotIDCard"] = '7'
#回乡证
defineDict["THOST_FTDC_ICT_HomeComingCard"] = '8'
#营业执照号
defineDict["THOST_FTDC_ICT_LicenseNo"] = '9'
#税务登记号/当地纳税ID
defineDict["THOST_FTDC_ICT_TaxNo"] = 'A'
#港澳居民来往内地通行证
defineDict["THOST_FTDC_ICT_HMMainlandTravelPermit"] = 'B'
#台湾居民来往大陆通行证
defineDict["THOST_FTDC_ICT_TwMainlandTravelPermit"] = 'C'
#驾照
defineDict["THOST_FTDC_ICT_DrivingLicense"] = 'D'
#当地社保ID
defineDict["THOST_FTDC_ICT_SocialID"] = 'F'
#当地身份证
defineDict["THOST_FTDC_ICT_LocalID"] = 'G'
#商业登记证
defineDict["THOST_FTDC_ICT_BusinessRegistration"] = 'H'
#港澳永久性居民身份证
defineDict["THOST_FTDC_ICT_HKMCIDCard"] = 'I'
#人行开户许可证
defineDict["THOST_FTDC_ICT_AccountsPermits"] = 'J'
#其他证件
defineDict["THOST_FTDC_ICT_OtherCard"] = 'x'
typedefDict["TThostFtdcIdCardTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcOrderLocalIDType是一个本地报单编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcOrderLocalIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcUserNameType是一个用户名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcUserNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcPartyNameType是一个参与人名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcPartyNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcErrorMsgType是一个错误信息类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcErrorMsgType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFieldNameType是一个字段名类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFieldNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFieldContentType是一个字段内容类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFieldContentType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcSystemNameType是一个系统名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSystemNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcContentType是一个消息正文类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcContentType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcInvestorRangeType是一个投资者范围类型
#//////////////////////////////////////////////////////////////////////
#所有
defineDict["THOST_FTDC_IR_All"] = '1'
#投资者组
defineDict["THOST_FTDC_IR_Group"] = '2'
#单一投资者
defineDict["THOST_FTDC_IR_Single"] = '3'
typedefDict["TThostFtdcInvestorRangeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcDepartmentRangeType是一个投资者范围类型
#//////////////////////////////////////////////////////////////////////
#所有
defineDict["THOST_FTDC_DR_All"] = '1'
#组织架构
defineDict["THOST_FTDC_DR_Group"] = '2'
#单一投资者
defineDict["THOST_FTDC_DR_Single"] = '3'
typedefDict["TThostFtdcDepartmentRangeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcDataSyncStatusType是一个数据同步状态类型
#//////////////////////////////////////////////////////////////////////
#未同步
defineDict["THOST_FTDC_DS_Asynchronous"] = '1'
#同步中
defineDict["THOST_FTDC_DS_Synchronizing"] = '2'
#已同步
defineDict["THOST_FTDC_DS_Synchronized"] = '3'
typedefDict["TThostFtdcDataSyncStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcBrokerDataSyncStatusType是一个经纪公司数据同步状态类型
#//////////////////////////////////////////////////////////////////////
#已同步
defineDict["THOST_FTDC_BDS_Synchronized"] = '1'
#同步中
defineDict["THOST_FTDC_BDS_Synchronizing"] = '2'
typedefDict["TThostFtdcBrokerDataSyncStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcExchangeConnectStatusType是一个交易所连接状态类型
#//////////////////////////////////////////////////////////////////////
#没有任何连接
defineDict["THOST_FTDC_ECS_NoConnection"] = '1'
#已经发出合约查询请求
defineDict["THOST_FTDC_ECS_QryInstrumentSent"] = '2'
#已经获取信息
defineDict["THOST_FTDC_ECS_GotInformation"] = '9'
typedefDict["TThostFtdcExchangeConnectStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcTraderConnectStatusType是一个交易所交易员连接状态类型
#//////////////////////////////////////////////////////////////////////
#没有任何连接
defineDict["THOST_FTDC_TCS_NotConnected"] = '1'
#已经连接
defineDict["THOST_FTDC_TCS_Connected"] = '2'
#已经发出合约查询请求
defineDict["THOST_FTDC_TCS_QryInstrumentSent"] = '3'
#订阅私有流
defineDict["THOST_FTDC_TCS_SubPrivateFlow"] = '4'
typedefDict["TThostFtdcTraderConnectStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFunctionCodeType是一个功能代码类型
#//////////////////////////////////////////////////////////////////////
#数据异步化
defineDict["THOST_FTDC_FC_DataAsync"] = '1'
#强制用户登出
defineDict["THOST_FTDC_FC_ForceUserLogout"] = '2'
#变更管理用户口令
defineDict["THOST_FTDC_FC_UserPasswordUpdate"] = '3'
#变更经纪公司口令
defineDict["THOST_FTDC_FC_BrokerPasswordUpdate"] = '4'
#变更投资者口令
defineDict["THOST_FTDC_FC_InvestorPasswordUpdate"] = '5'
#报单插入
defineDict["THOST_FTDC_FC_OrderInsert"] = '6'
#报单操作
defineDict["THOST_FTDC_FC_OrderAction"] = '7'
#同步系统数据
defineDict["THOST_FTDC_FC_SyncSystemData"] = '8'
#同步经纪公司数据
defineDict["THOST_FTDC_FC_SyncBrokerData"] = '9'
#批量同步经纪公司数据
defineDict["THOST_FTDC_FC_BachSyncBrokerData"] = 'A'
#超级查询
defineDict["THOST_FTDC_FC_SuperQuery"] = 'B'
#预埋报单插入
defineDict["THOST_FTDC_FC_ParkedOrderInsert"] = 'C'
#预埋报单操作
defineDict["THOST_FTDC_FC_ParkedOrderAction"] = 'D'
#同步动态令牌
defineDict["THOST_FTDC_FC_SyncOTP"] = 'E'
#删除未知单
defineDict["THOST_FTDC_FC_DeleteOrder"] = 'F'
typedefDict["TThostFtdcFunctionCodeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcBrokerFunctionCodeType是一个经纪公司功能代码类型
#//////////////////////////////////////////////////////////////////////
#强制用户登出
defineDict["THOST_FTDC_BFC_ForceUserLogout"] = '1'
#变更用户口令
defineDict["THOST_FTDC_BFC_UserPasswordUpdate"] = '2'
#同步经纪公司数据
defineDict["THOST_FTDC_BFC_SyncBrokerData"] = '3'
#批量同步经纪公司数据
defineDict["THOST_FTDC_BFC_BachSyncBrokerData"] = '4'
#报单插入
defineDict["THOST_FTDC_BFC_OrderInsert"] = '5'
#报单操作
defineDict["THOST_FTDC_BFC_OrderAction"] = '6'
#全部查询
defineDict["THOST_FTDC_BFC_AllQuery"] = '7'
#系统功能:登入/登出/修改密码等
defineDict["THOST_FTDC_BFC_log"] = 'a'
#基本查询:查询基础数据,如合约,交易所等常量
defineDict["THOST_FTDC_BFC_BaseQry"] = 'b'
#交易查询:如查成交,委托
defineDict["THOST_FTDC_BFC_TradeQry"] = 'c'
#交易功能:报单,撤单
defineDict["THOST_FTDC_BFC_Trade"] = 'd'
#银期转账
defineDict["THOST_FTDC_BFC_Virement"] = 'e'
#风险监控
defineDict["THOST_FTDC_BFC_Risk"] = 'f'
#查询/管理:查询会话,踢人等
defineDict["THOST_FTDC_BFC_Session"] = 'g'
#风控通知控制
defineDict["THOST_FTDC_BFC_RiskNoticeCtl"] = 'h'
#风控通知发送
defineDict["THOST_FTDC_BFC_RiskNotice"] = 'i'
#察看经纪公司资金权限
defineDict["THOST_FTDC_BFC_BrokerDeposit"] = 'j'
#资金查询
defineDict["THOST_FTDC_BFC_QueryFund"] = 'k'
#报单查询
defineDict["THOST_FTDC_BFC_QueryOrder"] = 'l'
#成交查询
defineDict["THOST_FTDC_BFC_QueryTrade"] = 'm'
#持仓查询
defineDict["THOST_FTDC_BFC_QueryPosition"] = 'n'
#行情查询
defineDict["THOST_FTDC_BFC_QueryMarketData"] = 'o'
#用户事件查询
defineDict["THOST_FTDC_BFC_QueryUserEvent"] = 'p'
#风险通知查询
defineDict["THOST_FTDC_BFC_QueryRiskNotify"] = 'q'
#出入金查询
defineDict["THOST_FTDC_BFC_QueryFundChange"] = 'r'
#投资者信息查询
defineDict["THOST_FTDC_BFC_QueryInvestor"] = 's'
#交易编码查询
defineDict["THOST_FTDC_BFC_QueryTradingCode"] = 't'
#强平
defineDict["THOST_FTDC_BFC_ForceClose"] = 'u'
#压力测试
defineDict["THOST_FTDC_BFC_PressTest"] = 'v'
#权益反算
defineDict["THOST_FTDC_BFC_RemainCalc"] = 'w'
#净持仓保证金指标
defineDict["THOST_FTDC_BFC_NetPositionInd"] = 'x'
#风险预算
defineDict["THOST_FTDC_BFC_RiskPredict"] = 'y'
#数据导出
defineDict["THOST_FTDC_BFC_DataExport"] = 'z'
#风控指标设置
defineDict["THOST_FTDC_BFC_RiskTargetSetup"] = 'A'
#行情预警
defineDict["THOST_FTDC_BFC_MarketDataWarn"] = 'B'
#业务通知查询
defineDict["THOST_FTDC_BFC_QryBizNotice"] = 'C'
#业务通知模板设置
defineDict["THOST_FTDC_BFC_CfgBizNotice"] = 'D'
#同步动态令牌
defineDict["THOST_FTDC_BFC_SyncOTP"] = 'E'
#发送业务通知
defineDict["THOST_FTDC_BFC_SendBizNotice"] = 'F'
#风险级别标准设置
defineDict["THOST_FTDC_BFC_CfgRiskLevelStd"] = 'G'
#交易终端应急功能
defineDict["THOST_FTDC_BFC_TbCommand"] = 'H'
#删除未知单
defineDict["THOST_FTDC_BFC_DeleteOrder"] = 'J'
#预埋报单插入
defineDict["THOST_FTDC_BFC_ParkedOrderInsert"] = 'K'
#预埋报单操作
defineDict["THOST_FTDC_BFC_ParkedOrderAction"] = 'L'
typedefDict["TThostFtdcBrokerFunctionCodeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcOrderActionStatusType是一个报单操作状态类型
#//////////////////////////////////////////////////////////////////////
#已经提交
defineDict["THOST_FTDC_OAS_Submitted"] = 'a'
#已经接受
defineDict["THOST_FTDC_OAS_Accepted"] = 'b'
#已经被拒绝
defineDict["THOST_FTDC_OAS_Rejected"] = 'c'
typedefDict["TThostFtdcOrderActionStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcOrderStatusType是一个报单状态类型
#//////////////////////////////////////////////////////////////////////
#全部成交
defineDict["THOST_FTDC_OST_AllTraded"] = '0'
#部分成交还在队列中
defineDict["THOST_FTDC_OST_PartTradedQueueing"] = '1'
#部分成交不在队列中
defineDict["THOST_FTDC_OST_PartTradedNotQueueing"] = '2'
#未成交还在队列中
defineDict["THOST_FTDC_OST_NoTradeQueueing"] = '3'
#未成交不在队列中
defineDict["THOST_FTDC_OST_NoTradeNotQueueing"] = '4'
#撤单
defineDict["THOST_FTDC_OST_Canceled"] = '5'
#未知
defineDict["THOST_FTDC_OST_Unknown"] = 'a'
#尚未触发
defineDict["THOST_FTDC_OST_NotTouched"] = 'b'
#已触发
defineDict["THOST_FTDC_OST_Touched"] = 'c'
typedefDict["TThostFtdcOrderStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcOrderSubmitStatusType是一个报单提交状态类型
#//////////////////////////////////////////////////////////////////////
#已经提交
defineDict["THOST_FTDC_OSS_InsertSubmitted"] = '0'
#撤单已经提交
defineDict["THOST_FTDC_OSS_CancelSubmitted"] = '1'
#修改已经提交
defineDict["THOST_FTDC_OSS_ModifySubmitted"] = '2'
#已经接受
defineDict["THOST_FTDC_OSS_Accepted"] = '3'
#报单已经被拒绝
defineDict["THOST_FTDC_OSS_InsertRejected"] = '4'
#撤单已经被拒绝
defineDict["THOST_FTDC_OSS_CancelRejected"] = '5'
#改单已经被拒绝
defineDict["THOST_FTDC_OSS_ModifyRejected"] = '6'
typedefDict["TThostFtdcOrderSubmitStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcPositionDateType是一个持仓日期类型
#//////////////////////////////////////////////////////////////////////
#今日持仓
defineDict["THOST_FTDC_PSD_Today"] = '1'
#历史持仓
defineDict["THOST_FTDC_PSD_History"] = '2'
typedefDict["TThostFtdcPositionDateType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcPositionDateTypeType是一个持仓日期类型类型
#//////////////////////////////////////////////////////////////////////
#使用历史持仓
defineDict["THOST_FTDC_PDT_UseHistory"] = '1'
#不使用历史持仓
defineDict["THOST_FTDC_PDT_NoUseHistory"] = '2'
typedefDict["TThostFtdcPositionDateTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcTradingRoleType是一个交易角色类型
#//////////////////////////////////////////////////////////////////////
#代理
defineDict["THOST_FTDC_ER_Broker"] = '1'
#自营
defineDict["THOST_FTDC_ER_Host"] = '2'
#做市商
defineDict["THOST_FTDC_ER_Maker"] = '3'
typedefDict["TThostFtdcTradingRoleType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcProductClassType是一个产品类型类型
#//////////////////////////////////////////////////////////////////////
#期货
defineDict["THOST_FTDC_PC_Futures"] = '1'
#期货期权
defineDict["THOST_FTDC_PC_Options"] = '2'
#组合
defineDict["THOST_FTDC_PC_Combination"] = '3'
#即期
defineDict["THOST_FTDC_PC_Spot"] = '4'
#期转现
defineDict["THOST_FTDC_PC_EFP"] = '5'
#现货期权
defineDict["THOST_FTDC_PC_SpotOption"] = '6'
defineDict["THOST_FTDC_PC_Defer"] = '7'
typedefDict["TThostFtdcProductClassType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcInstLifePhaseType是一个合约生命周期状态类型
#//////////////////////////////////////////////////////////////////////
#未上市
defineDict["THOST_FTDC_IP_NotStart"] = '0'
#上市
defineDict["THOST_FTDC_IP_Started"] = '1'
#停牌
defineDict["THOST_FTDC_IP_Pause"] = '2'
#到期
defineDict["THOST_FTDC_IP_Expired"] = '3'
typedefDict["TThostFtdcInstLifePhaseType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcDirectionType是一个买卖方向类型
#//////////////////////////////////////////////////////////////////////
#买
defineDict["THOST_FTDC_D_Buy"] = '0'
#卖
defineDict["THOST_FTDC_D_Sell"] = '1'
typedefDict["TThostFtdcDirectionType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcPositionTypeType是一个持仓类型类型
#//////////////////////////////////////////////////////////////////////
#净持仓
defineDict["THOST_FTDC_PT_Net"] = '1'
#综合持仓
defineDict["THOST_FTDC_PT_Gross"] = '2'
typedefDict["TThostFtdcPositionTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcPosiDirectionType是一个持仓多空方向类型
#//////////////////////////////////////////////////////////////////////
#净
defineDict["THOST_FTDC_PD_Net"] = '1'
#多头
defineDict["THOST_FTDC_PD_Long"] = '2'
#空头
defineDict["THOST_FTDC_PD_Short"] = '3'
typedefDict["TThostFtdcPosiDirectionType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSysSettlementStatusType是一个系统结算状态类型
#//////////////////////////////////////////////////////////////////////
#不活跃
defineDict["THOST_FTDC_SS_NonActive"] = '1'
#启动
defineDict["THOST_FTDC_SS_Startup"] = '2'
#操作
defineDict["THOST_FTDC_SS_Operating"] = '3'
#结算
defineDict["THOST_FTDC_SS_Settlement"] = '4'
#结算完成
defineDict["THOST_FTDC_SS_SettlementFinished"] = '5'
typedefDict["TThostFtdcSysSettlementStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcRatioAttrType是一个费率属性类型
#//////////////////////////////////////////////////////////////////////
#交易费率
defineDict["THOST_FTDC_RA_Trade"] = '0'
#结算费率
defineDict["THOST_FTDC_RA_Settlement"] = '1'
typedefDict["TThostFtdcRatioAttrType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcHedgeFlagType是一个投机套保标志类型
#//////////////////////////////////////////////////////////////////////
#投机
defineDict["THOST_FTDC_HF_Speculation"] = '1'
#套利
defineDict["THOST_FTDC_HF_Arbitrage"] = '2'
#套保
defineDict["THOST_FTDC_HF_Hedge"] = '3'
defineDict["THOST_FTDC_HF_DEFER"] = '4'
defineDict["THOST_FTDC_HF_MID"] = '5'
typedefDict["TThostFtdcHedgeFlagType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcBillHedgeFlagType是一个投机套保标志类型
#//////////////////////////////////////////////////////////////////////
#投机
defineDict["THOST_FTDC_BHF_Speculation"] = '1'
#套利
defineDict["THOST_FTDC_BHF_Arbitrage"] = '2'
#套保
defineDict["THOST_FTDC_BHF_Hedge"] = '3'
typedefDict["TThostFtdcBillHedgeFlagType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcClientIDTypeType是一个交易编码类型类型
#//////////////////////////////////////////////////////////////////////
#投机
defineDict["THOST_FTDC_CIDT_Speculation"] = '1'
#套利
defineDict["THOST_FTDC_CIDT_Arbitrage"] = '2'
#套保
defineDict["THOST_FTDC_CIDT_Hedge"] = '3'
typedefDict["TThostFtdcClientIDTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcOrderPriceTypeType是一个报单价格条件类型
#//////////////////////////////////////////////////////////////////////
#任意价
defineDict["THOST_FTDC_OPT_AnyPrice"] = '1'
#限价
defineDict["THOST_FTDC_OPT_LimitPrice"] = '2'
#最优价
defineDict["THOST_FTDC_OPT_BestPrice"] = '3'
#最新价
defineDict["THOST_FTDC_OPT_LastPrice"] = '4'
#最新价浮动上浮1个ticks
defineDict["THOST_FTDC_OPT_LastPricePlusOneTicks"] = '5'
#最新价浮动上浮2个ticks
defineDict["THOST_FTDC_OPT_LastPricePlusTwoTicks"] = '6'
#最新价浮动上浮3个ticks
defineDict["THOST_FTDC_OPT_LastPricePlusThreeTicks"] = '7'
#卖一价
defineDict["THOST_FTDC_OPT_AskPrice1"] = '8'
#卖一价浮动上浮1个ticks
defineDict["THOST_FTDC_OPT_AskPrice1PlusOneTicks"] = '9'
#卖一价浮动上浮2个ticks
defineDict["THOST_FTDC_OPT_AskPrice1PlusTwoTicks"] = 'A'
#卖一价浮动上浮3个ticks
defineDict["THOST_FTDC_OPT_AskPrice1PlusThreeTicks"] = 'B'
#买一价
defineDict["THOST_FTDC_OPT_BidPrice1"] = 'C'
#买一价浮动上浮1个ticks
defineDict["THOST_FTDC_OPT_BidPrice1PlusOneTicks"] = 'D'
#买一价浮动上浮2个ticks
defineDict["THOST_FTDC_OPT_BidPrice1PlusTwoTicks"] = 'E'
#买一价浮动上浮3个ticks
defineDict["THOST_FTDC_OPT_BidPrice1PlusThreeTicks"] = 'F'
#五档价
defineDict["THOST_FTDC_OPT_FiveLevelPrice"] = 'G'
typedefDict["TThostFtdcOrderPriceTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcOffsetFlagType是一个开平标志类型
#//////////////////////////////////////////////////////////////////////
#开仓
defineDict["THOST_FTDC_OF_Open"] = '0'
#平仓
defineDict["THOST_FTDC_OF_Close"] = '1'
#强平
defineDict["THOST_FTDC_OF_ForceClose"] = '2'
#平今
defineDict["THOST_FTDC_OF_CloseToday"] = '3'
#平昨
defineDict["THOST_FTDC_OF_CloseYesterday"] = '4'
#强减
defineDict["THOST_FTDC_OF_ForceOff"] = '5'
#本地强平
defineDict["THOST_FTDC_OF_LocalForceClose"] = '6'
typedefDict["TThostFtdcOffsetFlagType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcForceCloseReasonType是一个强平原因类型
#//////////////////////////////////////////////////////////////////////
#非强平
defineDict["THOST_FTDC_FCC_NotForceClose"] = '0'
#资金不足
defineDict["THOST_FTDC_FCC_LackDeposit"] = '1'
#客户超仓
defineDict["THOST_FTDC_FCC_ClientOverPositionLimit"] = '2'
#会员超仓
defineDict["THOST_FTDC_FCC_MemberOverPositionLimit"] = '3'
#持仓非整数倍
defineDict["THOST_FTDC_FCC_NotMultiple"] = '4'
#违规
defineDict["THOST_FTDC_FCC_Violation"] = '5'
#其它
defineDict["THOST_FTDC_FCC_Other"] = '6'
#自然人临近交割
defineDict["THOST_FTDC_FCC_PersonDeliv"] = '7'
typedefDict["TThostFtdcForceCloseReasonType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcOrderTypeType是一个报单类型类型
#//////////////////////////////////////////////////////////////////////
#正常
defineDict["THOST_FTDC_ORDT_Normal"] = '0'
#报价衍生
defineDict["THOST_FTDC_ORDT_DeriveFromQuote"] = '1'
#组合衍生
defineDict["THOST_FTDC_ORDT_DeriveFromCombination"] = '2'
#组合报单
defineDict["THOST_FTDC_ORDT_Combination"] = '3'
#条件单
defineDict["THOST_FTDC_ORDT_ConditionalOrder"] = '4'
#互换单
defineDict["THOST_FTDC_ORDT_Swap"] = '5'
typedefDict["TThostFtdcOrderTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcTimeConditionType是一个有效期类型类型
#//////////////////////////////////////////////////////////////////////
#立即完成,否则撤销
defineDict["THOST_FTDC_TC_IOC"] = '1'
#本节有效
defineDict["THOST_FTDC_TC_GFS"] = '2'
#当日有效
defineDict["THOST_FTDC_TC_GFD"] = '3'
#指定日期前有效
defineDict["THOST_FTDC_TC_GTD"] = '4'
#撤销前有效
defineDict["THOST_FTDC_TC_GTC"] = '5'
#集合竞价有效
defineDict["THOST_FTDC_TC_GFA"] = '6'
typedefDict["TThostFtdcTimeConditionType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcVolumeConditionType是一个成交量类型类型
#//////////////////////////////////////////////////////////////////////
#任何数量
defineDict["THOST_FTDC_VC_AV"] = '1'
#最小数量
defineDict["THOST_FTDC_VC_MV"] = '2'
#全部数量
defineDict["THOST_FTDC_VC_CV"] = '3'
typedefDict["TThostFtdcVolumeConditionType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcContingentConditionType是一个触发条件类型
#//////////////////////////////////////////////////////////////////////
#立即
defineDict["THOST_FTDC_CC_Immediately"] = '1'
#止损
defineDict["THOST_FTDC_CC_Touch"] = '2'
#止赢
defineDict["THOST_FTDC_CC_TouchProfit"] = '3'
#预埋单
defineDict["THOST_FTDC_CC_ParkedOrder"] = '4'
#最新价大于条件价
defineDict["THOST_FTDC_CC_LastPriceGreaterThanStopPrice"] = '5'
#最新价大于等于条件价
defineDict["THOST_FTDC_CC_LastPriceGreaterEqualStopPrice"] = '6'
#最新价小于条件价
defineDict["THOST_FTDC_CC_LastPriceLesserThanStopPrice"] = '7'
#最新价小于等于条件价
defineDict["THOST_FTDC_CC_LastPriceLesserEqualStopPrice"] = '8'
#卖一价大于条件价
defineDict["THOST_FTDC_CC_AskPriceGreaterThanStopPrice"] = '9'
#卖一价大于等于条件价
defineDict["THOST_FTDC_CC_AskPriceGreaterEqualStopPrice"] = 'A'
#卖一价小于条件价
defineDict["THOST_FTDC_CC_AskPriceLesserThanStopPrice"] = 'B'
#卖一价小于等于条件价
defineDict["THOST_FTDC_CC_AskPriceLesserEqualStopPrice"] = 'C'
#买一价大于条件价
defineDict["THOST_FTDC_CC_BidPriceGreaterThanStopPrice"] = 'D'
#买一价大于等于条件价
defineDict["THOST_FTDC_CC_BidPriceGreaterEqualStopPrice"] = 'E'
#买一价小于条件价
defineDict["THOST_FTDC_CC_BidPriceLesserThanStopPrice"] = 'F'
#买一价小于等于条件价
defineDict["THOST_FTDC_CC_BidPriceLesserEqualStopPrice"] = 'H'
typedefDict["TThostFtdcContingentConditionType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcActionFlagType是一个操作标志类型
#//////////////////////////////////////////////////////////////////////
#删除
defineDict["THOST_FTDC_AF_Delete"] = '0'
#修改
defineDict["THOST_FTDC_AF_Modify"] = '3'
typedefDict["TThostFtdcActionFlagType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcTradingRightType是一个交易权限类型
#//////////////////////////////////////////////////////////////////////
#可以交易
defineDict["THOST_FTDC_TR_Allow"] = '0'
#只能平仓
defineDict["THOST_FTDC_TR_CloseOnly"] = '1'
#不能交易
defineDict["THOST_FTDC_TR_Forbidden"] = '2'
typedefDict["TThostFtdcTradingRightType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcOrderSourceType是一个报单来源类型
#//////////////////////////////////////////////////////////////////////
#来自参与者
defineDict["THOST_FTDC_OSRC_Participant"] = '0'
#来自管理员
defineDict["THOST_FTDC_OSRC_Administrator"] = '1'
typedefDict["TThostFtdcOrderSourceType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcTradeTypeType是一个成交类型类型
#//////////////////////////////////////////////////////////////////////
#组合持仓拆分为单一持仓,初始化不应包含该类型的持仓
defineDict["THOST_FTDC_TRDT_SplitCombination"] = '#'
#普通成交
defineDict["THOST_FTDC_TRDT_Common"] = '0'
#期权执行
defineDict["THOST_FTDC_TRDT_OptionsExecution"] = '1'
#OTC成交
defineDict["THOST_FTDC_TRDT_OTC"] = '2'
#期转现衍生成交
defineDict["THOST_FTDC_TRDT_EFPDerived"] = '3'
#组合衍生成交
defineDict["THOST_FTDC_TRDT_CombinationDerived"] = '4'
typedefDict["TThostFtdcTradeTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcPriceSourceType是一个成交价来源类型
#//////////////////////////////////////////////////////////////////////
#前成交价
defineDict["THOST_FTDC_PSRC_LastPrice"] = '0'
#买委托价
defineDict["THOST_FTDC_PSRC_Buy"] = '1'
#卖委托价
defineDict["THOST_FTDC_PSRC_Sell"] = '2'
typedefDict["TThostFtdcPriceSourceType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcInstrumentStatusType是一个合约交易状态类型
#//////////////////////////////////////////////////////////////////////
#开盘前
defineDict["THOST_FTDC_IS_BeforeTrading"] = '0'
#非交易
defineDict["THOST_FTDC_IS_NoTrading"] = '1'
#连续交易
defineDict["THOST_FTDC_IS_Continous"] = '2'
#集合竞价报单
defineDict["THOST_FTDC_IS_AuctionOrdering"] = '3'
#集合竞价价格平衡
defineDict["THOST_FTDC_IS_AuctionBalance"] = '4'
#集合竞价撮合
defineDict["THOST_FTDC_IS_AuctionMatch"] = '5'
#收盘
defineDict["THOST_FTDC_IS_Closed"] = '6'
typedefDict["TThostFtdcInstrumentStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcInstStatusEnterReasonType是一个品种进入交易状态原因类型
#//////////////////////////////////////////////////////////////////////
#自动切换
defineDict["THOST_FTDC_IER_Automatic"] = '1'
#手动切换
defineDict["THOST_FTDC_IER_Manual"] = '2'
#熔断
defineDict["THOST_FTDC_IER_Fuse"] = '3'
typedefDict["TThostFtdcInstStatusEnterReasonType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcOrderActionRefType是一个报单操作引用类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcOrderActionRefType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcInstallCountType是一个安装数量类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcInstallCountType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcInstallIDType是一个安装编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcInstallIDType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcErrorIDType是一个错误代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcErrorIDType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcSettlementIDType是一个结算编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSettlementIDType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcVolumeType是一个数量类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcVolumeType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcFrontIDType是一个前置编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFrontIDType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcSessionIDType是一个会话编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSessionIDType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcSequenceNoType是一个序号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSequenceNoType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcCommandNoType是一个DB命令序号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCommandNoType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcMillisecType是一个时间(毫秒)类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcMillisecType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcVolumeMultipleType是一个合约数量乘数类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcVolumeMultipleType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcTradingSegmentSNType是一个交易阶段编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcTradingSegmentSNType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcRequestIDType是一个请求编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcRequestIDType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcYearType是一个年份类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcYearType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcMonthType是一个月份类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcMonthType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcBoolType是一个布尔型类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBoolType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcPriceType是一个价格类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcPriceType"] = "float"
#//////////////////////////////////////////////////////////////////////
#TFtdcCombOffsetFlagType是一个组合开平标志类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCombOffsetFlagType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCombHedgeFlagType是一个组合投机套保标志类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCombHedgeFlagType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcRatioType是一个比率类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcRatioType"] = "float"
#//////////////////////////////////////////////////////////////////////
#TFtdcMoneyType是一个资金类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcMoneyType"] = "float"
#//////////////////////////////////////////////////////////////////////
#TFtdcLargeVolumeType是一个大额数量类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcLargeVolumeType"] = "float"
#//////////////////////////////////////////////////////////////////////
#TFtdcSequenceSeriesType是一个序列系列号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSequenceSeriesType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcCommPhaseNoType是一个通讯时段编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCommPhaseNoType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcSequenceLabelType是一个序列编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSequenceLabelType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcUnderlyingMultipleType是一个基础商品乘数类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcUnderlyingMultipleType"] = "float"
#//////////////////////////////////////////////////////////////////////
#TFtdcPriorityType是一个优先级类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcPriorityType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcContractCodeType是一个合同编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcContractCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCityType是一个市类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCityType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcIsStockType是一个是否股民类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcIsStockType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcChannelType是一个渠道类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcChannelType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAddressType是一个通讯地址类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAddressType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcZipCodeType是一个邮政编码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcZipCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcTelephoneType是一个联系电话类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcTelephoneType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFaxType是一个传真类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFaxType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcMobileType是一个手机类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcMobileType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcEMailType是一个电子邮件类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcEMailType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcMemoType是一个备注类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcMemoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCompanyCodeType是一个企业代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCompanyCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcWebsiteType是一个网站地址类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcWebsiteType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcTaxNoType是一个税务登记号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcTaxNoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBatchStatusType是一个处理状态类型
#//////////////////////////////////////////////////////////////////////
#未上传
defineDict["THOST_FTDC_BS_NoUpload"] = '1'
#已上传
defineDict["THOST_FTDC_BS_Uploaded"] = '2'
#审核失败
defineDict["THOST_FTDC_BS_Failed"] = '3'
typedefDict["TThostFtdcBatchStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcPropertyIDType是一个属性代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcPropertyIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcPropertyNameType是一个属性名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcPropertyNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcLicenseNoType是一个营业执照号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcLicenseNoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAgentIDType是一个经纪人代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAgentIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAgentNameType是一个经纪人名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAgentNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAgentGroupIDType是一个经纪人组代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAgentGroupIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAgentGroupNameType是一个经纪人组名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAgentGroupNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcReturnStyleType是一个按品种返还方式类型
#//////////////////////////////////////////////////////////////////////
#按所有品种
defineDict["THOST_FTDC_RS_All"] = '1'
#按品种
defineDict["THOST_FTDC_RS_ByProduct"] = '2'
typedefDict["TThostFtdcReturnStyleType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcReturnPatternType是一个返还模式类型
#//////////////////////////////////////////////////////////////////////
#按成交手数
defineDict["THOST_FTDC_RP_ByVolume"] = '1'
#按留存手续费
defineDict["THOST_FTDC_RP_ByFeeOnHand"] = '2'
typedefDict["TThostFtdcReturnPatternType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcReturnLevelType是一个返还级别类型
#//////////////////////////////////////////////////////////////////////
#级别1
defineDict["THOST_FTDC_RL_Level1"] = '1'
#级别2
defineDict["THOST_FTDC_RL_Level2"] = '2'
#级别3
defineDict["THOST_FTDC_RL_Level3"] = '3'
#级别4
defineDict["THOST_FTDC_RL_Level4"] = '4'
#级别5
defineDict["THOST_FTDC_RL_Level5"] = '5'
#级别6
defineDict["THOST_FTDC_RL_Level6"] = '6'
#级别7
defineDict["THOST_FTDC_RL_Level7"] = '7'
#级别8
defineDict["THOST_FTDC_RL_Level8"] = '8'
#级别9
defineDict["THOST_FTDC_RL_Level9"] = '9'
typedefDict["TThostFtdcReturnLevelType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcReturnStandardType是一个返还标准类型
#//////////////////////////////////////////////////////////////////////
#分阶段返还
defineDict["THOST_FTDC_RSD_ByPeriod"] = '1'
#按某一标准
defineDict["THOST_FTDC_RSD_ByStandard"] = '2'
typedefDict["TThostFtdcReturnStandardType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcMortgageTypeType是一个质押类型类型
#//////////////////////////////////////////////////////////////////////
#质出
defineDict["THOST_FTDC_MT_Out"] = '0'
#质入
defineDict["THOST_FTDC_MT_In"] = '1'
typedefDict["TThostFtdcMortgageTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcInvestorSettlementParamIDType是一个投资者结算参数代码类型
#//////////////////////////////////////////////////////////////////////
#基础保证金
defineDict["THOST_FTDC_ISPI_BaseMargin"] = '1'
#最低权益标准
defineDict["THOST_FTDC_ISPI_LowestInterest"] = '2'
#质押比例
defineDict["THOST_FTDC_ISPI_MortgageRatio"] = '4'
#保证金算法
defineDict["THOST_FTDC_ISPI_MarginWay"] = '5'
#ctp :结算单结存是否包含质押
#sgit:结算单(盯市)权益等于结存
defineDict["THOST_FTDC_ISPI_BillDeposit"] = '9'
typedefDict["TThostFtdcInvestorSettlementParamIDType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcExchangeSettlementParamIDType是一个交易所结算参数代码类型
#//////////////////////////////////////////////////////////////////////
#质押比例
defineDict["THOST_FTDC_ESPI_MortgageRatio"] = '1'
#分项资金导入项
defineDict["THOST_FTDC_ESPI_OtherFundItem"] = '2'
#分项资金入交易所出入金
defineDict["THOST_FTDC_ESPI_OtherFundImport"] = '3'
#上期所交割手续费收取方式
defineDict["THOST_FTDC_ESPI_SHFEDelivFee"] = '4'
#大商所交割手续费收取方式
defineDict["THOST_FTDC_ESPI_DCEDelivFee"] = '5'
#中金所开户最低可用金额
defineDict["THOST_FTDC_ESPI_CFFEXMinPrepa"] = '6'
#郑商所结算方式
defineDict["THOST_FTDC_ESPI_CZCESettlementType"] = '7'
#交易所交割手续费收取方式
defineDict["THOST_FTDC_ESPI_ExchDelivFeeMode"] = '9'
#投资者交割手续费收取方式
defineDict["THOST_FTDC_ESPI_DelivFeeMode"] = '0'
#郑商所组合持仓保证金收取方式
defineDict["THOST_FTDC_ESPI_CZCEComMarginType"] = 'A'
#大商所套利保证金是否优惠
defineDict["THOST_FTDC_ESPI_DceComMarginType"] = 'B'
#虚值期权保证金优惠比率
defineDict["THOST_FTDC_ESPI_OptOutDisCountRate"] = 'a'
#最低保障系数
defineDict["THOST_FTDC_ESPI_OptMiniGuarantee"] = 'b'
typedefDict["TThostFtdcExchangeSettlementParamIDType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSystemParamIDType是一个系统参数代码类型
#//////////////////////////////////////////////////////////////////////
#投资者代码最小长度
defineDict["THOST_FTDC_SPI_InvestorIDMinLength"] = '1'
#投资者帐号代码最小长度
defineDict["THOST_FTDC_SPI_AccountIDMinLength"] = '2'
#投资者开户默认登录权限
defineDict["THOST_FTDC_SPI_UserRightLogon"] = '3'
#投资者交易结算单成交汇总方式
defineDict["THOST_FTDC_SPI_SettlementBillTrade"] = '4'
#统一开户更新交易编码方式
defineDict["THOST_FTDC_SPI_TradingCode"] = '5'
#结算是否判断存在未复核的出入金和分项资金
defineDict["THOST_FTDC_SPI_CheckFund"] = '6'
#是否启用手续费模板数据权限
defineDict["THOST_FTDC_SPI_CommModelRight"] = '7'
#是否启用保证金率模板数据权限
defineDict["THOST_FTDC_SPI_MarginModelRight"] = '9'
#是否规范用户才能激活
defineDict["THOST_FTDC_SPI_IsStandardActive"] = '8'
#上传的交易所结算文件路径
defineDict["THOST_FTDC_SPI_UploadSettlementFile"] = 'U'
#上报保证金监控中心文件路径
defineDict["THOST_FTDC_SPI_DownloadCSRCFile"] = 'D'
#生成的结算单文件路径
defineDict["THOST_FTDC_SPI_SettlementBillFile"] = 'S'
#证监会文件标识
defineDict["THOST_FTDC_SPI_CSRCOthersFile"] = 'C'
#投资者照片路径
defineDict["THOST_FTDC_SPI_InvestorPhoto"] = 'P'
#全结经纪公司上传文件路径
defineDict["THOST_FTDC_SPI_CSRCData"] = 'R'
#开户密码录入方式
defineDict["THOST_FTDC_SPI_InvestorPwdModel"] = 'I'
#投资者中金所结算文件下载路径
defineDict["THOST_FTDC_SPI_CFFEXInvestorSettleFile"] = 'F'
#投资者代码编码方式
defineDict["THOST_FTDC_SPI_InvestorIDType"] = 'a'
#休眠户最高权益
defineDict["THOST_FTDC_SPI_FreezeMaxReMain"] = 'r'
#手续费相关操作实时上场开关
defineDict["THOST_FTDC_SPI_IsSync"] = 'A'
#解除开仓权限限制
defineDict["THOST_FTDC_SPI_RelieveOpenLimit"] = 'O'
#是否规范用户才能休眠
defineDict["THOST_FTDC_SPI_IsStandardFreeze"] = 'X'
#郑商所是否开放所有品种套保交易
defineDict["THOST_FTDC_SPI_CZCENormalProductHedge"] = 'B'
typedefDict["TThostFtdcSystemParamIDType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcTradeParamIDType是一个交易系统参数代码类型
#//////////////////////////////////////////////////////////////////////
#系统加密算法
defineDict["THOST_FTDC_TPID_EncryptionStandard"] = 'E'
#系统风险算法
defineDict["THOST_FTDC_TPID_RiskMode"] = 'R'
#系统风险算法是否全局 0-否 1-是
defineDict["THOST_FTDC_TPID_RiskModeGlobal"] = 'G'
#密码加密算法
defineDict["THOST_FTDC_TPID_modeEncode"] = 'P'
#价格小数位数参数
defineDict["THOST_FTDC_TPID_tickMode"] = 'T'
#用户最大会话数
defineDict["THOST_FTDC_TPID_SingleUserSessionMaxNum"] = 'S'
#最大连续登录失败数
defineDict["THOST_FTDC_TPID_LoginFailMaxNum"] = 'L'
#是否强制认证
defineDict["THOST_FTDC_TPID_IsAuthForce"] = 'A'
#是否冻结证券持仓
defineDict["THOST_FTDC_TPID_IsPosiFreeze"] = 'F'
#是否限仓
defineDict["THOST_FTDC_TPID_IsPosiLimit"] = 'M'
#郑商所询价时间间隔
defineDict["THOST_FTDC_TPID_ForQuoteTimeInterval"] = 'Q'
typedefDict["TThostFtdcTradeParamIDType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSettlementParamValueType是一个参数代码值类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSettlementParamValueType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCounterIDType是一个计数器代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCounterIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcInvestorGroupNameType是一个投资者分组名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcInvestorGroupNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBrandCodeType是一个牌号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBrandCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcWarehouseType是一个仓库类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcWarehouseType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcProductDateType是一个产期类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcProductDateType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcGradeType是一个等级类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcGradeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcClassifyType是一个类别类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcClassifyType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcPositionType是一个货位类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcPositionType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcYieldlyType是一个产地类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcYieldlyType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcWeightType是一个公定重量类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcWeightType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcSubEntryFundNoType是一个分项资金流水号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSubEntryFundNoType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcFileIDType是一个文件标识类型
#//////////////////////////////////////////////////////////////////////
#资金数据
defineDict["THOST_FTDC_FI_SettlementFund"] = 'F'
#成交数据
defineDict["THOST_FTDC_FI_Trade"] = 'T'
#投资者持仓数据
defineDict["THOST_FTDC_FI_InvestorPosition"] = 'P'
#投资者分项资金数据
defineDict["THOST_FTDC_FI_SubEntryFund"] = 'O'
#ctp :组合持仓数据
#sgit:郑商所组合持仓数据
defineDict["THOST_FTDC_FI_CZCECombinationPos"] = 'C'
#上报保证金监控中心数据
defineDict["THOST_FTDC_FI_CSRCData"] = 'R'
#郑商所平仓了结数据
defineDict["THOST_FTDC_FI_CZCEClose"] = 'L'
#郑商所非平仓了结数据
defineDict["THOST_FTDC_FI_CZCENoClose"] = 'N'
#持仓明细数据
defineDict["THOST_FTDC_FI_PositionDtl"] = 'D'
#期权执行文件
defineDict["THOST_FTDC_FI_OptionStrike"] = 'S'
#结算价比对文件
defineDict["THOST_FTDC_FI_SettlementPriceComparison"] = 'M'
#上期所非持仓变动明细
defineDict["THOST_FTDC_FI_NonTradePosChange"] = 'B'
typedefDict["TThostFtdcFileIDType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFileNameType是一个文件名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFileNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFileTypeType是一个文件上传类型类型
#//////////////////////////////////////////////////////////////////////
#结算
defineDict["THOST_FTDC_FUT_Settlement"] = '0'
#核对
defineDict["THOST_FTDC_FUT_Check"] = '1'
typedefDict["TThostFtdcFileTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFileFormatType是一个文件格式类型
#//////////////////////////////////////////////////////////////////////
#文本文件(.txt)
defineDict["THOST_FTDC_FFT_Txt"] = '0'
#压缩文件(.zip)
defineDict["THOST_FTDC_FFT_Zip"] = '1'
#DBF文件(.dbf)
defineDict["THOST_FTDC_FFT_DBF"] = '2'
typedefDict["TThostFtdcFileFormatType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFileUploadStatusType是一个文件状态类型
#//////////////////////////////////////////////////////////////////////
#上传成功
defineDict["THOST_FTDC_FUS_SucceedUpload"] = '1'
#上传失败
defineDict["THOST_FTDC_FUS_FailedUpload"] = '2'
#导入成功
defineDict["THOST_FTDC_FUS_SucceedLoad"] = '3'
#导入部分成功
defineDict["THOST_FTDC_FUS_PartSucceedLoad"] = '4'
#导入失败
defineDict["THOST_FTDC_FUS_FailedLoad"] = '5'
typedefDict["TThostFtdcFileUploadStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcTransferDirectionType是一个移仓方向类型
#//////////////////////////////////////////////////////////////////////
#移出
defineDict["THOST_FTDC_TD_Out"] = '0'
#移入
defineDict["THOST_FTDC_TD_In"] = '1'
typedefDict["TThostFtdcTransferDirectionType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcUploadModeType是一个上传文件类型类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcUploadModeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAccountIDType是一个投资者帐号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAccountIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankFlagType是一个银行统一标识类型类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBankFlagType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankAccountType是一个银行账户类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBankAccountType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcOpenNameType是一个银行账户的开户人名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcOpenNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcOpenBankType是一个银行账户的开户行类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcOpenBankType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankNameType是一个银行名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBankNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcPublishPathType是一个发布路径类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcPublishPathType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcOperatorIDType是一个操作员代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcOperatorIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcMonthCountType是一个月份数量类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcMonthCountType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcAdvanceMonthArrayType是一个月份提前数组类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAdvanceMonthArrayType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcDateExprType是一个日期表达式类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcDateExprType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcInstrumentIDExprType是一个合约代码表达式类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcInstrumentIDExprType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcInstrumentNameExprType是一个合约名称表达式类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcInstrumentNameExprType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcSpecialCreateRuleType是一个特殊的创建规则类型
#//////////////////////////////////////////////////////////////////////
#没有特殊创建规则
defineDict["THOST_FTDC_SC_NoSpecialRule"] = '0'
#不包含春节
defineDict["THOST_FTDC_SC_NoSpringFestival"] = '1'
typedefDict["TThostFtdcSpecialCreateRuleType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcBasisPriceTypeType是一个挂牌基准价类型类型
#//////////////////////////////////////////////////////////////////////
#上一合约结算价
defineDict["THOST_FTDC_IPT_LastSettlement"] = '1'
#上一合约收盘价
defineDict["THOST_FTDC_IPT_LaseClose"] = '2'
typedefDict["TThostFtdcBasisPriceTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcProductLifePhaseType是一个产品生命周期状态类型
#//////////////////////////////////////////////////////////////////////
#活跃
defineDict["THOST_FTDC_PLP_Active"] = '1'
#不活跃
defineDict["THOST_FTDC_PLP_NonActive"] = '2'
#注销
defineDict["THOST_FTDC_PLP_Canceled"] = '3'
typedefDict["TThostFtdcProductLifePhaseType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcDeliveryModeType是一个交割方式类型
#//////////////////////////////////////////////////////////////////////
#现金交割
defineDict["THOST_FTDC_DM_CashDeliv"] = '1'
#实物交割
defineDict["THOST_FTDC_DM_CommodityDeliv"] = '2'
typedefDict["TThostFtdcDeliveryModeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcLogLevelType是一个日志级别类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcLogLevelType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcProcessNameType是一个存储过程名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcProcessNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcOperationMemoType是一个操作摘要类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcOperationMemoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFundIOTypeType是一个出入金类型类型
#//////////////////////////////////////////////////////////////////////
#出入金
defineDict["THOST_FTDC_FIOT_FundIO"] = '1'
#银期转帐
defineDict["THOST_FTDC_FIOT_Transfer"] = '2'
#银期换汇
defineDict["THOST_FTDC_FIOT_SwapCurrency"] = '3'
typedefDict["TThostFtdcFundIOTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFundTypeType是一个资金类型类型
#//////////////////////////////////////////////////////////////////////
#银行存款
defineDict["THOST_FTDC_FT_Deposite"] = '1'
#分项资金
defineDict["THOST_FTDC_FT_ItemFund"] = '2'
#公司调整
defineDict["THOST_FTDC_FT_Company"] = '3'
#资金内转
defineDict["THOST_FTDC_FT_InnerTransfer"] = '4'
typedefDict["TThostFtdcFundTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFundDirectionType是一个出入金方向类型
#//////////////////////////////////////////////////////////////////////
#入金
defineDict["THOST_FTDC_FD_In"] = '1'
#出金
defineDict["THOST_FTDC_FD_Out"] = '2'
typedefDict["TThostFtdcFundDirectionType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFundStatusType是一个资金状态类型
#//////////////////////////////////////////////////////////////////////
#已录入
defineDict["THOST_FTDC_FS_Record"] = '1'
#已复核
defineDict["THOST_FTDC_FS_Check"] = '2'
#已冲销
defineDict["THOST_FTDC_FS_Charge"] = '3'
typedefDict["TThostFtdcFundStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcBillNoType是一个票据号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBillNoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBillNameType是一个票据名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBillNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcPublishStatusType是一个发布状态类型
#//////////////////////////////////////////////////////////////////////
#未发布
defineDict["THOST_FTDC_PS_None"] = '1'
#正在发布
defineDict["THOST_FTDC_PS_Publishing"] = '2'
#已发布
defineDict["THOST_FTDC_PS_Published"] = '3'
typedefDict["TThostFtdcPublishStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcEnumValueIDType是一个枚举值代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcEnumValueIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcEnumValueTypeType是一个枚举值类型类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcEnumValueTypeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcEnumValueLabelType是一个枚举值名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcEnumValueLabelType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcEnumValueResultType是一个枚举值结果类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcEnumValueResultType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcSystemStatusType是一个系统状态类型
#//////////////////////////////////////////////////////////////////////
#不活跃
defineDict["THOST_FTDC_ES_NonActive"] = '1'
#启动
defineDict["THOST_FTDC_ES_Startup"] = '2'
#交易开始初始化
defineDict["THOST_FTDC_ES_Initialize"] = '3'
#交易完成初始化
defineDict["THOST_FTDC_ES_Initialized"] = '4'
#收市开始
defineDict["THOST_FTDC_ES_Close"] = '5'
#收市完成
defineDict["THOST_FTDC_ES_Closed"] = '6'
#结算
defineDict["THOST_FTDC_ES_Settlement"] = '7'
typedefDict["TThostFtdcSystemStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSettlementStatusType是一个结算状态类型
#//////////////////////////////////////////////////////////////////////
#初始
defineDict["THOST_FTDC_STS_Initialize"] = '0'
#结算中
defineDict["THOST_FTDC_STS_Settlementing"] = '1'
#已结算
defineDict["THOST_FTDC_STS_Settlemented"] = '2'
#结算完成
defineDict["THOST_FTDC_STS_Finished"] = '3'
typedefDict["TThostFtdcSettlementStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcRangeIntTypeType是一个限定值类型类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcRangeIntTypeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcRangeIntFromType是一个限定值下限类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcRangeIntFromType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcRangeIntToType是一个限定值上限类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcRangeIntToType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFunctionIDType是一个功能代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFunctionIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFunctionValueCodeType是一个功能编码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFunctionValueCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFunctionNameType是一个功能名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFunctionNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcRoleIDType是一个角色编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcRoleIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcRoleNameType是一个角色名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcRoleNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcDescriptionType是一个描述类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcDescriptionType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCombineIDType是一个组合编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCombineIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCombineTypeType是一个组合类型类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCombineTypeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcInvestorTypeType是一个投资者类型类型
#//////////////////////////////////////////////////////////////////////
#自然人
defineDict["THOST_FTDC_CT_Person"] = '0'
#法人
defineDict["THOST_FTDC_CT_Company"] = '1'
#投资基金
defineDict["THOST_FTDC_CT_Fund"] = '2'
#特殊法人
defineDict["THOST_FTDC_CT_SpecialOrgan"] = '3'
#资管户
defineDict["THOST_FTDC_CT_Asset"] = '4'
typedefDict["TThostFtdcInvestorTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcBrokerTypeType是一个经纪公司类型类型
#//////////////////////////////////////////////////////////////////////
#交易会员
defineDict["THOST_FTDC_BT_Trade"] = '0'
#交易结算会员
defineDict["THOST_FTDC_BT_TradeSettle"] = '1'
typedefDict["TThostFtdcBrokerTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcRiskLevelType是一个风险等级类型
#//////////////////////////////////////////////////////////////////////
#低风险客户
defineDict["THOST_FTDC_FAS_Low"] = '1'
#普通客户
defineDict["THOST_FTDC_FAS_Normal"] = '2'
#关注客户
defineDict["THOST_FTDC_FAS_Focus"] = '3'
#风险客户
defineDict["THOST_FTDC_FAS_Risk"] = '4'
typedefDict["TThostFtdcRiskLevelType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFeeAcceptStyleType是一个手续费收取方式类型
#//////////////////////////////////////////////////////////////////////
#按交易收取
defineDict["THOST_FTDC_FAS_ByTrade"] = '1'
#按交割收取
defineDict["THOST_FTDC_FAS_ByDeliv"] = '2'
#不收
defineDict["THOST_FTDC_FAS_None"] = '3'
#按指定手续费收取
defineDict["THOST_FTDC_FAS_FixFee"] = '4'
typedefDict["TThostFtdcFeeAcceptStyleType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcPasswordTypeType是一个密码类型类型
#//////////////////////////////////////////////////////////////////////
#交易密码
defineDict["THOST_FTDC_PWDT_Trade"] = '1'
#资金密码
defineDict["THOST_FTDC_PWDT_Account"] = '2'
typedefDict["TThostFtdcPasswordTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcAlgorithmType是一个盈亏算法类型
#//////////////////////////////////////////////////////////////////////
#浮盈浮亏都计算
defineDict["THOST_FTDC_AG_All"] = '1'
#浮盈不计,浮亏计
defineDict["THOST_FTDC_AG_OnlyLost"] = '2'
#浮盈计,浮亏不计
defineDict["THOST_FTDC_AG_OnlyGain"] = '3'
#浮盈浮亏都不计算
defineDict["THOST_FTDC_AG_None"] = '4'
typedefDict["TThostFtdcAlgorithmType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcIncludeCloseProfitType是一个是否包含平仓盈利类型
#//////////////////////////////////////////////////////////////////////
#包含平仓盈利
defineDict["THOST_FTDC_ICP_Include"] = '0'
#不包含平仓盈利
defineDict["THOST_FTDC_ICP_NotInclude"] = '2'
typedefDict["TThostFtdcIncludeCloseProfitType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcAllWithoutTradeType是一个是否受可提比例限制类型
#//////////////////////////////////////////////////////////////////////
#无仓无成交不受可提比例限制
defineDict["THOST_FTDC_AWT_Enable"] = '0'
#受可提比例限制
defineDict["THOST_FTDC_AWT_Disable"] = '2'
#无仓不受可提比例限制
defineDict["THOST_FTDC_AWT_NoHoldEnable"] = '3'
typedefDict["TThostFtdcAllWithoutTradeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCommentType是一个盈亏算法说明类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCommentType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcVersionType是一个版本号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcVersionType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcTradeCodeType是一个交易代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcTradeCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcTradeDateType是一个交易日期类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcTradeDateType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcTradeTimeType是一个交易时间类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcTradeTimeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcTradeSerialType是一个发起方流水号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcTradeSerialType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcTradeSerialNoType是一个发起方流水号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcTradeSerialNoType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcFutureIDType是一个期货公司代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFutureIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankIDType是一个银行代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBankIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankBrchIDType是一个银行分中心代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBankBrchIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankBranchIDType是一个分中心代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBankBranchIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcOperNoType是一个交易柜员类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcOperNoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcDeviceIDType是一个渠道标志类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcDeviceIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcRecordNumType是一个记录数类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcRecordNumType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFutureAccountType是一个期货资金账号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFutureAccountType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFuturePwdFlagType是一个资金密码核对标志类型
#//////////////////////////////////////////////////////////////////////
#不核对
defineDict["THOST_FTDC_FPWD_UnCheck"] = '0'
#核对
defineDict["THOST_FTDC_FPWD_Check"] = '1'
typedefDict["TThostFtdcFuturePwdFlagType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcTransferTypeType是一个银期转账类型类型
#//////////////////////////////////////////////////////////////////////
#银行转期货
defineDict["THOST_FTDC_TT_BankToFuture"] = '0'
#期货转银行
defineDict["THOST_FTDC_TT_FutureToBank"] = '1'
typedefDict["TThostFtdcTransferTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFutureAccPwdType是一个期货资金密码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFutureAccPwdType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCurrencyCodeType是一个币种类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCurrencyCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcRetCodeType是一个响应代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcRetCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcRetInfoType是一个响应信息类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcRetInfoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcTradeAmtType是一个银行总余额类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcTradeAmtType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcUseAmtType是一个银行可用余额类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcUseAmtType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFetchAmtType是一个银行可取余额类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFetchAmtType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcTransferValidFlagType是一个转账有效标志类型
#//////////////////////////////////////////////////////////////////////
#无效或失败
defineDict["THOST_FTDC_TVF_Invalid"] = '0'
#有效
defineDict["THOST_FTDC_TVF_Valid"] = '1'
#冲正
defineDict["THOST_FTDC_TVF_Reverse"] = '2'
typedefDict["TThostFtdcTransferValidFlagType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCertCodeType是一个证件号码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCertCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcReasonType是一个事由类型
#//////////////////////////////////////////////////////////////////////
#错单
defineDict["THOST_FTDC_RN_CD"] = '0'
#资金在途
defineDict["THOST_FTDC_RN_ZT"] = '1'
#其它
defineDict["THOST_FTDC_RN_QT"] = '2'
typedefDict["TThostFtdcReasonType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFundProjectIDType是一个资金项目编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFundProjectIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcSexType是一个性别类型
#//////////////////////////////////////////////////////////////////////
#未知
defineDict["THOST_FTDC_SEX_None"] = '0'
#男
defineDict["THOST_FTDC_SEX_Man"] = '1'
#女
defineDict["THOST_FTDC_SEX_Woman"] = '2'
typedefDict["TThostFtdcSexType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcProfessionType是一个职业类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcProfessionType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcNationalType是一个国籍类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcNationalType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcProvinceType是一个省类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcProvinceType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcRegionType是一个区类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcRegionType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCountryType是一个国家类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCountryType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcLicenseNOType是一个营业执照类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcLicenseNOType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCompanyTypeType是一个企业性质类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCompanyTypeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBusinessScopeType是一个经营范围类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBusinessScopeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCapitalCurrencyType是一个注册资本币种类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCapitalCurrencyType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcUserTypeType是一个用户类型类型
#//////////////////////////////////////////////////////////////////////
#投资者
defineDict["THOST_FTDC_UT_Investor"] = '0'
#操作员
defineDict["THOST_FTDC_UT_Operator"] = '1'
#管理员
defineDict["THOST_FTDC_UT_SuperUser"] = '2'
typedefDict["TThostFtdcUserTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcRateTypeType是一个费率类型类型
#//////////////////////////////////////////////////////////////////////
#保证金率
defineDict["THOST_FTDC_RATETYPE_MarginRate"] = '2'
typedefDict["TThostFtdcRateTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcNoteTypeType是一个通知类型类型
#//////////////////////////////////////////////////////////////////////
#交易结算单
defineDict["THOST_FTDC_NOTETYPE_TradeSettleBill"] = '1'
#交易结算月报
defineDict["THOST_FTDC_NOTETYPE_TradeSettleMonth"] = '2'
#追加保证金通知书
defineDict["THOST_FTDC_NOTETYPE_CallMarginNotes"] = '3'
#强行平仓通知书
defineDict["THOST_FTDC_NOTETYPE_ForceCloseNotes"] = '4'
#成交通知书
defineDict["THOST_FTDC_NOTETYPE_TradeNotes"] = '5'
#交割通知书
defineDict["THOST_FTDC_NOTETYPE_DelivNotes"] = '6'
typedefDict["TThostFtdcNoteTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSettlementStyleType是一个结算单方式类型
#//////////////////////////////////////////////////////////////////////
#逐日盯市
defineDict["THOST_FTDC_SBS_Day"] = '1'
#逐笔对冲
defineDict["THOST_FTDC_SBS_Volume"] = '2'
typedefDict["TThostFtdcSettlementStyleType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcBrokerDNSType是一个域名类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBrokerDNSType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcSentenceType是一个语句类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSentenceType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcSettlementBillTypeType是一个结算单类型类型
#//////////////////////////////////////////////////////////////////////
#日报
defineDict["THOST_FTDC_ST_Day"] = '0'
#月报
defineDict["THOST_FTDC_ST_Month"] = '1'
typedefDict["TThostFtdcSettlementBillTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcUserRightTypeType是一个客户权限类型类型
#//////////////////////////////////////////////////////////////////////
#登录
defineDict["THOST_FTDC_URT_Logon"] = '1'
#银期转帐
defineDict["THOST_FTDC_URT_Transfer"] = '2'
#邮寄结算单
defineDict["THOST_FTDC_URT_EMail"] = '3'
#传真结算单
defineDict["THOST_FTDC_URT_Fax"] = '4'
#条件单
defineDict["THOST_FTDC_URT_ConditionOrder"] = '5'
typedefDict["TThostFtdcUserRightTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcMarginPriceTypeType是一个保证金价格类型类型
#//////////////////////////////////////////////////////////////////////
#昨结算价
defineDict["THOST_FTDC_MPT_PreSettlementPrice"] = '1'
#最新价
defineDict["THOST_FTDC_MPT_SettlementPrice"] = '2'
#成交均价
defineDict["THOST_FTDC_MPT_AveragePrice"] = '3'
#开仓价
defineDict["THOST_FTDC_MPT_OpenPrice"] = '4'
typedefDict["TThostFtdcMarginPriceTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcBillGenStatusType是一个结算单生成状态类型
#//////////////////////////////////////////////////////////////////////
#未生成
defineDict["THOST_FTDC_BGS_None"] = '0'
#生成中
defineDict["THOST_FTDC_BGS_NoGenerated"] = '1'
#已生成
defineDict["THOST_FTDC_BGS_Generated"] = '2'
typedefDict["TThostFtdcBillGenStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcAlgoTypeType是一个算法类型类型
#//////////////////////////////////////////////////////////////////////
#持仓处理算法
defineDict["THOST_FTDC_AT_HandlePositionAlgo"] = '1'
#寻找保证金率算法
defineDict["THOST_FTDC_AT_FindMarginRateAlgo"] = '2'
typedefDict["TThostFtdcAlgoTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcHandlePositionAlgoIDType是一个持仓处理算法编号类型
#//////////////////////////////////////////////////////////////////////
#基本
defineDict["THOST_FTDC_HPA_Base"] = '1'
#大连商品交易所
defineDict["THOST_FTDC_HPA_DCE"] = '2'
#郑州商品交易所
defineDict["THOST_FTDC_HPA_CZCE"] = '3'
typedefDict["TThostFtdcHandlePositionAlgoIDType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFindMarginRateAlgoIDType是一个寻找保证金率算法编号类型
#//////////////////////////////////////////////////////////////////////
#基本
defineDict["THOST_FTDC_FMRA_Base"] = '1'
#大连商品交易所
defineDict["THOST_FTDC_FMRA_DCE"] = '2'
#郑州商品交易所
defineDict["THOST_FTDC_FMRA_CZCE"] = '3'
typedefDict["TThostFtdcFindMarginRateAlgoIDType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcHandleTradingAccountAlgoIDType是一个资金处理算法编号类型
#//////////////////////////////////////////////////////////////////////
#基本
defineDict["THOST_FTDC_HTAA_Base"] = '1'
#大连商品交易所
defineDict["THOST_FTDC_HTAA_DCE"] = '2'
#郑州商品交易所
defineDict["THOST_FTDC_HTAA_CZCE"] = '3'
typedefDict["TThostFtdcHandleTradingAccountAlgoIDType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcPersonTypeType是一个联系人类型类型
#//////////////////////////////////////////////////////////////////////
#指定下单人
defineDict["THOST_FTDC_PST_Order"] = '1'
#开户授权人
defineDict["THOST_FTDC_PST_Open"] = '2'
#资金调拨人
defineDict["THOST_FTDC_PST_Fund"] = '3'
#结算单确认人
defineDict["THOST_FTDC_PST_Settlement"] = '4'
#法人
defineDict["THOST_FTDC_PST_Company"] = '5'
#法人代表
defineDict["THOST_FTDC_PST_Corporation"] = '6'
#投资者联系人
defineDict["THOST_FTDC_PST_LinkMan"] = '7'
#分户管理资产负责人
defineDict["THOST_FTDC_PST_Ledger"] = '8'
#托(保)管人
defineDict["THOST_FTDC_PST_Trustee"] = '9'
#托(保)管机构法人代表
defineDict["THOST_FTDC_PST_TrusteeCorporation"] = 'A'
#托(保)管机构开户授权人
defineDict["THOST_FTDC_PST_TrusteeOpen"] = 'B'
#托(保)管机构联系人
defineDict["THOST_FTDC_PST_TrusteeContact"] = 'C'
#境外自然人参考证件
defineDict["THOST_FTDC_PST_ForeignerRefer"] = 'D'
#法人代表参考证件
defineDict["THOST_FTDC_PST_CorporationRefer"] = 'E'
typedefDict["TThostFtdcPersonTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcQueryInvestorRangeType是一个查询范围类型
#//////////////////////////////////////////////////////////////////////
#所有
defineDict["THOST_FTDC_QIR_All"] = '1'
#查询分类
defineDict["THOST_FTDC_QIR_Group"] = '2'
#单一投资者
defineDict["THOST_FTDC_QIR_Single"] = '3'
typedefDict["TThostFtdcQueryInvestorRangeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcInvestorRiskStatusType是一个投资者风险状态类型
#//////////////////////////////////////////////////////////////////////
#正常
defineDict["THOST_FTDC_IRS_Normal"] = '1'
#警告
defineDict["THOST_FTDC_IRS_Warn"] = '2'
#追保
defineDict["THOST_FTDC_IRS_Call"] = '3'
#强平
defineDict["THOST_FTDC_IRS_Force"] = '4'
#异常
defineDict["THOST_FTDC_IRS_Exception"] = '5'
typedefDict["TThostFtdcInvestorRiskStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcLegIDType是一个单腿编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcLegIDType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcLegMultipleType是一个单腿乘数类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcLegMultipleType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcImplyLevelType是一个派生层数类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcImplyLevelType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcClearAccountType是一个结算账户类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcClearAccountType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcOrganNOType是一个结算账户类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcOrganNOType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcClearbarchIDType是一个结算账户联行号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcClearbarchIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcUserEventTypeType是一个用户事件类型类型
#//////////////////////////////////////////////////////////////////////
#登录
defineDict["THOST_FTDC_UET_Login"] = '1'
#登出
defineDict["THOST_FTDC_UET_Logout"] = '2'
#交易成功
defineDict["THOST_FTDC_UET_Trading"] = '3'
#交易失败
defineDict["THOST_FTDC_UET_TradingError"] = '4'
#修改密码
defineDict["THOST_FTDC_UET_UpdatePassword"] = '5'
#客户端认证
defineDict["THOST_FTDC_UET_Authenticate"] = '6'
#其他
defineDict["THOST_FTDC_UET_Other"] = '9'
typedefDict["TThostFtdcUserEventTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcUserEventInfoType是一个用户事件信息类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcUserEventInfoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCloseStyleType是一个平仓方式类型
#//////////////////////////////////////////////////////////////////////
#先开先平
defineDict["THOST_FTDC_ICS_Close"] = '0'
#先平今再平昨
defineDict["THOST_FTDC_ICS_CloseToday"] = '1'
typedefDict["TThostFtdcCloseStyleType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcStatModeType是一个统计方式类型
#//////////////////////////////////////////////////////////////////////
#----
defineDict["THOST_FTDC_SM_Non"] = '0'
#按合约统计
defineDict["THOST_FTDC_SM_Instrument"] = '1'
#按产品统计
defineDict["THOST_FTDC_SM_Product"] = '2'
#按投资者统计
defineDict["THOST_FTDC_SM_Investor"] = '3'
typedefDict["TThostFtdcStatModeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcParkedOrderStatusType是一个预埋单状态类型
#//////////////////////////////////////////////////////////////////////
#未发送
defineDict["THOST_FTDC_PAOS_NotSend"] = '1'
#已发送
defineDict["THOST_FTDC_PAOS_Send"] = '2'
#已删除
defineDict["THOST_FTDC_PAOS_Deleted"] = '3'
typedefDict["TThostFtdcParkedOrderStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcParkedOrderIDType是一个预埋报单编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcParkedOrderIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcParkedOrderActionIDType是一个预埋撤单编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcParkedOrderActionIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcVirDealStatusType是一个处理状态类型
#//////////////////////////////////////////////////////////////////////
#正在处理
defineDict["THOST_FTDC_VDS_Dealing"] = '1'
#处理成功
defineDict["THOST_FTDC_VDS_DeaclSucceed"] = '2'
typedefDict["TThostFtdcVirDealStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcOrgSystemIDType是一个原有系统代码类型
#//////////////////////////////////////////////////////////////////////
#综合交易平台
defineDict["THOST_FTDC_ORGS_Standard"] = '0'
#易盛系统
defineDict["THOST_FTDC_ORGS_ESunny"] = '1'
#金仕达V6系统
defineDict["THOST_FTDC_ORGS_KingStarV6"] = '2'
typedefDict["TThostFtdcOrgSystemIDType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcVirTradeStatusType是一个交易状态类型
#//////////////////////////////////////////////////////////////////////
#正常处理中
defineDict["THOST_FTDC_VTS_NaturalDeal"] = '0'
#成功结束
defineDict["THOST_FTDC_VTS_SucceedEnd"] = '1'
#失败结束
defineDict["THOST_FTDC_VTS_FailedEND"] = '2'
#异常中
defineDict["THOST_FTDC_VTS_Exception"] = '3'
#已人工异常处理
defineDict["THOST_FTDC_VTS_ManualDeal"] = '4'
#通讯异常 ,请人工处理
defineDict["THOST_FTDC_VTS_MesException"] = '5'
#系统出错,请人工处理
defineDict["THOST_FTDC_VTS_SysException"] = '6'
typedefDict["TThostFtdcVirTradeStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcVirBankAccTypeType是一个银行帐户类型类型
#//////////////////////////////////////////////////////////////////////
#存折
defineDict["THOST_FTDC_VBAT_BankBook"] = '1'
#储蓄卡
defineDict["THOST_FTDC_VBAT_BankCard"] = '2'
#信用卡
defineDict["THOST_FTDC_VBAT_CreditCard"] = '3'
typedefDict["TThostFtdcVirBankAccTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcVirementStatusType是一个银行帐户类型类型
#//////////////////////////////////////////////////////////////////////
#正常
defineDict["THOST_FTDC_VMS_Natural"] = '0'
#销户
defineDict["THOST_FTDC_VMS_Canceled"] = '9'
typedefDict["TThostFtdcVirementStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcVirementAvailAbilityType是一个有效标志类型
#//////////////////////////////////////////////////////////////////////
#未确认
defineDict["THOST_FTDC_VAA_NoAvailAbility"] = '0'
#有效
defineDict["THOST_FTDC_VAA_AvailAbility"] = '1'
#冲正
defineDict["THOST_FTDC_VAA_Repeal"] = '2'
typedefDict["TThostFtdcVirementAvailAbilityType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcVirementTradeCodeType是一个交易代码类型
#//////////////////////////////////////////////////////////////////////
#银行发起银行资金转期货
defineDict["THOST_FTDC_VTC_BankBankToFuture"] = '102001'
#银行发起期货资金转银行
defineDict["THOST_FTDC_VTC_BankFutureToBank"] = '102002'
#期货发起银行资金转期货
defineDict["THOST_FTDC_VTC_FutureBankToFuture"] = '202001'
#期货发起期货资金转银行
defineDict["THOST_FTDC_VTC_FutureFutureToBank"] = '202002'
typedefDict["TThostFtdcVirementTradeCodeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcPhotoTypeNameType是一个影像类型名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcPhotoTypeNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcPhotoTypeIDType是一个影像类型代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcPhotoTypeIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcPhotoNameType是一个影像名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcPhotoNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcTopicIDType是一个主题代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcTopicIDType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcReportTypeIDType是一个交易报告类型标识类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcReportTypeIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCharacterIDType是一个交易特征代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCharacterIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLParamIDType是一个参数代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLParamIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLInvestorTypeType是一个投资者类型类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLInvestorTypeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLIdCardTypeType是一个证件类型类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLIdCardTypeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLTradeDirectType是一个资金进出方向类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLTradeDirectType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLTradeModelType是一个资金进出方式类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLTradeModelType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLParamIDType是一个参数代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLParamIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLOpParamValueType是一个业务参数代码值类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLOpParamValueType"] = "float"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLCustomerCardTypeType是一个客户身份证件/证明文件类型类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLCustomerCardTypeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLInstitutionNameType是一个金融机构网点名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLInstitutionNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLDistrictIDType是一个金融机构网点所在地区行政区划代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLDistrictIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLRelationShipType是一个金融机构网点与大额交易的关系类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLRelationShipType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLInstitutionTypeType是一个金融机构网点代码类型类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLInstitutionTypeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLInstitutionIDType是一个金融机构网点代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLInstitutionIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLAccountTypeType是一个账户类型类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLAccountTypeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLTradingTypeType是一个交易方式类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLTradingTypeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLTransactClassType是一个涉外收支交易分类与代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLTransactClassType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLCapitalIOType是一个资金收付标识类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLCapitalIOType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLSiteType是一个交易地点类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLSiteType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLCapitalPurposeType是一个资金用途类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLCapitalPurposeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLReportTypeType是一个报文类型类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLReportTypeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLSerialNoType是一个编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLSerialNoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLStatusType是一个状态类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLStatusType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLGenStatusType是一个Aml生成方式类型
#//////////////////////////////////////////////////////////////////////
#程序生成
defineDict["THOST_FTDC_GEN_Program"] = '0'
#人工生成
defineDict["THOST_FTDC_GEN_HandWork"] = '1'
typedefDict["TThostFtdcAMLGenStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLSeqCodeType是一个业务标识号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLSeqCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLFileNameType是一个AML文件名类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLFileNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLMoneyType是一个反洗钱资金类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLMoneyType"] = "float"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLFileAmountType是一个反洗钱资金类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLFileAmountType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcCFMMCKeyType是一个密钥类型(保证金监管)类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCFMMCKeyType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCFMMCTokenType是一个令牌类型(保证金监管)类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCFMMCTokenType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCFMMCKeyKindType是一个动态密钥类别(保证金监管)类型
#//////////////////////////////////////////////////////////////////////
#主动请求更新
defineDict["THOST_FTDC_CFMMCKK_REQUEST"] = 'R'
#CFMMC自动更新
defineDict["THOST_FTDC_CFMMCKK_AUTO"] = 'A'
#CFMMC手动更新
defineDict["THOST_FTDC_CFMMCKK_MANUAL"] = 'M'
typedefDict["TThostFtdcCFMMCKeyKindType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLReportNameType是一个报文名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAMLReportNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcIndividualNameType是一个个人姓名类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcIndividualNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCurrencyIDType是一个币种代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCurrencyIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCustNumberType是一个客户编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCustNumberType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcOrganCodeType是一个机构编码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcOrganCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcOrganNameType是一个机构名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcOrganNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcSuperOrganCodeType是一个上级机构编码,即期货公司总部、银行总行类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSuperOrganCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcSubBranchIDType是一个分支机构类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSubBranchIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcSubBranchNameType是一个分支机构名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSubBranchNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBranchNetCodeType是一个机构网点号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBranchNetCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBranchNetNameType是一个机构网点名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBranchNetNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcOrganFlagType是一个机构标识类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcOrganFlagType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankCodingForFutureType是一个银行对期货公司的编码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBankCodingForFutureType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankReturnCodeType是一个银行对返回码的定义类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBankReturnCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcPlateReturnCodeType是一个银期转帐平台对返回码的定义类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcPlateReturnCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankSubBranchIDType是一个银行分支机构编码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBankSubBranchIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFutureBranchIDType是一个期货分支机构编码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFutureBranchIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcReturnCodeType是一个返回代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcReturnCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcOperatorCodeType是一个操作员类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcOperatorCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcClearDepIDType是一个机构结算帐户机构号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcClearDepIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcClearBrchIDType是一个机构结算帐户联行号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcClearBrchIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcClearNameType是一个机构结算帐户名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcClearNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankAccountNameType是一个银行帐户名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBankAccountNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcInvDepIDType是一个机构投资人账号机构号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcInvDepIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcInvBrchIDType是一个机构投资人联行号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcInvBrchIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcMessageFormatVersionType是一个信息格式版本类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcMessageFormatVersionType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcDigestType是一个摘要类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcDigestType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAuthenticDataType是一个认证数据类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAuthenticDataType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcPasswordKeyType是一个密钥类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcPasswordKeyType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFutureAccountNameType是一个期货帐户名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFutureAccountNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcMobilePhoneType是一个手机类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcMobilePhoneType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFutureMainKeyType是一个期货公司主密钥类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFutureMainKeyType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFutureWorkKeyType是一个期货公司工作密钥类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFutureWorkKeyType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFutureTransKeyType是一个期货公司传输密钥类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFutureTransKeyType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankMainKeyType是一个银行主密钥类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBankMainKeyType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankWorkKeyType是一个银行工作密钥类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBankWorkKeyType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankTransKeyType是一个银行传输密钥类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBankTransKeyType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankServerDescriptionType是一个银行服务器描述信息类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBankServerDescriptionType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAddInfoType是一个附加信息类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAddInfoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcDescrInfoForReturnCodeType是一个返回码描述类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcDescrInfoForReturnCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCountryCodeType是一个国家代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCountryCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcSerialType是一个流水号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSerialType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcPlateSerialType是一个平台流水号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcPlateSerialType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankSerialType是一个银行流水号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBankSerialType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCorrectSerialType是一个被冲正交易流水号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCorrectSerialType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcFutureSerialType是一个期货公司流水号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFutureSerialType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcApplicationIDType是一个应用标识类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcApplicationIDType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankProxyIDType是一个银行代理标识类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBankProxyIDType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBTCoreIDType是一个银期转帐核心系统标识类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFBTCoreIDType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcServerPortType是一个服务端口号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcServerPortType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcRepealedTimesType是一个已经冲正次数类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcRepealedTimesType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcRepealTimeIntervalType是一个冲正时间间隔类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcRepealTimeIntervalType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcTotalTimesType是一个每日累计转帐次数类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcTotalTimesType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBTRequestIDType是一个请求ID类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFBTRequestIDType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcTIDType是一个交易ID类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcTIDType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcTradeAmountType是一个交易金额(元)类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcTradeAmountType"] = "float"
#//////////////////////////////////////////////////////////////////////
#TFtdcCustFeeType是一个应收客户费用(元)类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCustFeeType"] = "float"
#//////////////////////////////////////////////////////////////////////
#TFtdcFutureFeeType是一个应收期货公司费用(元)类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFutureFeeType"] = "float"
#//////////////////////////////////////////////////////////////////////
#TFtdcSingleMaxAmtType是一个单笔最高限额类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSingleMaxAmtType"] = "float"
#//////////////////////////////////////////////////////////////////////
#TFtdcSingleMinAmtType是一个单笔最低限额类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSingleMinAmtType"] = "float"
#//////////////////////////////////////////////////////////////////////
#TFtdcTotalAmtType是一个每日累计转帐额度类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcTotalAmtType"] = "float"
#//////////////////////////////////////////////////////////////////////
#TFtdcCertificationTypeType是一个证件类型类型
#//////////////////////////////////////////////////////////////////////
#身份证
defineDict["THOST_FTDC_CFT_IDCard"] = '0'
#护照
defineDict["THOST_FTDC_CFT_Passport"] = '1'
#军官证
defineDict["THOST_FTDC_CFT_OfficerIDCard"] = '2'
#士兵证
defineDict["THOST_FTDC_CFT_SoldierIDCard"] = '3'
#回乡证
defineDict["THOST_FTDC_CFT_HomeComingCard"] = '4'
#户口簿
defineDict["THOST_FTDC_CFT_HouseholdRegister"] = '5'
#营业执照号
defineDict["THOST_FTDC_CFT_LicenseNo"] = '6'
#组织机构代码证
defineDict["THOST_FTDC_CFT_InstitutionCodeCard"] = '7'
#临时营业执照号
defineDict["THOST_FTDC_CFT_TempLicenseNo"] = '8'
#民办非企业登记证书
defineDict["THOST_FTDC_CFT_NoEnterpriseLicenseNo"] = '9'
#其他证件
defineDict["THOST_FTDC_CFT_OtherCard"] = 'x'
#主管部门批文
defineDict["THOST_FTDC_CFT_SuperDepAgree"] = 'a'
typedefDict["TThostFtdcCertificationTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFileBusinessCodeType是一个文件业务功能类型
#//////////////////////////////////////////////////////////////////////
#其他
defineDict["THOST_FTDC_FBC_Others"] = '0'
#转账交易明细对账
defineDict["THOST_FTDC_FBC_TransferDetails"] = '1'
#客户账户状态对账
defineDict["THOST_FTDC_FBC_CustAccStatus"] = '2'
#账户类交易明细对账
defineDict["THOST_FTDC_FBC_AccountTradeDetails"] = '3'
#期货账户信息变更明细对账
defineDict["THOST_FTDC_FBC_FutureAccountChangeInfoDetails"] = '4'
#客户资金台账余额明细对账
defineDict["THOST_FTDC_FBC_CustMoneyDetail"] = '5'
#客户销户结息明细对账
defineDict["THOST_FTDC_FBC_CustCancelAccountInfo"] = '6'
#客户资金余额对账结果
defineDict["THOST_FTDC_FBC_CustMoneyResult"] = '7'
#其它对账异常结果文件
defineDict["THOST_FTDC_FBC_OthersExceptionResult"] = '8'
#客户结息净额明细
defineDict["THOST_FTDC_FBC_CustInterestNetMoneyDetails"] = '9'
#客户资金交收明细
defineDict["THOST_FTDC_FBC_CustMoneySendAndReceiveDetails"] = 'a'
#法人存管银行资金交收汇总
defineDict["THOST_FTDC_FBC_CorporationMoneyTotal"] = 'b'
#主体间资金交收汇总
defineDict["THOST_FTDC_FBC_MainbodyMoneyTotal"] = 'c'
#总分平衡监管数据
defineDict["THOST_FTDC_FBC_MainPartMonitorData"] = 'd'
#存管银行备付金余额
defineDict["THOST_FTDC_FBC_PreparationMoney"] = 'e'
#协办存管银行资金监管数据
defineDict["THOST_FTDC_FBC_BankMoneyMonitorData"] = 'f'
typedefDict["TThostFtdcFileBusinessCodeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCashExchangeCodeType是一个汇钞标志类型
#//////////////////////////////////////////////////////////////////////
#汇
defineDict["THOST_FTDC_CEC_Exchange"] = '1'
#钞
defineDict["THOST_FTDC_CEC_Cash"] = '2'
typedefDict["TThostFtdcCashExchangeCodeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcYesNoIndicatorType是一个是或否标识类型
#//////////////////////////////////////////////////////////////////////
#是
defineDict["THOST_FTDC_YNI_Yes"] = '0'
#否
defineDict["THOST_FTDC_YNI_No"] = '1'
typedefDict["TThostFtdcYesNoIndicatorType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcBanlanceTypeType是一个余额类型类型
#//////////////////////////////////////////////////////////////////////
#当前余额
defineDict["THOST_FTDC_BLT_CurrentMoney"] = '0'
#可用余额
defineDict["THOST_FTDC_BLT_UsableMoney"] = '1'
#可取余额
defineDict["THOST_FTDC_BLT_FetchableMoney"] = '2'
#冻结余额
defineDict["THOST_FTDC_BLT_FreezeMoney"] = '3'
typedefDict["TThostFtdcBanlanceTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcGenderType是一个性别类型
#//////////////////////////////////////////////////////////////////////
#未知状态
defineDict["THOST_FTDC_GD_Unknown"] = '0'
#男
defineDict["THOST_FTDC_GD_Male"] = '1'
#女
defineDict["THOST_FTDC_GD_Female"] = '2'
typedefDict["TThostFtdcGenderType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFeePayFlagType是一个费用支付标志类型
#//////////////////////////////////////////////////////////////////////
#由受益方支付费用
defineDict["THOST_FTDC_FPF_BEN"] = '0'
#由发送方支付费用
defineDict["THOST_FTDC_FPF_OUR"] = '1'
#由发送方支付发起的费用,受益方支付接受的费用
defineDict["THOST_FTDC_FPF_SHA"] = '2'
typedefDict["TThostFtdcFeePayFlagType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcPassWordKeyTypeType是一个密钥类型类型
#//////////////////////////////////////////////////////////////////////
#交换密钥
defineDict["THOST_FTDC_PWKT_ExchangeKey"] = '0'
#密码密钥
defineDict["THOST_FTDC_PWKT_PassWordKey"] = '1'
#MAC密钥
defineDict["THOST_FTDC_PWKT_MACKey"] = '2'
#报文密钥
defineDict["THOST_FTDC_PWKT_MessageKey"] = '3'
typedefDict["TThostFtdcPassWordKeyTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBTPassWordTypeType是一个密码类型类型
#//////////////////////////////////////////////////////////////////////
#查询
defineDict["THOST_FTDC_PWT_Query"] = '0'
#取款
defineDict["THOST_FTDC_PWT_Fetch"] = '1'
#转帐
defineDict["THOST_FTDC_PWT_Transfer"] = '2'
#交易
defineDict["THOST_FTDC_PWT_Trade"] = '3'
typedefDict["TThostFtdcFBTPassWordTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBTEncryModeType是一个加密方式类型
#//////////////////////////////////////////////////////////////////////
#不加密
defineDict["THOST_FTDC_EM_NoEncry"] = '0'
#DES
defineDict["THOST_FTDC_EM_DES"] = '1'
#3DES
defineDict["THOST_FTDC_EM_3DES"] = '2'
typedefDict["TThostFtdcFBTEncryModeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankRepealFlagType是一个银行冲正标志类型
#//////////////////////////////////////////////////////////////////////
#银行无需自动冲正
defineDict["THOST_FTDC_BRF_BankNotNeedRepeal"] = '0'
#银行待自动冲正
defineDict["THOST_FTDC_BRF_BankWaitingRepeal"] = '1'
#银行已自动冲正
defineDict["THOST_FTDC_BRF_BankBeenRepealed"] = '2'
typedefDict["TThostFtdcBankRepealFlagType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcBrokerRepealFlagType是一个期商冲正标志类型
#//////////////////////////////////////////////////////////////////////
#期商无需自动冲正
defineDict["THOST_FTDC_BRORF_BrokerNotNeedRepeal"] = '0'
#期商待自动冲正
defineDict["THOST_FTDC_BRORF_BrokerWaitingRepeal"] = '1'
#期商已自动冲正
defineDict["THOST_FTDC_BRORF_BrokerBeenRepealed"] = '2'
typedefDict["TThostFtdcBrokerRepealFlagType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcInstitutionTypeType是一个机构类别类型
#//////////////////////////////////////////////////////////////////////
#银行
defineDict["THOST_FTDC_TS_Bank"] = '0'
#期商
defineDict["THOST_FTDC_TS_Future"] = '1'
#券商
defineDict["THOST_FTDC_TS_Store"] = '2'
typedefDict["TThostFtdcInstitutionTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcLastFragmentType是一个最后分片标志类型
#//////////////////////////////////////////////////////////////////////
#是最后分片
defineDict["THOST_FTDC_LF_Yes"] = '0'
#不是最后分片
defineDict["THOST_FTDC_LF_No"] = '1'
typedefDict["TThostFtdcLastFragmentType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankAccStatusType是一个银行账户状态类型
#//////////////////////////////////////////////////////////////////////
#正常
defineDict["THOST_FTDC_BAS_Normal"] = '0'
#冻结
defineDict["THOST_FTDC_BAS_Freeze"] = '1'
#挂失
defineDict["THOST_FTDC_BAS_ReportLoss"] = '2'
typedefDict["TThostFtdcBankAccStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcMoneyAccountStatusType是一个资金账户状态类型
#//////////////////////////////////////////////////////////////////////
#正常
defineDict["THOST_FTDC_MAS_Normal"] = '0'
#销户
defineDict["THOST_FTDC_MAS_Cancel"] = '1'
typedefDict["TThostFtdcMoneyAccountStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcManageStatusType是一个存管状态类型
#//////////////////////////////////////////////////////////////////////
#指定存管
defineDict["THOST_FTDC_MSS_Point"] = '0'
#预指定
defineDict["THOST_FTDC_MSS_PrePoint"] = '1'
#撤销指定
defineDict["THOST_FTDC_MSS_CancelPoint"] = '2'
typedefDict["TThostFtdcManageStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSystemTypeType是一个应用系统类型类型
#//////////////////////////////////////////////////////////////////////
#银期转帐
defineDict["THOST_FTDC_SYT_FutureBankTransfer"] = '0'
#银证转帐
defineDict["THOST_FTDC_SYT_StockBankTransfer"] = '1'
#第三方存管
defineDict["THOST_FTDC_SYT_TheThirdPartStore"] = '2'
typedefDict["TThostFtdcSystemTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcTxnEndFlagType是一个银期转帐划转结果标志类型
#//////////////////////////////////////////////////////////////////////
#正常处理中
defineDict["THOST_FTDC_TEF_NormalProcessing"] = '0'
#成功结束
defineDict["THOST_FTDC_TEF_Success"] = '1'
#失败结束
defineDict["THOST_FTDC_TEF_Failed"] = '2'
#异常中
defineDict["THOST_FTDC_TEF_Abnormal"] = '3'
#已人工异常处理
defineDict["THOST_FTDC_TEF_ManualProcessedForException"] = '4'
#通讯异常 ,请人工处理
defineDict["THOST_FTDC_TEF_CommuFailedNeedManualProcess"] = '5'
#系统出错,请人工处理
defineDict["THOST_FTDC_TEF_SysErrorNeedManualProcess"] = '6'
typedefDict["TThostFtdcTxnEndFlagType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcProcessStatusType是一个银期转帐服务处理状态类型
#//////////////////////////////////////////////////////////////////////
#未处理
defineDict["THOST_FTDC_PSS_NotProcess"] = '0'
#开始处理
defineDict["THOST_FTDC_PSS_StartProcess"] = '1'
#处理完成
defineDict["THOST_FTDC_PSS_Finished"] = '2'
typedefDict["TThostFtdcProcessStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCustTypeType是一个客户类型类型
#//////////////////////////////////////////////////////////////////////
#自然人
defineDict["THOST_FTDC_CUSTT_Person"] = '0'
#机构户
defineDict["THOST_FTDC_CUSTT_Institution"] = '1'
typedefDict["TThostFtdcCustTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBTTransferDirectionType是一个银期转帐方向类型
#//////////////////////////////////////////////////////////////////////
#入金,银行转期货
defineDict["THOST_FTDC_FBTTD_FromBankToFuture"] = '1'
#出金,期货转银行
defineDict["THOST_FTDC_FBTTD_FromFutureToBank"] = '2'
typedefDict["TThostFtdcFBTTransferDirectionType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcOpenOrDestroyType是一个开销户类别类型
#//////////////////////////////////////////////////////////////////////
#开户
defineDict["THOST_FTDC_OOD_Open"] = '1'
#销户
defineDict["THOST_FTDC_OOD_Destroy"] = '0'
typedefDict["TThostFtdcOpenOrDestroyType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcAvailabilityFlagType是一个有效标志类型
#//////////////////////////////////////////////////////////////////////
#未确认
defineDict["THOST_FTDC_AVAF_Invalid"] = '0'
#有效
defineDict["THOST_FTDC_AVAF_Valid"] = '1'
#冲正
defineDict["THOST_FTDC_AVAF_Repeal"] = '2'
typedefDict["TThostFtdcAvailabilityFlagType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcOrganTypeType是一个机构类型类型
#//////////////////////////////////////////////////////////////////////
#银行代理
defineDict["THOST_FTDC_OT_Bank"] = '1'
#交易前置
defineDict["THOST_FTDC_OT_Future"] = '2'
#银期转帐平台管理
defineDict["THOST_FTDC_OT_PlateForm"] = '9'
typedefDict["TThostFtdcOrganTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcOrganLevelType是一个机构级别类型
#//////////////////////////////////////////////////////////////////////
#银行总行或期商总部
defineDict["THOST_FTDC_OL_HeadQuarters"] = '1'
#银行分中心或期货公司营业部
defineDict["THOST_FTDC_OL_Branch"] = '2'
typedefDict["TThostFtdcOrganLevelType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcProtocalIDType是一个协议类型类型
#//////////////////////////////////////////////////////////////////////
#期商协议
defineDict["THOST_FTDC_PID_FutureProtocal"] = '0'
#工行协议
defineDict["THOST_FTDC_PID_ICBCProtocal"] = '1'
#农行协议
defineDict["THOST_FTDC_PID_ABCProtocal"] = '2'
#中国银行协议
defineDict["THOST_FTDC_PID_CBCProtocal"] = '3'
#建行协议
defineDict["THOST_FTDC_PID_CCBProtocal"] = '4'
#交行协议
defineDict["THOST_FTDC_PID_BOCOMProtocal"] = '5'
#银期转帐平台协议
defineDict["THOST_FTDC_PID_FBTPlateFormProtocal"] = 'X'
typedefDict["TThostFtdcProtocalIDType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcConnectModeType是一个套接字连接方式类型
#//////////////////////////////////////////////////////////////////////
#短连接
defineDict["THOST_FTDC_CM_ShortConnect"] = '0'
#长连接
defineDict["THOST_FTDC_CM_LongConnect"] = '1'
typedefDict["TThostFtdcConnectModeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSyncModeType是一个套接字通信方式类型
#//////////////////////////////////////////////////////////////////////
#异步
defineDict["THOST_FTDC_SRM_ASync"] = '0'
#同步
defineDict["THOST_FTDC_SRM_Sync"] = '1'
typedefDict["TThostFtdcSyncModeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankAccTypeType是一个银行帐号类型类型
#//////////////////////////////////////////////////////////////////////
#银行存折
defineDict["THOST_FTDC_BAT_BankBook"] = '1'
#储蓄卡
defineDict["THOST_FTDC_BAT_SavingCard"] = '2'
#信用卡
defineDict["THOST_FTDC_BAT_CreditCard"] = '3'
typedefDict["TThostFtdcBankAccTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFutureAccTypeType是一个期货公司帐号类型类型
#//////////////////////////////////////////////////////////////////////
#银行存折
defineDict["THOST_FTDC_FAT_BankBook"] = '1'
#储蓄卡
defineDict["THOST_FTDC_FAT_SavingCard"] = '2'
#信用卡
defineDict["THOST_FTDC_FAT_CreditCard"] = '3'
typedefDict["TThostFtdcFutureAccTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcOrganStatusType是一个接入机构状态类型
#//////////////////////////////////////////////////////////////////////
#启用
defineDict["THOST_FTDC_OS_Ready"] = '0'
#签到
defineDict["THOST_FTDC_OS_CheckIn"] = '1'
#签退
defineDict["THOST_FTDC_OS_CheckOut"] = '2'
#对帐文件到达
defineDict["THOST_FTDC_OS_CheckFileArrived"] = '3'
#对帐
defineDict["THOST_FTDC_OS_CheckDetail"] = '4'
#日终清理
defineDict["THOST_FTDC_OS_DayEndClean"] = '5'
#注销
defineDict["THOST_FTDC_OS_Invalid"] = '9'
typedefDict["TThostFtdcOrganStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCCBFeeModeType是一个建行收费模式类型
#//////////////////////////////////////////////////////////////////////
#按金额扣收
defineDict["THOST_FTDC_CCBFM_ByAmount"] = '1'
#按月扣收
defineDict["THOST_FTDC_CCBFM_ByMonth"] = '2'
typedefDict["TThostFtdcCCBFeeModeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCommApiTypeType是一个通讯API类型类型
#//////////////////////////////////////////////////////////////////////
#客户端
defineDict["THOST_FTDC_CAPIT_Client"] = '1'
#服务端
defineDict["THOST_FTDC_CAPIT_Server"] = '2'
#交易系统的UserApi
defineDict["THOST_FTDC_CAPIT_UserApi"] = '3'
typedefDict["TThostFtdcCommApiTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcServiceIDType是一个服务编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcServiceIDType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcServiceLineNoType是一个服务线路编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcServiceLineNoType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcServiceNameType是一个服务名类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcServiceNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcLinkStatusType是一个连接状态类型
#//////////////////////////////////////////////////////////////////////
#已经连接
defineDict["THOST_FTDC_LS_Connected"] = '1'
#没有连接
defineDict["THOST_FTDC_LS_Disconnected"] = '2'
typedefDict["TThostFtdcLinkStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCommApiPointerType是一个通讯API指针类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCommApiPointerType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcPwdFlagType是一个密码核对标志类型
#//////////////////////////////////////////////////////////////////////
#不核对
defineDict["THOST_FTDC_BPWDF_NoCheck"] = '0'
#明文核对
defineDict["THOST_FTDC_BPWDF_BlankCheck"] = '1'
#密文核对
defineDict["THOST_FTDC_BPWDF_EncryptCheck"] = '2'
typedefDict["TThostFtdcPwdFlagType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSecuAccTypeType是一个期货帐号类型类型
#//////////////////////////////////////////////////////////////////////
#资金帐号
defineDict["THOST_FTDC_SAT_AccountID"] = '1'
#资金卡号
defineDict["THOST_FTDC_SAT_CardID"] = '2'
#上海股东帐号
defineDict["THOST_FTDC_SAT_SHStockholderID"] = '3'
#深圳股东帐号
defineDict["THOST_FTDC_SAT_SZStockholderID"] = '4'
typedefDict["TThostFtdcSecuAccTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcTransferStatusType是一个转账交易状态类型
#//////////////////////////////////////////////////////////////////////
#正常
defineDict["THOST_FTDC_TRFS_Normal"] = '0'
#被冲正
defineDict["THOST_FTDC_TRFS_Repealed"] = '1'
typedefDict["TThostFtdcTransferStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSponsorTypeType是一个发起方类型
#//////////////////////////////////////////////////////////////////////
#期商
defineDict["THOST_FTDC_SPTYPE_Broker"] = '0'
#银行
defineDict["THOST_FTDC_SPTYPE_Bank"] = '1'
typedefDict["TThostFtdcSponsorTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcReqRspTypeType是一个请求响应类别类型
#//////////////////////////////////////////////////////////////////////
#请求
defineDict["THOST_FTDC_REQRSP_Request"] = '0'
#响应
defineDict["THOST_FTDC_REQRSP_Response"] = '1'
typedefDict["TThostFtdcReqRspTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBTUserEventTypeType是一个银期转帐用户事件类型类型
#//////////////////////////////////////////////////////////////////////
#签到
defineDict["THOST_FTDC_FBTUET_SignIn"] = '0'
#银行转期货
defineDict["THOST_FTDC_FBTUET_FromBankToFuture"] = '1'
#期货转银行
defineDict["THOST_FTDC_FBTUET_FromFutureToBank"] = '2'
#开户
defineDict["THOST_FTDC_FBTUET_OpenAccount"] = '3'
#销户
defineDict["THOST_FTDC_FBTUET_CancelAccount"] = '4'
#变更银行账户
defineDict["THOST_FTDC_FBTUET_ChangeAccount"] = '5'
#冲正银行转期货
defineDict["THOST_FTDC_FBTUET_RepealFromBankToFuture"] = '6'
#冲正期货转银行
defineDict["THOST_FTDC_FBTUET_RepealFromFutureToBank"] = '7'
#查询银行账户
defineDict["THOST_FTDC_FBTUET_QueryBankAccount"] = '8'
#查询期货账户
defineDict["THOST_FTDC_FBTUET_QueryFutureAccount"] = '9'
#签退
defineDict["THOST_FTDC_FBTUET_SignOut"] = 'A'
#密钥同步
defineDict["THOST_FTDC_FBTUET_SyncKey"] = 'B'
#其他
defineDict["THOST_FTDC_FBTUET_Other"] = 'Z'
typedefDict["TThostFtdcFBTUserEventTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankIDByBankType是一个银行自己的编码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBankIDByBankType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankOperNoType是一个银行操作员号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBankOperNoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankCustNoType是一个银行客户号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBankCustNoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcDBOPSeqNoType是一个递增的序列号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcDBOPSeqNoType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcTableNameType是一个FBT表名类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcTableNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcPKNameType是一个FBT表操作主键名类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcPKNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcPKValueType是一个FBT表操作主键值类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcPKValueType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcDBOperationType是一个记录操作类型类型
#//////////////////////////////////////////////////////////////////////
#插入
defineDict["THOST_FTDC_DBOP_Insert"] = '0'
#更新
defineDict["THOST_FTDC_DBOP_Update"] = '1'
#删除
defineDict["THOST_FTDC_DBOP_Delete"] = '2'
typedefDict["TThostFtdcDBOperationType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSyncFlagType是一个同步标记类型
#//////////////////////////////////////////////////////////////////////
#已同步
defineDict["THOST_FTDC_SYNF_Yes"] = '0'
#未同步
defineDict["THOST_FTDC_SYNF_No"] = '1'
typedefDict["TThostFtdcSyncFlagType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcTargetIDType是一个同步目标编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcTargetIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcSyncTypeType是一个同步类型类型
#//////////////////////////////////////////////////////////////////////
#一次同步
defineDict["THOST_FTDC_SYNT_OneOffSync"] = '0'
#定时同步
defineDict["THOST_FTDC_SYNT_TimerSync"] = '1'
#定时完全同步
defineDict["THOST_FTDC_SYNT_TimerFullSync"] = '2'
typedefDict["TThostFtdcSyncTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBETimeType是一个各种换汇时间类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFBETimeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBEBankNoType是一个换汇银行行号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFBEBankNoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBECertNoType是一个换汇凭证号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFBECertNoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcExDirectionType是一个换汇方向类型
#//////////////////////////////////////////////////////////////////////
#结汇
defineDict["THOST_FTDC_FBEDIR_Settlement"] = '0'
#售汇
defineDict["THOST_FTDC_FBEDIR_Sale"] = '1'
typedefDict["TThostFtdcExDirectionType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBEBankAccountType是一个换汇银行账户类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFBEBankAccountType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBEBankAccountNameType是一个换汇银行账户名类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFBEBankAccountNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBEAmtType是一个各种换汇金额类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFBEAmtType"] = "float"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBEBusinessTypeType是一个换汇业务类型类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFBEBusinessTypeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBEPostScriptType是一个换汇附言类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFBEPostScriptType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBERemarkType是一个换汇备注类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFBERemarkType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcExRateType是一个换汇汇率类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcExRateType"] = "float"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBEResultFlagType是一个换汇成功标志类型
#//////////////////////////////////////////////////////////////////////
#成功
defineDict["THOST_FTDC_FBERES_Success"] = '0'
#账户余额不足
defineDict["THOST_FTDC_FBERES_InsufficientBalance"] = '1'
#交易结果未知
defineDict["THOST_FTDC_FBERES_UnknownTrading"] = '8'
#失败
defineDict["THOST_FTDC_FBERES_Fail"] = 'x'
typedefDict["TThostFtdcFBEResultFlagType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBERtnMsgType是一个换汇返回信息类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFBERtnMsgType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBEExtendMsgType是一个换汇扩展信息类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFBEExtendMsgType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBEBusinessSerialType是一个换汇记账流水号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFBEBusinessSerialType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBESystemSerialType是一个换汇流水号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFBESystemSerialType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBETotalExCntType是一个换汇交易总笔数类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFBETotalExCntType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBEExchStatusType是一个换汇交易状态类型
#//////////////////////////////////////////////////////////////////////
#正常
defineDict["THOST_FTDC_FBEES_Normal"] = '0'
#交易重发
defineDict["THOST_FTDC_FBEES_ReExchange"] = '1'
typedefDict["TThostFtdcFBEExchStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBEFileFlagType是一个换汇文件标志类型
#//////////////////////////////////////////////////////////////////////
#数据包
defineDict["THOST_FTDC_FBEFG_DataPackage"] = '0'
#文件
defineDict["THOST_FTDC_FBEFG_File"] = '1'
typedefDict["TThostFtdcFBEFileFlagType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBEAlreadyTradeType是一个换汇已交易标志类型
#//////////////////////////////////////////////////////////////////////
#未交易
defineDict["THOST_FTDC_FBEAT_NotTrade"] = '0'
#已交易
defineDict["THOST_FTDC_FBEAT_Trade"] = '1'
typedefDict["TThostFtdcFBEAlreadyTradeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBEOpenBankType是一个换汇账户开户行类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFBEOpenBankType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBEUserEventTypeType是一个银期换汇用户事件类型类型
#//////////////////////////////////////////////////////////////////////
#签到
defineDict["THOST_FTDC_FBEUET_SignIn"] = '0'
#换汇
defineDict["THOST_FTDC_FBEUET_Exchange"] = '1'
#换汇重发
defineDict["THOST_FTDC_FBEUET_ReExchange"] = '2'
#银行账户查询
defineDict["THOST_FTDC_FBEUET_QueryBankAccount"] = '3'
#换汇明细查询
defineDict["THOST_FTDC_FBEUET_QueryExchDetial"] = '4'
#换汇汇总查询
defineDict["THOST_FTDC_FBEUET_QueryExchSummary"] = '5'
#换汇汇率查询
defineDict["THOST_FTDC_FBEUET_QueryExchRate"] = '6'
#对账文件通知
defineDict["THOST_FTDC_FBEUET_CheckBankAccount"] = '7'
#签退
defineDict["THOST_FTDC_FBEUET_SignOut"] = '8'
#其他
defineDict["THOST_FTDC_FBEUET_Other"] = 'Z'
typedefDict["TThostFtdcFBEUserEventTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBEFileNameType是一个换汇相关文件名类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFBEFileNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBEBatchSerialType是一个换汇批次号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFBEBatchSerialType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBEReqFlagType是一个换汇发送标志类型
#//////////////////////////////////////////////////////////////////////
#未处理
defineDict["THOST_FTDC_FBERF_UnProcessed"] = '0'
#等待发送
defineDict["THOST_FTDC_FBERF_WaitSend"] = '1'
#发送成功
defineDict["THOST_FTDC_FBERF_SendSuccess"] = '2'
#发送失败
defineDict["THOST_FTDC_FBERF_SendFailed"] = '3'
#等待重发
defineDict["THOST_FTDC_FBERF_WaitReSend"] = '4'
typedefDict["TThostFtdcFBEReqFlagType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcNotifyClassType是一个风险通知类型类型
#//////////////////////////////////////////////////////////////////////
#正常
defineDict["THOST_FTDC_NC_NOERROR"] = '0'
#警示
defineDict["THOST_FTDC_NC_Warn"] = '1'
#追保
defineDict["THOST_FTDC_NC_Call"] = '2'
#强平
defineDict["THOST_FTDC_NC_Force"] = '3'
#穿仓
defineDict["THOST_FTDC_NC_CHUANCANG"] = '4'
#异常
defineDict["THOST_FTDC_NC_Exception"] = '5'
typedefDict["TThostFtdcNotifyClassType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcRiskNofityInfoType是一个客户风险通知消息类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcRiskNofityInfoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcForceCloseSceneIdType是一个强平场景编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcForceCloseSceneIdType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcForceCloseTypeType是一个强平单类型类型
#//////////////////////////////////////////////////////////////////////
#手工强平
defineDict["THOST_FTDC_FCT_Manual"] = '0'
#单一投资者辅助强平
defineDict["THOST_FTDC_FCT_Single"] = '1'
#批量投资者辅助强平
defineDict["THOST_FTDC_FCT_Group"] = '2'
typedefDict["TThostFtdcForceCloseTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcInstrumentIDsType是一个多个产品代码,用+分隔,如cu+zn类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcInstrumentIDsType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcRiskNotifyMethodType是一个风险通知途径类型
#//////////////////////////////////////////////////////////////////////
#系统通知
defineDict["THOST_FTDC_RNM_System"] = '0'
#短信通知
defineDict["THOST_FTDC_RNM_SMS"] = '1'
#邮件通知
defineDict["THOST_FTDC_RNM_EMail"] = '2'
#人工通知
defineDict["THOST_FTDC_RNM_Manual"] = '3'
typedefDict["TThostFtdcRiskNotifyMethodType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcRiskNotifyStatusType是一个风险通知状态类型
#//////////////////////////////////////////////////////////////////////
#未生成
defineDict["THOST_FTDC_RNS_NotGen"] = '0'
#已生成未发送
defineDict["THOST_FTDC_RNS_Generated"] = '1'
#发送失败
defineDict["THOST_FTDC_RNS_SendError"] = '2'
#已发送未接收
defineDict["THOST_FTDC_RNS_SendOk"] = '3'
#已接收未确认
defineDict["THOST_FTDC_RNS_Received"] = '4'
#已确认
defineDict["THOST_FTDC_RNS_Confirmed"] = '5'
typedefDict["TThostFtdcRiskNotifyStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcRiskUserEventType是一个风控用户操作事件类型
#//////////////////////////////////////////////////////////////////////
#导出数据
defineDict["THOST_FTDC_RUE_ExportData"] = '0'
typedefDict["TThostFtdcRiskUserEventType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcParamIDType是一个参数代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcParamIDType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcParamNameType是一个参数名类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcParamNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcParamValueType是一个参数值类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcParamValueType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcConditionalOrderSortTypeType是一个条件单索引条件类型
#//////////////////////////////////////////////////////////////////////
#使用最新价升序
defineDict["THOST_FTDC_COST_LastPriceAsc"] = '0'
#使用最新价降序
defineDict["THOST_FTDC_COST_LastPriceDesc"] = '1'
#使用卖价升序
defineDict["THOST_FTDC_COST_AskPriceAsc"] = '2'
#使用卖价降序
defineDict["THOST_FTDC_COST_AskPriceDesc"] = '3'
#使用买价升序
defineDict["THOST_FTDC_COST_BidPriceAsc"] = '4'
#使用买价降序
defineDict["THOST_FTDC_COST_BidPriceDesc"] = '5'
typedefDict["TThostFtdcConditionalOrderSortTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSendTypeType是一个报送状态类型
#//////////////////////////////////////////////////////////////////////
#未发送
defineDict["THOST_FTDC_UOAST_NoSend"] = '0'
#已发送
defineDict["THOST_FTDC_UOAST_Sended"] = '1'
#已生成
defineDict["THOST_FTDC_UOAST_Generated"] = '2'
#报送失败
defineDict["THOST_FTDC_UOAST_SendFail"] = '3'
#接收成功
defineDict["THOST_FTDC_UOAST_Success"] = '4'
#接收失败
defineDict["THOST_FTDC_UOAST_Fail"] = '5'
#取消报送
defineDict["THOST_FTDC_UOAST_Cancel"] = '6'
typedefDict["TThostFtdcSendTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcClientIDStatusType是一个交易编码状态类型
#//////////////////////////////////////////////////////////////////////
#未申请
defineDict["THOST_FTDC_UOACS_NoApply"] = '1'
#已提交申请
defineDict["THOST_FTDC_UOACS_Submited"] = '2'
#已发送申请
defineDict["THOST_FTDC_UOACS_Sended"] = '3'
#完成
defineDict["THOST_FTDC_UOACS_Success"] = '4'
#拒绝
defineDict["THOST_FTDC_UOACS_Refuse"] = '5'
#已撤销编码
defineDict["THOST_FTDC_UOACS_Cancel"] = '6'
typedefDict["TThostFtdcClientIDStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcIndustryIDType是一个行业编码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcIndustryIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcQuestionIDType是一个特有信息编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcQuestionIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcQuestionContentType是一个特有信息说明类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcQuestionContentType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcOptionIDType是一个选项编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcOptionIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcOptionContentType是一个选项说明类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcOptionContentType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcQuestionTypeType是一个特有信息类型类型
#//////////////////////////////////////////////////////////////////////
#单选
defineDict["THOST_FTDC_QT_Radio"] = '1'
#多选
defineDict["THOST_FTDC_QT_Option"] = '2'
#填空
defineDict["THOST_FTDC_QT_Blank"] = '3'
typedefDict["TThostFtdcQuestionTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcProcessIDType是一个业务流水号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcProcessIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcSeqNoType是一个流水号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSeqNoType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcUOAProcessStatusType是一个流程状态类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcUOAProcessStatusType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcProcessTypeType是一个流程功能类型类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcProcessTypeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBusinessTypeType是一个业务类型类型
#//////////////////////////////////////////////////////////////////////
#请求
defineDict["THOST_FTDC_BT_Request"] = '1'
#应答
defineDict["THOST_FTDC_BT_Response"] = '2'
#通知
defineDict["THOST_FTDC_BT_Notice"] = '3'
typedefDict["TThostFtdcBusinessTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCfmmcReturnCodeType是一个监控中心返回码类型
#//////////////////////////////////////////////////////////////////////
#成功
defineDict["THOST_FTDC_CRC_Success"] = '0'
#该客户已经有流程在处理中
defineDict["THOST_FTDC_CRC_Working"] = '1'
#监控中客户资料检查失败
defineDict["THOST_FTDC_CRC_InfoFail"] = '2'
#监控中实名制检查失败
defineDict["THOST_FTDC_CRC_IDCardFail"] = '3'
#其他错误
defineDict["THOST_FTDC_CRC_OtherFail"] = '4'
typedefDict["TThostFtdcCfmmcReturnCodeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcExReturnCodeType是一个交易所返回码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcExReturnCodeType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcClientTypeType是一个客户类型类型
#//////////////////////////////////////////////////////////////////////
#所有
defineDict["THOST_FTDC_CfMMCCT_All"] = '0'
#个人
defineDict["THOST_FTDC_CfMMCCT_Person"] = '1'
#单位
defineDict["THOST_FTDC_CfMMCCT_Company"] = '2'
#其他
defineDict["THOST_FTDC_CfMMCCT_Other"] = '3'
#特殊法人
defineDict["THOST_FTDC_CfMMCCT_SpecialOrgan"] = '4'
#资管户
defineDict["THOST_FTDC_CfMMCCT_Asset"] = '5'
typedefDict["TThostFtdcClientTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcExchangeIDTypeType是一个交易所编号类型
#//////////////////////////////////////////////////////////////////////
#上海期货交易所
defineDict["THOST_FTDC_EIDT_SHFE"] = 'S'
#郑州商品交易所
defineDict["THOST_FTDC_EIDT_CZCE"] = 'Z'
#大连商品交易所
defineDict["THOST_FTDC_EIDT_DCE"] = 'D'
#中国金融期货交易所
defineDict["THOST_FTDC_EIDT_CFFEX"] = 'J'
#上海国际能源交易中心股份有限公司
defineDict["THOST_FTDC_EIDT_INE"] = 'N'
#黄金
defineDict["THOST_FTDC_EIDT_GOLD"] = 'G'
#PAT
defineDict["THOST_FTDC_EIDT_PAT"] = 'P'
#
defineDict["THOST_FTDC_EIDT_LTS"] = 'L'
typedefDict["TThostFtdcExchangeIDTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcExClientIDTypeType是一个交易编码类型类型
#//////////////////////////////////////////////////////////////////////
#套保
defineDict["THOST_FTDC_ECIDT_Hedge"] = '1'
#套利
defineDict["THOST_FTDC_ECIDT_Arbitrage"] = '2'
#投机
defineDict["THOST_FTDC_ECIDT_Speculation"] = '3'
typedefDict["TThostFtdcExClientIDTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcClientClassifyType是一个客户分类码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcClientClassifyType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcUOAOrganTypeType是一个单位性质类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcUOAOrganTypeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcUOACountryCodeType是一个国家代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcUOACountryCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAreaCodeType是一个区号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAreaCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFuturesIDType是一个监控中心为客户分配的代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFuturesIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCffmcDateType是一个日期类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCffmcDateType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCffmcTimeType是一个时间类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCffmcTimeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcNocIDType是一个组织机构代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcNocIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcUpdateFlagType是一个更新状态类型
#//////////////////////////////////////////////////////////////////////
#未更新
defineDict["THOST_FTDC_UF_NoUpdate"] = '0'
#更新全部信息成功
defineDict["THOST_FTDC_UF_Success"] = '1'
#更新全部信息失败
defineDict["THOST_FTDC_UF_Fail"] = '2'
#更新交易编码成功
defineDict["THOST_FTDC_UF_TCSuccess"] = '3'
#更新交易编码失败
defineDict["THOST_FTDC_UF_TCFail"] = '4'
#已丢弃
defineDict["THOST_FTDC_UF_Cancel"] = '5'
typedefDict["TThostFtdcUpdateFlagType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcApplyOperateIDType是一个申请动作类型
#//////////////////////////////////////////////////////////////////////
#开户
defineDict["THOST_FTDC_AOID_OpenInvestor"] = '1'
#修改身份信息
defineDict["THOST_FTDC_AOID_ModifyIDCard"] = '2'
#修改一般信息
defineDict["THOST_FTDC_AOID_ModifyNoIDCard"] = '3'
#申请交易编码
defineDict["THOST_FTDC_AOID_ApplyTradingCode"] = '4'
#撤销交易编码
defineDict["THOST_FTDC_AOID_CancelTradingCode"] = '5'
#销户
defineDict["THOST_FTDC_AOID_CancelInvestor"] = '6'
#账户休眠
defineDict["THOST_FTDC_AOID_FreezeAccount"] = '8'
#激活休眠账户
defineDict["THOST_FTDC_AOID_ActiveFreezeAccount"] = '9'
typedefDict["TThostFtdcApplyOperateIDType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcApplyStatusIDType是一个申请状态类型
#//////////////////////////////////////////////////////////////////////
#未补全
defineDict["THOST_FTDC_ASID_NoComplete"] = '1'
#已提交
defineDict["THOST_FTDC_ASID_Submited"] = '2'
#已审核
defineDict["THOST_FTDC_ASID_Checked"] = '3'
#已拒绝
defineDict["THOST_FTDC_ASID_Refused"] = '4'
#已删除
defineDict["THOST_FTDC_ASID_Deleted"] = '5'
typedefDict["TThostFtdcApplyStatusIDType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSendMethodType是一个发送方式类型
#//////////////////////////////////////////////////////////////////////
#文件发送
defineDict["THOST_FTDC_UOASM_ByAPI"] = '1'
#电子发送
defineDict["THOST_FTDC_UOASM_ByFile"] = '2'
typedefDict["TThostFtdcSendMethodType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcEventTypeType是一个业务操作类型类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcEventTypeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcEventModeType是一个操作方法类型
#//////////////////////////////////////////////////////////////////////
#增加
defineDict["THOST_FTDC_EvM_ADD"] = '1'
#修改
defineDict["THOST_FTDC_EvM_UPDATE"] = '2'
#删除
defineDict["THOST_FTDC_EvM_DELETE"] = '3'
#复核
defineDict["THOST_FTDC_EvM_CHECK"] = '4'
#复制
defineDict["THOST_FTDC_EvM_COPY"] = '5'
#注销
defineDict["THOST_FTDC_EvM_CANCEL"] = '6'
#冲销
defineDict["THOST_FTDC_EvM_Reverse"] = '7'
typedefDict["TThostFtdcEventModeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcUOAAutoSendType是一个统一开户申请自动发送类型
#//////////////////////////////////////////////////////////////////////
#自动发送并接收
defineDict["THOST_FTDC_UOAA_ASR"] = '1'
#自动发送,不自动接收
defineDict["THOST_FTDC_UOAA_ASNR"] = '2'
#不自动发送,自动接收
defineDict["THOST_FTDC_UOAA_NSAR"] = '3'
#不自动发送,也不自动接收
defineDict["THOST_FTDC_UOAA_NSR"] = '4'
typedefDict["TThostFtdcUOAAutoSendType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcQueryDepthType是一个查询深度类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcQueryDepthType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcDataCenterIDType是一个数据中心代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcDataCenterIDType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcFlowIDType是一个流程ID类型
#//////////////////////////////////////////////////////////////////////
#投资者对应投资者组设置
defineDict["THOST_FTDC_EvM_InvestorGroupFlow"] = '1'
#投资者手续费率设置
defineDict["THOST_FTDC_EvM_InvestorRate"] = '2'
#投资者手续费率模板关系设置
defineDict["THOST_FTDC_EvM_InvestorCommRateModel"] = '3'
typedefDict["TThostFtdcFlowIDType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCheckLevelType是一个复核级别类型
#//////////////////////////////////////////////////////////////////////
#零级复核
defineDict["THOST_FTDC_CL_Zero"] = '0'
#一级复核
defineDict["THOST_FTDC_CL_One"] = '1'
#二级复核
defineDict["THOST_FTDC_CL_Two"] = '2'
typedefDict["TThostFtdcCheckLevelType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCheckNoType是一个操作次数类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCheckNoType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcCheckStatusType是一个复核级别类型
#//////////////////////////////////////////////////////////////////////
#未复核
defineDict["THOST_FTDC_CHS_Init"] = '0'
#复核中
defineDict["THOST_FTDC_CHS_Checking"] = '1'
#已复核
defineDict["THOST_FTDC_CHS_Checked"] = '2'
#拒绝
defineDict["THOST_FTDC_CHS_Refuse"] = '3'
#作废
defineDict["THOST_FTDC_CHS_Cancel"] = '4'
typedefDict["TThostFtdcCheckStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcUsedStatusType是一个生效状态类型
#//////////////////////////////////////////////////////////////////////
#未生效
defineDict["THOST_FTDC_CHU_Unused"] = '0'
#已生效
defineDict["THOST_FTDC_CHU_Used"] = '1'
#生效失败
defineDict["THOST_FTDC_CHU_Fail"] = '2'
typedefDict["TThostFtdcUsedStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcRateTemplateNameType是一个模型名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcRateTemplateNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcPropertyStringType是一个用于查询的投资属性字段类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcPropertyStringType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankAcountOriginType是一个账户来源类型
#//////////////////////////////////////////////////////////////////////
#手工录入
defineDict["THOST_FTDC_BAO_ByAccProperty"] = '0'
#银期转账
defineDict["THOST_FTDC_BAO_ByFBTransfer"] = '1'
typedefDict["TThostFtdcBankAcountOriginType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcMonthBillTradeSumType是一个结算单月报成交汇总方式类型
#//////////////////////////////////////////////////////////////////////
#同日同合约
defineDict["THOST_FTDC_MBTS_ByInstrument"] = '0'
#同日同合约同价格
defineDict["THOST_FTDC_MBTS_ByDayInsPrc"] = '1'
#同合约
defineDict["THOST_FTDC_MBTS_ByDayIns"] = '2'
typedefDict["TThostFtdcMonthBillTradeSumType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBTTradeCodeEnumType是一个银期交易代码枚举类型
#//////////////////////////////////////////////////////////////////////
#银行发起银行转期货
defineDict["THOST_FTDC_FTC_BankLaunchBankToBroker"] = '102001'
#期货发起银行转期货
defineDict["THOST_FTDC_FTC_BrokerLaunchBankToBroker"] = '202001'
#银行发起期货转银行
defineDict["THOST_FTDC_FTC_BankLaunchBrokerToBank"] = '102002'
#期货发起期货转银行
defineDict["THOST_FTDC_FTC_BrokerLaunchBrokerToBank"] = '202002'
typedefDict["TThostFtdcFBTTradeCodeEnumType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcRateTemplateIDType是一个模型代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcRateTemplateIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcRiskRateType是一个风险度类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcRiskRateType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcTimestampType是一个时间戳类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcTimestampType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcInvestorIDRuleNameType是一个号段规则名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcInvestorIDRuleNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcInvestorIDRuleExprType是一个号段规则表达式类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcInvestorIDRuleExprType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcLastDriftType是一个上次OTP漂移值类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcLastDriftType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcLastSuccessType是一个上次OTP成功值类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcLastSuccessType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcAuthKeyType是一个令牌密钥类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAuthKeyType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcSerialNumberType是一个序列号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSerialNumberType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcOTPTypeType是一个动态令牌类型类型
#//////////////////////////////////////////////////////////////////////
#无动态令牌
defineDict["THOST_FTDC_OTP_NONE"] = '0'
#时间令牌
defineDict["THOST_FTDC_OTP_TOTP"] = '1'
typedefDict["TThostFtdcOTPTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcOTPVendorsIDType是一个动态令牌提供商类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcOTPVendorsIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcOTPVendorsNameType是一个动态令牌提供商名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcOTPVendorsNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcOTPStatusType是一个动态令牌状态类型
#//////////////////////////////////////////////////////////////////////
#未使用
defineDict["THOST_FTDC_OTPS_Unused"] = '0'
#已使用
defineDict["THOST_FTDC_OTPS_Used"] = '1'
#注销
defineDict["THOST_FTDC_OTPS_Disuse"] = '2'
typedefDict["TThostFtdcOTPStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcBrokerUserTypeType是一个经济公司用户类型类型
#//////////////////////////////////////////////////////////////////////
#投资者
defineDict["THOST_FTDC_BUT_Investor"] = '1'
#操作员
defineDict["THOST_FTDC_BUT_BrokerUser"] = '2'
typedefDict["TThostFtdcBrokerUserTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFutureTypeType是一个期货类型类型
#//////////////////////////////////////////////////////////////////////
#商品期货
defineDict["THOST_FTDC_FUTT_Commodity"] = '1'
#金融期货
defineDict["THOST_FTDC_FUTT_Financial"] = '2'
typedefDict["TThostFtdcFutureTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFundEventTypeType是一个资金管理操作类型类型
#//////////////////////////////////////////////////////////////////////
#转账限额
defineDict["THOST_FTDC_FET_Restriction"] = '0'
#当日转账限额
defineDict["THOST_FTDC_FET_TodayRestriction"] = '1'
#期商流水
defineDict["THOST_FTDC_FET_Transfer"] = '2'
#资金冻结
defineDict["THOST_FTDC_FET_Credit"] = '3'
#投资者可提资金比例
defineDict["THOST_FTDC_FET_InvestorWithdrawAlm"] = '4'
#单个银行帐户转账限额
defineDict["THOST_FTDC_FET_BankRestriction"] = '5'
#银期签约账户
defineDict["THOST_FTDC_FET_Accountregister"] = '6'
#交易所出入金
defineDict["THOST_FTDC_FET_ExchangeFundIO"] = '7'
#投资者出入金
defineDict["THOST_FTDC_FET_InvestorFundIO"] = '8'
typedefDict["TThostFtdcFundEventTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcAccountSourceTypeType是一个资金账户来源类型
#//////////////////////////////////////////////////////////////////////
#银期同步
defineDict["THOST_FTDC_AST_FBTransfer"] = '0'
#手工录入
defineDict["THOST_FTDC_AST_ManualEntry"] = '1'
typedefDict["TThostFtdcAccountSourceTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCodeSourceTypeType是一个交易编码来源类型
#//////////////////////////////////////////////////////////////////////
#统一开户(已规范)
defineDict["THOST_FTDC_CST_UnifyAccount"] = '0'
#手工录入(未规范)
defineDict["THOST_FTDC_CST_ManualEntry"] = '1'
typedefDict["TThostFtdcCodeSourceTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcUserRangeType是一个操作员范围类型
#//////////////////////////////////////////////////////////////////////
#所有
defineDict["THOST_FTDC_UR_All"] = '0'
#单一操作员
defineDict["THOST_FTDC_UR_Single"] = '1'
typedefDict["TThostFtdcUserRangeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcTimeSpanType是一个时间跨度类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcTimeSpanType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcImportSequenceIDType是一个动态令牌导入批次编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcImportSequenceIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcByGroupType是一个交易统计表按客户统计方式类型
#//////////////////////////////////////////////////////////////////////
#按投资者统计
defineDict["THOST_FTDC_BG_Investor"] = '2'
#按类统计
defineDict["THOST_FTDC_BG_Group"] = '1'
typedefDict["TThostFtdcByGroupType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcTradeSumStatModeType是一个交易统计表按范围统计方式类型
#//////////////////////////////////////////////////////////////////////
#按合约统计
defineDict["THOST_FTDC_TSSM_Instrument"] = '1'
#按产品统计
defineDict["THOST_FTDC_TSSM_Product"] = '2'
#按交易所统计
defineDict["THOST_FTDC_TSSM_Exchange"] = '3'
typedefDict["TThostFtdcTradeSumStatModeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcComTypeType是一个组合成交类型类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcComTypeType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcUserProductIDType是一个产品标识类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcUserProductIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcUserProductNameType是一个产品名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcUserProductNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcUserProductMemoType是一个产品说明类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcUserProductMemoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCCancelFlagType是一个新增或变更标志类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCCancelFlagType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCDateType是一个日期类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCDateType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCInvestorNameType是一个客户名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCInvestorNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCOpenInvestorNameType是一个客户名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCOpenInvestorNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCInvestorIDType是一个客户代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCInvestorIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCIdentifiedCardNoType是一个证件号码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCIdentifiedCardNoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCClientIDType是一个交易编码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCClientIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCBankFlagType是一个银行标识类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCBankFlagType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCBankAccountType是一个银行账户类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCBankAccountType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCOpenNameType是一个开户人类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCOpenNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCMemoType是一个说明类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCMemoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCTimeType是一个时间类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCTimeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCTradeIDType是一个成交流水号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCTradeIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCExchangeInstIDType是一个合约代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCExchangeInstIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCMortgageNameType是一个质押品名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCMortgageNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCReasonType是一个事由类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCReasonType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcIsSettlementType是一个是否为非结算会员类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcIsSettlementType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCMoneyType是一个资金类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCMoneyType"] = "float"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCPriceType是一个价格类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCPriceType"] = "float"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCOptionsTypeType是一个期权类型类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCOptionsTypeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCStrikePriceType是一个执行价类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCStrikePriceType"] = "float"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCTargetProductIDType是一个标的品种类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCTargetProductIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCTargetInstrIDType是一个标的合约类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCTargetInstrIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCommModelNameType是一个手续费率模板名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCommModelNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCommModelMemoType是一个手续费率模板备注类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCommModelMemoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcExprSetModeType是一个日期表达式设置类型类型
#//////////////////////////////////////////////////////////////////////
#相对已有规则设置
defineDict["THOST_FTDC_ESM_Relative"] = '1'
#典型设置
defineDict["THOST_FTDC_ESM_Typical"] = '2'
typedefDict["TThostFtdcExprSetModeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcRateInvestorRangeType是一个投资者范围类型
#//////////////////////////////////////////////////////////////////////
#公司标准
defineDict["THOST_FTDC_RIR_All"] = '1'
#模板
defineDict["THOST_FTDC_RIR_Model"] = '2'
#单一投资者
defineDict["THOST_FTDC_RIR_Single"] = '3'
typedefDict["TThostFtdcRateInvestorRangeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcAgentBrokerIDType是一个代理经纪公司代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAgentBrokerIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcDRIdentityIDType是一个交易中心代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcDRIdentityIDType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcDRIdentityNameType是一个交易中心名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcDRIdentityNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcDBLinkIDType是一个DBLink标识号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcDBLinkIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcSyncDataStatusType是一个主次用系统数据同步状态类型
#//////////////////////////////////////////////////////////////////////
#未同步
defineDict["THOST_FTDC_SDS_Initialize"] = '0'
#同步中
defineDict["THOST_FTDC_SDS_Settlementing"] = '1'
#已同步
defineDict["THOST_FTDC_SDS_Settlemented"] = '2'
typedefDict["TThostFtdcSyncDataStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcTradeSourceType是一个成交来源类型
#//////////////////////////////////////////////////////////////////////
#来自交易所普通回报
defineDict["THOST_FTDC_TSRC_NORMAL"] = '0'
#来自查询
defineDict["THOST_FTDC_TSRC_QUERY"] = '1'
typedefDict["TThostFtdcTradeSourceType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFlexStatModeType是一个产品合约统计方式类型
#//////////////////////////////////////////////////////////////////////
#产品统计
defineDict["THOST_FTDC_FSM_Product"] = '1'
#交易所统计
defineDict["THOST_FTDC_FSM_Exchange"] = '2'
#统计所有
defineDict["THOST_FTDC_FSM_All"] = '3'
typedefDict["TThostFtdcFlexStatModeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcByInvestorRangeType是一个投资者范围统计方式类型
#//////////////////////////////////////////////////////////////////////
#属性统计
defineDict["THOST_FTDC_BIR_Property"] = '1'
#统计所有
defineDict["THOST_FTDC_BIR_All"] = '2'
typedefDict["TThostFtdcByInvestorRangeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSRiskRateType是一个风险度类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSRiskRateType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcSequenceNo12Type是一个序号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSequenceNo12Type"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcPropertyInvestorRangeType是一个投资者范围类型
#//////////////////////////////////////////////////////////////////////
#所有
defineDict["THOST_FTDC_PIR_All"] = '1'
#投资者属性
defineDict["THOST_FTDC_PIR_Property"] = '2'
#单一投资者
defineDict["THOST_FTDC_PIR_Single"] = '3'
typedefDict["TThostFtdcPropertyInvestorRangeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFileStatusType是一个文件状态类型
#//////////////////////////////////////////////////////////////////////
#未生成
defineDict["THOST_FTDC_FIS_NoCreate"] = '0'
#已生成
defineDict["THOST_FTDC_FIS_Created"] = '1'
#生成失败
defineDict["THOST_FTDC_FIS_Failed"] = '2'
typedefDict["TThostFtdcFileStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFileGenStyleType是一个文件生成方式类型
#//////////////////////////////////////////////////////////////////////
#下发
defineDict["THOST_FTDC_FGS_FileTransmit"] = '0'
#生成
defineDict["THOST_FTDC_FGS_FileGen"] = '1'
typedefDict["TThostFtdcFileGenStyleType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSysOperModeType是一个系统日志操作方法类型
#//////////////////////////////////////////////////////////////////////
#增加
defineDict["THOST_FTDC_SoM_Add"] = '1'
#修改
defineDict["THOST_FTDC_SoM_Update"] = '2'
#删除
defineDict["THOST_FTDC_SoM_Delete"] = '3'
#复制
defineDict["THOST_FTDC_SoM_Copy"] = '4'
#激活
defineDict["THOST_FTDC_SoM_AcTive"] = '5'
#注销
defineDict["THOST_FTDC_SoM_CanCel"] = '6'
#重置
defineDict["THOST_FTDC_SoM_ReSet"] = '7'
typedefDict["TThostFtdcSysOperModeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSysOperTypeType是一个系统日志操作类型类型
#//////////////////////////////////////////////////////////////////////
#修改操作员密码
defineDict["THOST_FTDC_SoT_UpdatePassword"] = '0'
#操作员组织架构关系
defineDict["THOST_FTDC_SoT_UserDepartment"] = '1'
#角色管理
defineDict["THOST_FTDC_SoT_RoleManager"] = '2'
#角色功能设置
defineDict["THOST_FTDC_SoT_RoleFunction"] = '3'
#基础参数设置
defineDict["THOST_FTDC_SoT_BaseParam"] = '4'
#设置操作员
defineDict["THOST_FTDC_SoT_SetUserID"] = '5'
#用户角色设置
defineDict["THOST_FTDC_SoT_SetUserRole"] = '6'
#用户IP限制
defineDict["THOST_FTDC_SoT_UserIpRestriction"] = '7'
#组织架构管理
defineDict["THOST_FTDC_SoT_DepartmentManager"] = '8'
#组织架构向查询分类复制
defineDict["THOST_FTDC_SoT_DepartmentCopy"] = '9'
#交易编码管理
defineDict["THOST_FTDC_SoT_Tradingcode"] = 'A'
#投资者状态维护
defineDict["THOST_FTDC_SoT_InvestorStatus"] = 'B'
#投资者权限管理
defineDict["THOST_FTDC_SoT_InvestorAuthority"] = 'C'
#属性设置
defineDict["THOST_FTDC_SoT_PropertySet"] = 'D'
#重置投资者密码
defineDict["THOST_FTDC_SoT_ReSetInvestorPasswd"] = 'E'
#投资者个性信息维护
defineDict["THOST_FTDC_SoT_InvestorPersonalityInfo"] = 'F'
typedefDict["TThostFtdcSysOperTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCDataQueyTypeType是一个上报数据查询类型类型
#//////////////////////////////////////////////////////////////////////
#查询当前交易日报送的数据
defineDict["THOST_FTDC_CSRCQ_Current"] = '0'
#查询历史报送的代理经纪公司的数据
defineDict["THOST_FTDC_CSRCQ_History"] = '1'
typedefDict["TThostFtdcCSRCDataQueyTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFreezeStatusType是一个休眠状态类型
#//////////////////////////////////////////////////////////////////////
#活跃
defineDict["THOST_FTDC_FRS_Normal"] = '1'
#休眠
defineDict["THOST_FTDC_FRS_Freeze"] = '0'
typedefDict["TThostFtdcFreezeStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcStandardStatusType是一个规范状态类型
#//////////////////////////////////////////////////////////////////////
#已规范
defineDict["THOST_FTDC_STST_Standard"] = '0'
#未规范
defineDict["THOST_FTDC_STST_NonStandard"] = '1'
typedefDict["TThostFtdcStandardStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCFreezeStatusType是一个休眠状态类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCFreezeStatusType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcRightParamTypeType是一个配置类型类型
#//////////////////////////////////////////////////////////////////////
#休眠户
defineDict["THOST_FTDC_RPT_Freeze"] = '1'
#激活休眠户
defineDict["THOST_FTDC_RPT_FreezeActive"] = '2'
#开仓权限限制
defineDict["THOST_FTDC_RPT_OpenLimit"] = '3'
#解除开仓权限限制
defineDict["THOST_FTDC_RPT_RelieveOpenLimit"] = '4'
typedefDict["TThostFtdcRightParamTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcRightTemplateIDType是一个模板代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcRightTemplateIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcRightTemplateNameType是一个模板名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcRightTemplateNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcDataStatusType是一个反洗钱审核表数据状态类型
#//////////////////////////////////////////////////////////////////////
#正常
defineDict["THOST_FTDC_AMLDS_Normal"] = '0'
#已删除
defineDict["THOST_FTDC_AMLDS_Deleted"] = '1'
typedefDict["TThostFtdcDataStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcAMLCheckStatusType是一个审核状态类型
#//////////////////////////////////////////////////////////////////////
#未复核
defineDict["THOST_FTDC_AMLCHS_Init"] = '0'
#复核中
defineDict["THOST_FTDC_AMLCHS_Checking"] = '1'
#已复核
defineDict["THOST_FTDC_AMLCHS_Checked"] = '2'
#拒绝上报
defineDict["THOST_FTDC_AMLCHS_RefuseReport"] = '3'
typedefDict["TThostFtdcAMLCheckStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcAmlDateTypeType是一个日期类型类型
#//////////////////////////////////////////////////////////////////////
#检查日期
defineDict["THOST_FTDC_AMLDT_DrawDay"] = '0'
#发生日期
defineDict["THOST_FTDC_AMLDT_TouchDay"] = '1'
typedefDict["TThostFtdcAmlDateTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcAmlCheckLevelType是一个审核级别类型
#//////////////////////////////////////////////////////////////////////
#零级审核
defineDict["THOST_FTDC_AMLCL_CheckLevel0"] = '0'
#一级审核
defineDict["THOST_FTDC_AMLCL_CheckLevel1"] = '1'
#二级审核
defineDict["THOST_FTDC_AMLCL_CheckLevel2"] = '2'
#三级审核
defineDict["THOST_FTDC_AMLCL_CheckLevel3"] = '3'
typedefDict["TThostFtdcAmlCheckLevelType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcAmlCheckFlowType是一个反洗钱数据抽取审核流程类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAmlCheckFlowType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcDataTypeType是一个数据类型类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcDataTypeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcExportFileTypeType是一个导出文件类型类型
#//////////////////////////////////////////////////////////////////////
#CSV
defineDict["THOST_FTDC_EFT_CSV"] = '0'
#Excel
defineDict["THOST_FTDC_EFT_EXCEL"] = '1'
#DBF
defineDict["THOST_FTDC_EFT_DBF"] = '2'
typedefDict["TThostFtdcExportFileTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSettleManagerTypeType是一个结算配置类型类型
#//////////////////////////////////////////////////////////////////////
#结算前准备
defineDict["THOST_FTDC_SMT_Before"] = '1'
#结算
defineDict["THOST_FTDC_SMT_Settlement"] = '2'
#结算后核对
defineDict["THOST_FTDC_SMT_After"] = '3'
#结算后处理
defineDict["THOST_FTDC_SMT_Settlemented"] = '4'
typedefDict["TThostFtdcSettleManagerTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSettleManagerIDType是一个结算配置代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSettleManagerIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcSettleManagerNameType是一个结算配置名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSettleManagerNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcSettleManagerLevelType是一个结算配置等级类型
#//////////////////////////////////////////////////////////////////////
#必要
defineDict["THOST_FTDC_SML_Must"] = '1'
#警告
defineDict["THOST_FTDC_SML_Alarm"] = '2'
#提示
defineDict["THOST_FTDC_SML_Prompt"] = '3'
#不检查
defineDict["THOST_FTDC_SML_Ignore"] = '4'
typedefDict["TThostFtdcSettleManagerLevelType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSettleManagerGroupType是一个模块分组类型
#//////////////////////////////////////////////////////////////////////
#交易所核对
defineDict["THOST_FTDC_SMG_Exhcange"] = '1'
#内部核对
defineDict["THOST_FTDC_SMG_ASP"] = '2'
#上报数据核对
defineDict["THOST_FTDC_SMG_CSRC"] = '3'
typedefDict["TThostFtdcSettleManagerGroupType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCheckResultMemoType是一个核对结果说明类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCheckResultMemoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFunctionUrlType是一个功能链接类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFunctionUrlType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAuthInfoType是一个客户端认证信息类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAuthInfoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAuthCodeType是一个客户端认证码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAuthCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcLimitUseTypeType是一个保值额度使用类型类型
#//////////////////////////////////////////////////////////////////////
#可重复使用
defineDict["THOST_FTDC_LUT_Repeatable"] = '1'
#不可重复使用
defineDict["THOST_FTDC_LUT_Unrepeatable"] = '2'
typedefDict["TThostFtdcLimitUseTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcDataResourceType是一个数据来源类型
#//////////////////////////////////////////////////////////////////////
#本系统
defineDict["THOST_FTDC_DAR_Settle"] = '1'
#交易所
defineDict["THOST_FTDC_DAR_Exchange"] = '2'
#报送数据
defineDict["THOST_FTDC_DAR_CSRC"] = '3'
typedefDict["TThostFtdcDataResourceType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcMarginTypeType是一个保证金类型类型
#//////////////////////////////////////////////////////////////////////
#交易所保证金率
defineDict["THOST_FTDC_MGT_ExchMarginRate"] = '0'
#投资者保证金率
defineDict["THOST_FTDC_MGT_InstrMarginRate"] = '1'
#投资者交易保证金率
defineDict["THOST_FTDC_MGT_InstrMarginRateTrade"] = '2'
typedefDict["TThostFtdcMarginTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcActiveTypeType是一个生效类型类型
#//////////////////////////////////////////////////////////////////////
#仅当日生效
defineDict["THOST_FTDC_ACT_Intraday"] = '1'
#长期生效
defineDict["THOST_FTDC_ACT_Long"] = '2'
typedefDict["TThostFtdcActiveTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcMarginRateTypeType是一个冲突保证金率类型类型
#//////////////////////////////////////////////////////////////////////
#交易所保证金率
defineDict["THOST_FTDC_MRT_Exchange"] = '1'
#投资者保证金率
defineDict["THOST_FTDC_MRT_Investor"] = '2'
#投资者交易保证金率
defineDict["THOST_FTDC_MRT_InvestorTrade"] = '3'
typedefDict["TThostFtdcMarginRateTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcBackUpStatusType是一个备份数据状态类型
#//////////////////////////////////////////////////////////////////////
#未生成备份数据
defineDict["THOST_FTDC_BUS_UnBak"] = '0'
#备份数据生成中
defineDict["THOST_FTDC_BUS_BakUp"] = '1'
#已生成备份数据
defineDict["THOST_FTDC_BUS_BakUped"] = '2'
#备份数据失败
defineDict["THOST_FTDC_BUS_BakFail"] = '3'
typedefDict["TThostFtdcBackUpStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcInitSettlementType是一个结算初始化状态类型
#//////////////////////////////////////////////////////////////////////
#结算初始化未开始
defineDict["THOST_FTDC_SIS_UnInitialize"] = '0'
#结算初始化中
defineDict["THOST_FTDC_SIS_Initialize"] = '1'
#结算初始化完成
defineDict["THOST_FTDC_SIS_Initialized"] = '2'
typedefDict["TThostFtdcInitSettlementType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcReportStatusType是一个报表数据生成状态类型
#//////////////////////////////////////////////////////////////////////
#未生成报表数据
defineDict["THOST_FTDC_SRS_NoCreate"] = '0'
#报表数据生成中
defineDict["THOST_FTDC_SRS_Create"] = '1'
#已生成报表数据
defineDict["THOST_FTDC_SRS_Created"] = '2'
#生成报表数据失败
defineDict["THOST_FTDC_SRS_CreateFail"] = '3'
typedefDict["TThostFtdcReportStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSaveStatusType是一个数据归档状态类型
#//////////////////////////////////////////////////////////////////////
#归档未完成
defineDict["THOST_FTDC_SSS_UnSaveData"] = '0'
#归档完成
defineDict["THOST_FTDC_SSS_SaveDatad"] = '1'
typedefDict["TThostFtdcSaveStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSettArchiveStatusType是一个结算确认数据归档状态类型
#//////////////////////////////////////////////////////////////////////
#未归档数据
defineDict["THOST_FTDC_SAS_UnArchived"] = '0'
#数据归档中
defineDict["THOST_FTDC_SAS_Archiving"] = '1'
#已归档数据
defineDict["THOST_FTDC_SAS_Archived"] = '2'
#归档数据失败
defineDict["THOST_FTDC_SAS_ArchiveFail"] = '3'
typedefDict["TThostFtdcSettArchiveStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCTPTypeType是一个CTP交易系统类型类型
#//////////////////////////////////////////////////////////////////////
#未知类型
defineDict["THOST_FTDC_CTPT_Unkown"] = '0'
#主中心
defineDict["THOST_FTDC_CTPT_MainCenter"] = '1'
#备中心
defineDict["THOST_FTDC_CTPT_BackUp"] = '2'
typedefDict["TThostFtdcCTPTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcToolIDType是一个工具代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcToolIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcToolNameType是一个工具名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcToolNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCloseDealTypeType是一个平仓处理类型类型
#//////////////////////////////////////////////////////////////////////
#正常
defineDict["THOST_FTDC_CDT_Normal"] = '0'
#投机平仓优先
defineDict["THOST_FTDC_CDT_SpecFirst"] = '1'
typedefDict["TThostFtdcCloseDealTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcMortgageFundUseRangeType是一个货币质押资金可用范围类型
#//////////////////////////////////////////////////////////////////////
#不能使用
defineDict["THOST_FTDC_MFUR_None"] = '0'
#用于保证金
defineDict["THOST_FTDC_MFUR_Margin"] = '1'
#用于手续费、盈亏、保证金
defineDict["THOST_FTDC_MFUR_All"] = '2'
typedefDict["TThostFtdcMortgageFundUseRangeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCurrencyUnitType是一个币种单位数量类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCurrencyUnitType"] = "float"
#//////////////////////////////////////////////////////////////////////
#TFtdcExchangeRateType是一个汇率类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcExchangeRateType"] = "float"
#//////////////////////////////////////////////////////////////////////
#TFtdcSpecProductTypeType是一个特殊产品类型类型
#//////////////////////////////////////////////////////////////////////
#郑商所套保产品
defineDict["THOST_FTDC_SPT_CzceHedge"] = '1'
#货币质押产品
defineDict["THOST_FTDC_SPT_IneForeignCurrency"] = '2'
#大连短线开平仓产品
defineDict["THOST_FTDC_SPT_DceOpenClose"] = '3'
typedefDict["TThostFtdcSpecProductTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFundMortgageTypeType是一个货币质押类型类型
#//////////////////////////////////////////////////////////////////////
#质押
defineDict["THOST_FTDC_FMT_Mortgage"] = '1'
#解质
defineDict["THOST_FTDC_FMT_Redemption"] = '2'
typedefDict["TThostFtdcFundMortgageTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcAccountSettlementParamIDType是一个投资者账户结算参数代码类型
#//////////////////////////////////////////////////////////////////////
#基础保证金
defineDict["THOST_FTDC_ASPI_BaseMargin"] = '1'
#最低权益标准
defineDict["THOST_FTDC_ASPI_LowestInterest"] = '2'
typedefDict["TThostFtdcAccountSettlementParamIDType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCurrencyNameType是一个币种名称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCurrencyNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCurrencySignType是一个币种符号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCurrencySignType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcFundMortDirectionType是一个货币质押方向类型
#//////////////////////////////////////////////////////////////////////
#货币质入
defineDict["THOST_FTDC_FMD_In"] = '1'
#货币质出
defineDict["THOST_FTDC_FMD_Out"] = '2'
typedefDict["TThostFtdcFundMortDirectionType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcBusinessClassType是一个换汇类别类型
#//////////////////////////////////////////////////////////////////////
#盈利
defineDict["THOST_FTDC_BT_Profit"] = '0'
#亏损
defineDict["THOST_FTDC_BT_Loss"] = '1'
#其他
defineDict["THOST_FTDC_BT_Other"] = 'Z'
typedefDict["TThostFtdcBusinessClassType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSwapSourceTypeType是一个换汇数据来源类型
#//////////////////////////////////////////////////////////////////////
#手工
defineDict["THOST_FTDC_SST_Manual"] = '0'
#自动生成
defineDict["THOST_FTDC_SST_Automatic"] = '1'
typedefDict["TThostFtdcSwapSourceTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCurrExDirectionType是一个换汇类型类型
#//////////////////////////////////////////////////////////////////////
#结汇
defineDict["THOST_FTDC_CED_Settlement"] = '0'
#售汇
defineDict["THOST_FTDC_CED_Sale"] = '1'
typedefDict["TThostFtdcCurrExDirectionType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCurrencySwapStatusType是一个申请状态类型
#//////////////////////////////////////////////////////////////////////
#已录入
defineDict["THOST_FTDC_CSS_Entry"] = '1'
#已审核
defineDict["THOST_FTDC_CSS_Approve"] = '2'
#已拒绝
defineDict["THOST_FTDC_CSS_Refuse"] = '3'
#已撤销
defineDict["THOST_FTDC_CSS_Revoke"] = '4'
#已发送
defineDict["THOST_FTDC_CSS_Send"] = '5'
#换汇成功
defineDict["THOST_FTDC_CSS_Success"] = '6'
#换汇失败
defineDict["THOST_FTDC_CSS_Failure"] = '7'
typedefDict["TThostFtdcCurrencySwapStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCurrExchCertNoType是一个凭证号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCurrExchCertNoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBatchSerialNoType是一个批次号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBatchSerialNoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcReqFlagType是一个换汇发送标志类型
#//////////////////////////////////////////////////////////////////////
#未发送
defineDict["THOST_FTDC_REQF_NoSend"] = '0'
#发送成功
defineDict["THOST_FTDC_REQF_SendSuccess"] = '1'
#发送失败
defineDict["THOST_FTDC_REQF_SendFailed"] = '2'
#等待重发
defineDict["THOST_FTDC_REQF_WaitReSend"] = '3'
typedefDict["TThostFtdcReqFlagType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcResFlagType是一个换汇返回成功标志类型
#//////////////////////////////////////////////////////////////////////
#成功
defineDict["THOST_FTDC_RESF_Success"] = '0'
#账户余额不足
defineDict["THOST_FTDC_RESF_InsuffiCient"] = '1'
#交易结果未知
defineDict["THOST_FTDC_RESF_UnKnown"] = '8'
typedefDict["TThostFtdcResFlagType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcPageControlType是一个换汇页面控制类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcPageControlType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcRecordCountType是一个记录数类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcRecordCountType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcCurrencySwapMemoType是一个换汇需确认信息类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCurrencySwapMemoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcExStatusType是一个修改状态类型
#//////////////////////////////////////////////////////////////////////
#修改前
defineDict["THOST_FTDC_EXS_Before"] = '0'
#修改后
defineDict["THOST_FTDC_EXS_After"] = '1'
typedefDict["TThostFtdcExStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcClientRegionType是一个开户客户地域类型
#//////////////////////////////////////////////////////////////////////
#国内客户
defineDict["THOST_FTDC_CR_Domestic"] = '1'
#港澳台客户
defineDict["THOST_FTDC_CR_GMT"] = '2'
#国外客户
defineDict["THOST_FTDC_CR_Foreign"] = '3'
typedefDict["TThostFtdcClientRegionType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcWorkPlaceType是一个工作单位类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcWorkPlaceType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBusinessPeriodType是一个经营期限类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBusinessPeriodType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcWebSiteType是一个网址类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcWebSiteType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcUOAIdCardTypeType是一个统一开户证件类型类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcUOAIdCardTypeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcClientModeType是一个开户模式类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcClientModeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcInvestorFullNameType是一个投资者全称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcInvestorFullNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcUOABrokerIDType是一个境外中介机构ID类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcUOABrokerIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcUOAZipCodeType是一个邮政编码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcUOAZipCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcUOAEMailType是一个电子邮箱类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcUOAEMailType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcOldCityType是一个城市类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcOldCityType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCorporateIdentifiedCardNoType是一个法人代表证件号码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCorporateIdentifiedCardNoType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcHasBoardType是一个是否有董事会类型
#//////////////////////////////////////////////////////////////////////
#没有
defineDict["THOST_FTDC_HB_No"] = '0'
#有
defineDict["THOST_FTDC_HB_Yes"] = '1'
typedefDict["TThostFtdcHasBoardType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcStartModeType是一个启动模式类型
#//////////////////////////////////////////////////////////////////////
#正常
defineDict["THOST_FTDC_SM_Normal"] = '1'
#应急
defineDict["THOST_FTDC_SM_Emerge"] = '2'
#恢复
defineDict["THOST_FTDC_SM_Restore"] = '3'
typedefDict["TThostFtdcStartModeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcTemplateTypeType是一个模型类型类型
#//////////////////////////////////////////////////////////////////////
#全量
defineDict["THOST_FTDC_TPT_Full"] = '1'
#增量
defineDict["THOST_FTDC_TPT_Increment"] = '2'
#备份
defineDict["THOST_FTDC_TPT_BackUp"] = '3'
typedefDict["TThostFtdcTemplateTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcLoginModeType是一个登录模式类型
#//////////////////////////////////////////////////////////////////////
#交易
defineDict["THOST_FTDC_LM_Trade"] = '0'
#转账
defineDict["THOST_FTDC_LM_Transfer"] = '1'
typedefDict["TThostFtdcLoginModeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcPromptTypeType是一个日历提示类型类型
#//////////////////////////////////////////////////////////////////////
#合约上下市
defineDict["THOST_FTDC_CPT_Instrument"] = '1'
#保证金分段生效
defineDict["THOST_FTDC_CPT_Margin"] = '2'
typedefDict["TThostFtdcPromptTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcLedgerManageIDType是一个分户管理资产编码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcLedgerManageIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcInvestVarietyType是一个投资品种类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcInvestVarietyType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcBankAccountTypeType是一个账户类别类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBankAccountTypeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcLedgerManageBankType是一个开户银行类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcLedgerManageBankType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCffexDepartmentNameType是一个开户营业部类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCffexDepartmentNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCffexDepartmentCodeType是一个营业部代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCffexDepartmentCodeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcHasTrusteeType是一个是否有托管人类型
#//////////////////////////////////////////////////////////////////////
#有
defineDict["THOST_FTDC_HT_Yes"] = '1'
#没有
defineDict["THOST_FTDC_HT_No"] = '0'
typedefDict["TThostFtdcHasTrusteeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCMemo1Type是一个说明类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCMemo1Type"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAssetmgrCFullNameType是一个代理资产管理业务的期货公司全称类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAssetmgrCFullNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAssetmgrApprovalNOType是一个资产管理业务批文号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAssetmgrApprovalNOType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAssetmgrMgrNameType是一个资产管理业务负责人姓名类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAssetmgrMgrNameType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAmTypeType是一个机构类型类型
#//////////////////////////////////////////////////////////////////////
#银行
defineDict["THOST_FTDC_AMT_Bank"] = '1'
#证券公司
defineDict["THOST_FTDC_AMT_Securities"] = '2'
#基金公司
defineDict["THOST_FTDC_AMT_Fund"] = '3'
#保险公司
defineDict["THOST_FTDC_AMT_Insurance"] = '4'
#信托公司
defineDict["THOST_FTDC_AMT_Trust"] = '5'
#其他
defineDict["THOST_FTDC_AMT_Other"] = '9'
typedefDict["TThostFtdcAmTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCAmTypeType是一个机构类型类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCAmTypeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCFundIOTypeType是一个出入金类型类型
#//////////////////////////////////////////////////////////////////////
#出入金
defineDict["THOST_FTDC_CFIOT_FundIO"] = '0'
#银期换汇
defineDict["THOST_FTDC_CFIOT_SwapCurrency"] = '1'
typedefDict["TThostFtdcCSRCFundIOTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCusAccountTypeType是一个结算账户类型类型
#//////////////////////////////////////////////////////////////////////
#期货结算账户
defineDict["THOST_FTDC_CAT_Futures"] = '1'
#纯期货资管业务下的资管结算账户
defineDict["THOST_FTDC_CAT_AssetmgrFuture"] = '2'
#综合类资管业务下的期货资管托管账户
defineDict["THOST_FTDC_CAT_AssetmgrTrustee"] = '3'
#综合类资管业务下的资金中转账户
defineDict["THOST_FTDC_CAT_AssetmgrTransfer"] = '4'
typedefDict["TThostFtdcCusAccountTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCNationalType是一个国籍类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCNationalType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCSRCSecAgentIDType是一个二级代理ID类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCSRCSecAgentIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcLanguageTypeType是一个通知语言类型类型
#//////////////////////////////////////////////////////////////////////
#中文
defineDict["THOST_FTDC_LT_Chinese"] = '1'
#英文
defineDict["THOST_FTDC_LT_English"] = '2'
typedefDict["TThostFtdcLanguageTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcAmAccountType是一个投资账户类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcAmAccountType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcAssetmgrClientTypeType是一个资产管理客户类型类型
#//////////////////////////////////////////////////////////////////////
#个人资管客户
defineDict["THOST_FTDC_AMCT_Person"] = '1'
#单位资管客户
defineDict["THOST_FTDC_AMCT_Organ"] = '2'
#特殊单位资管客户
defineDict["THOST_FTDC_AMCT_SpecialOrgan"] = '4'
typedefDict["TThostFtdcAssetmgrClientTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcAssetmgrTypeType是一个投资类型类型
#//////////////////////////////////////////////////////////////////////
#期货类
defineDict["THOST_FTDC_ASST_Futures"] = '3'
#综合类
defineDict["THOST_FTDC_ASST_SpecialOrgan"] = '4'
typedefDict["TThostFtdcAssetmgrTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcUOMType是一个计量单位类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcUOMType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcSHFEInstLifePhaseType是一个上期所合约生命周期状态类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSHFEInstLifePhaseType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcSHFEProductClassType是一个产品类型类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSHFEProductClassType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcPriceDecimalType是一个价格小数位类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcPriceDecimalType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcInTheMoneyFlagType是一个平值期权标志类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcInTheMoneyFlagType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCheckInstrTypeType是一个合约比较类型类型
#//////////////////////////////////////////////////////////////////////
#合约交易所不存在
defineDict["THOST_FTDC_CIT_HasExch"] = '0'
#合约本系统不存在
defineDict["THOST_FTDC_CIT_HasATP"] = '1'
#合约比较不一致
defineDict["THOST_FTDC_CIT_HasDiff"] = '2'
typedefDict["TThostFtdcCheckInstrTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcDeliveryTypeType是一个交割类型类型
#//////////////////////////////////////////////////////////////////////
#手工交割
defineDict["THOST_FTDC_DT_HandDeliv"] = '1'
#到期交割
defineDict["THOST_FTDC_DT_PersonDeliv"] = '2'
typedefDict["TThostFtdcDeliveryTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcBigMoneyType是一个资金类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcBigMoneyType"] = "float"
#//////////////////////////////////////////////////////////////////////
#TFtdcMaxMarginSideAlgorithmType是一个大额单边保证金算法类型
#//////////////////////////////////////////////////////////////////////
#不使用大额单边保证金算法
defineDict["THOST_FTDC_MMSA_NO"] = '0'
#使用大额单边保证金算法
defineDict["THOST_FTDC_MMSA_YES"] = '1'
typedefDict["TThostFtdcMaxMarginSideAlgorithmType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcDAClientTypeType是一个资产管理客户类型类型
#//////////////////////////////////////////////////////////////////////
#自然人
defineDict["THOST_FTDC_CACT_Person"] = '0'
#法人
defineDict["THOST_FTDC_CACT_Company"] = '1'
#其他
defineDict["THOST_FTDC_CACT_Other"] = '2'
typedefDict["TThostFtdcDAClientTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCombinInstrIDType是一个套利合约代码类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCombinInstrIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCombinSettlePriceType是一个各腿结算价类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcCombinSettlePriceType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcDCEPriorityType是一个优先级类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcDCEPriorityType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcTradeGroupIDType是一个成交组号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcTradeGroupIDType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcIsCheckPrepaType是一个是否校验开户可用资金类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcIsCheckPrepaType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcUOAAssetmgrTypeType是一个投资类型类型
#//////////////////////////////////////////////////////////////////////
#期货类
defineDict["THOST_FTDC_UOAAT_Futures"] = '1'
#综合类
defineDict["THOST_FTDC_UOAAT_SpecialOrgan"] = '2'
typedefDict["TThostFtdcUOAAssetmgrTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcDirectionEnType是一个买卖方向类型
#//////////////////////////////////////////////////////////////////////
#Buy
defineDict["THOST_FTDC_DEN_Buy"] = '0'
#Sell
defineDict["THOST_FTDC_DEN_Sell"] = '1'
typedefDict["TThostFtdcDirectionEnType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcOffsetFlagEnType是一个开平标志类型
#//////////////////////////////////////////////////////////////////////
#Position Opening
defineDict["THOST_FTDC_OFEN_Open"] = '0'
#Position Close
defineDict["THOST_FTDC_OFEN_Close"] = '1'
#Forced Liquidation
defineDict["THOST_FTDC_OFEN_ForceClose"] = '2'
#Close Today
defineDict["THOST_FTDC_OFEN_CloseToday"] = '3'
#Close Prev.
defineDict["THOST_FTDC_OFEN_CloseYesterday"] = '4'
#Forced Reduction
defineDict["THOST_FTDC_OFEN_ForceOff"] = '5'
#Local Forced Liquidation
defineDict["THOST_FTDC_OFEN_LocalForceClose"] = '6'
typedefDict["TThostFtdcOffsetFlagEnType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcHedgeFlagEnType是一个投机套保标志类型
#//////////////////////////////////////////////////////////////////////
#Speculation
defineDict["THOST_FTDC_HFEN_Speculation"] = '1'
#Arbitrage
defineDict["THOST_FTDC_HFEN_Arbitrage"] = '2'
#Hedge
defineDict["THOST_FTDC_HFEN_Hedge"] = '3'
typedefDict["TThostFtdcHedgeFlagEnType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFundIOTypeEnType是一个出入金类型类型
#//////////////////////////////////////////////////////////////////////
#Deposit/Withdrawal
defineDict["THOST_FTDC_FIOTEN_FundIO"] = '1'
#Bank-Futures Transfer
defineDict["THOST_FTDC_FIOTEN_Transfer"] = '2'
#Bank-Futures FX Exchange
defineDict["THOST_FTDC_FIOTEN_SwapCurrency"] = '3'
typedefDict["TThostFtdcFundIOTypeEnType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFundTypeEnType是一个资金类型类型
#//////////////////////////////////////////////////////////////////////
#Bank Deposit
defineDict["THOST_FTDC_FTEN_Deposite"] = '1'
#Payment/Fee
defineDict["THOST_FTDC_FTEN_ItemFund"] = '2'
#Brokerage Adj
defineDict["THOST_FTDC_FTEN_Company"] = '3'
#Internal Transfer
defineDict["THOST_FTDC_FTEN_InnerTransfer"] = '4'
typedefDict["TThostFtdcFundTypeEnType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFundDirectionEnType是一个出入金方向类型
#//////////////////////////////////////////////////////////////////////
#Deposit
defineDict["THOST_FTDC_FDEN_In"] = '1'
#Withdrawal
defineDict["THOST_FTDC_FDEN_Out"] = '2'
typedefDict["TThostFtdcFundDirectionEnType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFundMortDirectionEnType是一个货币质押方向类型
#//////////////////////////////////////////////////////////////////////
#Pledge
defineDict["THOST_FTDC_FMDEN_In"] = '1'
#Redemption
defineDict["THOST_FTDC_FMDEN_Out"] = '2'
typedefDict["TThostFtdcFundMortDirectionEnType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSwapBusinessTypeType是一个换汇业务种类类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcSwapBusinessTypeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcOptionsTypeType是一个期权类型类型
#//////////////////////////////////////////////////////////////////////
#看涨
defineDict["THOST_FTDC_CP_CallOptions"] = '1'
#看跌
defineDict["THOST_FTDC_CP_PutOptions"] = '2'
typedefDict["TThostFtdcOptionsTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcStrikeModeType是一个执行方式类型
#//////////////////////////////////////////////////////////////////////
#欧式
defineDict["THOST_FTDC_STM_Continental"] = '0'
#美式
defineDict["THOST_FTDC_STM_American"] = '1'
#百慕大
defineDict["THOST_FTDC_STM_Bermuda"] = '2'
typedefDict["TThostFtdcStrikeModeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcStrikeTypeType是一个执行类型类型
#//////////////////////////////////////////////////////////////////////
#自身对冲
defineDict["THOST_FTDC_STT_Hedge"] = '0'
#匹配执行
defineDict["THOST_FTDC_STT_Match"] = '1'
typedefDict["TThostFtdcStrikeTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcApplyTypeType是一个中金所期权放弃执行申请类型类型
#//////////////////////////////////////////////////////////////////////
#不执行数量
defineDict["THOST_FTDC_APPT_NotStrikeNum"] = '4'
typedefDict["TThostFtdcApplyTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcGiveUpDataSourceType是一个放弃执行申请数据来源类型
#//////////////////////////////////////////////////////////////////////
#系统生成
defineDict["THOST_FTDC_GUDS_Gen"] = '0'
#手工添加
defineDict["THOST_FTDC_GUDS_Hand"] = '1'
typedefDict["TThostFtdcGiveUpDataSourceType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcExecOrderSysIDType是一个执行宣告系统编号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcExecOrderSysIDType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcExecResultType是一个执行结果类型
#//////////////////////////////////////////////////////////////////////
#没有执行
defineDict["THOST_FTDC_OER_NoExec"] = 'n'
#已经取消
defineDict["THOST_FTDC_OER_Canceled"] = 'c'
#执行成功
defineDict["THOST_FTDC_OER_OK"] = '0'
#期权持仓不够
defineDict["THOST_FTDC_OER_NoPosition"] = '1'
#资金不够
defineDict["THOST_FTDC_OER_NoDeposit"] = '2'
#会员不存在
defineDict["THOST_FTDC_OER_NoParticipant"] = '3'
#客户不存在
defineDict["THOST_FTDC_OER_NoClient"] = '4'
#合约不存在
defineDict["THOST_FTDC_OER_NoInstrument"] = '6'
#没有执行权限
defineDict["THOST_FTDC_OER_NoRight"] = '7'
#不合理的数量
defineDict["THOST_FTDC_OER_InvalidVolume"] = '8'
#没有足够的历史成交
defineDict["THOST_FTDC_OER_NoEnoughHistoryTrade"] = '9'
#未知
defineDict["THOST_FTDC_OER_Unknown"] = 'a'
typedefDict["TThostFtdcExecResultType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcStrikeSequenceType是一个执行序号类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcStrikeSequenceType"] = "int"
#//////////////////////////////////////////////////////////////////////
#TFtdcStrikeTimeType是一个执行时间类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcStrikeTimeType"] = "string"
#//////////////////////////////////////////////////////////////////////
#TFtdcCombinationTypeType是一个组合类型类型
#//////////////////////////////////////////////////////////////////////
#期货组合
defineDict["THOST_FTDC_COMBT_Future"] = '0'
#垂直价差BUL
defineDict["THOST_FTDC_COMBT_BUL"] = '1'
#垂直价差BER
defineDict["THOST_FTDC_COMBT_BER"] = '2'
#跨式组合
defineDict["THOST_FTDC_COMBT_STD"] = '3'
#宽跨式组合
defineDict["THOST_FTDC_COMBT_STG"] = '4'
#备兑组合
defineDict["THOST_FTDC_COMBT_PRT"] = '5'
#时间价差组合
defineDict["THOST_FTDC_COMBT_CLD"] = '6'
typedefDict["TThostFtdcCombinationTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcOptionRoyaltyPriceTypeType是一个期权权利金价格类型类型
#//////////////////////////////////////////////////////////////////////
#昨结算价
defineDict["THOST_FTDC_ORPT_PreSettlementPrice"] = '1'
#开仓价
defineDict["THOST_FTDC_ORPT_OpenPrice"] = '4'
typedefDict["TThostFtdcOptionRoyaltyPriceTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcBalanceAlgorithmType是一个权益算法类型
#//////////////////////////////////////////////////////////////////////
#不计算期权市值盈亏
defineDict["THOST_FTDC_BLAG_Default"] = '1'
#计算期权市值亏损
defineDict["THOST_FTDC_BLAG_IncludeOptValLost"] = '2'
typedefDict["TThostFtdcBalanceAlgorithmType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcActionTypeType是一个执行类型类型
#//////////////////////////////////////////////////////////////////////
#执行
defineDict["THOST_FTDC_ACTP_Exec"] = '1'
#放弃
defineDict["THOST_FTDC_ACTP_Abandon"] = '2'
typedefDict["TThostFtdcActionTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcForQuoteStatusType是一个询价状态类型
#//////////////////////////////////////////////////////////////////////
#已经提交
defineDict["THOST_FTDC_FQST_Submitted"] = 'a'
#已经接受
defineDict["THOST_FTDC_FQST_Accepted"] = 'b'
#已经被拒绝
defineDict["THOST_FTDC_FQST_Rejected"] = 'c'
typedefDict["TThostFtdcForQuoteStatusType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcValueMethodType是一个取值方式类型
#//////////////////////////////////////////////////////////////////////
#按绝对值
defineDict["THOST_FTDC_VM_Absolute"] = '0'
#按比率
defineDict["THOST_FTDC_VM_Ratio"] = '1'
typedefDict["TThostFtdcValueMethodType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcExecOrderPositionFlagType是一个期权行权后是否保留期货头寸的标记类型
#//////////////////////////////////////////////////////////////////////
#保留
defineDict["THOST_FTDC_EOPF_Reserve"] = '0'
#不保留
defineDict["THOST_FTDC_EOPF_UnReserve"] = '1'
typedefDict["TThostFtdcExecOrderPositionFlagType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcExecOrderCloseFlagType是一个期权行权后生成的头寸是否自动平仓类型
#//////////////////////////////////////////////////////////////////////
#自动平仓
defineDict["THOST_FTDC_EOCF_AutoClose"] = '0'
#免于自动平仓
defineDict["THOST_FTDC_EOCF_NotToClose"] = '1'
typedefDict["TThostFtdcExecOrderCloseFlagType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcProductTypeType是一个产品类型类型
#//////////////////////////////////////////////////////////////////////
#期货
defineDict["THOST_FTDC_PTE_Futures"] = '1'
#期权
defineDict["THOST_FTDC_PTE_Options"] = '2'
typedefDict["TThostFtdcProductTypeType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCZCEUploadFileNameType是一个郑商所结算文件名类型
#//////////////////////////////////////////////////////////////////////
#^\d{8}_zz_\d{4}
defineDict["THOST_FTDC_CUFN_CUFN_O"] = 'O'
#^\d{8}成交表
defineDict["THOST_FTDC_CUFN_CUFN_T"] = 'T'
#^\d{8}单腿持仓表new
defineDict["THOST_FTDC_CUFN_CUFN_P"] = 'P'
#^\d{8}非平仓了结表
defineDict["THOST_FTDC_CUFN_CUFN_N"] = 'N'
#^\d{8}平仓表
defineDict["THOST_FTDC_CUFN_CUFN_L"] = 'L'
#^\d{8}资金表
defineDict["THOST_FTDC_CUFN_CUFN_F"] = 'F'
#^\d{8}组合持仓表
defineDict["THOST_FTDC_CUFN_CUFN_C"] = 'C'
#^\d{8}保证金参数表
defineDict["THOST_FTDC_CUFN_CUFN_M"] = 'M'
typedefDict["TThostFtdcCZCEUploadFileNameType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcDCEUploadFileNameType是一个大商所结算文件名类型
#//////////////////////////////////////////////////////////////////////
#^\d{8}_dl_\d{3}
defineDict["THOST_FTDC_DUFN_DUFN_O"] = 'O'
#^\d{8}_成交表
defineDict["THOST_FTDC_DUFN_DUFN_T"] = 'T'
#^\d{8}_持仓表
defineDict["THOST_FTDC_DUFN_DUFN_P"] = 'P'
#^\d{8}_资金结算表
defineDict["THOST_FTDC_DUFN_DUFN_F"] = 'F'
#^\d{8}_优惠组合持仓明细表
defineDict["THOST_FTDC_DUFN_DUFN_C"] = 'C'
#^\d{8}_持仓明细表
defineDict["THOST_FTDC_DUFN_DUFN_D"] = 'D'
#^\d{8}_保证金参数表
defineDict["THOST_FTDC_DUFN_DUFN_M"] = 'M'
#^\d{8}_期权执行表
defineDict["THOST_FTDC_DUFN_DUFN_S"] = 'S'
typedefDict["TThostFtdcDCEUploadFileNameType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcSHFEUploadFileNameType是一个上期所结算文件名类型
#//////////////////////////////////////////////////////////////////////
#^\d{4}_\d{8}_\d{8}_DailyFundChg
defineDict["THOST_FTDC_SUFN_SUFN_O"] = 'O'
#^\d{4}_\d{8}_\d{8}_Trade
defineDict["THOST_FTDC_SUFN_SUFN_T"] = 'T'
#^\d{4}_\d{8}_\d{8}_SettlementDetail
defineDict["THOST_FTDC_SUFN_SUFN_P"] = 'P'
#^\d{4}_\d{8}_\d{8}_Capital
defineDict["THOST_FTDC_SUFN_SUFN_F"] = 'F'
typedefDict["TThostFtdcSHFEUploadFileNameType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCFFEXUploadFileNameType是一个中金所结算文件名类型
#//////////////////////////////////////////////////////////////////////
#^\d{4}_SG\d{1}_\d{8}_\d{1}_Trade
defineDict["THOST_FTDC_CFUFN_SUFN_T"] = 'T'
#^\d{4}_SG\d{1}_\d{8}_\d{1}_SettlementDetail
defineDict["THOST_FTDC_CFUFN_SUFN_P"] = 'P'
#^\d{4}_SG\d{1}_\d{8}_\d{1}_Capital
defineDict["THOST_FTDC_CFUFN_SUFN_F"] = 'F'
#^\d{4}_SG\d{1}_\d{8}_\d{1}_OptionExec
defineDict["THOST_FTDC_CFUFN_SUFN_S"] = 'S'
typedefDict["TThostFtdcCFFEXUploadFileNameType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcCombDirectionType是一个组合指令方向类型
#//////////////////////////////////////////////////////////////////////
#申请组合
defineDict["THOST_FTDC_CMDR_Comb"] = '0'
#申请拆分
defineDict["THOST_FTDC_CMDR_UnComb"] = '1'
typedefDict["TThostFtdcCombDirectionType"] = "char"
#//////////////////////////////////////////////////////////////////////
#TFtdcFBTBankIDType是一个银行标识类型
#//////////////////////////////////////////////////////////////////////
typedefDict["TThostFtdcFBTBankIDType"] = "string"
#
| mit |
Adai0808/wechat-python-sdk | wechat_sdk/lib.py | 20 | 2115 | # -*- coding: utf-8 -*-
from xml.dom import minidom, Node
def disable_urllib3_warning():
"""
https://urllib3.readthedocs.org/en/latest/security.html#insecurerequestwarning
InsecurePlatformWarning 警告的临时解决方案
"""
try:
import requests.packages.urllib3
requests.packages.urllib3.disable_warnings()
except Exception:
pass
class XMLStore(object):
"""
XML 存储类,可方便转换为 Dict
"""
def __init__(self, xmlstring):
self._raw = xmlstring
self._doc = minidom.parseString(xmlstring)
@property
def xml2dict(self):
"""
将 XML 转换为 dict
"""
self._remove_whitespace_nodes(self._doc.childNodes[0])
return self._element2dict(self._doc.childNodes[0])
def _element2dict(self, parent):
"""
将单个节点转换为 dict
"""
d = {}
for node in parent.childNodes:
if not isinstance(node, minidom.Element):
continue
if not node.hasChildNodes():
continue
if node.childNodes[0].nodeType == minidom.Node.ELEMENT_NODE:
try:
d[node.tagName]
except KeyError:
d[node.tagName] = []
d[node.tagName].append(self._element2dict(node))
elif len(node.childNodes) == 1 and node.childNodes[0].nodeType in [minidom.Node.CDATA_SECTION_NODE, minidom.Node.TEXT_NODE]:
d[node.tagName] = node.childNodes[0].data
return d
def _remove_whitespace_nodes(self, node, unlink=True):
"""
删除空白无用节点
"""
remove_list = []
for child in node.childNodes:
if child.nodeType == Node.TEXT_NODE and not child.data.strip():
remove_list.append(child)
elif child.hasChildNodes():
self._remove_whitespace_nodes(child, unlink)
for node in remove_list:
node.parentNode.removeChild(node)
if unlink:
node.unlink()
| bsd-2-clause |
th3infinity/libavg | src/test/testcase.py | 2 | 12629 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# libavg - Media Playback Engine.
# Copyright (C) 2003-2014 Ulrich von Zadow
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Current versions can be found at www.libavg.de
#
import unittest
import sys
import os
import math
from libavg import avg, player, logger
def almostEqual(a, b, epsilon):
try:
bOk = True
for i in range(len(a)):
if not(almostEqual(a[i], b[i], epsilon)):
bOk = False
return bOk
except TypeError:
return math.fabs(a-b) < epsilon
def flatten(l):
ltype = type(l)
l = list(l)
i = 0
while i < len(l):
while isinstance(l[i], (list, tuple)):
if not l[i]:
l.pop(i)
i -= 1
break
else:
l[i:i + 1] = l[i]
i += 1
return ltype(l)
class SuppressOutput(object):
class Blackhole(object):
def write(self, *args):
pass
def __init__(self):
self.__savedStreams = [sys.stdout, sys.stderr]
def __enter__(self):
sys.stdout = self.Blackhole()
sys.stderr = self.Blackhole()
def __exit__(self, *args):
sys.stdout, sys.stderr = self.__savedStreams
class MouseEmulator(object):
def __init__(self):
self.btnStates = [False, False, False]
def sendMouseEvent(self, type_, x, y, btn=1):
helper = player.getTestHelper()
index = btn-1
if type_ == avg.Event.CURSOR_UP:
self.btnStates[index] = False
if type_ == avg.Event.CURSOR_DOWN:
self.btnStates[index] = True
helper.fakeMouseEvent(type_, self.btnStates[0], self.btnStates[1],
self.btnStates[2], x, y, btn)
class AVGTestCase(unittest.TestCase):
imageResultDirectory = "resultimages"
baselineImageResultDirectory = "baseline"
def __init__(self, testFuncName):
unittest.TestCase.__init__(self, testFuncName)
player.enableGLErrorChecks(True)
logger.configureCategory("MEMORY", logger.Severity.ERR)
logger.configureCategory("DEPREC", logger.Severity.ERR)
self.__testFuncName = testFuncName
self.__logger = avg.logger
self.__skipped = False
self.__warnOnImageDiff = False
self.__mouseEmulator = None
def __setupPlayer(self):
player.setMultiSampleSamples(1)
@staticmethod
def setImageResultDirectory(name):
AVGTestCase.imageResultDirectory = name
@staticmethod
def getImageResultDir():
return AVGTestCase.imageResultDirectory
@staticmethod
def cleanResultDir():
resultDir = AVGTestCase.getImageResultDir()
try:
files = os.listdir(resultDir)
for resultFile in files:
os.remove(resultDir+"/"+resultFile)
except OSError:
try:
os.mkdir(resultDir)
except OSError:
pass
def start(self, warnOnImageDiff, actions):
self.__setupPlayer()
self.__dumpTestFrames = (os.getenv("AVG_DUMP_TEST_FRAMES") is not None)
self.__delaying = False
self.__warnOnImageDiff = warnOnImageDiff
self.assert_(player.isPlaying() == 0)
self.actions = flatten(actions)
self.curFrame = 0
player.subscribe(player.ON_FRAME, self.__nextAction)
player.setFramerate(10000)
player.assumePixelsPerMM(1)
player.play()
self.assert_(player.isPlaying() == 0)
def delay(self, time):
def timeout():
self.__delaying = False
self.__delaying = True
player.setTimeout(time, timeout)
def compareImage(self, fileName):
bmp = player.screenshot()
self.compareBitmapToFile(bmp, fileName)
def compareBitmapToFile(self, bmp, fileName):
try:
baselineBmp = avg.Bitmap(AVGTestCase.baselineImageResultDirectory + "/"
+ fileName + ".png")
except RuntimeError:
bmp.save(AVGTestCase.getImageResultDir()+"/"+fileName+".png")
self.__logger.warning("Could not load image "+fileName+".png")
raise
diffBmp = bmp.subtract(baselineBmp)
average = diffBmp.getAvg()
stdDev = diffBmp.getStdDev()
if (average > 0.1 or stdDev > 0.5):
if self._isCurrentDirWriteable():
bmp.save(AVGTestCase.getImageResultDir() + "/" + fileName + ".png")
baselineBmp.save(AVGTestCase.getImageResultDir() + "/" + fileName
+ "_baseline.png")
diffBmp.save(AVGTestCase.getImageResultDir() + "/" + fileName
+ "_diff.png")
if (average > 2 or stdDev > 6):
msg = (" "+fileName+
": Difference image has avg=%(avg).2f, std dev=%(stddev).2f"%
{'avg':average, 'stddev':stdDev})
if self.__warnOnImageDiff:
sys.stderr.write("\n"+msg+"\n")
else:
self.fail(msg)
def areSimilarBmps(self, bmp1, bmp2, maxAvg, maxStdDev):
diffBmp = bmp1.subtract(bmp2)
avg = diffBmp.getAvg()
stdDev = diffBmp.getStdDev()
return avg <= maxAvg and stdDev <= maxStdDev
def assertAlmostEqual(self, a, b, epsilon=0.00001):
if not(almostEqual(a, b, epsilon)):
msg = "almostEqual: " + str(a) + " != " + str(b)
self.fail(msg)
def loadEmptyScene(self, resolution=(160,120)):
player.createMainCanvas(size=resolution)
root = player.getRootNode()
root.mediadir = "media"
return root
def initDefaultImageScene(self):
root = self.loadEmptyScene()
avg.ImageNode(id="testtiles", pos=(0,30), size=(65,65), href="rgb24-65x65.png",
maxtilewidth=16, maxtileheight=32, parent=root)
avg.ImageNode(id="test", pos=(64,30), href="rgb24-65x65.png", pivot=(0,0),
angle=0.274, parent=root)
avg.ImageNode(id="test1", pos=(129,30), href="rgb24-65x65.png", parent=root)
def fakeClick(self, x, y):
helper = player.getTestHelper()
helper.fakeMouseEvent(avg.Event.CURSOR_DOWN, True, False, False, x, y, 1)
helper.fakeMouseEvent(avg.Event.CURSOR_UP, False, False, False, x, y, 1)
def skip(self, message):
self.__skipReason = str(message)
sys.stderr.write("skipping: " + str(message) + " ... ")
self.__skipped = True
def skipped(self):
return self.__skipped
def skipReason(self):
return self.__skipReason
def skipIfMinimalShader(self):
if not(player.areFullShadersSupported()):
self.skip("Not supported if ShaderUsage == MINIMAL")
player.stop()
return
def _sendMouseEvent(self, eventType, x, y, btn=1):
if not self.__mouseEmulator:
self.__mouseEmulator = MouseEmulator()
self.__mouseEmulator.sendMouseEvent(eventType, x, y, btn)
def _sendTouchEvent(self, eventID, eventType, x, y):
helper = player.getTestHelper()
helper.fakeTouchEvent(eventID, eventType, avg.Event.TOUCH, avg.Point2D(x,y))
def _sendTouchEvents(self, eventData):
helper = player.getTestHelper()
for (eventID, eventType, x, y) in eventData:
helper.fakeTouchEvent(eventID, eventType, avg.Event.TOUCH, avg.Point2D(x,y))
def _sendTangibleEvent(self, eventID, markerid, eventType, x, y):
helper = player.getTestHelper()
helper.fakeTangibleEvent(eventID, markerid, eventType, avg.Point2D(x,y),
avg.Point2D(0,0), 0)
def _genMouseEventFrames(self, eventType, x, y, expectedEvents):
return [
lambda: self._sendMouseEvent(eventType, x, y),
lambda: self.messageTester.assertState(expectedEvents),
]
def _genTouchEventFrames(self, eventData, expectedEvents):
return [
lambda: self._sendTouchEvents(eventData),
lambda: self.messageTester.assertState(expectedEvents),
]
def _isCurrentDirWriteable(self):
return bool(os.access('.', os.W_OK))
def __nextAction(self):
if not(self.__delaying):
if self.__dumpTestFrames:
self.__logger.log("Frame "+str(self.curFrame))
if len(self.actions) == self.curFrame:
player.stop()
else:
action = self.actions[self.curFrame]
if action != None:
action()
self.curFrame += 1
def createAVGTestSuite(availableTests, AVGTestCaseClass, testSubset):
testNames = []
if testSubset:
for testName in testSubset:
if testName in availableTests:
testNames.append(testName)
else:
sys.stderr.write(("No test named %s"%testName) + "\n")
sys.exit(1)
else:
testNames = availableTests
suite = unittest.TestSuite()
for testName in testNames:
suite.addTest(AVGTestCaseClass(testName))
return suite
class NodeHandlerTester(object):
def __init__(self, testCase, node):
self.__testCase = testCase
self.reset()
self.__node = node
self.__subscriberIDs = set()
self.setHandlers()
self.__messagesReceived = set()
def assertState(self, expectedMessages):
self.__testCase.assert_(self.isState(expectedMessages))
self.reset()
def isState(self, expectedMessages):
expectedMessages = set(expectedMessages)
if expectedMessages != self.__messagesReceived:
sys.stderr.write("\nState expected: "+str(expectedMessages)+"\n")
sys.stderr.write("Actual state: "+str(self.__messagesReceived)+"\n")
return False
else:
return True
def reset(self):
self.__messagesReceived = set()
def setHandlers(self):
messageIDs = [
avg.Node.CURSOR_DOWN, avg.Node.CURSOR_UP, avg.Node.CURSOR_OVER,
avg.Node.CURSOR_OUT, avg.Node.CURSOR_MOTION,
avg.Node.TANGIBLE_DOWN, avg.Node.TANGIBLE_UP, avg.Node.TANGIBLE_OVER,
avg.Node.TANGIBLE_OUT, avg.Node.TANGIBLE_MOTION,
avg.Node.MOUSE_WHEEL
]
for messageID in messageIDs:
subscriberID = self.__node.subscribe(messageID,
lambda event, messageID=messageID:
self.setMessageReceived(messageID, event))
self.__subscriberIDs.add(subscriberID)
def clearHandlers(self):
for subscriber in self.__subscriberIDs:
self.__node.unsubscribe(subscriber)
self.__subscriberIDs = set()
def setMessageReceived(self, messageID, event):
self.__messagesReceived.add(messageID)
class MessageTester(object):
def __init__(self, publisher, messageIDs, testCase=None):
for messageID in messageIDs:
publisher.subscribe(messageID,
lambda messageID=messageID: self.setMessageReceived(messageID))
self.__messagesReceived = set()
self.__testCase = testCase
def assertState(self, expectedMessages):
self.__testCase.assert_(self.isState(expectedMessages))
self.reset()
def isState(self, expectedMessages):
expectedMessages = set(expectedMessages)
if expectedMessages != self.__messagesReceived:
sys.stderr.write("\nState expected: "+str(expectedMessages)+"\n")
sys.stderr.write("Actual state: "+str(self.__messagesReceived)+"\n")
return False
else:
return True
def setMessageReceived(self, messageID):
self.__messagesReceived.add(messageID)
def reset(self):
self.__messagesReceived = set()
| lgpl-2.1 |
bzennn/blog_flask | python/lib/python2.7/site-packages/pip/utils/ui.py | 490 | 11597 | from __future__ import absolute_import
from __future__ import division
import itertools
import sys
from signal import signal, SIGINT, default_int_handler
import time
import contextlib
import logging
from pip.compat import WINDOWS
from pip.utils import format_size
from pip.utils.logging import get_indentation
from pip._vendor import six
from pip._vendor.progress.bar import Bar, IncrementalBar
from pip._vendor.progress.helpers import (WritelnMixin,
HIDE_CURSOR, SHOW_CURSOR)
from pip._vendor.progress.spinner import Spinner
try:
from pip._vendor import colorama
# Lots of different errors can come from this, including SystemError and
# ImportError.
except Exception:
colorama = None
logger = logging.getLogger(__name__)
def _select_progress_class(preferred, fallback):
encoding = getattr(preferred.file, "encoding", None)
# If we don't know what encoding this file is in, then we'll just assume
# that it doesn't support unicode and use the ASCII bar.
if not encoding:
return fallback
# Collect all of the possible characters we want to use with the preferred
# bar.
characters = [
getattr(preferred, "empty_fill", six.text_type()),
getattr(preferred, "fill", six.text_type()),
]
characters += list(getattr(preferred, "phases", []))
# Try to decode the characters we're using for the bar using the encoding
# of the given file, if this works then we'll assume that we can use the
# fancier bar and if not we'll fall back to the plaintext bar.
try:
six.text_type().join(characters).encode(encoding)
except UnicodeEncodeError:
return fallback
else:
return preferred
_BaseBar = _select_progress_class(IncrementalBar, Bar)
class InterruptibleMixin(object):
"""
Helper to ensure that self.finish() gets called on keyboard interrupt.
This allows downloads to be interrupted without leaving temporary state
(like hidden cursors) behind.
This class is similar to the progress library's existing SigIntMixin
helper, but as of version 1.2, that helper has the following problems:
1. It calls sys.exit().
2. It discards the existing SIGINT handler completely.
3. It leaves its own handler in place even after an uninterrupted finish,
which will have unexpected delayed effects if the user triggers an
unrelated keyboard interrupt some time after a progress-displaying
download has already completed, for example.
"""
def __init__(self, *args, **kwargs):
"""
Save the original SIGINT handler for later.
"""
super(InterruptibleMixin, self).__init__(*args, **kwargs)
self.original_handler = signal(SIGINT, self.handle_sigint)
# If signal() returns None, the previous handler was not installed from
# Python, and we cannot restore it. This probably should not happen,
# but if it does, we must restore something sensible instead, at least.
# The least bad option should be Python's default SIGINT handler, which
# just raises KeyboardInterrupt.
if self.original_handler is None:
self.original_handler = default_int_handler
def finish(self):
"""
Restore the original SIGINT handler after finishing.
This should happen regardless of whether the progress display finishes
normally, or gets interrupted.
"""
super(InterruptibleMixin, self).finish()
signal(SIGINT, self.original_handler)
def handle_sigint(self, signum, frame):
"""
Call self.finish() before delegating to the original SIGINT handler.
This handler should only be in place while the progress display is
active.
"""
self.finish()
self.original_handler(signum, frame)
class DownloadProgressMixin(object):
def __init__(self, *args, **kwargs):
super(DownloadProgressMixin, self).__init__(*args, **kwargs)
self.message = (" " * (get_indentation() + 2)) + self.message
@property
def downloaded(self):
return format_size(self.index)
@property
def download_speed(self):
# Avoid zero division errors...
if self.avg == 0.0:
return "..."
return format_size(1 / self.avg) + "/s"
@property
def pretty_eta(self):
if self.eta:
return "eta %s" % self.eta_td
return ""
def iter(self, it, n=1):
for x in it:
yield x
self.next(n)
self.finish()
class WindowsMixin(object):
def __init__(self, *args, **kwargs):
# The Windows terminal does not support the hide/show cursor ANSI codes
# even with colorama. So we'll ensure that hide_cursor is False on
# Windows.
# This call neds to go before the super() call, so that hide_cursor
# is set in time. The base progress bar class writes the "hide cursor"
# code to the terminal in its init, so if we don't set this soon
# enough, we get a "hide" with no corresponding "show"...
if WINDOWS and self.hide_cursor:
self.hide_cursor = False
super(WindowsMixin, self).__init__(*args, **kwargs)
# Check if we are running on Windows and we have the colorama module,
# if we do then wrap our file with it.
if WINDOWS and colorama:
self.file = colorama.AnsiToWin32(self.file)
# The progress code expects to be able to call self.file.isatty()
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
# add it.
self.file.isatty = lambda: self.file.wrapped.isatty()
# The progress code expects to be able to call self.file.flush()
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
# add it.
self.file.flush = lambda: self.file.wrapped.flush()
class DownloadProgressBar(WindowsMixin, InterruptibleMixin,
DownloadProgressMixin, _BaseBar):
file = sys.stdout
message = "%(percent)d%%"
suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
DownloadProgressMixin, WritelnMixin, Spinner):
file = sys.stdout
suffix = "%(downloaded)s %(download_speed)s"
def next_phase(self):
if not hasattr(self, "_phaser"):
self._phaser = itertools.cycle(self.phases)
return next(self._phaser)
def update(self):
message = self.message % self
phase = self.next_phase()
suffix = self.suffix % self
line = ''.join([
message,
" " if message else "",
phase,
" " if suffix else "",
suffix,
])
self.writeln(line)
################################################################
# Generic "something is happening" spinners
#
# We don't even try using progress.spinner.Spinner here because it's actually
# simpler to reimplement from scratch than to coerce their code into doing
# what we need.
################################################################
@contextlib.contextmanager
def hidden_cursor(file):
# The Windows terminal does not support the hide/show cursor ANSI codes,
# even via colorama. So don't even try.
if WINDOWS:
yield
# We don't want to clutter the output with control characters if we're
# writing to a file, or if the user is running with --quiet.
# See https://github.com/pypa/pip/issues/3418
elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
yield
else:
file.write(HIDE_CURSOR)
try:
yield
finally:
file.write(SHOW_CURSOR)
class RateLimiter(object):
def __init__(self, min_update_interval_seconds):
self._min_update_interval_seconds = min_update_interval_seconds
self._last_update = 0
def ready(self):
now = time.time()
delta = now - self._last_update
return delta >= self._min_update_interval_seconds
def reset(self):
self._last_update = time.time()
class InteractiveSpinner(object):
def __init__(self, message, file=None, spin_chars="-\\|/",
# Empirically, 8 updates/second looks nice
min_update_interval_seconds=0.125):
self._message = message
if file is None:
file = sys.stdout
self._file = file
self._rate_limiter = RateLimiter(min_update_interval_seconds)
self._finished = False
self._spin_cycle = itertools.cycle(spin_chars)
self._file.write(" " * get_indentation() + self._message + " ... ")
self._width = 0
def _write(self, status):
assert not self._finished
# Erase what we wrote before by backspacing to the beginning, writing
# spaces to overwrite the old text, and then backspacing again
backup = "\b" * self._width
self._file.write(backup + " " * self._width + backup)
# Now we have a blank slate to add our status
self._file.write(status)
self._width = len(status)
self._file.flush()
self._rate_limiter.reset()
def spin(self):
if self._finished:
return
if not self._rate_limiter.ready():
return
self._write(next(self._spin_cycle))
def finish(self, final_status):
if self._finished:
return
self._write(final_status)
self._file.write("\n")
self._file.flush()
self._finished = True
# Used for dumb terminals, non-interactive installs (no tty), etc.
# We still print updates occasionally (once every 60 seconds by default) to
# act as a keep-alive for systems like Travis-CI that take lack-of-output as
# an indication that a task has frozen.
class NonInteractiveSpinner(object):
def __init__(self, message, min_update_interval_seconds=60):
self._message = message
self._finished = False
self._rate_limiter = RateLimiter(min_update_interval_seconds)
self._update("started")
def _update(self, status):
assert not self._finished
self._rate_limiter.reset()
logger.info("%s: %s", self._message, status)
def spin(self):
if self._finished:
return
if not self._rate_limiter.ready():
return
self._update("still running...")
def finish(self, final_status):
if self._finished:
return
self._update("finished with status '%s'" % (final_status,))
self._finished = True
@contextlib.contextmanager
def open_spinner(message):
# Interactive spinner goes directly to sys.stdout rather than being routed
# through the logging system, but it acts like it has level INFO,
# i.e. it's only displayed if we're at level INFO or better.
# Non-interactive spinner goes through the logging system, so it is always
# in sync with logging configuration.
if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
spinner = InteractiveSpinner(message)
else:
spinner = NonInteractiveSpinner(message)
try:
with hidden_cursor(sys.stdout):
yield spinner
except KeyboardInterrupt:
spinner.finish("canceled")
raise
except Exception:
spinner.finish("error")
raise
else:
spinner.finish("done")
| gpl-3.0 |
luca1897/Pyfacebook | pyfacebook-lite/__init__.py | 1 | 6953 | #
# __init__.py
# Copyright (C) orion 2011 <luca.barbara@live.com>
#
# PyFbGraph is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyFbGraph is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import urllib2,urllib
import simplejson
GRAPH_URL = "https://graph.facebook.com/"
generic_access_token = ""
client_access_token = None
class PyFacebook(object):
def __init__(self,id=None,specific_access_token=None):
self.set_specific_access_token(specific_access_token)
self.id=id
def Request(self,get={},post=None,delete=False,**args):
if post:
post = urllib.urlencode(post)
get["access_token"] = self.specific_access_token if self.specific_access_token else get_generic_access_token()
get = urllib.urlencode(get)
url = "%s%s/%s?%s" % (GRAPH_URL, self.id, args["method"], get)
print url
req = urllib2.Request(url)
if delete:
req.get_method = lambda: 'DELETE'
elif "body" in args:
req.add_header('Content-type', 'multipart/form-data; boundary=%s'% ('PyFbGraph'))
req.add_header('Content-length', len(args["body"]))
req.add_data(args["body"])
try:
res = urllib2.urlopen(req, post).read()
except urllib2.HTTPError, e:
res = e.read()
try:
data = simplejson.loads(res)
if type(data) == dict:
if data.get("error"):
raise FbError(data["error"]["message"])
else:
return data
else:
return res
except ValueError:
return res
def set_specific_access_token(self,access_token):
self.specific_access_token = access_token
def get_specific_access_token(self):
return str(self.specific_access_token)
def set_id(self,id):
self.id = id
def init(**args):
if "access_token" in args and args["access_token"]:
set_generic_access_token(args["access_token"])
else:
if "app_id" in args:
app_id = args["app_id"]
else:
raise FbError("failed: no appID found")
if "permission" in args:
permission = args["permission"]
else:
permission = ""
if "method" in args:
method = args["method"]
else:
method = qt_method
set_generic_access_token(args={"app_id":app_id,"permission":permission,"method":method})
def set_client_access_token(arg):
global client_access_token
client_access_token = arg
def get_client_access_token(client_id=None,client_secret=None):
global client_access_token
if not client_access_token:
url = "%soauth/access_token?client_id=%s&client_secret=%s&grant_type=client_credentials" % (GRAPH_URL,client_id,client_secret)
request = urllib2.Request(url)
set_client_access_token(Request().request(request).split("=")[1])
return client_access_token
def set_generic_access_token(access_token="",args=None):
global generic_access_token
if access_token:
generic_access_token = access_token
else:
generic_access_token = args["method"](args["app_id"],args["permission"])
def get_generic_access_token():
return str(generic_access_token)
""" METHOD """
class UploadFiles():
def upload_files(self,args):
import mimetypes
ret = []
for a in args:
body = []
mimetype = mimetypes.guess_type(a["filename"])[0] or 'application/octet-stream'
filehandle = open(a["filename"])
#File description
for p in a["param"]:
body.append('--PyFbGraph')
body.append('Content-Disposition: form-data; name="%s"' % (p))
body.append('')
body.append(a["param"][p])
#File Content
body.append('--PyFbGraph')
body.append('Content-Disposition: file; name="source"; filename="%s"' % (a["filename"]))
body.append('Content-Type: %s' % mimetype)
body.append('')
body.append(filehandle.read())
body.append('')
filehandle.close()
body.append('--PyFbGraph--')
body.append('')
ret.append(self.Request(method="photos",body='\r\n'.join(body)))
return ret
class Connection():
CONN = []
def connection(self,connection=[],**args):
if connection in self.CONN or not self.CONN:
return self.Request(method=connection,get=args)
else:
return "Unknown connection: %s " % (connection)
class Object():
FIELDS = []
def object(self,**args):
get = []
if "fields" in args:
d = diff_list(self.FIELDS,args["fields"])
if d:
return "Unknown(s) field(s): %s" % (",".join(d))
get.append({"fields":",".join(args["fields"])})
return self.Request(method="",get=args)
""" ACCESS TOKEN """
class common_method():
access_token = ""
REDIRECT_URL = "&redirect_uri=https://www.facebook.com/connect/login_success.html"
def __init__(self,app_id,permission):
if permission:
scope = "&scope=" + ",".join(permission)
self.OAUTH_URL = "https://www.facebook.com/dialog/oauth?client_id=" + app_id + self.REDIRECT_URL + scope + "&response_type=token"
self.create_browser()
def load_finished(self):
url = self.get_url()
if url.find("https://www.facebook.com/connect/login_success.html#access_token=")>=0:
self.access_token = url[url.find("=")+1:url.find("&expires_in")]
self.destroy()
def __str__(self):
return self.access_token
class webkitgtk_method(common_method):
def create_browser(self):
try:
import gtk
import webkit
except ImportError:
raise self.raise_error("You need pywebkitgtk\nDownload: http://code.google.com/p/pywebkitgtk/")
self.web = webkit.WebView()
win = gtk.Window(gtk.WINDOW_TOPLEVEL)
win.add(self.web)
win.show_all()
self.web.open(self.OAUTH_URL)
self.web.connect("load-finished", self.load_finished)
self.gtk.main()
def get_url(self):
return self.web.get_main_frame().get_uri()
def destroy(self):
self.web.destroy()
self.gtk.main_quit()
class qt_method(common_method):
def create_browser(self):
try:
from PySide.QtCore import QUrl
from PySide.QtGui import QApplication
from PySide.QtWebKit import QWebView
except ImportError:
raise self.raise_error("You need python-pyside\nDownload: http://developer.qt.nokia.com/wiki/PySide_Binaries_Linux")
self.app = QApplication(sys.argv)
self.web = QWebView()
self.web.load(QUrl(self.OAUTH_URL))
self.web.loadFinished[bool].connect(self.load_finished)
self.web.show()
self.app.exec_()
def get_url(self):
return self.web.url().toString()
def destroy(self):
self.web.close()
self.app.exit()
"""EXCEPTION"""
class FbError(Exception):
def raise_error(self, message):
Exception.__init__(self, message)
| gpl-3.0 |
yamt/networking-ofagent-old3 | networking_ofagent/hacking/checks.py | 6 | 4024 | # Copyright (c) 2014 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
import pep8
# Guidelines for writing new hacking checks
#
# - Use only for Neutron specific tests. OpenStack general tests
# should be submitted to the common 'hacking' module.
# - Pick numbers in the range N3xx. Find the current test with
# the highest allocated number and then pick the next value.
# - Keep the test method code in the source file ordered based
# on the N3xx value.
# - List the new rule in the top level HACKING.rst file
# - Add test cases for each new rule to
# neutron/tests/unit/test_hacking.py
_all_log_levels = {
# NOTE(yamamoto): Following nova which uses _() for audit.
'audit': '_',
'error': '_LE',
'info': '_LI',
'warn': '_LW',
'warning': '_LW',
'critical': '_LC',
'exception': '_LE',
}
_all_hints = set(_all_log_levels.values())
def _regex_for_level(level, hint):
return r".*LOG\.%(level)s\(\s*((%(wrong_hints)s)\(|'|\")" % {
'level': level,
'wrong_hints': '|'.join(_all_hints - set([hint])),
}
log_translation_hint = re.compile(
'|'.join('(?:%s)' % _regex_for_level(level, hint)
for level, hint in _all_log_levels.iteritems()))
def validate_log_translations(logical_line, physical_line, filename):
# Translations are not required in the test directory
if "neutron/tests" in filename:
return
if pep8.noqa(physical_line):
return
msg = "N320: Log messages require translation hints!"
if log_translation_hint.match(logical_line):
yield (0, msg)
def use_jsonutils(logical_line, filename):
msg = "N321: jsonutils.%(fun)s must be used instead of json.%(fun)s"
# Some files in the tree are not meant to be run from inside Neutron
# itself, so we should not complain about them not using jsonutils
json_check_skipped_patterns = [
"neutron/plugins/openvswitch/agent/xenapi/etc/xapi.d/plugins/netwrap",
]
for pattern in json_check_skipped_patterns:
if pattern in filename:
return
if "json." in logical_line:
json_funcs = ['dumps(', 'dump(', 'loads(', 'load(']
for f in json_funcs:
pos = logical_line.find('json.%s' % f)
if pos != -1:
yield (pos, msg % {'fun': f[:-1]})
def no_translate_debug_logs(logical_line, filename):
"""Check for 'LOG.debug(_(' and 'LOG.debug(_Lx('
As per our translation policy,
https://wiki.openstack.org/wiki/LoggingStandards#Log_Translation
we shouldn't translate debug level logs.
* This check assumes that 'LOG' is a logger.
N319
"""
for hint in _all_hints:
if logical_line.startswith("LOG.debug(%s(" % hint):
yield(0, "N319 Don't translate debug level logs")
def check_assert_called_once_with(logical_line, filename):
# Try to detect unintended calls of nonexistent mock methods like:
# assert_called_once
# assertCalledOnceWith
if 'neutron/tests/' in filename:
if '.assert_called_once_with(' in logical_line:
return
if '.assertcalledonce' in logical_line.lower().replace('_', ''):
msg = ("N322: Possible use of no-op mock method. "
"please use assert_called_once_with.")
yield (0, msg)
def factory(register):
register(validate_log_translations)
register(use_jsonutils)
register(check_assert_called_once_with)
register(no_translate_debug_logs)
| apache-2.0 |
ojake/django | django/db/backends/base/creation.py | 147 | 10102 | import sys
import time
from django.apps import apps
from django.conf import settings
from django.core import serializers
from django.db import router
from django.utils.six import StringIO
from django.utils.six.moves import input
# The prefix to put on the default database name when creating
# the test database.
TEST_DATABASE_PREFIX = 'test_'
class BaseDatabaseCreation(object):
"""
This class encapsulates all backend-specific differences that pertain to
creation and destruction of the test database.
"""
def __init__(self, connection):
self.connection = connection
@property
def _nodb_connection(self):
"""
Used to be defined here, now moved to DatabaseWrapper.
"""
return self.connection._nodb_connection
def create_test_db(self, verbosity=1, autoclobber=False, serialize=True, keepdb=False):
"""
Creates a test database, prompting the user for confirmation if the
database already exists. Returns the name of the test database created.
"""
# Don't import django.core.management if it isn't needed.
from django.core.management import call_command
test_database_name = self._get_test_db_name()
if verbosity >= 1:
test_db_repr = ''
action = 'Creating'
if verbosity >= 2:
test_db_repr = " ('%s')" % test_database_name
if keepdb:
action = "Using existing"
print("%s test database for alias '%s'%s..." % (
action, self.connection.alias, test_db_repr))
# We could skip this call if keepdb is True, but we instead
# give it the keepdb param. This is to handle the case
# where the test DB doesn't exist, in which case we need to
# create it, then just not destroy it. If we instead skip
# this, we will get an exception.
self._create_test_db(verbosity, autoclobber, keepdb)
self.connection.close()
settings.DATABASES[self.connection.alias]["NAME"] = test_database_name
self.connection.settings_dict["NAME"] = test_database_name
# We report migrate messages at one level lower than that requested.
# This ensures we don't get flooded with messages during testing
# (unless you really ask to be flooded).
call_command(
'migrate',
verbosity=max(verbosity - 1, 0),
interactive=False,
database=self.connection.alias,
run_syncdb=True,
)
# We then serialize the current state of the database into a string
# and store it on the connection. This slightly horrific process is so people
# who are testing on databases without transactions or who are using
# a TransactionTestCase still get a clean database on every test run.
if serialize:
self.connection._test_serialized_contents = self.serialize_db_to_string()
call_command('createcachetable', database=self.connection.alias)
# Ensure a connection for the side effect of initializing the test database.
self.connection.ensure_connection()
return test_database_name
def set_as_test_mirror(self, primary_settings_dict):
"""
Set this database up to be used in testing as a mirror of a primary database
whose settings are given
"""
self.connection.settings_dict['NAME'] = primary_settings_dict['NAME']
def serialize_db_to_string(self):
"""
Serializes all data in the database into a JSON string.
Designed only for test runner usage; will not handle large
amounts of data.
"""
# Build list of all apps to serialize
from django.db.migrations.loader import MigrationLoader
loader = MigrationLoader(self.connection)
app_list = []
for app_config in apps.get_app_configs():
if (
app_config.models_module is not None and
app_config.label in loader.migrated_apps and
app_config.name not in settings.TEST_NON_SERIALIZED_APPS
):
app_list.append((app_config, None))
# Make a function to iteratively return every object
def get_objects():
for model in serializers.sort_dependencies(app_list):
if (model._meta.can_migrate(self.connection) and
router.allow_migrate_model(self.connection.alias, model)):
queryset = model._default_manager.using(self.connection.alias).order_by(model._meta.pk.name)
for obj in queryset.iterator():
yield obj
# Serialize to a string
out = StringIO()
serializers.serialize("json", get_objects(), indent=None, stream=out)
return out.getvalue()
def deserialize_db_from_string(self, data):
"""
Reloads the database with data from a string generated by
the serialize_db_to_string method.
"""
data = StringIO(data)
for obj in serializers.deserialize("json", data, using=self.connection.alias):
obj.save()
def _get_test_db_name(self):
"""
Internal implementation - returns the name of the test DB that will be
created. Only useful when called from create_test_db() and
_create_test_db() and when no external munging is done with the 'NAME'
or 'TEST_NAME' settings.
"""
if self.connection.settings_dict['TEST']['NAME']:
return self.connection.settings_dict['TEST']['NAME']
return TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME']
def _create_test_db(self, verbosity, autoclobber, keepdb=False):
"""
Internal implementation - creates the test db tables.
"""
suffix = self.sql_table_creation_suffix()
test_database_name = self._get_test_db_name()
qn = self.connection.ops.quote_name
# Create the test database and connect to it.
with self._nodb_connection.cursor() as cursor:
try:
cursor.execute(
"CREATE DATABASE %s %s" % (qn(test_database_name), suffix))
except Exception as e:
# if we want to keep the db, then no need to do any of the below,
# just return and skip it all.
if keepdb:
return test_database_name
sys.stderr.write(
"Got an error creating the test database: %s\n" % e)
if not autoclobber:
confirm = input(
"Type 'yes' if you would like to try deleting the test "
"database '%s', or 'no' to cancel: " % test_database_name)
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print("Destroying old test database '%s'..."
% self.connection.alias)
cursor.execute(
"DROP DATABASE %s" % qn(test_database_name))
cursor.execute(
"CREATE DATABASE %s %s" % (qn(test_database_name),
suffix))
except Exception as e:
sys.stderr.write(
"Got an error recreating the test database: %s\n" % e)
sys.exit(2)
else:
print("Tests cancelled.")
sys.exit(1)
return test_database_name
def destroy_test_db(self, old_database_name, verbosity=1, keepdb=False):
"""
Destroy a test database, prompting the user for confirmation if the
database already exists.
"""
self.connection.close()
test_database_name = self.connection.settings_dict['NAME']
if verbosity >= 1:
test_db_repr = ''
action = 'Destroying'
if verbosity >= 2:
test_db_repr = " ('%s')" % test_database_name
if keepdb:
action = 'Preserving'
print("%s test database for alias '%s'%s..." % (
action, self.connection.alias, test_db_repr))
# if we want to preserve the database
# skip the actual destroying piece.
if not keepdb:
self._destroy_test_db(test_database_name, verbosity)
# Restore the original database name
settings.DATABASES[self.connection.alias]["NAME"] = old_database_name
self.connection.settings_dict["NAME"] = old_database_name
def _destroy_test_db(self, test_database_name, verbosity):
"""
Internal implementation - remove the test db tables.
"""
# Remove the test database to clean up after
# ourselves. Connect to the previous database (not the test database)
# to do so, because it's not allowed to delete a database while being
# connected to it.
with self.connection._nodb_connection.cursor() as cursor:
# Wait to avoid "database is being accessed by other users" errors.
time.sleep(1)
cursor.execute("DROP DATABASE %s"
% self.connection.ops.quote_name(test_database_name))
def sql_table_creation_suffix(self):
"""
SQL to append to the end of the test table creation statements.
"""
return ''
def test_db_signature(self):
"""
Returns a tuple with elements of self.connection.settings_dict (a
DATABASES setting value) that uniquely identify a database
accordingly to the RDBMS particularities.
"""
settings_dict = self.connection.settings_dict
return (
settings_dict['HOST'],
settings_dict['PORT'],
settings_dict['ENGINE'],
settings_dict['NAME']
)
| bsd-3-clause |
rfguri/vimfiles | bundle/ycm/third_party/ycmd/third_party/python-future/src/future/backports/misc.py | 20 | 31552 | """
Miscellaneous function (re)definitions from the Py3.4+ standard library
for Python 2.6/2.7.
- math.ceil (for Python 2.7)
- collections.OrderedDict (for Python 2.6)
- collections.Counter (for Python 2.6)
- collections.ChainMap (for all versions prior to Python 3.3)
- itertools.count (for Python 2.6, with step parameter)
- subprocess.check_output (for Python 2.6)
- reprlib.recursive_repr (for Python 2.6+)
"""
from __future__ import absolute_import
import subprocess
from math import ceil as oldceil
from collections import Mapping, MutableMapping
from operator import itemgetter as _itemgetter, eq as _eq
import sys
import heapq as _heapq
from _weakref import proxy as _proxy
from itertools import repeat as _repeat, chain as _chain, starmap as _starmap
from socket import getaddrinfo, SOCK_STREAM, error, socket
from future.utils import iteritems, itervalues, PY26, PY3
def ceil(x):
"""
Return the ceiling of x as an int.
This is the smallest integral value >= x.
"""
return int(oldceil(x))
########################################################################
### reprlib.recursive_repr decorator from Py3.4
########################################################################
from itertools import islice
if PY3:
try:
from _thread import get_ident
except ImportError:
from _dummy_thread import get_ident
else:
try:
from thread import get_ident
except ImportError:
from dummy_thread import get_ident
def recursive_repr(fillvalue='...'):
'Decorator to make a repr function return fillvalue for a recursive call'
def decorating_function(user_function):
repr_running = set()
def wrapper(self):
key = id(self), get_ident()
if key in repr_running:
return fillvalue
repr_running.add(key)
try:
result = user_function(self)
finally:
repr_running.discard(key)
return result
# Can't use functools.wraps() here because of bootstrap issues
wrapper.__module__ = getattr(user_function, '__module__')
wrapper.__doc__ = getattr(user_function, '__doc__')
wrapper.__name__ = getattr(user_function, '__name__')
wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
return wrapper
return decorating_function
################################################################################
### OrderedDict
################################################################################
class _Link(object):
__slots__ = 'prev', 'next', 'key', '__weakref__'
class OrderedDict(dict):
'Dictionary that remembers insertion order'
# An inherited dict maps keys to values.
# The inherited dict provides __getitem__, __len__, __contains__, and get.
# The remaining methods are order-aware.
# Big-O running times for all methods are the same as regular dictionaries.
# The internal self.__map dict maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# The sentinel is in self.__hardroot with a weakref proxy in self.__root.
# The prev links are weakref proxies (to prevent circular references).
# Individual links are kept alive by the hard reference in self.__map.
# Those hard references disappear when a key is deleted from an OrderedDict.
def __init__(*args, **kwds):
'''Initialize an ordered dictionary. The signature is the same as
regular dictionaries, but keyword arguments are not recommended because
their insertion order is arbitrary.
'''
if not args:
raise TypeError("descriptor '__init__' of 'OrderedDict' object "
"needs an argument")
self = args[0]
args = args[1:]
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except AttributeError:
self.__hardroot = _Link()
self.__root = root = _proxy(self.__hardroot)
root.prev = root.next = root
self.__map = {}
self.__update(*args, **kwds)
def __setitem__(self, key, value,
dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link at the end of the linked list,
# and the inherited dictionary is updated with the new key/value pair.
if key not in self:
self.__map[key] = link = Link()
root = self.__root
last = root.prev
link.prev, link.next, link.key = last, root, key
last.next = link
root.prev = proxy(link)
dict_setitem(self, key, value)
def __delitem__(self, key, dict_delitem=dict.__delitem__):
'od.__delitem__(y) <==> del od[y]'
# Deleting an existing item uses self.__map to find the link which gets
# removed by updating the links in the predecessor and successor nodes.
dict_delitem(self, key)
link = self.__map.pop(key)
link_prev = link.prev
link_next = link.next
link_prev.next = link_next
link_next.prev = link_prev
def __iter__(self):
'od.__iter__() <==> iter(od)'
# Traverse the linked list in order.
root = self.__root
curr = root.next
while curr is not root:
yield curr.key
curr = curr.next
def __reversed__(self):
'od.__reversed__() <==> reversed(od)'
# Traverse the linked list in reverse order.
root = self.__root
curr = root.prev
while curr is not root:
yield curr.key
curr = curr.prev
def clear(self):
'od.clear() -> None. Remove all items from od.'
root = self.__root
root.prev = root.next = root
self.__map.clear()
dict.clear(self)
def popitem(self, last=True):
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
Pairs are returned in LIFO order if last is true or FIFO order if false.
'''
if not self:
raise KeyError('dictionary is empty')
root = self.__root
if last:
link = root.prev
link_prev = link.prev
link_prev.next = root
root.prev = link_prev
else:
link = root.next
link_next = link.next
root.next = link_next
link_next.prev = root
key = link.key
del self.__map[key]
value = dict.pop(self, key)
return key, value
def move_to_end(self, key, last=True):
'''Move an existing element to the end (or beginning if last==False).
Raises KeyError if the element does not exist.
When last=True, acts like a fast version of self[key]=self.pop(key).
'''
link = self.__map[key]
link_prev = link.prev
link_next = link.next
link_prev.next = link_next
link_next.prev = link_prev
root = self.__root
if last:
last = root.prev
link.prev = last
link.next = root
last.next = root.prev = link
else:
first = root.next
link.prev = root
link.next = first
root.next = first.prev = link
def __sizeof__(self):
sizeof = sys.getsizeof
n = len(self) + 1 # number of links including root
size = sizeof(self.__dict__) # instance dictionary
size += sizeof(self.__map) * 2 # internal dict and inherited dict
size += sizeof(self.__hardroot) * n # link objects
size += sizeof(self.__root) * n # proxy objects
return size
update = __update = MutableMapping.update
keys = MutableMapping.keys
values = MutableMapping.values
items = MutableMapping.items
__ne__ = MutableMapping.__ne__
__marker = object()
def pop(self, key, default=__marker):
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding
value. If key is not found, d is returned if given, otherwise KeyError
is raised.
'''
if key in self:
result = self[key]
del self[key]
return result
if default is self.__marker:
raise KeyError(key)
return default
def setdefault(self, key, default=None):
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
if key in self:
return self[key]
self[key] = default
return default
@recursive_repr()
def __repr__(self):
'od.__repr__() <==> repr(od)'
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, list(self.items()))
def __reduce__(self):
'Return state information for pickling'
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
return self.__class__, (), inst_dict or None, None, iter(self.items())
def copy(self):
'od.copy() -> a shallow copy of od'
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S.
If not specified, the value defaults to None.
'''
self = cls()
for key in iterable:
self[key] = value
return self
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return dict.__eq__(self, other) and all(map(_eq, self, other))
return dict.__eq__(self, other)
# {{{ http://code.activestate.com/recipes/576611/ (r11)
try:
from operator import itemgetter
from heapq import nlargest
except ImportError:
pass
########################################################################
### Counter
########################################################################
def _count_elements(mapping, iterable):
'Tally elements from the iterable.'
mapping_get = mapping.get
for elem in iterable:
mapping[elem] = mapping_get(elem, 0) + 1
class Counter(dict):
'''Dict subclass for counting hashable items. Sometimes called a bag
or multiset. Elements are stored as dictionary keys and their counts
are stored as dictionary values.
>>> c = Counter('abcdeabcdabcaba') # count elements from a string
>>> c.most_common(3) # three most common elements
[('a', 5), ('b', 4), ('c', 3)]
>>> sorted(c) # list all unique elements
['a', 'b', 'c', 'd', 'e']
>>> ''.join(sorted(c.elements())) # list elements with repetitions
'aaaaabbbbcccdde'
>>> sum(c.values()) # total of all counts
15
>>> c['a'] # count of letter 'a'
5
>>> for elem in 'shazam': # update counts from an iterable
... c[elem] += 1 # by adding 1 to each element's count
>>> c['a'] # now there are seven 'a'
7
>>> del c['b'] # remove all 'b'
>>> c['b'] # now there are zero 'b'
0
>>> d = Counter('simsalabim') # make another counter
>>> c.update(d) # add in the second counter
>>> c['a'] # now there are nine 'a'
9
>>> c.clear() # empty the counter
>>> c
Counter()
Note: If a count is set to zero or reduced to zero, it will remain
in the counter until the entry is deleted or the counter is cleared:
>>> c = Counter('aaabbc')
>>> c['b'] -= 2 # reduce the count of 'b' by two
>>> c.most_common() # 'b' is still in, but its count is zero
[('a', 3), ('c', 1), ('b', 0)]
'''
# References:
# http://en.wikipedia.org/wiki/Multiset
# http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html
# http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm
# http://code.activestate.com/recipes/259174/
# Knuth, TAOCP Vol. II section 4.6.3
def __init__(*args, **kwds):
'''Create a new, empty Counter object. And if given, count elements
from an input iterable. Or, initialize the count from another mapping
of elements to their counts.
>>> c = Counter() # a new, empty counter
>>> c = Counter('gallahad') # a new counter from an iterable
>>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping
>>> c = Counter(a=4, b=2) # a new counter from keyword args
'''
if not args:
raise TypeError("descriptor '__init__' of 'Counter' object "
"needs an argument")
self = args[0]
args = args[1:]
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
super(Counter, self).__init__()
self.update(*args, **kwds)
def __missing__(self, key):
'The count of elements not in the Counter is zero.'
# Needed so that self[missing_item] does not raise KeyError
return 0
def most_common(self, n=None):
'''List the n most common elements and their counts from the most
common to the least. If n is None, then list all element counts.
>>> Counter('abcdeabcdabcaba').most_common(3)
[('a', 5), ('b', 4), ('c', 3)]
'''
# Emulate Bag.sortedByCount from Smalltalk
if n is None:
return sorted(self.items(), key=_itemgetter(1), reverse=True)
return _heapq.nlargest(n, self.items(), key=_itemgetter(1))
def elements(self):
'''Iterator over elements repeating each as many times as its count.
>>> c = Counter('ABCABC')
>>> sorted(c.elements())
['A', 'A', 'B', 'B', 'C', 'C']
# Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1
>>> prime_factors = Counter({2: 2, 3: 3, 17: 1})
>>> product = 1
>>> for factor in prime_factors.elements(): # loop over factors
... product *= factor # and multiply them
>>> product
1836
Note, if an element's count has been set to zero or is a negative
number, elements() will ignore it.
'''
# Emulate Bag.do from Smalltalk and Multiset.begin from C++.
return _chain.from_iterable(_starmap(_repeat, self.items()))
# Override dict methods where necessary
@classmethod
def fromkeys(cls, iterable, v=None):
# There is no equivalent method for counters because setting v=1
# means that no element can have a count greater than one.
raise NotImplementedError(
'Counter.fromkeys() is undefined. Use Counter(iterable) instead.')
def update(*args, **kwds):
'''Like dict.update() but add counts instead of replacing them.
Source can be an iterable, a dictionary, or another Counter instance.
>>> c = Counter('which')
>>> c.update('witch') # add elements from another iterable
>>> d = Counter('watch')
>>> c.update(d) # add elements from another counter
>>> c['h'] # four 'h' in which, witch, and watch
4
'''
# The regular dict.update() operation makes no sense here because the
# replace behavior results in the some of original untouched counts
# being mixed-in with all of the other counts for a mismash that
# doesn't have a straight-forward interpretation in most counting
# contexts. Instead, we implement straight-addition. Both the inputs
# and outputs are allowed to contain zero and negative counts.
if not args:
raise TypeError("descriptor 'update' of 'Counter' object "
"needs an argument")
self = args[0]
args = args[1:]
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
iterable = args[0] if args else None
if iterable is not None:
if isinstance(iterable, Mapping):
if self:
self_get = self.get
for elem, count in iterable.items():
self[elem] = count + self_get(elem, 0)
else:
super(Counter, self).update(iterable) # fast path when counter is empty
else:
_count_elements(self, iterable)
if kwds:
self.update(kwds)
def subtract(*args, **kwds):
'''Like dict.update() but subtracts counts instead of replacing them.
Counts can be reduced below zero. Both the inputs and outputs are
allowed to contain zero and negative counts.
Source can be an iterable, a dictionary, or another Counter instance.
>>> c = Counter('which')
>>> c.subtract('witch') # subtract elements from another iterable
>>> c.subtract(Counter('watch')) # subtract elements from another counter
>>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch
0
>>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch
-1
'''
if not args:
raise TypeError("descriptor 'subtract' of 'Counter' object "
"needs an argument")
self = args[0]
args = args[1:]
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
iterable = args[0] if args else None
if iterable is not None:
self_get = self.get
if isinstance(iterable, Mapping):
for elem, count in iterable.items():
self[elem] = self_get(elem, 0) - count
else:
for elem in iterable:
self[elem] = self_get(elem, 0) - 1
if kwds:
self.subtract(kwds)
def copy(self):
'Return a shallow copy.'
return self.__class__(self)
def __reduce__(self):
return self.__class__, (dict(self),)
def __delitem__(self, elem):
'Like dict.__delitem__() but does not raise KeyError for missing values.'
if elem in self:
super(Counter, self).__delitem__(elem)
def __repr__(self):
if not self:
return '%s()' % self.__class__.__name__
try:
items = ', '.join(map('%r: %r'.__mod__, self.most_common()))
return '%s({%s})' % (self.__class__.__name__, items)
except TypeError:
# handle case where values are not orderable
return '{0}({1!r})'.format(self.__class__.__name__, dict(self))
# Multiset-style mathematical operations discussed in:
# Knuth TAOCP Volume II section 4.6.3 exercise 19
# and at http://en.wikipedia.org/wiki/Multiset
#
# Outputs guaranteed to only include positive counts.
#
# To strip negative and zero counts, add-in an empty counter:
# c += Counter()
def __add__(self, other):
'''Add counts from two counters.
>>> Counter('abbb') + Counter('bcc')
Counter({'b': 4, 'c': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
newcount = count + other[elem]
if newcount > 0:
result[elem] = newcount
for elem, count in other.items():
if elem not in self and count > 0:
result[elem] = count
return result
def __sub__(self, other):
''' Subtract count, but keep only results with positive counts.
>>> Counter('abbbc') - Counter('bccd')
Counter({'b': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
newcount = count - other[elem]
if newcount > 0:
result[elem] = newcount
for elem, count in other.items():
if elem not in self and count < 0:
result[elem] = 0 - count
return result
def __or__(self, other):
'''Union is the maximum of value in either of the input counters.
>>> Counter('abbb') | Counter('bcc')
Counter({'b': 3, 'c': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
other_count = other[elem]
newcount = other_count if count < other_count else count
if newcount > 0:
result[elem] = newcount
for elem, count in other.items():
if elem not in self and count > 0:
result[elem] = count
return result
def __and__(self, other):
''' Intersection is the minimum of corresponding counts.
>>> Counter('abbb') & Counter('bcc')
Counter({'b': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
other_count = other[elem]
newcount = count if count < other_count else other_count
if newcount > 0:
result[elem] = newcount
return result
def __pos__(self):
'Adds an empty counter, effectively stripping negative and zero counts'
return self + Counter()
def __neg__(self):
'''Subtracts from an empty counter. Strips positive and zero counts,
and flips the sign on negative counts.
'''
return Counter() - self
def _keep_positive(self):
'''Internal method to strip elements with a negative or zero count'''
nonpositive = [elem for elem, count in self.items() if not count > 0]
for elem in nonpositive:
del self[elem]
return self
def __iadd__(self, other):
'''Inplace add from another counter, keeping only positive counts.
>>> c = Counter('abbb')
>>> c += Counter('bcc')
>>> c
Counter({'b': 4, 'c': 2, 'a': 1})
'''
for elem, count in other.items():
self[elem] += count
return self._keep_positive()
def __isub__(self, other):
'''Inplace subtract counter, but keep only results with positive counts.
>>> c = Counter('abbbc')
>>> c -= Counter('bccd')
>>> c
Counter({'b': 2, 'a': 1})
'''
for elem, count in other.items():
self[elem] -= count
return self._keep_positive()
def __ior__(self, other):
'''Inplace union is the maximum of value from either counter.
>>> c = Counter('abbb')
>>> c |= Counter('bcc')
>>> c
Counter({'b': 3, 'c': 2, 'a': 1})
'''
for elem, other_count in other.items():
count = self[elem]
if other_count > count:
self[elem] = other_count
return self._keep_positive()
def __iand__(self, other):
'''Inplace intersection is the minimum of corresponding counts.
>>> c = Counter('abbb')
>>> c &= Counter('bcc')
>>> c
Counter({'b': 1})
'''
for elem, count in self.items():
other_count = other[elem]
if other_count < count:
self[elem] = other_count
return self._keep_positive()
def check_output(*popenargs, **kwargs):
"""
For Python 2.6 compatibility: see
http://stackoverflow.com/questions/4814970/
"""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise subprocess.CalledProcessError(retcode, cmd)
return output
def count(start=0, step=1):
"""
``itertools.count`` in Py 2.6 doesn't accept a step
parameter. This is an enhanced version of ``itertools.count``
for Py2.6 equivalent to ``itertools.count`` in Python 2.7+.
"""
while True:
yield start
start += step
########################################################################
### ChainMap (helper for configparser and string.Template)
### From the Py3.4 source code. See also:
### https://github.com/kkxue/Py2ChainMap/blob/master/py2chainmap.py
########################################################################
class ChainMap(MutableMapping):
''' A ChainMap groups multiple dicts (or other mappings) together
to create a single, updateable view.
The underlying mappings are stored in a list. That list is public and can
accessed or updated using the *maps* attribute. There is no other state.
Lookups search the underlying mappings successively until a key is found.
In contrast, writes, updates, and deletions only operate on the first
mapping.
'''
def __init__(self, *maps):
'''Initialize a ChainMap by setting *maps* to the given mappings.
If no mappings are provided, a single empty dictionary is used.
'''
self.maps = list(maps) or [{}] # always at least one map
def __missing__(self, key):
raise KeyError(key)
def __getitem__(self, key):
for mapping in self.maps:
try:
return mapping[key] # can't use 'key in mapping' with defaultdict
except KeyError:
pass
return self.__missing__(key) # support subclasses that define __missing__
def get(self, key, default=None):
return self[key] if key in self else default
def __len__(self):
return len(set().union(*self.maps)) # reuses stored hash values if possible
def __iter__(self):
return iter(set().union(*self.maps))
def __contains__(self, key):
return any(key in m for m in self.maps)
def __bool__(self):
return any(self.maps)
# Py2 compatibility:
__nonzero__ = __bool__
@recursive_repr()
def __repr__(self):
return '{0.__class__.__name__}({1})'.format(
self, ', '.join(map(repr, self.maps)))
@classmethod
def fromkeys(cls, iterable, *args):
'Create a ChainMap with a single dict created from the iterable.'
return cls(dict.fromkeys(iterable, *args))
def copy(self):
'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'
return self.__class__(self.maps[0].copy(), *self.maps[1:])
__copy__ = copy
def new_child(self, m=None): # like Django's Context.push()
'''
New ChainMap with a new map followed by all previous maps. If no
map is provided, an empty dict is used.
'''
if m is None:
m = {}
return self.__class__(m, *self.maps)
@property
def parents(self): # like Django's Context.pop()
'New ChainMap from maps[1:].'
return self.__class__(*self.maps[1:])
def __setitem__(self, key, value):
self.maps[0][key] = value
def __delitem__(self, key):
try:
del self.maps[0][key]
except KeyError:
raise KeyError('Key not found in the first mapping: {!r}'.format(key))
def popitem(self):
'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
try:
return self.maps[0].popitem()
except KeyError:
raise KeyError('No keys found in the first mapping.')
def pop(self, key, *args):
'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'
try:
return self.maps[0].pop(key, *args)
except KeyError:
raise KeyError('Key not found in the first mapping: {!r}'.format(key))
def clear(self):
'Clear maps[0], leaving maps[1:] intact.'
self.maps[0].clear()
# Re-use the same sentinel as in the Python stdlib socket module:
from socket import _GLOBAL_DEFAULT_TIMEOUT
# Was: _GLOBAL_DEFAULT_TIMEOUT = object()
def create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT,
source_address=None):
"""Backport of 3-argument create_connection() for Py2.6.
Connect to *address* and return the socket object.
Convenience function. Connect to *address* (a 2-tuple ``(host,
port)``) and return the socket object. Passing the optional
*timeout* parameter will set the timeout on the socket instance
before attempting to connect. If no *timeout* is supplied, the
global default timeout setting returned by :func:`getdefaulttimeout`
is used. If *source_address* is set it must be a tuple of (host, port)
for the socket to bind as a source address before making the connection.
An host of '' or port 0 tells the OS to use the default.
"""
host, port = address
err = None
for res in getaddrinfo(host, port, 0, SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
try:
sock = socket(af, socktype, proto)
if timeout is not _GLOBAL_DEFAULT_TIMEOUT:
sock.settimeout(timeout)
if source_address:
sock.bind(source_address)
sock.connect(sa)
return sock
except error as _:
err = _
if sock is not None:
sock.close()
if err is not None:
raise err
else:
raise error("getaddrinfo returns an empty list")
# Back up our definitions above in case they're useful
_OrderedDict = OrderedDict
_Counter = Counter
_check_output = check_output
_count = count
_ceil = ceil
__count_elements = _count_elements
_recursive_repr = recursive_repr
_ChainMap = ChainMap
_create_connection = create_connection
# Overwrite the definitions above with the usual ones
# from the standard library:
if sys.version_info >= (2, 7):
from collections import OrderedDict, Counter
from subprocess import check_output
from itertools import count
from socket import create_connection
if sys.version_info >= (3, 0):
from math import ceil
from collections import _count_elements
if sys.version_info >= (3, 3):
from reprlib import recursive_repr
from collections import ChainMap
| mit |
erjohnso/ansible | lib/ansible/modules/network/cloudengine/ce_file_copy.py | 23 | 12414 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ce_file_copy
version_added: "2.4"
short_description: Copy a file to a remote cloudengine device over SCP on HUAWEI CloudEngine switches.
description:
- Copy a file to a remote cloudengine device over SCP on HUAWEI CloudEngine switches.
author:
- Zhou Zhijin (@CloudEngine-Ansible)
notes:
- The feature must be enabled with feature scp-server.
- If the file is already present, no transfer will take place.
options:
local_file:
description:
- Path to local file. Local directory must exist.
The maximum length of local_file is 4096.
required: true
remote_file:
description:
- Remote file path of the copy. Remote directories must exist.
If omitted, the name of the local file will be used.
The maximum length of remote_file is 4096.
required: false
default: null
file_system:
description:
- The remote file system of the device. If omitted,
devices that support a file_system parameter will use
their default values.
File system indicates the storage medium and can be set to as follows,
1) 'flash:' is root directory of the flash memory on the master MPU.
2) 'slave#flash:' is root directory of the flash memory on the slave MPU.
If no slave MPU exists, this drive is unavailable.
3) 'chassis ID/slot number#flash:' is root directory of the flash memory on
a device in a stack. For example, 1/5#flash indicates the flash memory
whose chassis ID is 1 and slot number is 5.
required: false
default: 'flash:'
'''
EXAMPLES = '''
- name: File copy test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: "Copy a local file to remote device"
ce_file_copy:
local_file: /usr/vrpcfg.cfg
remote_file: /vrpcfg.cfg
file_system: 'flash:'
provider: "{{ cli }}"
'''
RETURN = '''
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
transfer_result:
description: information about transfer result.
returned: always
type: string
sample: 'The local file has been successfully transferred to the device.'
local_file:
description: The path of the local file.
returned: always
type: string
sample: '/usr/work/vrpcfg.zip'
remote_file:
description: The path of the remote file.
returned: always
type: string
sample: '/vrpcfg.zip'
'''
import re
import os
import time
from xml.etree import ElementTree
import paramiko
from ansible.module_utils.basic import get_exception, AnsibleModule
from ansible.module_utils.ce import ce_argument_spec, run_commands, get_nc_config
try:
from scp import SCPClient
HAS_SCP = True
except ImportError:
HAS_SCP = False
CE_NC_GET_FILE_INFO = """
<filter type="subtree">
<vfm xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<dirs>
<dir>
<fileName>%s</fileName>
<dirName>%s</dirName>
<DirSize></DirSize>
</dir>
</dirs>
</vfm>
</filter>
"""
CE_NC_GET_SCP_ENABLE = """
<filter type="subtree">
<sshs xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<sshServer>
<scpEnable></scpEnable>
</sshServer>
</sshs>
</filter>
"""
def get_cli_exception(exc=None):
"""Get cli exception message"""
msg = list()
if not exc:
exc = get_exception()
if exc:
errs = str(exc).split("\r\n")
for err in errs:
if not err:
continue
if "matched error in response:" in err:
continue
if " at '^' position" in err:
err = err.replace(" at '^' position", "")
if err.replace(" ", "") == "^":
continue
if len(err) > 2 and err[0] in ["<", "["] and err[-1] in [">", "]"]:
continue
if err[-1] == ".":
err = err[:-1]
if err.replace(" ", "") == "":
continue
msg.append(err)
else:
msg = ["Error: Fail to get cli exception message."]
while msg[-1][-1] == ' ':
msg[-1] = msg[-1][:-1]
if msg[-1][-1] != ".":
msg[-1] += "."
return ", ".join(msg).capitalize()
class FileCopy(object):
"""File copy function class"""
def __init__(self, argument_spec):
self.spec = argument_spec
self.module = None
self.init_module()
# file copy parameters
self.local_file = self.module.params['local_file']
self.remote_file = self.module.params['remote_file']
self.file_system = self.module.params['file_system']
# state
self.transfer_result = None
self.changed = False
def init_module(self):
"""Init module"""
self.module = AnsibleModule(
argument_spec=self.spec, supports_check_mode=True)
def remote_file_exists(self, dst, file_system='flash:'):
"""Remote file whether exists"""
full_path = file_system + dst
file_name = os.path.basename(full_path)
file_path = os.path.dirname(full_path)
file_path = file_path + '/'
xml_str = CE_NC_GET_FILE_INFO % (file_name, file_path)
ret_xml = get_nc_config(self.module, xml_str)
if "<data/>" in ret_xml:
return False, 0
xml_str = ret_xml.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
# get file info
root = ElementTree.fromstring(xml_str)
topo = root.find("data/vfm/dirs/dir")
if topo is None:
return False, 0
for eles in topo:
if eles.tag in ["DirSize"]:
return True, int(eles.text.replace(',', ''))
return False, 0
def local_file_exists(self):
"""Local file whether exists"""
return os.path.isfile(self.local_file)
def enough_space(self):
"""Whether device has enough space"""
commands = list()
cmd = 'dir %s' % self.file_system
commands.append(cmd)
output = run_commands(self.module, commands)
if not output:
return True
match = re.search(r'\((.*) KB free\)', output[0])
kbytes_free = match.group(1)
kbytes_free = kbytes_free.replace(',', '')
file_size = os.path.getsize(self.local_file)
if int(kbytes_free) * 1024 > file_size:
return True
return False
def transfer_file(self, dest):
"""Begin to transfer file by scp"""
if not self.local_file_exists():
self.module.fail_json(
msg='Could not transfer file. Local file doesn\'t exist.')
if not self.enough_space():
self.module.fail_json(
msg='Could not transfer file. Not enough space on device.')
hostname = self.module.params['provider']['host']
username = self.module.params['provider']['username']
password = self.module.params['provider']['password']
port = self.module.params['provider']['port']
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(hostname=hostname, username=username, password=password, port=port)
full_remote_path = '{}{}'.format(self.file_system, dest)
scp = SCPClient(ssh.get_transport())
try:
scp.put(self.local_file, full_remote_path)
except:
time.sleep(10)
file_exists, temp_size = self.remote_file_exists(
dest, self.file_system)
file_size = os.path.getsize(self.local_file)
if file_exists and int(temp_size) == int(file_size):
pass
else:
scp.close()
self.module.fail_json(msg='Could not transfer file. There was an error '
'during transfer. Please make sure the format of '
'input parameters is right.')
scp.close()
return True
def get_scp_enable(self):
"""Get scp enable state"""
xml_str = CE_NC_GET_SCP_ENABLE
ret_xml = get_nc_config(self.module, xml_str)
if "<data/>" in ret_xml:
return False
xml_str = ret_xml.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
# get file info
root = ElementTree.fromstring(xml_str)
topo = root.find("data/sshs/sshServer")
if topo is None:
return False
for eles in topo:
if eles.tag in ["scpEnable"]:
return True, eles.text
return False
def work(self):
"""Excute task """
if not HAS_SCP:
self.module.fail_json(
msg="'Error: No scp package, please install it.'")
if self.local_file and len(self.local_file) > 4096:
self.module.fail_json(
msg="'Error: The maximum length of local_file is 4096.'")
if self.remote_file and len(self.remote_file) > 4096:
self.module.fail_json(
msg="'Error: The maximum length of remote_file is 4096.'")
retcode, cur_state = self.get_scp_enable()
if retcode and cur_state == 'Disable':
self.module.fail_json(
msg="'Error: Please ensure SCP server is enabled.'")
if not os.path.isfile(self.local_file):
self.module.fail_json(
msg="Local file {} not found".format(self.local_file))
dest = self.remote_file or ('/' + os.path.basename(self.local_file))
remote_exists, file_size = self.remote_file_exists(
dest, file_system=self.file_system)
if remote_exists and (os.path.getsize(self.local_file) != file_size):
remote_exists = False
if not remote_exists:
self.changed = True
file_exists = False
else:
file_exists = True
self.transfer_result = 'The local file already exists on the device.'
if not file_exists:
self.transfer_file(dest)
self.transfer_result = 'The local file has been successfully transferred to the device.'
if self.remote_file is None:
self.remote_file = '/' + os.path.basename(self.local_file)
self.module.exit_json(
changed=self.changed,
transfer_result=self.transfer_result,
local_file=self.local_file,
remote_file=self.remote_file,
file_system=self.file_system)
def main():
"""Main function entry"""
argument_spec = dict(
local_file=dict(required=True),
remote_file=dict(required=False),
file_system=dict(required=False, default='flash:')
)
argument_spec.update(ce_argument_spec)
filecopy_obj = FileCopy(argument_spec)
filecopy_obj.work()
if __name__ == '__main__':
main()
| gpl-3.0 |
fevxie/odoo | openerp/addons/base/ir/ir_attachment.py | 183 | 16487 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hashlib
import itertools
import logging
import os
import re
from openerp import tools
from openerp.tools.translate import _
from openerp.exceptions import AccessError
from openerp.osv import fields,osv
from openerp import SUPERUSER_ID
from openerp.osv.orm import except_orm
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
class ir_attachment(osv.osv):
"""Attachments are used to link binary files or url to any openerp document.
External attachment storage
---------------------------
The 'data' function field (_data_get,data_set) is implemented using
_file_read, _file_write and _file_delete which can be overridden to
implement other storage engines, shuch methods should check for other
location pseudo uri (example: hdfs://hadoppserver)
The default implementation is the file:dirname location that stores files
on the local filesystem using name based on their sha1 hash
"""
_order = 'id desc'
def _name_get_resname(self, cr, uid, ids, object, method, context):
data = {}
for attachment in self.browse(cr, uid, ids, context=context):
model_object = attachment.res_model
res_id = attachment.res_id
if model_object and res_id:
model_pool = self.pool[model_object]
res = model_pool.name_get(cr,uid,[res_id],context)
res_name = res and res[0][1] or None
if res_name:
field = self._columns.get('res_name',False)
if field and len(res_name) > field.size:
res_name = res_name[:30] + '...'
data[attachment.id] = res_name or False
else:
data[attachment.id] = False
return data
def _storage(self, cr, uid, context=None):
return self.pool['ir.config_parameter'].get_param(cr, SUPERUSER_ID, 'ir_attachment.location', 'file')
def _filestore(self, cr, uid, context=None):
return tools.config.filestore(cr.dbname)
def force_storage(self, cr, uid, context=None):
"""Force all attachments to be stored in the currently configured storage"""
if not self.pool['res.users'].has_group(cr, uid, 'base.group_erp_manager'):
raise AccessError(_('Only administrators can execute this action.'))
location = self._storage(cr, uid, context)
domain = {
'db': [('store_fname', '!=', False)],
'file': [('db_datas', '!=', False)],
}[location]
ids = self.search(cr, uid, domain, context=context)
for attach in self.browse(cr, uid, ids, context=context):
attach.write({'datas': attach.datas})
return True
# 'data' field implementation
def _full_path(self, cr, uid, path):
# sanitize ath
path = re.sub('[.]', '', path)
path = path.strip('/\\')
return os.path.join(self._filestore(cr, uid), path)
def _get_path(self, cr, uid, bin_data):
sha = hashlib.sha1(bin_data).hexdigest()
# retro compatibility
fname = sha[:3] + '/' + sha
full_path = self._full_path(cr, uid, fname)
if os.path.isfile(full_path):
return fname, full_path # keep existing path
# scatter files across 256 dirs
# we use '/' in the db (even on windows)
fname = sha[:2] + '/' + sha
full_path = self._full_path(cr, uid, fname)
dirname = os.path.dirname(full_path)
if not os.path.isdir(dirname):
os.makedirs(dirname)
return fname, full_path
def _file_read(self, cr, uid, fname, bin_size=False):
full_path = self._full_path(cr, uid, fname)
r = ''
try:
if bin_size:
r = os.path.getsize(full_path)
else:
r = open(full_path,'rb').read().encode('base64')
except IOError:
_logger.exception("_read_file reading %s", full_path)
return r
def _file_write(self, cr, uid, value):
bin_value = value.decode('base64')
fname, full_path = self._get_path(cr, uid, bin_value)
if not os.path.exists(full_path):
try:
with open(full_path, 'wb') as fp:
fp.write(bin_value)
except IOError:
_logger.exception("_file_write writing %s", full_path)
return fname
def _file_delete(self, cr, uid, fname):
# using SQL to include files hidden through unlink or due to record rules
cr.execute("SELECT COUNT(*) FROM ir_attachment WHERE store_fname = %s", (fname,))
count = cr.fetchone()[0]
full_path = self._full_path(cr, uid, fname)
if not count and os.path.exists(full_path):
try:
os.unlink(full_path)
except OSError:
_logger.exception("_file_delete could not unlink %s", full_path)
except IOError:
# Harmless and needed for race conditions
_logger.exception("_file_delete could not unlink %s", full_path)
def _data_get(self, cr, uid, ids, name, arg, context=None):
if context is None:
context = {}
result = {}
bin_size = context.get('bin_size')
for attach in self.browse(cr, uid, ids, context=context):
if attach.store_fname:
result[attach.id] = self._file_read(cr, uid, attach.store_fname, bin_size)
else:
result[attach.id] = attach.db_datas
return result
def _data_set(self, cr, uid, id, name, value, arg, context=None):
# We dont handle setting data to null
if not value:
return True
if context is None:
context = {}
location = self._storage(cr, uid, context)
file_size = len(value.decode('base64'))
attach = self.browse(cr, uid, id, context=context)
fname_to_delete = attach.store_fname
if location != 'db':
fname = self._file_write(cr, uid, value)
# SUPERUSER_ID as probably don't have write access, trigger during create
super(ir_attachment, self).write(cr, SUPERUSER_ID, [id], {'store_fname': fname, 'file_size': file_size, 'db_datas': False}, context=context)
else:
super(ir_attachment, self).write(cr, SUPERUSER_ID, [id], {'db_datas': value, 'file_size': file_size, 'store_fname': False}, context=context)
# After de-referencing the file in the database, check whether we need
# to garbage-collect it on the filesystem
if fname_to_delete:
self._file_delete(cr, uid, fname_to_delete)
return True
_name = 'ir.attachment'
_columns = {
'name': fields.char('Attachment Name', required=True),
'datas_fname': fields.char('File Name'),
'description': fields.text('Description'),
'res_name': fields.function(_name_get_resname, type='char', string='Resource Name', store=True),
'res_model': fields.char('Resource Model', readonly=True, help="The database object this attachment will be attached to"),
'res_id': fields.integer('Resource ID', readonly=True, help="The record id this is attached to"),
'create_date': fields.datetime('Date Created', readonly=True),
'create_uid': fields.many2one('res.users', 'Owner', readonly=True),
'company_id': fields.many2one('res.company', 'Company', change_default=True),
'type': fields.selection( [ ('url','URL'), ('binary','Binary'), ],
'Type', help="Binary File or URL", required=True, change_default=True),
'url': fields.char('Url', size=1024),
# al: We keep shitty field names for backward compatibility with document
'datas': fields.function(_data_get, fnct_inv=_data_set, string='File Content', type="binary", nodrop=True),
'store_fname': fields.char('Stored Filename'),
'db_datas': fields.binary('Database Data'),
'file_size': fields.integer('File Size'),
}
_defaults = {
'type': 'binary',
'file_size': 0,
'company_id': lambda s,cr,uid,c: s.pool.get('res.company')._company_default_get(cr, uid, 'ir.attachment', context=c),
}
def _auto_init(self, cr, context=None):
super(ir_attachment, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('ir_attachment_res_idx',))
if not cr.fetchone():
cr.execute('CREATE INDEX ir_attachment_res_idx ON ir_attachment (res_model, res_id)')
cr.commit()
def check(self, cr, uid, ids, mode, context=None, values=None):
"""Restricts the access to an ir.attachment, according to referred model
In the 'document' module, it is overriden to relax this hard rule, since
more complex ones apply there.
"""
res_ids = {}
require_employee = False
if ids:
if isinstance(ids, (int, long)):
ids = [ids]
cr.execute('SELECT DISTINCT res_model, res_id, create_uid FROM ir_attachment WHERE id = ANY (%s)', (ids,))
for rmod, rid, create_uid in cr.fetchall():
if not (rmod and rid):
if create_uid != uid:
require_employee = True
continue
res_ids.setdefault(rmod,set()).add(rid)
if values:
if values.get('res_model') and values.get('res_id'):
res_ids.setdefault(values['res_model'],set()).add(values['res_id'])
ima = self.pool.get('ir.model.access')
for model, mids in res_ids.items():
# ignore attachments that are not attached to a resource anymore when checking access rights
# (resource was deleted but attachment was not)
if not self.pool.get(model):
require_employee = True
continue
existing_ids = self.pool[model].exists(cr, uid, mids)
if len(existing_ids) != len(mids):
require_employee = True
ima.check(cr, uid, model, mode)
self.pool[model].check_access_rule(cr, uid, existing_ids, mode, context=context)
if require_employee:
if not uid == SUPERUSER_ID and not self.pool['res.users'].has_group(cr, uid, 'base.group_user'):
raise except_orm(_('Access Denied'), _("Sorry, you are not allowed to access this document."))
def _search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
ids = super(ir_attachment, self)._search(cr, uid, args, offset=offset,
limit=limit, order=order,
context=context, count=False,
access_rights_uid=access_rights_uid)
if not ids:
if count:
return 0
return []
# Work with a set, as list.remove() is prohibitive for large lists of documents
# (takes 20+ seconds on a db with 100k docs during search_count()!)
orig_ids = ids
ids = set(ids)
# For attachments, the permissions of the document they are attached to
# apply, so we must remove attachments for which the user cannot access
# the linked document.
# Use pure SQL rather than read() as it is about 50% faster for large dbs (100k+ docs),
# and the permissions are checked in super() and below anyway.
cr.execute("""SELECT id, res_model, res_id FROM ir_attachment WHERE id = ANY(%s)""", (list(ids),))
targets = cr.dictfetchall()
model_attachments = {}
for target_dict in targets:
if not target_dict['res_model']:
continue
# model_attachments = { 'model': { 'res_id': [id1,id2] } }
model_attachments.setdefault(target_dict['res_model'],{}).setdefault(target_dict['res_id'] or 0, set()).add(target_dict['id'])
# To avoid multiple queries for each attachment found, checks are
# performed in batch as much as possible.
ima = self.pool.get('ir.model.access')
for model, targets in model_attachments.iteritems():
if model not in self.pool:
continue
if not ima.check(cr, uid, model, 'read', False):
# remove all corresponding attachment ids
for attach_id in itertools.chain(*targets.values()):
ids.remove(attach_id)
continue # skip ir.rule processing, these ones are out already
# filter ids according to what access rules permit
target_ids = targets.keys()
allowed_ids = [0] + self.pool[model].search(cr, uid, [('id', 'in', target_ids)], context=context)
disallowed_ids = set(target_ids).difference(allowed_ids)
for res_id in disallowed_ids:
for attach_id in targets[res_id]:
ids.remove(attach_id)
# sort result according to the original sort ordering
result = [id for id in orig_ids if id in ids]
return len(result) if count else list(result)
def read(self, cr, uid, ids, fields_to_read=None, context=None, load='_classic_read'):
if isinstance(ids, (int, long)):
ids = [ids]
self.check(cr, uid, ids, 'read', context=context)
return super(ir_attachment, self).read(cr, uid, ids, fields_to_read, context=context, load=load)
def write(self, cr, uid, ids, vals, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
self.check(cr, uid, ids, 'write', context=context, values=vals)
if 'file_size' in vals:
del vals['file_size']
return super(ir_attachment, self).write(cr, uid, ids, vals, context)
def copy(self, cr, uid, id, default=None, context=None):
self.check(cr, uid, [id], 'write', context=context)
return super(ir_attachment, self).copy(cr, uid, id, default, context)
def unlink(self, cr, uid, ids, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
self.check(cr, uid, ids, 'unlink', context=context)
# First delete in the database, *then* in the filesystem if the
# database allowed it. Helps avoid errors when concurrent transactions
# are deleting the same file, and some of the transactions are
# rolled back by PostgreSQL (due to concurrent updates detection).
to_delete = [a.store_fname
for a in self.browse(cr, uid, ids, context=context)
if a.store_fname]
res = super(ir_attachment, self).unlink(cr, uid, ids, context)
for file_path in to_delete:
self._file_delete(cr, uid, file_path)
return res
def create(self, cr, uid, values, context=None):
self.check(cr, uid, [], mode='write', context=context, values=values)
if 'file_size' in values:
del values['file_size']
return super(ir_attachment, self).create(cr, uid, values, context)
def action_get(self, cr, uid, context=None):
return self.pool.get('ir.actions.act_window').for_xml_id(
cr, uid, 'base', 'action_attachment', context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
arborh/tensorflow | tensorflow/python/autograph/utils/tensors_test.py | 23 | 2715 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensors module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.autograph.utils import tensors
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import list_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.platform import test
class TensorsTest(test.TestCase):
def _simple_tensor_array(self):
return tensor_array_ops.TensorArray(dtypes.int32, size=3)
def _simple_tensor_list(self):
return list_ops.empty_tensor_list(
element_shape=constant_op.constant([1]), element_dtype=dtypes.int32)
def _simple_list_of_tensors(self):
return [constant_op.constant(1), constant_op.constant(2)]
def test_is_tensor_array(self):
self.assertTrue(tensors.is_tensor_array(self._simple_tensor_array()))
self.assertFalse(tensors.is_tensor_array(self._simple_tensor_list()))
self.assertFalse(tensors.is_tensor_array(constant_op.constant(1)))
self.assertFalse(tensors.is_tensor_array(self._simple_list_of_tensors()))
self.assertFalse(tensors.is_tensor_array(None))
def test_is_tensor_list(self):
self.assertFalse(tensors.is_tensor_list(self._simple_tensor_array()))
self.assertTrue(tensors.is_tensor_list(self._simple_tensor_list()))
self.assertFalse(tensors.is_tensor_list(constant_op.constant(1)))
self.assertFalse(tensors.is_tensor_list(self._simple_list_of_tensors()))
self.assertFalse(tensors.is_tensor_list(None))
def is_range_tensor(self):
self.assertTrue(tensors.is_range_tensor(math_ops.range(1)))
self.assertTrue(tensors.is_range_tensor(math_ops.range(1, 2)))
self.assertTrue(tensors.is_range_tensor(math_ops.range(1, 2, 3)))
self.assertFalse(tensors.is_range_tensor(None))
self.assertFalse(tensors.is_range_tensor(constant_op.constant(range(1))))
if __name__ == '__main__':
test.main()
| apache-2.0 |
Jordan-wicker/Bread | node_modules/node-gyp/gyp/tools/pretty_vcproj.py | 2637 | 9586 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Make the format of a vcproj really pretty.
This script normalize and sort an xml. It also fetches all the properties
inside linked vsprops and include them explicitly in the vcproj.
It outputs the resulting xml to stdout.
"""
__author__ = 'nsylvain (Nicolas Sylvain)'
import os
import sys
from xml.dom.minidom import parse
from xml.dom.minidom import Node
REPLACEMENTS = dict()
ARGUMENTS = None
class CmpTuple(object):
"""Compare function between 2 tuple."""
def __call__(self, x, y):
return cmp(x[0], y[0])
class CmpNode(object):
"""Compare function between 2 xml nodes."""
def __call__(self, x, y):
def get_string(node):
node_string = "node"
node_string += node.nodeName
if node.nodeValue:
node_string += node.nodeValue
if node.attributes:
# We first sort by name, if present.
node_string += node.getAttribute("Name")
all_nodes = []
for (name, value) in node.attributes.items():
all_nodes.append((name, value))
all_nodes.sort(CmpTuple())
for (name, value) in all_nodes:
node_string += name
node_string += value
return node_string
return cmp(get_string(x), get_string(y))
def PrettyPrintNode(node, indent=0):
if node.nodeType == Node.TEXT_NODE:
if node.data.strip():
print '%s%s' % (' '*indent, node.data.strip())
return
if node.childNodes:
node.normalize()
# Get the number of attributes
attr_count = 0
if node.attributes:
attr_count = node.attributes.length
# Print the main tag
if attr_count == 0:
print '%s<%s>' % (' '*indent, node.nodeName)
else:
print '%s<%s' % (' '*indent, node.nodeName)
all_attributes = []
for (name, value) in node.attributes.items():
all_attributes.append((name, value))
all_attributes.sort(CmpTuple())
for (name, value) in all_attributes:
print '%s %s="%s"' % (' '*indent, name, value)
print '%s>' % (' '*indent)
if node.nodeValue:
print '%s %s' % (' '*indent, node.nodeValue)
for sub_node in node.childNodes:
PrettyPrintNode(sub_node, indent=indent+2)
print '%s</%s>' % (' '*indent, node.nodeName)
def FlattenFilter(node):
"""Returns a list of all the node and sub nodes."""
node_list = []
if (node.attributes and
node.getAttribute('Name') == '_excluded_files'):
# We don't add the "_excluded_files" filter.
return []
for current in node.childNodes:
if current.nodeName == 'Filter':
node_list.extend(FlattenFilter(current))
else:
node_list.append(current)
return node_list
def FixFilenames(filenames, current_directory):
new_list = []
for filename in filenames:
if filename:
for key in REPLACEMENTS:
filename = filename.replace(key, REPLACEMENTS[key])
os.chdir(current_directory)
filename = filename.strip('"\' ')
if filename.startswith('$'):
new_list.append(filename)
else:
new_list.append(os.path.abspath(filename))
return new_list
def AbsoluteNode(node):
"""Makes all the properties we know about in this node absolute."""
if node.attributes:
for (name, value) in node.attributes.items():
if name in ['InheritedPropertySheets', 'RelativePath',
'AdditionalIncludeDirectories',
'IntermediateDirectory', 'OutputDirectory',
'AdditionalLibraryDirectories']:
# We want to fix up these paths
path_list = value.split(';')
new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1]))
node.setAttribute(name, ';'.join(new_list))
if not value:
node.removeAttribute(name)
def CleanupVcproj(node):
"""For each sub node, we call recursively this function."""
for sub_node in node.childNodes:
AbsoluteNode(sub_node)
CleanupVcproj(sub_node)
# Normalize the node, and remove all extranous whitespaces.
for sub_node in node.childNodes:
if sub_node.nodeType == Node.TEXT_NODE:
sub_node.data = sub_node.data.replace("\r", "")
sub_node.data = sub_node.data.replace("\n", "")
sub_node.data = sub_node.data.rstrip()
# Fix all the semicolon separated attributes to be sorted, and we also
# remove the dups.
if node.attributes:
for (name, value) in node.attributes.items():
sorted_list = sorted(value.split(';'))
unique_list = []
for i in sorted_list:
if not unique_list.count(i):
unique_list.append(i)
node.setAttribute(name, ';'.join(unique_list))
if not value:
node.removeAttribute(name)
if node.childNodes:
node.normalize()
# For each node, take a copy, and remove it from the list.
node_array = []
while node.childNodes and node.childNodes[0]:
# Take a copy of the node and remove it from the list.
current = node.childNodes[0]
node.removeChild(current)
# If the child is a filter, we want to append all its children
# to this same list.
if current.nodeName == 'Filter':
node_array.extend(FlattenFilter(current))
else:
node_array.append(current)
# Sort the list.
node_array.sort(CmpNode())
# Insert the nodes in the correct order.
for new_node in node_array:
# But don't append empty tool node.
if new_node.nodeName == 'Tool':
if new_node.attributes and new_node.attributes.length == 1:
# This one was empty.
continue
if new_node.nodeName == 'UserMacro':
continue
node.appendChild(new_node)
def GetConfiguationNodes(vcproj):
#TODO(nsylvain): Find a better way to navigate the xml.
nodes = []
for node in vcproj.childNodes:
if node.nodeName == "Configurations":
for sub_node in node.childNodes:
if sub_node.nodeName == "Configuration":
nodes.append(sub_node)
return nodes
def GetChildrenVsprops(filename):
dom = parse(filename)
if dom.documentElement.attributes:
vsprops = dom.documentElement.getAttribute('InheritedPropertySheets')
return FixFilenames(vsprops.split(';'), os.path.dirname(filename))
return []
def SeekToNode(node1, child2):
# A text node does not have properties.
if child2.nodeType == Node.TEXT_NODE:
return None
# Get the name of the current node.
current_name = child2.getAttribute("Name")
if not current_name:
# There is no name. We don't know how to merge.
return None
# Look through all the nodes to find a match.
for sub_node in node1.childNodes:
if sub_node.nodeName == child2.nodeName:
name = sub_node.getAttribute("Name")
if name == current_name:
return sub_node
# No match. We give up.
return None
def MergeAttributes(node1, node2):
# No attributes to merge?
if not node2.attributes:
return
for (name, value2) in node2.attributes.items():
# Don't merge the 'Name' attribute.
if name == 'Name':
continue
value1 = node1.getAttribute(name)
if value1:
# The attribute exist in the main node. If it's equal, we leave it
# untouched, otherwise we concatenate it.
if value1 != value2:
node1.setAttribute(name, ';'.join([value1, value2]))
else:
# The attribute does nto exist in the main node. We append this one.
node1.setAttribute(name, value2)
# If the attribute was a property sheet attributes, we remove it, since
# they are useless.
if name == 'InheritedPropertySheets':
node1.removeAttribute(name)
def MergeProperties(node1, node2):
MergeAttributes(node1, node2)
for child2 in node2.childNodes:
child1 = SeekToNode(node1, child2)
if child1:
MergeProperties(child1, child2)
else:
node1.appendChild(child2.cloneNode(True))
def main(argv):
"""Main function of this vcproj prettifier."""
global ARGUMENTS
ARGUMENTS = argv
# check if we have exactly 1 parameter.
if len(argv) < 2:
print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
'[key2=value2]' % argv[0])
return 1
# Parse the keys
for i in range(2, len(argv)):
(key, value) = argv[i].split('=')
REPLACEMENTS[key] = value
# Open the vcproj and parse the xml.
dom = parse(argv[1])
# First thing we need to do is find the Configuration Node and merge them
# with the vsprops they include.
for configuration_node in GetConfiguationNodes(dom.documentElement):
# Get the property sheets associated with this configuration.
vsprops = configuration_node.getAttribute('InheritedPropertySheets')
# Fix the filenames to be absolute.
vsprops_list = FixFilenames(vsprops.strip().split(';'),
os.path.dirname(argv[1]))
# Extend the list of vsprops with all vsprops contained in the current
# vsprops.
for current_vsprops in vsprops_list:
vsprops_list.extend(GetChildrenVsprops(current_vsprops))
# Now that we have all the vsprops, we need to merge them.
for current_vsprops in vsprops_list:
MergeProperties(configuration_node,
parse(current_vsprops).documentElement)
# Now that everything is merged, we need to cleanup the xml.
CleanupVcproj(dom.documentElement)
# Finally, we use the prett xml function to print the vcproj back to the
# user.
#print dom.toprettyxml(newl="\n")
PrettyPrintNode(dom.documentElement)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| mit |
cfelton/myhdl | example/rs232/test_rs232.py | 13 | 4075 | import sys
from random import randrange
import unittest
from unittest import TestCase
from rs232_rx import rs232_rx
from myhdl import Simulation, Signal, intbv, join
from rs232_tx import rs232_tx
from rs232_util import Config, EVEN, ODD, ParityError, Error
class rs232Test(TestCase):
""" rs232 functional unit test """
def default(self):
tx = Signal(intbv(0))
rx = tx
actual = intbv(0)
cfg = Config()
for i in range(256):
data = intbv(i)
yield join(rs232_tx(tx, data, cfg), rs232_rx(rx, actual, cfg))
self.assertEqual(data, actual)
def testDefault(self):
""" Check default case """
Simulation(self.default()).run(quiet=1)
def oddParity(self):
tx = Signal(intbv(0))
rx = tx
actual = intbv(0)
cfg = Config(parity=ODD)
for i in range(256):
data = intbv(i)
yield join(rs232_tx(tx, data, cfg), rs232_rx(rx, actual, cfg))
self.assertEqual(data, actual)
def testOddParity(self):
""" Check odd parity """
Simulation(self.oddParity()).run(quiet=1)
def sevenBitsEvenParity(self):
tx = Signal(intbv(0))
rx = tx
actual = intbv(0)
cfg = Config(parity=EVEN, n_bits=7)
cfg_rx = Config(parity=EVEN, n_bits=7)
for i in range(256):
data = intbv(i)
yield join(rs232_tx(tx, data, cfg), rs232_rx(rx, actual, cfg_rx))
self.assertEqual(data, actual)
def testSevenBitsEvenParity(self):
""" Check 7 bits with even parity """
Simulation(self.sevenBitsEvenParity()).run(quiet=1)
def ParityError(self):
tx = Signal(intbv(0))
rx = tx
actual = intbv(0)
cfg_rx = Config(parity=ODD)
cfg_tx = Config(parity=EVEN)
data = intbv(randrange(256))
yield join(rs232_tx(tx, data, cfg_tx), rs232_rx(rx, actual, cfg_rx))
def testParityError(self):
""" Expect a parity error """
try:
Simulation(self.ParityError()).run(quiet=1)
except ParityError:
pass
else:
self.fail("Expected parity error")
class rs232Characterize(TestCase):
""" rs232 baud rate characterization test """
def bench(self, tx_baud_rate):
tx = Signal(intbv(0))
rx = tx
actual = intbv(0)
cfg_tx = Config(baud_rate=tx_baud_rate)
cfg_rx = Config()
for i in range(256):
data = intbv(i)
yield join(rs232_tx(tx, data, cfg_tx), rs232_rx(rx, actual, cfg_rx))
if not data == actual:
raise Error
def testCharacterize(self):
""" Find min/max tx baud rate tolerance by simulation """
coarseOffset = 100
fineOffset = 5
tx_baud_rate = 9600
try:
while 1:
tx_baud_rate += coarseOffset
Simulation(self.bench(tx_baud_rate)).run(quiet=1)
except Error:
pass
while 1:
try:
tx_baud_rate -= fineOffset
Simulation(self.bench(tx_baud_rate)).run(quiet=1)
except Error:
continue
else:
print "Max tx baudrate: %s" % tx_baud_rate
break
tx_baud_rate = 9600
try:
while 1:
tx_baud_rate -= coarseOffset
Simulation(self.bench(tx_baud_rate)).run(quiet=1)
except Error:
pass
while 1:
try:
tx_baud_rate += fineOffset
Simulation(self.bench(tx_baud_rate)).run(quiet=1)
except Error:
continue
else:
print "Min tx baudrate: %s" % tx_baud_rate
break
if __name__ == "__main__":
testRunner = unittest.TextTestRunner(verbosity=2)
unittest.main(testRunner=testRunner)
| lgpl-2.1 |
thornomad/django-hitcount | tests/test_admin.py | 1 | 7857 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase, RequestFactory
from django.core.exceptions import PermissionDenied
try:
from django.core.urlresolvers import reverse
except ImportError:
from django.urls import reverse
from django.contrib.admin.sites import AdminSite
from django.contrib.messages.storage.fallback import FallbackStorage
from django.contrib.auth.models import User
from hitcount.admin import HitAdmin, HitCountAdmin
from hitcount.models import Hit, BlacklistIP, BlacklistUserAgent
from hitcount.utils import get_hitcount_model
from blog.models import Post
HitCount = get_hitcount_model()
class HitCountAdminTest(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.admin = HitCountAdmin(HitCount, AdminSite())
def test_has_add_permission(self):
"""
Should return False always.
"""
self.assertFalse(self.admin.has_add_permission(self.factory))
class HitAdminTest(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.admin = HitAdmin(Hit, AdminSite())
self.request = self.factory.get(reverse('admin:hitcount_hit_changelist'))
# https://code.djangoproject.com/ticket/17971
setattr(self.request, 'session', 'session')
messages = FallbackStorage(self.request)
setattr(self.request, '_messages', messages)
post = Post.objects.create(title='my title', content='my text')
hit_count = HitCount.objects.create(content_object=post)
for x in range(10):
Hit.objects.create(hitcount=hit_count, ip="127.0.0.%s" % x, user_agent="agent_%s" % x)
def test_has_add_permission(self):
"""
Should return False always.
"""
self.assertFalse(self.admin.has_add_permission(self.factory))
def test_get_actions(self):
"""
Actions should be: ['blacklist_ips',
'blacklist_user_agents',
'blacklist_delete_ips',
'blacklist_delete_user_agents',
'delete_queryset',
]
"""
actions = ['blacklist_ips',
'blacklist_user_agents',
'blacklist_delete_ips',
'blacklist_delete_user_agents',
'delete_queryset',
]
self.assertEqual(actions, list(self.admin.get_actions(self.request).keys()))
def test_blacklist_ips_single(self):
"""
Test adding `blacklist_ips` via Admin action.
"""
# add by hit object, should
qs = Hit.objects.filter(ip="127.0.0.5")
self.admin.blacklist_ips(self.request, qs)
ip = BlacklistIP.objects.get(pk=1)
self.assertEqual(ip.ip, "127.0.0.5")
self.assertEqual(len(BlacklistIP.objects.all()), 1)
def test_blacklist_ips_multiple(self):
"""
Test adding `blacklist_ips` via Admin action with multiple items.
"""
qs = Hit.objects.all()[:5]
self.admin.blacklist_ips(self.request, qs)
ips = BlacklistIP.objects.values_list('ip', flat=True)
self.assertEqual(ips[4], '127.0.0.5')
self.assertEqual(len(BlacklistIP.objects.all()), 5)
def test_blacklist_ips_add_only_once(self):
"""
Test adding `blacklist_ips` to ensure adding the same IP address more
than once does not duplicate a record in the BlacklistIP table.
"""
qs = Hit.objects.filter(ip="127.0.0.5")
self.admin.blacklist_ips(self.request, qs)
self.assertEqual(len(BlacklistIP.objects.all()), 1)
# adding a second time should not increase the list
qs = Hit.objects.filter(ip="127.0.0.5")
self.admin.blacklist_ips(self.request, qs)
self.assertEqual(len(BlacklistIP.objects.all()), 1)
def test_blacklist_user_agents_single(self):
"""
Test adding `blacklist_user_agent` via Admin action.
"""
qs = Hit.objects.filter(ip="127.0.0.5")
self.admin.blacklist_user_agents(self.request, qs)
ua = BlacklistUserAgent.objects.get(pk=1)
self.assertEqual(ua.user_agent, 'agent_5')
self.assertEqual(len(BlacklistUserAgent.objects.all()), 1)
def test_blacklist_user_agents_multiple(self):
"""
Test adding `blacklist_ips` via Admin action with multiple items.
"""
qs = Hit.objects.all()[:5]
self.admin.blacklist_user_agents(self.request, qs)
uas = BlacklistUserAgent.objects.values_list('user_agent', flat=True)
self.assertEqual(uas[2], 'agent_7')
self.assertEqual(len(BlacklistUserAgent.objects.all()), 5)
def test_blacklist_user_agents_add_only_once(self):
"""
Test adding `blacklist_ips` to ensure adding the same user agent more
than once does not duplicate a record in the BlacklistUserAgent table.
"""
qs = Hit.objects.filter(ip="127.0.0.5")
self.admin.blacklist_user_agents(self.request, qs)
self.assertEqual(len(BlacklistUserAgent.objects.all()), 1)
# adding a second time should not increase the list
qs = Hit.objects.filter(ip="127.0.0.5")
self.admin.blacklist_user_agents(self.request, qs)
self.assertEqual(len(BlacklistUserAgent.objects.all()), 1)
def test_delete_queryset(self):
"""
Test the `delete_queryset` action.
"""
my_admin = User.objects.create_superuser('myuser', 'myemail@example.com', '1234')
self.request.user = my_admin
qs = Hit.objects.all()[:5]
self.admin.delete_queryset(self.request, qs)
hit_count = HitCount.objects.get(pk=1)
self.assertEqual(len(Hit.objects.all()), 5)
self.assertEqual(hit_count.hits, 5)
def test_delete_queryset_single_item(self):
"""
Test the `delete_queryset` action against a single item.
"""
my_admin = User.objects.create_superuser('myuser', 'myemail@example.com', '1234')
self.request.user = my_admin
qs = Hit.objects.filter(ip="127.0.0.5")
self.admin.delete_queryset(self.request, qs)
hit_count = HitCount.objects.get(pk=1)
self.assertEqual(len(Hit.objects.all()), 9)
self.assertEqual(hit_count.hits, 9)
def test_delete_queryset_permission_denied(self):
"""
Test the `delete_queryset` action against an unauthorized user.
"""
my_admin = User.objects.create_user('myuser', 'myemail@example.com', '1234')
self.request.user = my_admin
qs = Hit.objects.all()[:5]
with self.assertRaises(PermissionDenied):
self.admin.delete_queryset(self.request, qs)
def test_blacklist_and_delete_ips(self):
"""
Test the `blacklist_delete_ips` action.
"""
my_admin = User.objects.create_superuser('myuser', 'myemail@example.com', '1234')
self.request.user = my_admin
qs = Hit.objects.all()[:5]
self.admin.blacklist_delete_ips(self.request, qs)
hit_count = HitCount.objects.get(pk=1)
self.assertEqual(len(Hit.objects.all()), 5)
self.assertEqual(hit_count.hits, 5)
self.assertEqual(len(BlacklistIP.objects.all()), 5)
def test_blacklist_and_delete_user_agents(self):
"""
Test the `blacklist_delete_user_agents` action.
"""
my_admin = User.objects.create_superuser('myuser', 'myemail@example.com', '1234')
self.request.user = my_admin
qs = Hit.objects.all()[:5]
self.admin.blacklist_delete_user_agents(self.request, qs)
hit_count = HitCount.objects.get(pk=1)
self.assertEqual(len(Hit.objects.all()), 5)
self.assertEqual(hit_count.hits, 5)
self.assertEqual(len(BlacklistUserAgent.objects.all()), 5)
| mit |
trezorg/django | tests/regressiontests/defer_regress/tests.py | 53 | 5528 | from operator import attrgetter
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.contrib.sessions.backends.db import SessionStore
from django.db import connection
from django.db.models.loading import cache
from django.test import TestCase
from models import ResolveThis, Item, RelatedItem, Child, Leaf
class DeferRegressionTest(TestCase):
def test_basic(self):
# Deferred fields should really be deferred and not accidentally use
# the field's default value just because they aren't passed to __init__
Item.objects.create(name="first", value=42)
obj = Item.objects.only("name", "other_value").get(name="first")
# Accessing "name" doesn't trigger a new database query. Accessing
# "value" or "text" should.
def test():
self.assertEqual(obj.name, "first")
self.assertEqual(obj.other_value, 0)
self.assertNumQueries(0, test)
def test():
self.assertEqual(obj.value, 42)
self.assertNumQueries(1, test)
def test():
self.assertEqual(obj.text, "xyzzy")
self.assertNumQueries(1, test)
def test():
self.assertEqual(obj.text, "xyzzy")
self.assertNumQueries(0, test)
# Regression test for #10695. Make sure different instances don't
# inadvertently share data in the deferred descriptor objects.
i = Item.objects.create(name="no I'm first", value=37)
items = Item.objects.only("value").order_by("-value")
self.assertEqual(items[0].name, "first")
self.assertEqual(items[1].name, "no I'm first")
RelatedItem.objects.create(item=i)
r = RelatedItem.objects.defer("item").get()
self.assertEqual(r.item_id, i.id)
self.assertEqual(r.item, i)
# Some further checks for select_related() and inherited model
# behaviour (regression for #10710).
c1 = Child.objects.create(name="c1", value=42)
c2 = Child.objects.create(name="c2", value=37)
Leaf.objects.create(name="l1", child=c1, second_child=c2)
obj = Leaf.objects.only("name", "child").select_related()[0]
self.assertEqual(obj.child.name, "c1")
self.assertQuerysetEqual(
Leaf.objects.select_related().only("child__name", "second_child__name"), [
"l1",
],
attrgetter("name")
)
# Models instances with deferred fields should still return the same
# content types as their non-deferred versions (bug #10738).
ctype = ContentType.objects.get_for_model
c1 = ctype(Item.objects.all()[0])
c2 = ctype(Item.objects.defer("name")[0])
c3 = ctype(Item.objects.only("name")[0])
self.assertTrue(c1 is c2 is c3)
# Regression for #10733 - only() can be used on a model with two
# foreign keys.
results = Leaf.objects.only("name", "child", "second_child").select_related()
self.assertEqual(results[0].child.name, "c1")
self.assertEqual(results[0].second_child.name, "c2")
results = Leaf.objects.only("name", "child", "second_child", "child__name", "second_child__name").select_related()
self.assertEqual(results[0].child.name, "c1")
self.assertEqual(results[0].second_child.name, "c2")
# Test for #12163 - Pickling error saving session with unsaved model
# instances.
SESSION_KEY = '2b1189a188b44ad18c35e1baac6ceead'
item = Item()
item._deferred = False
s = SessionStore(SESSION_KEY)
s.clear()
s["item"] = item
s.save()
s = SessionStore(SESSION_KEY)
s.modified = True
s.save()
i2 = s["item"]
self.assertFalse(i2._deferred)
# Regression for #11936 - loading.get_models should not return deferred
# models by default.
klasses = sorted(
cache.get_models(cache.get_app("defer_regress")),
key=lambda klass: klass.__name__
)
self.assertEqual(
klasses, [
Child,
Item,
Leaf,
RelatedItem,
ResolveThis,
]
)
klasses = sorted(
map(
attrgetter("__name__"),
cache.get_models(
cache.get_app("defer_regress"), include_deferred=True
),
)
)
self.assertEqual(
klasses, [
"Child",
"Child_Deferred_value",
"Item",
"Item_Deferred_name",
"Item_Deferred_name_other_value_text",
"Item_Deferred_name_other_value_value",
"Item_Deferred_other_value_text_value",
"Item_Deferred_text_value",
"Leaf",
"Leaf_Deferred_child_id_second_child_id_value",
"Leaf_Deferred_name_value",
"Leaf_Deferred_second_child_value",
"Leaf_Deferred_value",
"RelatedItem",
"RelatedItem_Deferred_",
"RelatedItem_Deferred_item_id",
"ResolveThis",
]
)
def test_resolve_columns(self):
rt = ResolveThis.objects.create(num=5.0, name='Foobar')
qs = ResolveThis.objects.defer('num')
self.assertEqual(1, qs.count())
self.assertEqual('Foobar', qs[0].name)
| bsd-3-clause |
kbrebanov/ansible | lib/ansible/modules/storage/netapp/netapp_e_volume_copy.py | 41 | 16492 | #!/usr/bin/python
# (c) 2016, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: netapp_e_volume_copy
short_description: Create volume copy pairs
description:
- Create and delete snapshots images on volume groups for NetApp E-series storage arrays.
version_added: '2.2'
author: Kevin Hulquest (@hulquest)
extends_documentation_fragment:
- netapp.eseries
options:
api_username:
required: true
description:
- The username to authenticate with the SANtricity WebServices Proxy or embedded REST API.
api_password:
required: true
description:
- The password to authenticate with the SANtricity WebServices Proxy or embedded REST API.
api_url:
required: true
description:
- The url to the SANtricity WebServices Proxy or embedded REST API, for example C(https://prod-1.wahoo.acme.com/devmgr/v2).
validate_certs:
required: false
default: true
description:
- Should https certificates be validated?
source_volume_id:
description:
- The id of the volume copy source.
- If used, must be paired with destination_volume_id
- Mutually exclusive with volume_copy_pair_id, and search_volume_id
destination_volume_id:
description:
- The id of the volume copy destination.
- If used, must be paired with source_volume_id
- Mutually exclusive with volume_copy_pair_id, and search_volume_id
volume_copy_pair_id:
description:
- The id of a given volume copy pair
- Mutually exclusive with destination_volume_id, source_volume_id, and search_volume_id
- Can use to delete or check presence of volume pairs
- Must specify this or (destination_volume_id and source_volume_id)
state:
description:
- Whether the specified volume copy pair should exist or not.
required: True
choices: ['present', 'absent']
create_copy_pair_if_does_not_exist:
description:
- Defines if a copy pair will be created if it does not exist.
- If set to True destination_volume_id and source_volume_id are required.
choices: [True, False]
default: True
start_stop_copy:
description:
- starts a re-copy or stops a copy in progress
- "Note: If you stop the initial file copy before it it done the copy pair will be destroyed"
- Requires volume_copy_pair_id
search_volume_id:
description:
- Searches for all valid potential target and source volumes that could be used in a copy_pair
- Mutually exclusive with volume_copy_pair_id, destination_volume_id and source_volume_id
"""
RESULTS = """
"""
EXAMPLES = """
---
msg:
description: Success message
returned: success
type: string
sample: Json facts for the volume copy that was created.
"""
RETURN = """
msg:
description: Success message
returned: success
type: string
sample: Created Volume Copy Pair with ID
"""
import json
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
from ansible.module_utils.netapp import request
HEADERS = {
"Content-Type": "application/json",
"Accept": "application/json",
}
def find_volume_copy_pair_id_from_source_volume_id_and_destination_volume_id(params):
get_status = 'storage-systems/%s/volume-copy-jobs' % params['ssid']
url = params['api_url'] + get_status
(rc, resp) = request(url, method='GET', url_username=params['api_username'],
url_password=params['api_password'], headers=HEADERS,
validate_certs=params['validate_certs'])
volume_copy_pair_id = None
for potential_copy_pair in resp:
if potential_copy_pair['sourceVolume'] == params['source_volume_id']:
if potential_copy_pair['sourceVolume'] == params['source_volume_id']:
volume_copy_pair_id = potential_copy_pair['id']
return volume_copy_pair_id
def create_copy_pair(params):
get_status = 'storage-systems/%s/volume-copy-jobs' % params['ssid']
url = params['api_url'] + get_status
rData = {
"sourceId": params['source_volume_id'],
"targetId": params['destination_volume_id']
}
(rc, resp) = request(url, data=json.dumps(rData), ignore_errors=True, method='POST',
url_username=params['api_username'], url_password=params['api_password'], headers=HEADERS,
validate_certs=params['validate_certs'])
if rc != 200:
return False, (rc, resp)
else:
return True, (rc, resp)
def delete_copy_pair_by_copy_pair_id(params):
get_status = 'storage-systems/%s/volume-copy-jobs/%s?retainRepositories=false' % (
params['ssid'], params['volume_copy_pair_id'])
url = params['api_url'] + get_status
(rc, resp) = request(url, ignore_errors=True, method='DELETE',
url_username=params['api_username'], url_password=params['api_password'], headers=HEADERS,
validate_certs=params['validate_certs'])
if rc != 204:
return False, (rc, resp)
else:
return True, (rc, resp)
def find_volume_copy_pair_id_by_volume_copy_pair_id(params):
get_status = 'storage-systems/%s/volume-copy-jobs/%s?retainRepositories=false' % (
params['ssid'], params['volume_copy_pair_id'])
url = params['api_url'] + get_status
(rc, resp) = request(url, ignore_errors=True, method='DELETE',
url_username=params['api_username'], url_password=params['api_password'], headers=HEADERS,
validate_certs=params['validate_certs'])
if rc != 200:
return False, (rc, resp)
else:
return True, (rc, resp)
def start_stop_copy(params):
get_status = 'storage-systems/%s/volume-copy-jobs-control/%s?control=%s' % (
params['ssid'], params['volume_copy_pair_id'], params['start_stop_copy'])
url = params['api_url'] + get_status
(response_code, response_data) = request(url, ignore_errors=True, method='POST',
url_username=params['api_username'], url_password=params['api_password'],
headers=HEADERS,
validate_certs=params['validate_certs'])
if response_code == 200:
return True, response_data[0]['percentComplete']
else:
return False, response_data
def check_copy_status(params):
get_status = 'storage-systems/%s/volume-copy-jobs-control/%s' % (
params['ssid'], params['volume_copy_pair_id'])
url = params['api_url'] + get_status
(response_code, response_data) = request(url, ignore_errors=True, method='GET',
url_username=params['api_username'], url_password=params['api_password'],
headers=HEADERS,
validate_certs=params['validate_certs'])
if response_code == 200:
if response_data['percentComplete'] != -1:
return True, response_data['percentComplete']
else:
return False, response_data['percentComplete']
else:
return False, response_data
def find_valid_copy_pair_targets_and_sources(params):
get_status = 'storage-systems/%s/volumes' % params['ssid']
url = params['api_url'] + get_status
(response_code, response_data) = request(url, ignore_errors=True, method='GET',
url_username=params['api_username'], url_password=params['api_password'],
headers=HEADERS,
validate_certs=params['validate_certs'])
if response_code == 200:
source_capacity = None
candidates = []
for volume in response_data:
if volume['id'] == params['search_volume_id']:
source_capacity = volume['capacity']
else:
candidates.append(volume)
potential_sources = []
potential_targets = []
for volume in candidates:
if volume['capacity'] > source_capacity:
if volume['volumeCopyTarget'] is False:
if volume['volumeCopySource'] is False:
potential_targets.append(volume['id'])
else:
if volume['volumeCopyTarget'] is False:
if volume['volumeCopySource'] is False:
potential_sources.append(volume['id'])
return potential_targets, potential_sources
else:
raise Exception("Response [%s]" % response_code)
def main():
module = AnsibleModule(argument_spec=dict(
source_volume_id=dict(type='str'),
destination_volume_id=dict(type='str'),
copy_priority=dict(required=False, default=0, type='int'),
ssid=dict(required=True, type='str'),
api_url=dict(required=True),
api_username=dict(required=False),
api_password=dict(required=False, no_log=True),
validate_certs=dict(required=False, default=True),
targetWriteProtected=dict(required=False, default=True, type='bool'),
onlineCopy=dict(required=False, default=False, type='bool'),
volume_copy_pair_id=dict(type='str'),
status=dict(required=True, choices=['present', 'absent'], type='str'),
create_copy_pair_if_does_not_exist=dict(required=False, default=True, type='bool'),
start_stop_copy=dict(required=False, choices=['start', 'stop'], type='str'),
search_volume_id=dict(type='str'),
),
mutually_exclusive=[['volume_copy_pair_id', 'destination_volume_id'],
['volume_copy_pair_id', 'source_volume_id'],
['volume_copy_pair_id', 'search_volume_id'],
['search_volume_id', 'destination_volume_id'],
['search_volume_id', 'source_volume_id'],
],
required_together=[['source_volume_id', 'destination_volume_id'],
],
required_if=[["create_copy_pair_if_does_not_exist", True, ['source_volume_id', 'destination_volume_id'], ],
["start_stop_copy", 'stop', ['volume_copy_pair_id'], ],
["start_stop_copy", 'start', ['volume_copy_pair_id'], ],
]
)
params = module.params
if not params['api_url'].endswith('/'):
params['api_url'] += '/'
# Check if we want to search
if params['search_volume_id'] is not None:
try:
potential_targets, potential_sources = find_valid_copy_pair_targets_and_sources(params)
except:
e = get_exception()
module.fail_json(msg="Failed to find valid copy pair candidates. Error [%s]" % str(e))
module.exit_json(changed=False,
msg=' Valid source devices found: %s Valid target devices found: %s' % (len(potential_sources), len(potential_targets)),
search_volume_id=params['search_volume_id'],
valid_targets=potential_targets,
valid_sources=potential_sources)
# Check if we want to start or stop a copy operation
if params['start_stop_copy'] == 'start' or params['start_stop_copy'] == 'stop':
# Get the current status info
currenty_running, status_info = check_copy_status(params)
# If we want to start
if params['start_stop_copy'] == 'start':
# If we have already started
if currenty_running is True:
module.exit_json(changed=False, msg='Volume Copy Pair copy has started.',
volume_copy_pair_id=params['volume_copy_pair_id'], percent_done=status_info)
# If we need to start
else:
start_status, info = start_stop_copy(params)
if start_status is True:
module.exit_json(changed=True, msg='Volume Copy Pair copy has started.',
volume_copy_pair_id=params['volume_copy_pair_id'], percent_done=info)
else:
module.fail_json(msg="Could not start volume copy pair Error: %s" % info)
# If we want to stop
else:
# If it has already stopped
if currenty_running is False:
module.exit_json(changed=False, msg='Volume Copy Pair copy is stopped.',
volume_copy_pair_id=params['volume_copy_pair_id'])
# If we need to stop it
else:
start_status, info = start_stop_copy(params)
if start_status is True:
module.exit_json(changed=True, msg='Volume Copy Pair copy has been stopped.',
volume_copy_pair_id=params['volume_copy_pair_id'])
else:
module.fail_json(msg="Could not stop volume copy pair Error: %s" % info)
# If we want the copy pair to exist we do this stuff
if params['status'] == 'present':
# We need to check if it exists first
if params['volume_copy_pair_id'] is None:
params['volume_copy_pair_id'] = find_volume_copy_pair_id_from_source_volume_id_and_destination_volume_id(
params)
# If no volume copy pair is found we need need to make it.
if params['volume_copy_pair_id'] is None:
# In order to create we can not do so with just a volume_copy_pair_id
copy_began_status, (rc, resp) = create_copy_pair(params)
if copy_began_status is True:
module.exit_json(changed=True, msg='Created Volume Copy Pair with ID: %s' % resp['id'])
else:
module.fail_json(msg="Could not create volume copy pair Code: %s Error: %s" % (rc, resp))
# If it does exist we do nothing
else:
# We verify that it exists
exist_status, (exist_status_code, exist_status_data) = find_volume_copy_pair_id_by_volume_copy_pair_id(
params)
if exist_status:
module.exit_json(changed=False,
msg=' Volume Copy Pair with ID: %s exists' % params['volume_copy_pair_id'])
else:
if exist_status_code == 404:
module.fail_json(
msg=' Volume Copy Pair with ID: %s does not exist. Can not create without source_volume_id and destination_volume_id' %
params['volume_copy_pair_id'])
else:
module.fail_json(msg="Could not find volume copy pair Code: %s Error: %s" % (
exist_status_code, exist_status_data))
module.fail_json(msg="Done")
# If we want it to not exist we do this
else:
if params['volume_copy_pair_id'] is None:
params['volume_copy_pair_id'] = find_volume_copy_pair_id_from_source_volume_id_and_destination_volume_id(
params)
# We delete it by the volume_copy_pair_id
delete_status, (delete_status_code, delete_status_data) = delete_copy_pair_by_copy_pair_id(params)
if delete_status is True:
module.exit_json(changed=True,
msg=' Volume Copy Pair with ID: %s was deleted' % params['volume_copy_pair_id'])
else:
if delete_status_code == 404:
module.exit_json(changed=False,
msg=' Volume Copy Pair with ID: %s does not exist' % params['volume_copy_pair_id'])
else:
module.fail_json(msg="Could not delete volume copy pair Code: %s Error: %s" % (
delete_status_code, delete_status_data))
if __name__ == '__main__':
main()
| gpl-3.0 |
carlgao/lenga | images/lenny64-peon/usr/share/python-support/python-django/django/contrib/comments/templatetags/comments.py | 309 | 11845 | from django import template
from django.template.loader import render_to_string
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.contrib import comments
from django.utils.encoding import smart_unicode
register = template.Library()
class BaseCommentNode(template.Node):
"""
Base helper class (abstract) for handling the get_comment_* template tags.
Looks a bit strange, but the subclasses below should make this a bit more
obvious.
"""
#@classmethod
def handle_token(cls, parser, token):
"""Class method to parse get_comment_list/count/form and return a Node."""
tokens = token.contents.split()
if tokens[1] != 'for':
raise template.TemplateSyntaxError("Second argument in %r tag must be 'for'" % tokens[0])
# {% get_whatever for obj as varname %}
if len(tokens) == 5:
if tokens[3] != 'as':
raise template.TemplateSyntaxError("Third argument in %r must be 'as'" % tokens[0])
return cls(
object_expr = parser.compile_filter(tokens[2]),
as_varname = tokens[4],
)
# {% get_whatever for app.model pk as varname %}
elif len(tokens) == 6:
if tokens[4] != 'as':
raise template.TemplateSyntaxError("Fourth argument in %r must be 'as'" % tokens[0])
return cls(
ctype = BaseCommentNode.lookup_content_type(tokens[2], tokens[0]),
object_pk_expr = parser.compile_filter(tokens[3]),
as_varname = tokens[5]
)
else:
raise template.TemplateSyntaxError("%r tag requires 4 or 5 arguments" % tokens[0])
handle_token = classmethod(handle_token)
#@staticmethod
def lookup_content_type(token, tagname):
try:
app, model = token.split('.')
return ContentType.objects.get(app_label=app, model=model)
except ValueError:
raise template.TemplateSyntaxError("Third argument in %r must be in the format 'app.model'" % tagname)
except ContentType.DoesNotExist:
raise template.TemplateSyntaxError("%r tag has non-existant content-type: '%s.%s'" % (tagname, app, model))
lookup_content_type = staticmethod(lookup_content_type)
def __init__(self, ctype=None, object_pk_expr=None, object_expr=None, as_varname=None, comment=None):
if ctype is None and object_expr is None:
raise template.TemplateSyntaxError("Comment nodes must be given either a literal object or a ctype and object pk.")
self.comment_model = comments.get_model()
self.as_varname = as_varname
self.ctype = ctype
self.object_pk_expr = object_pk_expr
self.object_expr = object_expr
self.comment = comment
def render(self, context):
qs = self.get_query_set(context)
context[self.as_varname] = self.get_context_value_from_queryset(context, qs)
return ''
def get_query_set(self, context):
ctype, object_pk = self.get_target_ctype_pk(context)
if not object_pk:
return self.comment_model.objects.none()
qs = self.comment_model.objects.filter(
content_type = ctype,
object_pk = smart_unicode(object_pk),
site__pk = settings.SITE_ID,
)
# The is_public and is_removed fields are implementation details of the
# built-in comment model's spam filtering system, so they might not
# be present on a custom comment model subclass. If they exist, we
# should filter on them.
field_names = [f.name for f in self.comment_model._meta.fields]
if 'is_public' in field_names:
qs = qs.filter(is_public=True)
if getattr(settings, 'COMMENTS_HIDE_REMOVED', True) and 'is_removed' in field_names:
qs = qs.filter(is_removed=False)
return qs
def get_target_ctype_pk(self, context):
if self.object_expr:
try:
obj = self.object_expr.resolve(context)
except template.VariableDoesNotExist:
return None, None
return ContentType.objects.get_for_model(obj), obj.pk
else:
return self.ctype, self.object_pk_expr.resolve(context, ignore_failures=True)
def get_context_value_from_queryset(self, context, qs):
"""Subclasses should override this."""
raise NotImplementedError
class CommentListNode(BaseCommentNode):
"""Insert a list of comments into the context."""
def get_context_value_from_queryset(self, context, qs):
return list(qs)
class CommentCountNode(BaseCommentNode):
"""Insert a count of comments into the context."""
def get_context_value_from_queryset(self, context, qs):
return qs.count()
class CommentFormNode(BaseCommentNode):
"""Insert a form for the comment model into the context."""
def get_form(self, context):
ctype, object_pk = self.get_target_ctype_pk(context)
if object_pk:
return comments.get_form()(ctype.get_object_for_this_type(pk=object_pk))
else:
return None
def render(self, context):
context[self.as_varname] = self.get_form(context)
return ''
class RenderCommentFormNode(CommentFormNode):
"""Render the comment form directly"""
#@classmethod
def handle_token(cls, parser, token):
"""Class method to parse render_comment_form and return a Node."""
tokens = token.contents.split()
if tokens[1] != 'for':
raise template.TemplateSyntaxError("Second argument in %r tag must be 'for'" % tokens[0])
# {% render_comment_form for obj %}
if len(tokens) == 3:
return cls(object_expr=parser.compile_filter(tokens[2]))
# {% render_comment_form for app.models pk %}
elif len(tokens) == 4:
return cls(
ctype = BaseCommentNode.lookup_content_type(tokens[2], tokens[0]),
object_pk_expr = parser.compile_filter(tokens[3])
)
handle_token = classmethod(handle_token)
def render(self, context):
ctype, object_pk = self.get_target_ctype_pk(context)
if object_pk:
template_search_list = [
"comments/%s/%s/form.html" % (ctype.app_label, ctype.model),
"comments/%s/form.html" % ctype.app_label,
"comments/form.html"
]
context.push()
formstr = render_to_string(template_search_list, {"form" : self.get_form(context)}, context)
context.pop()
return formstr
else:
return ''
class RenderCommentListNode(CommentListNode):
"""Render the comment list directly"""
#@classmethod
def handle_token(cls, parser, token):
"""Class method to parse render_comment_list and return a Node."""
tokens = token.contents.split()
if tokens[1] != 'for':
raise template.TemplateSyntaxError("Second argument in %r tag must be 'for'" % tokens[0])
# {% render_comment_list for obj %}
if len(tokens) == 3:
return cls(object_expr=parser.compile_filter(tokens[2]))
# {% render_comment_list for app.models pk %}
elif len(tokens) == 4:
return cls(
ctype = BaseCommentNode.lookup_content_type(tokens[2], tokens[0]),
object_pk_expr = parser.compile_filter(tokens[3])
)
handle_token = classmethod(handle_token)
def render(self, context):
ctype, object_pk = self.get_target_ctype_pk(context)
if object_pk:
template_search_list = [
"comments/%s/%s/list.html" % (ctype.app_label, ctype.model),
"comments/%s/list.html" % ctype.app_label,
"comments/list.html"
]
qs = self.get_query_set(context)
context.push()
liststr = render_to_string(template_search_list, {
"comment_list" : self.get_context_value_from_queryset(context, qs)
}, context)
context.pop()
return liststr
else:
return ''
# We could just register each classmethod directly, but then we'd lose out on
# the automagic docstrings-into-admin-docs tricks. So each node gets a cute
# wrapper function that just exists to hold the docstring.
#@register.tag
def get_comment_count(parser, token):
"""
Gets the comment count for the given params and populates the template
context with a variable containing that value, whose name is defined by the
'as' clause.
Syntax::
{% get_comment_count for [object] as [varname] %}
{% get_comment_count for [app].[model] [object_id] as [varname] %}
Example usage::
{% get_comment_count for event as comment_count %}
{% get_comment_count for calendar.event event.id as comment_count %}
{% get_comment_count for calendar.event 17 as comment_count %}
"""
return CommentCountNode.handle_token(parser, token)
#@register.tag
def get_comment_list(parser, token):
"""
Gets the list of comments for the given params and populates the template
context with a variable containing that value, whose name is defined by the
'as' clause.
Syntax::
{% get_comment_list for [object] as [varname] %}
{% get_comment_list for [app].[model] [object_id] as [varname] %}
Example usage::
{% get_comment_list for event as comment_list %}
{% for comment in comment_list %}
...
{% endfor %}
"""
return CommentListNode.handle_token(parser, token)
#@register.tag
def render_comment_list(parser, token):
"""
Render the comment list (as returned by ``{% get_comment_list %}``)
through the ``comments/list.html`` template
Syntax::
{% render_comment_list for [object] %}
{% render_comment_list for [app].[model] [object_id] %}
Example usage::
{% render_comment_list for event %}
"""
return RenderCommentListNode.handle_token(parser, token)
#@register.tag
def get_comment_form(parser, token):
"""
Get a (new) form object to post a new comment.
Syntax::
{% get_comment_form for [object] as [varname] %}
{% get_comment_form for [app].[model] [object_id] as [varname] %}
"""
return CommentFormNode.handle_token(parser, token)
#@register.tag
def render_comment_form(parser, token):
"""
Render the comment form (as returned by ``{% render_comment_form %}``) through
the ``comments/form.html`` template.
Syntax::
{% render_comment_form for [object] %}
{% render_comment_form for [app].[model] [object_id] %}
"""
return RenderCommentFormNode.handle_token(parser, token)
#@register.simple_tag
def comment_form_target():
"""
Get the target URL for the comment form.
Example::
<form action="{% comment_form_target %}" method="post">
"""
return comments.get_form_target()
#@register.simple_tag
def get_comment_permalink(comment, anchor_pattern=None):
"""
Get the permalink for a comment, optionally specifying the format of the
named anchor to be appended to the end of the URL.
Example::
{{ get_comment_permalink comment "#c%(id)s-by-%(user_name)s" }}
"""
if anchor_pattern:
return comment.get_absolute_url(anchor_pattern)
return comment.get_absolute_url()
register.tag(get_comment_count)
register.tag(get_comment_list)
register.tag(get_comment_form)
register.tag(render_comment_form)
register.simple_tag(comment_form_target)
register.simple_tag(get_comment_permalink)
register.tag(render_comment_list)
| mit |
hardbyte/scipy-sim | scipysim/actors/io/bundle.py | 2 | 2225 |
import logging
from scipysim.actors import Actor, Channel
import unittest
import numpy
class Bundle(Actor):
'''
This buffering/compressing/bundling actor takes a source
and waits for a preset number
of events (or for the signal to finish) before passing them on in one.
They get passed on as a special condensed packet.
'''
num_inputs = 1
num_outputs = 1
def __init__(self, input_channel, output_channel, bundle_size=None):
"""
Constructor for a bundle block.
@param input_channel: The input channel to be bundled
@param output_channel: The output channel that has been bundled
@param bundle_size: The max size of an output bundle. Default
is to buffer the whole signal then output a single bundle.
"""
super(Bundle, self).__init__(input_channel=input_channel, output_channel=output_channel)
self.bundle_size = bundle_size
self.temp_data = []
def process(self):
"""Send packets of events at one time"""
logging.debug("Running buffer/bundle process")
obj = self.input_channel.get(True) # this is blocking
if not obj.last:
self.temp_data.append(obj)
if obj.last or self.bundle_size is not None and len(self.temp_data) >= self.bundle_size:
self.send_bundle()
if obj.last:
self.output_channel.put(obj) # Propagate termination
self.stop = True
else:
self.temp_data = []
def send_bundle(self):
'''
Create a numpy data type that can carry all the
information then add it to the output channel
'''
x = numpy.zeros(len(self.temp_data),
dtype=
{
'names': ["Tag", "Value"],
'formats': ['f8', 'f8'],
'titles': ['Domain', 'Name'] # This might not get used...
}
)
x[:] = [ (element['tag'], element['value']) for element in self.temp_data if element is not None]
self.output_channel.put(x)
| gpl-3.0 |
nburn42/tensorflow | tensorflow/contrib/gan/python/features/python/random_tensor_pool.py | 55 | 1552 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A tensor pool stores values from an input tensor and returns a stored one.
See the following papers for more details.
1) `Learning from simulated and unsupervised images through adversarial
training` (https://arxiv.org/abs/1612.07828).
2) `Unpaired Image-to-Image Translation using Cycle-Consistent Adversarial
Networks` (https://arxiv.org/abs/1703.10593).
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.gan.python.features.python import random_tensor_pool_impl
# pylint: disable=wildcard-import
from tensorflow.contrib.gan.python.features.python.random_tensor_pool_impl import *
# pylint: enable=wildcard-import
from tensorflow.python.util.all_util import remove_undocumented
__all__ = random_tensor_pool_impl.__all__
remove_undocumented(__name__, __all__)
| apache-2.0 |
boundlessgeo/qgis-geogig-plugin | geogig/gui/dialogs/remotesdialog.py | 1 | 7621 | # -*- coding: utf-8 -*-
#
# (c) 2016 Boundless, http://boundlessgeo.com
# This code is licensed under the GPL 2.0 license.
#
from PyQt4 import QtCore, QtGui
from PyQt4.QtCore import *
from PyQt4.QtGui import *
class RemotesDialog(QtGui.QDialog):
def __init__(self, parent, repo):
QtGui.QDialog.__init__(self, parent, QtCore.Qt.WindowSystemMenuHint | QtCore.Qt.WindowTitleHint)
self.changed = False
self.repo = repo
self.remotes = repo.remotes
self.setupUi()
def setupUi(self):
self.resize(500, 350)
self.setWindowTitle("Remotes manager")
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setSpacing(2)
self.horizontalLayout.setMargin(0)
self.buttonBox = QtGui.QDialogButtonBox()
self.buttonBox.setOrientation(QtCore.Qt.Vertical)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Close)
self.table = QtGui.QTableWidget()
self.table.verticalHeader().setVisible(False)
self.table.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.table.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.addRowButton = QtGui.QPushButton()
self.addRowButton.setText("Add remote")
self.editRowButton = QtGui.QPushButton()
self.editRowButton.setText("Edit remote")
self.removeRowButton = QtGui.QPushButton()
self.removeRowButton.setText("Remove remote")
self.buttonBox.addButton(self.addRowButton, QtGui.QDialogButtonBox.ActionRole)
self.buttonBox.addButton(self.editRowButton, QtGui.QDialogButtonBox.ActionRole)
self.buttonBox.addButton(self.removeRowButton, QtGui.QDialogButtonBox.ActionRole)
self.setTableContent()
self.horizontalLayout.addWidget(self.table)
self.horizontalLayout.addWidget(self.buttonBox)
self.setLayout(self.horizontalLayout)
self.buttonBox.rejected.connect(self.close)
self.editRowButton.clicked.connect(self.editRow)
self.addRowButton.clicked.connect(self.addRow)
self.removeRowButton.clicked.connect(self.removeRow)
QtCore.QMetaObject.connectSlotsByName(self)
self.editRowButton.setEnabled(False)
self.removeRowButton.setEnabled(False)
def setTableContent(self):
self.table.clear()
self.table.setColumnCount(2)
self.table.setColumnWidth(0, 200)
self.table.setColumnWidth(1, 200)
self.table.setHorizontalHeaderLabels(["Name", "URL"])
self.table.horizontalHeader().setResizeMode(QtGui.QHeaderView.Stretch)
self.table.setRowCount(len(self.remotes))
for i, name in enumerate(self.remotes):
url = self.remotes[name]
self.table.setRowHeight(i, 22)
item = QtGui.QTableWidgetItem(name, 0)
item.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)
self.table.setItem(i, 0, item)
item = QtGui.QTableWidgetItem(url, 0)
item.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)
self.table.setItem(i, 1, item)
self.table.itemSelectionChanged.connect(self.selectionChanged)
def selectionChanged(self):
enabled = len(self.table.selectedItems()) > 0
self.editRowButton.setEnabled(enabled)
self.removeRowButton.setEnabled(enabled)
def editRow(self):
item = self.table.item(self.table.currentRow(), 0)
if item is not None:
name = item.text()
url = self.table.item(self.table.currentRow(), 1).text()
dlg = NewRemoteDialog(name, url, self)
dlg.exec_()
if dlg.ok:
self.repo.removeremote(name)
self.repo.addremote(dlg.name, dlg.url, dlg.username, dlg.password)
del self.remotes[name]
self.remotes[dlg.name] = dlg.url
self.setTableContent()
self.changed = True
def removeRow(self):
item = self.table.item(self.table.currentRow(), 0)
if item is not None:
name = item.text()
self.repo.removeremote(name)
del self.remotes[name]
self.setTableContent()
self.changed = True
def addRow(self):
dlg = NewRemoteDialog(parent = self)
dlg.exec_()
if dlg.ok:
self.repo.addremote(dlg.name, dlg.url, dlg.username, dlg.password)
self.remotes[dlg.name] = dlg.url
self.setTableContent()
self.changed = True
class NewRemoteDialog(QtGui.QDialog):
def __init__(self, name = None, url = None, parent = None):
super(NewRemoteDialog, self).__init__(parent)
self.ok = False
self.name = name
self.url = url
self.initGui()
def initGui(self):
self.setWindowTitle('New remote')
layout = QtGui.QVBoxLayout()
buttonBox = QtGui.QDialogButtonBox(QtGui.QDialogButtonBox.Ok | QtGui.QDialogButtonBox.Close)
horizontalLayout = QtGui.QHBoxLayout()
horizontalLayout.setSpacing(30)
horizontalLayout.setMargin(0)
nameLabel = QtGui.QLabel('Name')
nameLabel.setMinimumWidth(120)
nameLabel.setMaximumWidth(120)
self.nameBox = QtGui.QLineEdit()
if self.name is not None:
self.nameBox.setText(self.name)
horizontalLayout.addWidget(nameLabel)
horizontalLayout.addWidget(self.nameBox)
layout.addLayout(horizontalLayout)
horizontalLayout = QtGui.QHBoxLayout()
horizontalLayout.setSpacing(30)
horizontalLayout.setMargin(0)
urlLabel = QtGui.QLabel('URL')
urlLabel.setMinimumWidth(120)
urlLabel.setMaximumWidth(120)
self.urlBox = QtGui.QLineEdit()
if self.url is not None:
self.urlBox.setText(self.url)
horizontalLayout.addWidget(urlLabel)
horizontalLayout.addWidget(self.urlBox)
layout.addLayout(horizontalLayout)
horizontalLayout = QtGui.QHBoxLayout()
horizontalLayout.setSpacing(30)
horizontalLayout.setMargin(0)
usernameLabel = QtGui.QLabel('Username')
usernameLabel.setMinimumWidth(120)
usernameLabel.setMaximumWidth(120)
self.usernameBox = QtGui.QLineEdit()
horizontalLayout.addWidget(usernameLabel)
horizontalLayout.addWidget(self.usernameBox)
layout.addLayout(horizontalLayout)
horizontalLayout = QtGui.QHBoxLayout()
horizontalLayout.setSpacing(30)
horizontalLayout.setMargin(0)
passwordLabel = QtGui.QLabel('Password')
passwordLabel.setMinimumWidth(120)
passwordLabel.setMaximumWidth(120)
self.passwordBox = QtGui.QLineEdit()
self.passwordBox.setEchoMode(QtGui.QLineEdit.Password)
horizontalLayout.addWidget(passwordLabel)
horizontalLayout.addWidget(self.passwordBox)
layout.addLayout(horizontalLayout)
layout.addWidget(buttonBox)
self.setLayout(layout)
buttonBox.accepted.connect(self.okPressed)
buttonBox.rejected.connect(self.cancelPressed)
self.resize(400, 200)
def okPressed(self):
self.name = unicode(self.nameBox.text())
self.url = unicode(self.urlBox.text())
self.username = unicode(self.usernameBox.text()).strip() or None
self.password = unicode(self.passwordBox.text()).strip() or None
self.ok = True
self.close()
def cancelPressed(self):
self.name = None
self.url = None
self.close()
| gpl-2.0 |
miselin/grpc | src/python/grpcio/grpc/framework/base/_ingestion.py | 2 | 16422 | # Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""State and behavior for ingestion during an operation."""
import abc
import collections
import six
from grpc.framework.base import _constants
from grpc.framework.base import _interfaces
from grpc.framework.base import exceptions
from grpc.framework.base import interfaces
from grpc.framework.foundation import abandonment
from grpc.framework.foundation import callable_util
from grpc.framework.foundation import stream
_CREATE_CONSUMER_EXCEPTION_LOG_MESSAGE = 'Exception initializing ingestion!'
_CONSUME_EXCEPTION_LOG_MESSAGE = 'Exception during ingestion!'
class _ConsumerCreation(collections.namedtuple(
'_ConsumerCreation', ('consumer', 'remote_error', 'abandoned'))):
"""A sum type for the outcome of ingestion initialization.
Either consumer will be non-None, remote_error will be True, or abandoned will
be True.
Attributes:
consumer: A stream.Consumer for ingesting payloads.
remote_error: A boolean indicating that the consumer could not be created
due to an error on the remote side of the operation.
abandoned: A boolean indicating that the consumer creation was abandoned.
"""
class _EmptyConsumer(stream.Consumer):
"""A no-operative stream.Consumer that ignores all inputs and calls."""
def consume(self, value):
"""See stream.Consumer.consume for specification."""
def terminate(self):
"""See stream.Consumer.terminate for specification."""
def consume_and_terminate(self, value):
"""See stream.Consumer.consume_and_terminate for specification."""
class _ConsumerCreator(six.with_metaclass(abc.ABCMeta)):
"""Common specification of different consumer-creating behavior."""
@abc.abstractmethod
def create_consumer(self, requirement):
"""Creates the stream.Consumer to which customer payloads will be delivered.
Any exceptions raised by this method should be attributed to and treated as
defects in the serviced or servicer code called by this method.
Args:
requirement: A value required by this _ConsumerCreator for consumer
creation.
Returns:
A _ConsumerCreation describing the result of consumer creation.
"""
raise NotImplementedError()
class _FrontConsumerCreator(_ConsumerCreator):
"""A _ConsumerCreator appropriate for front-side use."""
def __init__(self, subscription, operation_context):
"""Constructor.
Args:
subscription: The serviced's interfaces.ServicedSubscription for the
operation.
operation_context: The interfaces.OperationContext object for the
operation.
"""
self._subscription = subscription
self._operation_context = operation_context
def create_consumer(self, requirement):
"""See _ConsumerCreator.create_consumer for specification."""
if self._subscription.kind is interfaces.ServicedSubscription.Kind.FULL:
try:
return _ConsumerCreation(
self._subscription.ingestor.consumer(self._operation_context),
False, False)
except abandonment.Abandoned:
return _ConsumerCreation(None, False, True)
else:
return _ConsumerCreation(_EmptyConsumer(), False, False)
class _BackConsumerCreator(_ConsumerCreator):
"""A _ConsumerCreator appropriate for back-side use."""
def __init__(self, servicer, operation_context, emission_consumer):
"""Constructor.
Args:
servicer: The interfaces.Servicer that will service the operation.
operation_context: The interfaces.OperationContext object for the
operation.
emission_consumer: The stream.Consumer object to which payloads emitted
from the operation will be passed.
"""
self._servicer = servicer
self._operation_context = operation_context
self._emission_consumer = emission_consumer
def create_consumer(self, requirement):
"""See _ConsumerCreator.create_consumer for full specification.
Args:
requirement: The name of the Servicer method to be called during this
operation.
Returns:
A _ConsumerCreation describing the result of consumer creation.
"""
try:
return _ConsumerCreation(
self._servicer.service(
requirement, self._operation_context, self._emission_consumer),
False, False)
except exceptions.NoSuchMethodError:
return _ConsumerCreation(None, True, False)
except abandonment.Abandoned:
return _ConsumerCreation(None, False, True)
class _WrappedConsumer(object):
"""Wraps a consumer to catch the exceptions that it is allowed to throw."""
def __init__(self, consumer):
"""Constructor.
Args:
consumer: A stream.Consumer that may raise abandonment.Abandoned from any
of its methods.
"""
self._consumer = consumer
def moar(self, payload, complete):
"""Makes progress with the wrapped consumer.
This method catches all exceptions allowed to be thrown by the wrapped
consumer. Any exceptions raised by this method should be blamed on the
customer-supplied consumer.
Args:
payload: A customer-significant payload object. May be None only if
complete is True.
complete: Whether or not the end of the payload sequence has been reached.
Must be True if payload is None.
Returns:
True if the wrapped consumer made progress or False if the wrapped
consumer raised abandonment.Abandoned to indicate its abandonment of
progress.
"""
try:
if payload is None:
self._consumer.terminate()
elif complete:
self._consumer.consume_and_terminate(payload)
else:
self._consumer.consume(payload)
return True
except abandonment.Abandoned:
return False
class _IngestionManager(_interfaces.IngestionManager):
"""An implementation of _interfaces.IngestionManager."""
def __init__(
self, lock, pool, consumer_creator, failure_outcome, termination_manager,
transmission_manager):
"""Constructor.
Args:
lock: The operation-wide lock.
pool: A thread pool in which to execute customer code.
consumer_creator: A _ConsumerCreator wrapping the portion of customer code
that when called returns the stream.Consumer with which the customer
code will ingest payload values.
failure_outcome: Whichever one of
interfaces.Outcome.SERVICED_FAILURE or
interfaces.Outcome.SERVICER_FAILURE describes local failure of
customer code.
termination_manager: The _interfaces.TerminationManager for the operation.
transmission_manager: The _interfaces.TransmissionManager for the
operation.
"""
self._lock = lock
self._pool = pool
self._consumer_creator = consumer_creator
self._failure_outcome = failure_outcome
self._termination_manager = termination_manager
self._transmission_manager = transmission_manager
self._expiration_manager = None
self._wrapped_ingestion_consumer = None
self._pending_ingestion = []
self._ingestion_complete = False
self._processing = False
def set_expiration_manager(self, expiration_manager):
self._expiration_manager = expiration_manager
def _abort_internal_only(self):
self._wrapped_ingestion_consumer = None
self._pending_ingestion = None
def _abort_and_notify(self, outcome):
self._abort_internal_only()
self._termination_manager.abort(outcome)
self._transmission_manager.abort(outcome)
self._expiration_manager.abort()
def _next(self):
"""Computes the next step for ingestion.
Returns:
A payload, complete, continue triplet indicating what payload (if any) is
available to feed into customer code, whether or not the sequence of
payloads has terminated, and whether or not there is anything
immediately actionable to call customer code to do.
"""
if self._pending_ingestion is None:
return None, False, False
elif self._pending_ingestion:
payload = self._pending_ingestion.pop(0)
complete = self._ingestion_complete and not self._pending_ingestion
return payload, complete, True
elif self._ingestion_complete:
return None, True, True
else:
return None, False, False
def _process(self, wrapped_ingestion_consumer, payload, complete):
"""A method to call to execute customer code.
This object's lock must *not* be held when calling this method.
Args:
wrapped_ingestion_consumer: The _WrappedConsumer with which to pass
payloads to customer code.
payload: A customer payload. May be None only if complete is True.
complete: Whether or not the sequence of payloads to pass to the customer
has concluded.
"""
while True:
consumption_outcome = callable_util.call_logging_exceptions(
wrapped_ingestion_consumer.moar, _CONSUME_EXCEPTION_LOG_MESSAGE,
payload, complete)
if consumption_outcome.exception is None:
if consumption_outcome.return_value:
with self._lock:
if complete:
self._pending_ingestion = None
self._termination_manager.ingestion_complete()
return
else:
payload, complete, moar = self._next()
if not moar:
self._processing = False
return
else:
with self._lock:
if self._pending_ingestion is not None:
self._abort_and_notify(self._failure_outcome)
self._processing = False
return
else:
with self._lock:
self._abort_and_notify(self._failure_outcome)
self._processing = False
return
def start(self, requirement):
if self._pending_ingestion is not None:
def initialize():
consumer_creation_outcome = callable_util.call_logging_exceptions(
self._consumer_creator.create_consumer,
_CREATE_CONSUMER_EXCEPTION_LOG_MESSAGE, requirement)
if consumer_creation_outcome.return_value is None:
with self._lock:
self._abort_and_notify(self._failure_outcome)
self._processing = False
elif consumer_creation_outcome.return_value.remote_error:
with self._lock:
self._abort_and_notify(interfaces.Outcome.RECEPTION_FAILURE)
self._processing = False
elif consumer_creation_outcome.return_value.abandoned:
with self._lock:
if self._pending_ingestion is not None:
self._abort_and_notify(self._failure_outcome)
self._processing = False
else:
wrapped_ingestion_consumer = _WrappedConsumer(
consumer_creation_outcome.return_value.consumer)
with self._lock:
self._wrapped_ingestion_consumer = wrapped_ingestion_consumer
payload, complete, moar = self._next()
if not moar:
self._processing = False
return
self._process(wrapped_ingestion_consumer, payload, complete)
self._pool.submit(
callable_util.with_exceptions_logged(
initialize, _constants.INTERNAL_ERROR_LOG_MESSAGE))
self._processing = True
def consume(self, payload):
if self._ingestion_complete:
self._abort_and_notify(self._failure_outcome)
elif self._pending_ingestion is not None:
if self._processing:
self._pending_ingestion.append(payload)
else:
self._pool.submit(
callable_util.with_exceptions_logged(
self._process, _constants.INTERNAL_ERROR_LOG_MESSAGE),
self._wrapped_ingestion_consumer, payload, False)
self._processing = True
def terminate(self):
if self._ingestion_complete:
self._abort_and_notify(self._failure_outcome)
else:
self._ingestion_complete = True
if self._pending_ingestion is not None and not self._processing:
self._pool.submit(
callable_util.with_exceptions_logged(
self._process, _constants.INTERNAL_ERROR_LOG_MESSAGE),
self._wrapped_ingestion_consumer, None, True)
self._processing = True
def consume_and_terminate(self, payload):
if self._ingestion_complete:
self._abort_and_notify(self._failure_outcome)
else:
self._ingestion_complete = True
if self._pending_ingestion is not None:
if self._processing:
self._pending_ingestion.append(payload)
else:
self._pool.submit(
callable_util.with_exceptions_logged(
self._process, _constants.INTERNAL_ERROR_LOG_MESSAGE),
self._wrapped_ingestion_consumer, payload, True)
self._processing = True
def abort(self):
"""See _interfaces.IngestionManager.abort for specification."""
self._abort_internal_only()
def front_ingestion_manager(
lock, pool, subscription, termination_manager, transmission_manager,
operation_context):
"""Creates an IngestionManager appropriate for front-side use.
Args:
lock: The operation-wide lock.
pool: A thread pool in which to execute customer code.
subscription: A interfaces.ServicedSubscription indicating the
customer's interest in the results of the operation.
termination_manager: The _interfaces.TerminationManager for the operation.
transmission_manager: The _interfaces.TransmissionManager for the
operation.
operation_context: A interfaces.OperationContext for the operation.
Returns:
An IngestionManager appropriate for front-side use.
"""
ingestion_manager = _IngestionManager(
lock, pool, _FrontConsumerCreator(subscription, operation_context),
interfaces.Outcome.SERVICED_FAILURE, termination_manager,
transmission_manager)
ingestion_manager.start(None)
return ingestion_manager
def back_ingestion_manager(
lock, pool, servicer, termination_manager, transmission_manager,
operation_context, emission_consumer):
"""Creates an IngestionManager appropriate for back-side use.
Args:
lock: The operation-wide lock.
pool: A thread pool in which to execute customer code.
servicer: A interfaces.Servicer for servicing the operation.
termination_manager: The _interfaces.TerminationManager for the operation.
transmission_manager: The _interfaces.TransmissionManager for the
operation.
operation_context: A interfaces.OperationContext for the operation.
emission_consumer: The _interfaces.EmissionConsumer for the operation.
Returns:
An IngestionManager appropriate for back-side use.
"""
ingestion_manager = _IngestionManager(
lock, pool, _BackConsumerCreator(
servicer, operation_context, emission_consumer),
interfaces.Outcome.SERVICER_FAILURE, termination_manager,
transmission_manager)
return ingestion_manager
| bsd-3-clause |
derricw/pyqtgraph | pyqtgraph/canvas/CanvasTemplate_pyside.py | 42 | 5513 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file './pyqtgraph/canvas/CanvasTemplate.ui'
#
# Created: Mon Dec 23 10:10:52 2013
# by: pyside-uic 0.2.14 running on PySide 1.1.2
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(490, 414)
self.gridLayout = QtGui.QGridLayout(Form)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setSpacing(0)
self.gridLayout.setObjectName("gridLayout")
self.splitter = QtGui.QSplitter(Form)
self.splitter.setOrientation(QtCore.Qt.Horizontal)
self.splitter.setObjectName("splitter")
self.view = GraphicsView(self.splitter)
self.view.setObjectName("view")
self.layoutWidget = QtGui.QWidget(self.splitter)
self.layoutWidget.setObjectName("layoutWidget")
self.gridLayout_2 = QtGui.QGridLayout(self.layoutWidget)
self.gridLayout_2.setContentsMargins(0, 0, 0, 0)
self.gridLayout_2.setObjectName("gridLayout_2")
self.storeSvgBtn = QtGui.QPushButton(self.layoutWidget)
self.storeSvgBtn.setObjectName("storeSvgBtn")
self.gridLayout_2.addWidget(self.storeSvgBtn, 1, 0, 1, 1)
self.storePngBtn = QtGui.QPushButton(self.layoutWidget)
self.storePngBtn.setObjectName("storePngBtn")
self.gridLayout_2.addWidget(self.storePngBtn, 1, 1, 1, 1)
self.autoRangeBtn = QtGui.QPushButton(self.layoutWidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(1)
sizePolicy.setHeightForWidth(self.autoRangeBtn.sizePolicy().hasHeightForWidth())
self.autoRangeBtn.setSizePolicy(sizePolicy)
self.autoRangeBtn.setObjectName("autoRangeBtn")
self.gridLayout_2.addWidget(self.autoRangeBtn, 3, 0, 1, 2)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setSpacing(0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.redirectCheck = QtGui.QCheckBox(self.layoutWidget)
self.redirectCheck.setObjectName("redirectCheck")
self.horizontalLayout.addWidget(self.redirectCheck)
self.redirectCombo = CanvasCombo(self.layoutWidget)
self.redirectCombo.setObjectName("redirectCombo")
self.horizontalLayout.addWidget(self.redirectCombo)
self.gridLayout_2.addLayout(self.horizontalLayout, 6, 0, 1, 2)
self.itemList = TreeWidget(self.layoutWidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(100)
sizePolicy.setHeightForWidth(self.itemList.sizePolicy().hasHeightForWidth())
self.itemList.setSizePolicy(sizePolicy)
self.itemList.setHeaderHidden(True)
self.itemList.setObjectName("itemList")
self.itemList.headerItem().setText(0, "1")
self.gridLayout_2.addWidget(self.itemList, 7, 0, 1, 2)
self.ctrlLayout = QtGui.QGridLayout()
self.ctrlLayout.setSpacing(0)
self.ctrlLayout.setObjectName("ctrlLayout")
self.gridLayout_2.addLayout(self.ctrlLayout, 11, 0, 1, 2)
self.resetTransformsBtn = QtGui.QPushButton(self.layoutWidget)
self.resetTransformsBtn.setObjectName("resetTransformsBtn")
self.gridLayout_2.addWidget(self.resetTransformsBtn, 8, 0, 1, 1)
self.mirrorSelectionBtn = QtGui.QPushButton(self.layoutWidget)
self.mirrorSelectionBtn.setObjectName("mirrorSelectionBtn")
self.gridLayout_2.addWidget(self.mirrorSelectionBtn, 4, 0, 1, 1)
self.reflectSelectionBtn = QtGui.QPushButton(self.layoutWidget)
self.reflectSelectionBtn.setObjectName("reflectSelectionBtn")
self.gridLayout_2.addWidget(self.reflectSelectionBtn, 4, 1, 1, 1)
self.gridLayout.addWidget(self.splitter, 0, 0, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Form", "Form", None, QtGui.QApplication.UnicodeUTF8))
self.storeSvgBtn.setText(QtGui.QApplication.translate("Form", "Store SVG", None, QtGui.QApplication.UnicodeUTF8))
self.storePngBtn.setText(QtGui.QApplication.translate("Form", "Store PNG", None, QtGui.QApplication.UnicodeUTF8))
self.autoRangeBtn.setText(QtGui.QApplication.translate("Form", "Auto Range", None, QtGui.QApplication.UnicodeUTF8))
self.redirectCheck.setToolTip(QtGui.QApplication.translate("Form", "Check to display all local items in a remote canvas.", None, QtGui.QApplication.UnicodeUTF8))
self.redirectCheck.setText(QtGui.QApplication.translate("Form", "Redirect", None, QtGui.QApplication.UnicodeUTF8))
self.resetTransformsBtn.setText(QtGui.QApplication.translate("Form", "Reset Transforms", None, QtGui.QApplication.UnicodeUTF8))
self.mirrorSelectionBtn.setText(QtGui.QApplication.translate("Form", "Mirror Selection", None, QtGui.QApplication.UnicodeUTF8))
self.reflectSelectionBtn.setText(QtGui.QApplication.translate("Form", "MirrorXY", None, QtGui.QApplication.UnicodeUTF8))
from ..widgets.TreeWidget import TreeWidget
from CanvasManager import CanvasCombo
from ..widgets.GraphicsView import GraphicsView
| mit |
Abi1ity/uniclust2.0 | flask/lib/python2.7/site-packages/decorator.py | 112 | 10639 | ########################## LICENCE ###############################
# Copyright (c) 2005-2012, Michele Simionato
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# Redistributions in bytecode form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
# DAMAGE.
"""
Decorator module, see http://pypi.python.org/pypi/decorator
for the documentation.
"""
__version__ = '3.4.0'
__all__ = ["decorator", "FunctionMaker", "contextmanager"]
import sys, re, inspect
if sys.version >= '3':
from inspect import getfullargspec
def get_init(cls):
return cls.__init__
else:
class getfullargspec(object):
"A quick and dirty replacement for getfullargspec for Python 2.X"
def __init__(self, f):
self.args, self.varargs, self.varkw, self.defaults = \
inspect.getargspec(f)
self.kwonlyargs = []
self.kwonlydefaults = None
def __iter__(self):
yield self.args
yield self.varargs
yield self.varkw
yield self.defaults
def get_init(cls):
return cls.__init__.im_func
DEF = re.compile('\s*def\s*([_\w][_\w\d]*)\s*\(')
# basic functionality
class FunctionMaker(object):
"""
An object with the ability to create functions with a given signature.
It has attributes name, doc, module, signature, defaults, dict and
methods update and make.
"""
def __init__(self, func=None, name=None, signature=None,
defaults=None, doc=None, module=None, funcdict=None):
self.shortsignature = signature
if func:
# func can be a class or a callable, but not an instance method
self.name = func.__name__
if self.name == '<lambda>': # small hack for lambda functions
self.name = '_lambda_'
self.doc = func.__doc__
self.module = func.__module__
if inspect.isfunction(func):
argspec = getfullargspec(func)
self.annotations = getattr(func, '__annotations__', {})
for a in ('args', 'varargs', 'varkw', 'defaults', 'kwonlyargs',
'kwonlydefaults'):
setattr(self, a, getattr(argspec, a))
for i, arg in enumerate(self.args):
setattr(self, 'arg%d' % i, arg)
if sys.version < '3': # easy way
self.shortsignature = self.signature = \
inspect.formatargspec(
formatvalue=lambda val: "", *argspec)[1:-1]
else: # Python 3 way
allargs = list(self.args)
allshortargs = list(self.args)
if self.varargs:
allargs.append('*' + self.varargs)
allshortargs.append('*' + self.varargs)
elif self.kwonlyargs:
allargs.append('*') # single star syntax
for a in self.kwonlyargs:
allargs.append('%s=None' % a)
allshortargs.append('%s=%s' % (a, a))
if self.varkw:
allargs.append('**' + self.varkw)
allshortargs.append('**' + self.varkw)
self.signature = ', '.join(allargs)
self.shortsignature = ', '.join(allshortargs)
self.dict = func.__dict__.copy()
# func=None happens when decorating a caller
if name:
self.name = name
if signature is not None:
self.signature = signature
if defaults:
self.defaults = defaults
if doc:
self.doc = doc
if module:
self.module = module
if funcdict:
self.dict = funcdict
# check existence required attributes
assert hasattr(self, 'name')
if not hasattr(self, 'signature'):
raise TypeError('You are decorating a non function: %s' % func)
def update(self, func, **kw):
"Update the signature of func with the data in self"
func.__name__ = self.name
func.__doc__ = getattr(self, 'doc', None)
func.__dict__ = getattr(self, 'dict', {})
func.func_defaults = getattr(self, 'defaults', ())
func.__kwdefaults__ = getattr(self, 'kwonlydefaults', None)
func.__annotations__ = getattr(self, 'annotations', None)
callermodule = sys._getframe(3).f_globals.get('__name__', '?')
func.__module__ = getattr(self, 'module', callermodule)
func.__dict__.update(kw)
def make(self, src_templ, evaldict=None, addsource=False, **attrs):
"Make a new function from a given template and update the signature"
src = src_templ % vars(self) # expand name and signature
evaldict = evaldict or {}
mo = DEF.match(src)
if mo is None:
raise SyntaxError('not a valid function template\n%s' % src)
name = mo.group(1) # extract the function name
names = set([name] + [arg.strip(' *') for arg in
self.shortsignature.split(',')])
for n in names:
if n in ('_func_', '_call_'):
raise NameError('%s is overridden in\n%s' % (n, src))
if not src.endswith('\n'): # add a newline just for safety
src += '\n' # this is needed in old versions of Python
try:
code = compile(src, '<string>', 'single')
# print >> sys.stderr, 'Compiling %s' % src
exec code in evaldict
except:
print >> sys.stderr, 'Error in generated code:'
print >> sys.stderr, src
raise
func = evaldict[name]
if addsource:
attrs['__source__'] = src
self.update(func, **attrs)
return func
@classmethod
def create(cls, obj, body, evaldict, defaults=None,
doc=None, module=None, addsource=True, **attrs):
"""
Create a function from the strings name, signature and body.
evaldict is the evaluation dictionary. If addsource is true an attribute
__source__ is added to the result. The attributes attrs are added,
if any.
"""
if isinstance(obj, str): # "name(signature)"
name, rest = obj.strip().split('(', 1)
signature = rest[:-1] #strip a right parens
func = None
else: # a function
name = None
signature = None
func = obj
self = cls(func, name, signature, defaults, doc, module)
ibody = '\n'.join(' ' + line for line in body.splitlines())
return self.make('def %(name)s(%(signature)s):\n' + ibody,
evaldict, addsource, **attrs)
def decorator(caller, func=None):
"""
decorator(caller) converts a caller function into a decorator;
decorator(caller, func) decorates a function using a caller.
"""
if func is not None: # returns a decorated function
evaldict = func.func_globals.copy()
evaldict['_call_'] = caller
evaldict['_func_'] = func
return FunctionMaker.create(
func, "return _call_(_func_, %(shortsignature)s)",
evaldict, undecorated=func, __wrapped__=func)
else: # returns a decorator
if inspect.isclass(caller):
name = caller.__name__.lower()
callerfunc = get_init(caller)
doc = 'decorator(%s) converts functions/generators into ' \
'factories of %s objects' % (caller.__name__, caller.__name__)
fun = getfullargspec(callerfunc).args[1] # second arg
elif inspect.isfunction(caller):
name = '_lambda_' if caller.__name__ == '<lambda>' \
else caller.__name__
callerfunc = caller
doc = caller.__doc__
fun = getfullargspec(callerfunc).args[0] # first arg
else: # assume caller is an object with a __call__ method
name = caller.__class__.__name__.lower()
callerfunc = caller.__call__.im_func
doc = caller.__call__.__doc__
fun = getfullargspec(callerfunc).args[1] # second arg
evaldict = callerfunc.func_globals.copy()
evaldict['_call_'] = caller
evaldict['decorator'] = decorator
return FunctionMaker.create(
'%s(%s)' % (name, fun),
'return decorator(_call_, %s)' % fun,
evaldict, undecorated=caller, __wrapped__=caller,
doc=doc, module=caller.__module__)
######################### contextmanager ########################
def __call__(self, func):
'Context manager decorator'
return FunctionMaker.create(
func, "with _self_: return _func_(%(shortsignature)s)",
dict(_self_=self, _func_=func), __wrapped__=func)
try: # Python >= 3.2
from contextlib import _GeneratorContextManager
ContextManager = type(
'ContextManager', (_GeneratorContextManager,), dict(__call__=__call__))
except ImportError: # Python >= 2.5
from contextlib import GeneratorContextManager
def __init__(self, f, *a, **k):
return GeneratorContextManager.__init__(self, f(*a, **k))
ContextManager = type(
'ContextManager', (GeneratorContextManager,),
dict(__call__=__call__, __init__=__init__))
contextmanager = decorator(ContextManager)
| bsd-3-clause |
phyxl/subber | lib/werkzeug/security.py | 302 | 8407 | # -*- coding: utf-8 -*-
"""
werkzeug.security
~~~~~~~~~~~~~~~~~
Security related helpers such as secure password hashing tools.
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import os
import hmac
import hashlib
import posixpath
import codecs
from struct import Struct
from random import SystemRandom
from operator import xor
from itertools import starmap
from werkzeug._compat import range_type, PY2, text_type, izip, to_bytes, \
string_types, to_native
SALT_CHARS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'
DEFAULT_PBKDF2_ITERATIONS = 1000
_pack_int = Struct('>I').pack
_builtin_safe_str_cmp = getattr(hmac, 'compare_digest', None)
_sys_rng = SystemRandom()
_os_alt_seps = list(sep for sep in [os.path.sep, os.path.altsep]
if sep not in (None, '/'))
def _find_hashlib_algorithms():
algos = getattr(hashlib, 'algorithms', None)
if algos is None:
algos = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
rv = {}
for algo in algos:
func = getattr(hashlib, algo, None)
if func is not None:
rv[algo] = func
return rv
_hash_funcs = _find_hashlib_algorithms()
def pbkdf2_hex(data, salt, iterations=DEFAULT_PBKDF2_ITERATIONS,
keylen=None, hashfunc=None):
"""Like :func:`pbkdf2_bin` but returns a hex encoded string.
.. versionadded:: 0.9
:param data: the data to derive.
:param salt: the salt for the derivation.
:param iterations: the number of iterations.
:param keylen: the length of the resulting key. If not provided
the digest size will be used.
:param hashfunc: the hash function to use. This can either be the
string name of a known hash function or a function
from the hashlib module. Defaults to sha1.
"""
rv = pbkdf2_bin(data, salt, iterations, keylen, hashfunc)
return to_native(codecs.encode(rv, 'hex_codec'))
def pbkdf2_bin(data, salt, iterations=DEFAULT_PBKDF2_ITERATIONS,
keylen=None, hashfunc=None):
"""Returns a binary digest for the PBKDF2 hash algorithm of `data`
with the given `salt`. It iterates `iterations` time and produces a
key of `keylen` bytes. By default SHA-1 is used as hash function,
a different hashlib `hashfunc` can be provided.
.. versionadded:: 0.9
:param data: the data to derive.
:param salt: the salt for the derivation.
:param iterations: the number of iterations.
:param keylen: the length of the resulting key. If not provided
the digest size will be used.
:param hashfunc: the hash function to use. This can either be the
string name of a known hash function or a function
from the hashlib module. Defaults to sha1.
"""
if isinstance(hashfunc, string_types):
hashfunc = _hash_funcs[hashfunc]
elif not hashfunc:
hashfunc = hashlib.sha1
salt = to_bytes(salt)
mac = hmac.HMAC(to_bytes(data), None, hashfunc)
if not keylen:
keylen = mac.digest_size
def _pseudorandom(x, mac=mac):
h = mac.copy()
h.update(x)
return bytearray(h.digest())
buf = bytearray()
for block in range_type(1, -(-keylen // mac.digest_size) + 1):
rv = u = _pseudorandom(salt + _pack_int(block))
for i in range_type(iterations - 1):
u = _pseudorandom(bytes(u))
rv = bytearray(starmap(xor, izip(rv, u)))
buf.extend(rv)
return bytes(buf[:keylen])
def safe_str_cmp(a, b):
"""This function compares strings in somewhat constant time. This
requires that the length of at least one string is known in advance.
Returns `True` if the two strings are equal or `False` if they are not.
.. versionadded:: 0.7
"""
if _builtin_safe_str_cmp is not None:
return _builtin_safe_str_cmp(a, b)
if len(a) != len(b):
return False
rv = 0
if isinstance(a, bytes) and isinstance(b, bytes) and not PY2:
for x, y in izip(a, b):
rv |= x ^ y
else:
for x, y in izip(a, b):
rv |= ord(x) ^ ord(y)
return rv == 0
def gen_salt(length):
"""Generate a random string of SALT_CHARS with specified ``length``."""
if length <= 0:
raise ValueError('requested salt of length <= 0')
return ''.join(_sys_rng.choice(SALT_CHARS) for _ in range_type(length))
def _hash_internal(method, salt, password):
"""Internal password hash helper. Supports plaintext without salt,
unsalted and salted passwords. In case salted passwords are used
hmac is used.
"""
if method == 'plain':
return password, method
if isinstance(password, text_type):
password = password.encode('utf-8')
if method.startswith('pbkdf2:'):
args = method[7:].split(':')
if len(args) not in (1, 2):
raise ValueError('Invalid number of arguments for PBKDF2')
method = args.pop(0)
iterations = args and int(args[0] or 0) or DEFAULT_PBKDF2_ITERATIONS
is_pbkdf2 = True
actual_method = 'pbkdf2:%s:%d' % (method, iterations)
else:
is_pbkdf2 = False
actual_method = method
hash_func = _hash_funcs.get(method)
if hash_func is None:
raise TypeError('invalid method %r' % method)
if is_pbkdf2:
if not salt:
raise ValueError('Salt is required for PBKDF2')
rv = pbkdf2_hex(password, salt, iterations,
hashfunc=hash_func)
elif salt:
if isinstance(salt, text_type):
salt = salt.encode('utf-8')
rv = hmac.HMAC(salt, password, hash_func).hexdigest()
else:
h = hash_func()
h.update(password)
rv = h.hexdigest()
return rv, actual_method
def generate_password_hash(password, method='pbkdf2:sha1', salt_length=8):
"""Hash a password with the given method and salt with with a string of
the given length. The format of the string returned includes the method
that was used so that :func:`check_password_hash` can check the hash.
The format for the hashed string looks like this::
method$salt$hash
This method can **not** generate unsalted passwords but it is possible
to set the method to plain to enforce plaintext passwords. If a salt
is used, hmac is used internally to salt the password.
If PBKDF2 is wanted it can be enabled by setting the method to
``pbkdf2:method:iterations`` where iterations is optional::
pbkdf2:sha1:2000$salt$hash
pbkdf2:sha1$salt$hash
:param password: the password to hash
:param method: the hash method to use (one that hashlib supports), can
optionally be in the format ``pbpdf2:<method>[:iterations]``
to enable PBKDF2.
:param salt_length: the length of the salt in letters
"""
salt = method != 'plain' and gen_salt(salt_length) or ''
h, actual_method = _hash_internal(method, salt, password)
return '%s$%s$%s' % (actual_method, salt, h)
def check_password_hash(pwhash, password):
"""check a password against a given salted and hashed password value.
In order to support unsalted legacy passwords this method supports
plain text passwords, md5 and sha1 hashes (both salted and unsalted).
Returns `True` if the password matched, `False` otherwise.
:param pwhash: a hashed string like returned by
:func:`generate_password_hash`
:param password: the plaintext password to compare against the hash
"""
if pwhash.count('$') < 2:
return False
method, salt, hashval = pwhash.split('$', 2)
return safe_str_cmp(_hash_internal(method, salt, password)[0], hashval)
def safe_join(directory, filename):
"""Safely join `directory` and `filename`. If this cannot be done,
this function returns ``None``.
:param directory: the base directory.
:param filename: the untrusted filename relative to that directory.
"""
filename = posixpath.normpath(filename)
for sep in _os_alt_seps:
if sep in filename:
return None
if os.path.isabs(filename) or filename.startswith('../'):
return None
return os.path.join(directory, filename)
| gpl-2.0 |
HughP/stardict-3 | tools/src/lingea-trd-decoder.py | 44 | 28596 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Script for decoding Lingea Dictionary (.trd) file
# Result is <header>\t<definition> file, convertable easily
# by stardict-editor from package stardict-tools into native
# Stardict dictionary (stardict.sf.net and www.stardict.org)
#
# Copyright (C) 2007 - Klokan Petr Přidal (www.klokan.cz)
#
# Based on script CobuildConv.rb by Nomad
# http://hp.vector.co.jp/authors/VA005784/cobuild/cobuildconv.html
#
# Version history:
# 0.4 (30.10.2007) Patch by Petr Dlouhy, optional HTML generation
# 0.3 (28.10.2007) Patch by Petr Dlouhy, cleanup, bugfix. More dictionaries.
# 0.2 (19.7.2007) Changes, documentation, first 100% dictionary
# 0.1 (20.5.2006) Initial version based on Nomad specs
#
# Supported dictionaries:
# - Lingea Německý Kapesní slovník
# - Lingea Anglický Kapesní slovník
# - Lingea 2002 series (theoretically)
#
# Modified by:
# - Petr Dlouhy (petr.dlouhy | email.cz)
# Generalization of data block rules, sampleFlag 0x04, sound out fix, data phrase prefix with comment (0x04)
# HTML output, debugging patch, options on command line
#
# <write your name here>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
# VERSION
VERSION = "0.4"
import getopt, sys
def usage():
print "Lingea Dictionary Decoder"
print "-------------------------"
print "Version: %s" % VERSION
print "Copyright (C) 2007 - Klokan Petr Pridal, Petr Dlouhy"
print
print "Usage: python lingea-trd-decoder.py DICTIONARY.trd > DICTIONARY.tab"
print "Result convertion by stardict-tools: /usr/lib/stardict-tools/tabfile"
print
print " -o <num> --out-style : Output style"
print " 0 no tags"
print " 1 \\n tags"
print " 2 html tags"
print " -h --help : Print this message"
print " -d --debug : Degub"
print " -r --debug-header : Degub - print headers"
print " -a --debug-all : Degub - print all records"
print " -l --debug-limit : Degub limit"
print
print "For HTML support in StarDict dictionary .ifo has to contain:"
print "sametypesequence=g"
print "!!! Change the .ifo file after generation by tabfile !!!"
print
try:
opts, args = getopt.getopt(sys.argv[1:], "hdo:ral:", ["help", "debug", "out-style=", "debug-header", "debug-all", "debug-limit="])
except getopt.GetoptError:
usage()
print "ERROR: Bad option"
sys.exit(2)
import locale
DEBUG = False
OUTSTYLE = 2
DEBUGHEADER = False
DEBUGALL = False
DEBUGLIMIT = 1
for o, a in opts:
if o in ("-d", "-debug"):
# DEBUGING !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
DEBUG = True
if o in ("-o", "--out-style"):
# output style
OUTSTYLE = locale.atoi(a)
if OUTSTYLE > 2:
usage()
print "ERROR: Output style not specified"
if o in ("-r", "--debug-header"):
# If DEBUG and DEBUGHEADER, then print just all header records
DEBUGHEADER = True
if o in ("-a", "--debug-all"):
# If DEBUG and DEBUGALL then print debug info for all records
DEBUGALL = True
if o in ("-h", "--help"):
usage()
sys.exit(0)
if o in ("-l", "--debug-limit"):
# Number of wrong records for printing to stop during debugging
DEBUGLIMIT = locale.atoi(a)
# FILENAME is a first parameter on the commandline now
if len(args) == 1:
FILENAME = args[0]
else:
usage()
print "ERROR: You have to specify .trd file to decode"
sys.exit(2)
from struct import *
import re
alpha = ['\x00', 'a','b','c','d','e','f','g','h','i',
'j','k','l','m','n','o','p','q','r','s',
't','u','v','w','x','y','z','#AL27#','#AL28#','#AL29#',
'#AL30#','#AL31#', ' ', '.', '<', '>', ',', ';', '-', '#AL39#',
'#GRAVE#', '#ACUTE#', '#CIRC#', '#TILDE#', '#UML#', '#AL45#', '#AL46#', '#CARON#', '#AL48#', '#CEDIL#',
'#AL50#', '#AL51#', '#GREEK#', '#AL53#', '#AL54#', '#AL55#', '#AL56#', '#AL57#', '#AL58#', '#SYMBOL#',
'#AL60#', '#UPCASE#', '#SPECIAL#', '#UNICODE#'] # 4 bytes after unicode
upcase = ['#UP0#','#UP1#','#UP2#','#UP3#','#UP4#','#UP5#','#UP6#','#UP7#','#UP8#','#UP9#',
'#UP10#','#UP11#','#UP12#','#UP13#','#UP14#','#UP15#','#UP16#','#UP17#','#UP18#','#UP19#',
'#UP20#','#UP21#','#UP22#','#UP23#','#UP24#','#UP25#','#UP26#','#UP27#','#UP28#','#UP29#',
'#UP30#','#UP31#','A','B','C','D','E','F','G','H',
'I','J','K','L','M','N','O','P','Q','R',
'S','T','U','V','W','X','Y','Z','#UP58#','#UP59#',
'#UP60#','#UP61#','#UP62#','#UP63#']
upcase_pron = ['#pr0#', '#pr1#','#pr2#','#pr3#','#pr4#','#pr5#','#pr6#','#pr7#','#pr8#','#pr9#',
'#pr10#', '#pr11#','#pr12#','#pr13#','#pr14#','#pr15#','#pr16#','#pr17#','#pr18#','#pr19#',
'#pr20#', '#pr21#','#pr22#','#pr23#','#pr24#','#pr25#','#pr26#','#pr27#','#pr28#','#pr29#',
'#pr30#', '#pr31#','ɑ','#pr33#','ʧ','ð','ə','ɜ','#pr38#','æ',
'ɪ', 'ɭ','#pr42#','ŋ','#pr44#','ɳ','ɔ','#pr47#','ɒ','ɽ',
'ʃ', 'θ','ʊ','ʌ','#pr54#','#pr55#','#pr56#','ʒ','#pr58#','#pr59#',
'#pr60#', '#pr61#','#pr62#','#pr63#']
symbol = ['#SY0#', '#SY1#','#SY2#','#SY3#','§','#SY5#','#SY6#','#SY7#','#SY8#','#SY9#',
'#SY10#', '#SY11#','#SY12#','#SY13#','#SY14#','™','#SY16#','#SY17#','¢','£',
'#SY20#', '#SY21#','#SY22#','#SY23#','©','#SY25#','#SY26#','#SY27#','®','°',
'#SY30#', '²','³','#SY33#','#SY34#','#SY35#','¹','#SY37#','#SY38#','#SY39#',
'½', '#SY41#','#SY42#','×','÷','#SY45#','#SY46#','#SY47#','#SY48#','#SY49#',
'#SY50#', '#SY51#','#SY52#','#SY53#','#SY54#','#SY55#','#SY56#','#SY57#','#SY58#','#SY59#',
'#SY60#', '#SY61#','#SY62#','#SY63#']
special = ['#SP0#', '!','"','#','$','%','&','\'','(',')',
'*', '+','#SP12#','#SP13#','#SP14#','/','0','1','2','3',
'4', '5','6','7','8','9',':',';','<','=',
'>', '?','@','[','\\',']','^','_','`','{',
'|', '}','~','#SP43#','#SP44#','#SP45#','#SP46#','#SP47#','#SP48#','#SP49#',
'#SP50#', '#SP51#','#SP52#','#SP53#','#SP54#','#SP55#','#SP56#','#SP57#','#SP58#','#SP59#',
'#SP60#', '#SP61#','#SP62#','#SP63#']
wordclass = ('#0#','n:','adj:','pron:','#4#','v:','adv:','prep:','#8#','#9#',
'intr:','phr:','#12#','#13#','#14#','#15#','#16#','#17#','#18#','#19#',
'#20#','#21#','#22#','#23#','#24#','#25#','#26#','#27#','#28#','#29#',
'#30#','#31#')
if OUTSTYLE == 0:
tag = {
'db':('' ,''), #Data begining
'rn':('' ,'\t'), #Record name
'va':('' ,' '), #Header variant
'wc':('(' ,')'), #WordClass
'pa':('' ,' '), #Header parts
'fo':('(' ,') '), #Header forms
'on':('(' ,')' ), #Header origin note
'pr':('[' ,']'), #Header pronunciation
'dv':('{' ,'} '), #Header dataVariant
'sa':('`' ,'`' ), #Data sample
'sw':('' ,''), #Data sample wordclass; is no printed by Lingea
'do':('`' ,'`' ), #Data origin note
'df':('' ,' '), #Data definition
'ps':('"' ,'" '), #Data phrase short form
'pg':('"' ,' = '), #Data phrase green
'pc':('`' ,'`'), #Data phrase comment; this comment is not printed by Lingea), but it seems useful
'p1':('"' ,' = '), #Data phrase 1
'p2':('' ,'" ' ), #Data phrase 2
'sp':('"' ,' = ' ),#Data simple phrase
'b1':('"' ,' = '), #Data phrase (block) 1
'b2':('" ' ,''), #Data phrase (block) 2
}
if OUTSTYLE == 1:
tag = {
'db':('•' ,''), #Data begining
'rn':('' ,'\t'), #Record name
'va':('' ,' '), #Header variant
'wc':('' ,'\\n'), #WordClass
'pa':('' ,':\\n'), #Header parts
'fo':('(' ,') '), #Header forms
'on':('(' ,')\\n' ), #Header origin note
'pr':('[' ,']\\n'), #Header pronunciation
'dv':('{' ,'} '), #Header dataVariant
'sa':(' ' ,'\\n' ), #Data sample
'sw':('' ,''), #Data sample wordclass; is not printed by Lingea
'do':(' ' ,' ' ), #Data origin note
'df':(' ' ,'\\n'), #Data definition
'ps':(' ' ,'\\n'), #Data phrase short form
'pg':(' ' ,' '), #Data phrase green
'pc':(' ' ,' '), #Data phrase comment; this comment is not printed by Lingea), but it seems useful
'p1':(' ' ,' '), #Data phrase 1
'p2':(' ' ,'\\n' ), #Data phrase 2
'sp':('' ,'\\n' ), #Data simple phrase
'b1':('"' ,' = '), #Data phrase (block) 1
'b2':('" ' ,''), #Data phrase (block) 2
}
if OUTSTYLE == 2:
tag = {
'db':('•' ,''), #Data begining
'rn':('' ,'\t'), #Record name
'va':('' ,' '), #Header variant
'wc':('<span size="larger" color="darkred" weight="bold">','</span>\\n'), #WordClass
'pa':('<span size="larger" color="darkred" weight="bold">',':</span>\\n'), #Header parts
'fo':('(' ,') '), #Header forms
'on':('<span color="blue">(' ,')</span>\\n' ), #Header origin note
'pr':('[' ,']\\n'), #Header pronunciation
'dv':('{' ,'} '), #Header dataVariant
'sa':(' <span color="darkred" weight="bold">' ,'</span>\\n' ), #Data sample
'sw':('' ,''), #Data sample wordclass; is not printed by Lingea
'do':(' <span color="darkred" weight="bold">' ,'</span> ' ), #Data origin note
'df':(' <span weight="bold">' ,'</span>\\n'), #Data definition
'ps':(' <span color="dimgray" weight="bold">' ,'</span>\\n'), #Data phrase short form
'pg':(' <span color="darkgreen" style="italic">' ,'</span> '), #Data phrase green
'pc':(' <span color="darkgreen" style="italic">' ,'</span> '), #Data phrase comment; this comment is not printed by Lingea), but it seems useful
'p1':(' <span color="dimgray" style="italic">' ,'</span> '), #Data phrase 1
'p2':(' ' ,'\\n' ), #Data phrase 2
'sp':('<span color="cyan">' ,'</span>\\n' ), #Data simple phrase
'b1':('"' ,' = '), #Data phrase (block) 1
'b2':('" ' ,''), #Data phrase (block) 2
}
# Print color debug functions
purple = lambda c: '\x1b[1;35m'+c+'\x1b[0m'
blue = lambda c: '\x1b[1;34m'+c+'\x1b[0m'
cyan = lambda c: '\x1b[36m'+c+'\x1b[0m'
gray = lambda c: '\x1b[1m'+c+'\x1b[0m'
def getRec(n):
"""Get data stream for record of given number"""
if n >= 0 and n < entryCount:
f.seek(index[n])
return f.read(index[n+1] - index[n])
else:
return ''
def decode_alpha( stream, nullstop=True):
"""Decode 6-bit encoding data stream from the begining untit first NULL"""
offset = 0
triple = 0
result = []
while triple < len( stream ):
if offset % 4 == 0:
c = stream[triple] >> 2
triple += 1
if offset % 4 == 1:
c = (stream[triple-1] & 3) << 4 | stream[triple] >> 4
triple += 1
if offset % 4 == 2:
c = (stream[triple-1] & 15) << 2 | (stream[triple] & 192) >> 6
triple += 1
if offset % 4 == 3:
c = stream[triple-1] & 63
if c == 0 and nullstop:
break
offset += 1
# TODO: ENCODE UNICODE 4 BYTE STREAM!!! and but it after #UNICODE# as unichr()
result.append(c)
return decode_alpha_postprocessing(result), triple - 1
def decode_alpha_postprocessing( input ):
"""Lowlevel alphabet decoding postprocessing, combines tuples into one character"""
result = ""
input.extend([0x00]*5)
# UPCASE, UPCASE_PRON, SYMBOL, SPECIAL
skip = False
for i in range(0,len(input)-1):
if skip:
skip = False
continue
bc = input[i]
c = alpha[bc]
bc1 = input[i+1]
c1 = alpha[bc1]
if bc < 40:
result += c
else:
if c == "#GRAVE#":
if c1 == 'a': result += 'à'
else: result += '#GRAVE%s#' % c1
elif c == "#UML#":
if c1 == 'o': result += 'ö'
elif c1 == 'u': result += 'ü'
elif c1 == 'a': result += 'ä'
elif c1 == ' ': result += 'Ä'
elif c1 == '#AL46#': result += 'Ö'
elif c1 == '#GREEK#': result += 'Ü'
else: result += '#UML%s#' % c1
elif c == "#ACUTE#":
if c1 == 'a': result += 'á'
elif c1 == 'e': result += 'é'
elif c1 == 'i': result += 'í'
elif c1 == 'o': result += 'ó'
elif c1 == 'u': result += 'ú'
elif c1 == 'y': result += 'ý'
elif c1 == ' ': result += 'Á'
elif c1 == '#GRAVE#': result += 'Í'
else: result += '#ACUTE%s#' % c1
elif c == "#CARON#":
if c1 == 'r': result += 'ř'
elif c1 == 'c': result += 'č'
elif c1 == 's': result += 'š'
elif c1 == 'z': result += 'ž'
elif c1 == 'e': result += 'ě'
elif c1 == 'd': result += 'ď'
elif c1 == 't': result += 'ť'
elif c1 == 'a': result += 'å'
elif c1 == 'u': result += 'ů'
elif c1 == 'n': result += 'ň'
elif c1 == '<': result += 'Č'
elif c1 == '#CEDIL#': result += 'Ř'
elif c1 == '#AL50#': result += 'Š'
elif c1 == '#AL57#': result += 'Ž'
else: result += '#CARON%s#' % c1
elif c == "#UPCASE#":
result += upcase[bc1]
elif c == "#SYMBOL#":
result += symbol[bc1]
elif c == "#AL51#":
if c1 == 's': result += 'ß'
elif c == "#AL48#":
result += "#AL48#%s" % c1
elif c == "#SPECIAL#":
result += special[bc1]
elif c == "#UNICODE#":
result += '#UNICODE%s#' % bc1
elif c == "#CIRC#":
if c1 == 'a': result += 'â'
else: result += '#CARON%s#' % c1
else:
result += '%sX%s#' % (c[:-1], bc1)
skip = True
return result
def pronunciation_encode(s):
"""Encode pronunciation upcase symbols into IPA symbols"""
for i in range(0, 64):
s = s.replace(upcase[i], upcase_pron[i])
return s
re_d = re.compile(r'<d(.*?)>')
re_w = re.compile(r'<w(.*?)>')
re_y = re.compile(r'<y(.*?)>')
re_c = re.compile(r'<c(.*?)>')
def decode_tag_postprocessing(input):
"""Decode and replace tags used in lingea dictionaries; decode internal tags"""
s = input
# General information in http://www.david-zbiral.cz/El-slovniky-plnaverze.htm#_Toc151656799
# TODO: Better output handling
if OUTSTYLE == 0:
# ?? <d...>
s = re_d.sub(r'(\1)',s)
# ?? <w...>
s = re_w.sub(r'(\1)',s)
# ?? <y...>
s = re_y.sub(r'(\1)',s)
# ?? <c...>
s = re_c.sub(r'(\1)',s)
# ...
if OUTSTYLE == 1:
# ?? <d...>
s = re_d.sub(r'(\1)',s)
# ?? <w...>
s = re_w.sub(r'(\1)',s)
# ?? <y...>
s = re_y.sub(r'(\1)',s)
# ?? <c...>
s = re_c.sub(r'(\1)',s)
# ...
if OUTSTYLE == 2:
# ?? <d...>
s = re_d.sub(r'<span size="small" color="blue">(\1)</span>',s)
# ?? <w...>
s = re_w.sub(r'<span size="small" color="blue" style="italic">\1</span>',s)
# ?? <y...>
s = re_y.sub(r'<span size="small" color="blue" style="italic">\1</span>',s)
# ?? <c...>
s = re_c.sub(r'<span size="small" color="blue" style="italic">\1</span>',s)
# ...
return s
def toBin( b ):
"""Prettify debug output format: hex(bin)dec"""
original = b
r = 0;
i = 1;
while b > 0:
if b & 0x01 != 0: r += i
i *= 10
b = b >> 1
return "0x%02X(%08d)%03d" % (original, r, original)
def out( comment = "", skip = False):
"""Read next byte or string (with skip=True) and output DEBUG info"""
global bs, pos
s, triple = decode_alpha(bs[pos:])
s = s.split('\x00')[0] # give me string until first NULL
if (comment.find('%') != -1):
if skip:
comment = comment % s
else:
comment = comment % bs[pos]
if DEBUG: print "%03d %s %s | %s | %03d" % (pos, toBin(bs[pos]),comment, s, (triple + pos))
if skip:
pos += triple + 1
return s.replace('`','') # Remove '`' character from words
else:
pos += 1
return bs[pos-1]
outInt = lambda c: out(c)
outStr = lambda c: out(c, True)
def decode(stream):
"""Decode byte stream of one record, return decoded string with formatting in utf"""
result = ""
global bs, pos
# stream - data byte stream for one record
bs = unpack("<%sB" % len(stream), stream)
# bs - list of bytes from stream
pos = 0
itemCount = outInt("ItemCount: %s") # Number of blocks in the record
mainFlag = outInt("MainFlag: %s")
# HEADER BLOCK
# ------------
if mainFlag & 0x01:
headerFlag = outInt("HeaderFlag: %s") # Blocks in header
if headerFlag & 0x01:
result += tag['rn'][0] + outStr("Header record name: %s").replace('_','') + tag['rn'][1] # Remove character '_' from index
if headerFlag & 0x02:
result += tag['va'][0] + outStr("Header variant: %s") + tag['va'][1]
if headerFlag & 0x04:
s = outInt("Header wordclass: %s")
if s < 32:
result += tag['wc'][0] + wordclass[s] + tag['wc'][1]
else:
raise "Header wordclass out of range in: %s" % result
if headerFlag & 0x08:
result += tag['pa'][0] + outStr("Header parts: %s") + tag['pa'][1]
if headerFlag & 0x10:
result += tag['fo'][0] + outStr("Header forms: %s") + tag['fo'][1]
if headerFlag & 0x20:
result += tag['on'][0] + outStr("Header origin note: %s") + tag['on'][1]
if headerFlag & 0x80:
result += tag['pr'][0] + pronunciation_encode(outStr("Header pronunciation: %s")) + tag['pr'][1]
# Header data block
if mainFlag & 0x02:
headerFlag = outInt("Header dataFlag: %s") # Blocks in header
if headerFlag & 0x02:
result += tag['dv'][0] + outStr("Header dataVariant: %s")+ tag['dv'][1]
# ??? Link elsewhere
pass
# SOUND DATA REFERENCE
if mainFlag & 0x80:
outInt("Sound reference byte #1: %s")
outInt("Sound reference byte #2: %s")
outInt("Sound reference byte #3: %s")
outInt("Sound reference byte #4: %s")
outInt("Sound reference byte #5: %s")
#out("Sound data reference (5 bytes)", 6)
# TODO: Test all mainFlags in header!!!!
#result += ': '
li = 0
#print just every first word class identifier
# TODO: this is not systematic (should be handled by output)
global lastWordClass
lastWordClass = 0
# DATA BLOCK(S)
# -------------
for i in range(0, itemCount):
item = tag['db'][0] + tag['db'][1]
ol = False
dataFlag = outInt("DataFlag: %s -----------------------------")
if dataFlag & 0x01: # small index
sampleFlag = outInt("Data sampleFlag: %s")
if sampleFlag & 0x01:
result += tag['sa'][0] + outStr("Data sample: %s") + tag['sa'][1]
if sampleFlag & 0x04:
s = outInt("Data wordclass: %s")
if s != lastWordClass:
if s < 32:
result += tag['wc'][0] + wordclass[s] + tag['wc'][1]
else:
raise "Header wordclass out of range in: %s" % result
lastWordClass = s
if sampleFlag & 0x08:
result += tag['sw'][0] + outStr("Data sample wordclass: %s") + tag['sw'][1]
if sampleFlag & 0x10:
outInt("Data sample Int: %s")
outInt("Data sample Int: %s")
outInt("Data sample Int: %s")
if sampleFlag & 0x20:
item += tag['do'][0] + outStr("Data origin note: %s") + tag['do'][1]
if sampleFlag & 0x80:
item += " "
result += tag['pr'][0] + pronunciation_encode(outStr("Data sample pronunciation: %s")) + tag['pr'][1]
if dataFlag & 0x02:
item += " "
subFlag = outInt("Data subFlag: %s")
if subFlag == 0x80:
outStr("Data sub prefix: %s")
# It seams that data sub prefix content is ignored and there is a generated number for the whole block instead.
li += 1
ol = True
if dataFlag & 0x04: # chart
pass # ???
if dataFlag & 0x08: # reference
item += tag['df'][0] + outStr("Data definition: %s") + tag['df'][1]
if dataFlag & 0x10:
pass # ???
if dataFlag & 0x20: # phrase
phraseFlag1 = outInt("Data phraseFlag1: %s")
if phraseFlag1 & 0x01:
item += tag['ps'][0] + outStr("Data phrase short form: %s") + tag['ps'][1]
if phraseFlag1 & 0x02:
phraseCount = outInt("Data phraseCount: %s")
for i in range(0, phraseCount):
phraseComment = outInt("Data phrase prefix")
if phraseComment & 0x04:
item += tag['pc'][0] + outStr("Data phrase comment: %s") + tag['pc'][1]
item += tag['p1'][0] + outStr("Data phrase 1: %s") + tag['p1'][1]
item += tag['p2'][0] + outStr("Data phrase 2: %s") + tag['p2'][1]
if phraseFlag1 & 0x04:
phraseCount = outInt("Data phraseCount: %s")
for i in range(0, phraseCount):
phraseComment = outInt("Data phrase prefix")
if phraseComment & 0x04:
item += tag['pc'][0] + outStr("Data phrase 1: %s") + tag['pc'][1]
item += tag['pg'][0] + outStr("Data phrase comment: %s") + tag['pg'][1]
item += tag['p2'][0] + outStr("Data phrase 2: %s") + tag['p2'][1]
if phraseFlag1 & 0x08:
phraseCount = outInt("Data simple phraseCount: %s")
for i in range(0, phraseCount):
item += " "
item += tag['sp'][0] + outStr("Data simple phrase: %s") + tag['sp'][1]
if phraseFlag1 & 0x40:
item += tag['ps'][0] + outStr("Data phrase short form: %s") + tag['ps'][1]
# TODO: be careful in changing the rules, to have back compatibility!
if dataFlag & 0x40: # reference, related language
#0x01 synonym ?
#0x02 antonym ?
pass
if dataFlag & 0x80: # Phrase block
flags = [
out("Data phrase block: %s"),
out("Data phrase block: %s"),
out("Data phrase block: %s"),
out("Data phrase block: %s"),
out("Data phrase block: %s"),
out("Data phrase block: %s"),
out("Data phrase block: %s"),
out("Data phrase block: %s")]
if flags == [0x80,0x80,0xF9,0xDF,0x9D,0x00,0x0B,0x01]:
result += "\\nphr: "
li = 1
ol = True
item += tag['b1'][0]+outStr("Data phrase 1: %s") + tag['b1'][1]
out("Data phrase block: %s")
out("Data phrase block: %s")
out("Data phrase block: %s")
out("Data phrase block: %s")
item += tag['ds'][0] + outStr("Data phrase 2: %s") + tag['ds'][1]
if flags == [0x80,0x80,0xF9,0xDF,0x9D,0x00,0x23,0x01]:
result += "\\nphr: "
li = 1
ol = True
item += tag['b1'][0]+outStr("Data phrase 1: %s") + tag['b1'][1]
out("Data phrase block: %s")
out("Data phrase block: %s")
out("Data phrase block: %s")
out("Data phrase block: %s")
out("Data phrase block: %s")
item += tag['ds'][0] + outStr("Data phrase 2: %s") + tag['ds'][1]
if ol:
result += "\\n%d. %s" % (li, item)
else:
result += item
ok = True
while pos < len(stream):
ok = (out() == 0x00) and ok
if ok:
result += '\n'
return decode_tag_postprocessing(result)
################################################################
# MAIN
################################################################
f = open(FILENAME,'rb')
# DECODE HEADER OF FILE
copyright = unpack("<64s",f.read(64))[0]
a = unpack("<16L",f.read(64))
entryCount = a[4]
indexBaseCount = a[6]
indexOffsetCount = a[7]
pos1 = a[8]
indexPos = a[9]
bodyPos = a[10]
smallIndex = (a[3] == 2052)
# DECODE INDEX STRUCTURE OF FILE
index = []
f.seek(indexPos)
bases = unpack("<%sL" % indexBaseCount, f.read(indexBaseCount * 4))
if smallIndex: # In small dictionaries every base is used 4-times
bases4 = []
for i in bases:
bases4.extend([i,i,i,i])
bases = bases4
for b in bases:
offsets = unpack("<64H", f.read(64*2))
for o in offsets:
if len(index) < indexOffsetCount:
#print "Index %s: %s + %s + %s * 4 = %s" % (len(index), bodyPos, b, o, toBin(bodyPos + b + o * 4))
index.append(bodyPos + b + o * 4)
# DECODE RECORDS
if DEBUG:
# PRINTOUT DEBUG OF FIRST <DEBUGLIMIT> WRONG RECORDS:
for i in range(1,entryCount):
if not DEBUGALL:
DEBUG = False
s = decode(getRec(i))
if DEBUGHEADER:
# print s.split('\t')[0]
print s
if DEBUGLIMIT > 0 and not s.endswith('\n'):
DEBUG = True
print "-"*80
print "%s) at address %s" % (i, toBin(index[i]))
print
s = decode(getRec(i))
print s
DEBUGLIMIT -= 1
DEBUG = True
else:
# DECODE EACH RECORD AND PRINT IT IN FORMAT FOR stardict-editor <term>\t<definition>
for i in range(1,entryCount):
s = decode(getRec(i))
if s.endswith('\n'):
print s,
else:
print s
print "!!! RECORD STRUCTURE DECODING ERROR !!!"
print "Please run this script in DEBUG mode and repair DATA BLOCK(S) section in function decode()"
print "If you succeed with whole dictionary send report (name of the dictionary and source code of script) to slovniky@googlegroups.com"
break
| gpl-3.0 |
windyuuy/opera | chromium/src/chrome/common/extensions/docs/server2/branch_utility.py | 3 | 6320 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import logging
import operator
from appengine_url_fetcher import AppEngineUrlFetcher
import url_constants
class ChannelInfo(object):
def __init__(self, channel, branch, version):
self.channel = channel
self.branch = branch
self.version = version
class BranchUtility(object):
def __init__(self, fetch_url, history_url, fetcher, object_store_creator):
self._fetcher = fetcher
# BranchUtility is obviously cross-channel, so set the channel to None.
self._branch_object_store = object_store_creator.Create(BranchUtility,
category='branch',
channel=None)
self._version_object_store = object_store_creator.Create(BranchUtility,
category='version',
channel=None)
self._fetch_result = self._fetcher.FetchAsync(fetch_url)
self._history_result = self._fetcher.FetchAsync(history_url)
@staticmethod
def GetAllChannelNames():
return ('stable', 'beta', 'dev', 'trunk')
@staticmethod
def NewestChannel(channels):
for channel in reversed(BranchUtility.GetAllChannelNames()):
if channel in channels:
return channel
@staticmethod
def Create(object_store_creator):
return BranchUtility(url_constants.OMAHA_PROXY_URL,
url_constants.OMAHA_DEV_HISTORY,
AppEngineUrlFetcher(),
object_store_creator)
@staticmethod
def SplitChannelNameFromPath(path):
'''Splits the channel name out of |path|, returning the tuple
(channel_name, real_path). If the channel cannot be determined then returns
(None, path).
'''
if '/' in path:
first, second = path.split('/', 1)
else:
first, second = (path, '')
if first in BranchUtility.GetAllChannelNames():
return (first, second)
return (None, path)
def GetAllBranchNumbers(self):
return ((channel, self.GetChannelInfo(channel).branch)
for channel in BranchUtility.GetAllChannelNames())
def GetAllVersionNumbers(self):
return (self.GetChannelInfo(channel).version
for channel in BranchUtility.GetAllChannelNames())
def GetAllChannelInfo(self):
return (self.GetChannelInfo(channel)
for channel in BranchUtility.GetAllChannelNames())
def GetChannelInfo(self, channel):
return ChannelInfo(channel,
self._ExtractFromVersionJson(channel, 'branch'),
self._ExtractFromVersionJson(channel, 'version'))
def _ExtractFromVersionJson(self, channel_name, data_type):
'''Returns the branch or version number for a channel name.
'''
if channel_name == 'trunk':
return 'trunk'
if data_type == 'branch':
object_store = self._branch_object_store
elif data_type == 'version':
object_store = self._version_object_store
data = object_store.Get(channel_name).Get()
if data is not None:
return data
try:
version_json = json.loads(self._fetch_result.Get().content)
except Exception as e:
# This can happen if omahaproxy is misbehaving, which we've seen before.
# Quick hack fix: just serve from trunk until it's fixed.
logging.error('Failed to fetch or parse branch from omahaproxy: %s! '
'Falling back to "trunk".' % e)
return 'trunk'
numbers = {}
for entry in version_json:
if entry['os'] not in ['win', 'linux', 'mac', 'cros']:
continue
for version in entry['versions']:
if version['channel'] != channel_name:
continue
if data_type == 'branch':
number = version['version'].split('.')[2]
elif data_type == 'version':
number = version['version'].split('.')[0]
if number not in numbers:
numbers[number] = 0
else:
numbers[number] += 1
sorted_numbers = sorted(numbers.iteritems(),
None,
operator.itemgetter(1),
True)
object_store.Set(channel_name, int(sorted_numbers[0][0]))
return int(sorted_numbers[0][0])
def GetBranchForVersion(self, version):
'''Returns the most recent branch for a given chrome version number using
data stored on omahaproxy (see url_constants).
'''
if version == 'trunk':
return 'trunk'
branch = self._branch_object_store.Get(version).Get()
if branch is not None:
return branch
version_json = json.loads(self._history_result.Get().content)
for entry in version_json['events']:
# Here, entry['title'] looks like: '<title> - <version>.##.<branch>.##'
version_title = entry['title'].split(' - ')[1].split('.')
if version_title[0] == str(version):
self._branch_object_store.Set(str(version), version_title[2])
return int(version_title[2])
raise ValueError('The branch for %s could not be found.' % version)
def GetChannelForVersion(self, version):
'''Returns the name of the development channel corresponding to a given
version number.
'''
for channel_info in self.GetAllChannelInfo():
if channel_info.channel == 'stable' and version <= channel_info.version:
return channel_info.channel
if version == channel_info.version:
return channel_info.channel
def GetLatestVersionNumber(self):
'''Returns the most recent version number found using data stored on
omahaproxy.
'''
latest_version = self._version_object_store.Get('latest').Get()
if latest_version is not None:
return latest_version
version_json = json.loads(self._history_result.Get().content)
latest_version = 0
for entry in version_json['events']:
version_title = entry['title'].split(' - ')[1].split('.')
version = int(version_title[0])
if version > latest_version:
latest_version = version
self._version_object_store.Set('latest', latest_version)
return latest_version
| bsd-3-clause |
uskudnik/ggrc-core | src/tests/ggrc/converters/test_audit.py | 2 | 3980 | # Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: silas@reciprocitylabs.com
# Maintained By: silas@reciprocitylabs.com
from datetime import date, datetime
from os.path import abspath, dirname, join
from mock import patch
from ggrc import db
from ggrc.models.all_models import Audit, Objective, Person, Program, Request
from ggrc.converters.import_helper import handle_csv_import
from ggrc.converters.common import ImportException
from ggrc.converters.requests import RequestsConverter
from tests.ggrc import TestCase
from nose.plugins.skip import SkipTest
THIS_ABS_PATH = abspath(dirname(__file__))
CSV_DIR = join(THIS_ABS_PATH, 'comparison_csvs/')
@SkipTest
class TestRequest(TestCase):
def setUp(self):
super(TestRequest, self).setUp()
self.patcher = patch('ggrc.converters.base.log_event')
self.mock_log = self.patcher.start()
self.date1 = date(2013, 9, 25)
self.date2 = date(2013, 9, 26)
self.date3 = date(2013, 9, 5)
self.person3= Person(name="Requestor Person", email="requester@example.com")
self.objective1 = Objective(slug="OBJ-1", title="Objective 1")
self.person1 = Person(name="Assignee Person", email="assignee@example.com")
self.person2= Person(name="Audit Contact Person", email="contact@example.com")
self.prog1 = Program(slug="PROG-1", title="Program 1")
self.audit1 = Audit(slug="AUD-1", title="Audit 1", status="Planned", program=self.prog1, contact=self.person2)
self.request1 = Request(slug="REQ-1", title="Request 1", requestor=self.person3, assignee=self.person1, request_type=u'documentation', status='Draft', requested_on=self.date3, due_on=self.date2, audit=self.audit1)
objs = [self.objective1, self.person1, self.person2, self.prog1, self.audit1, self.request1]
[db.session.add(obj) for obj in objs]
db.session.commit()
self.db_program = Program.query.filter_by(slug="PROG-1").first()
self.db_audit = Audit.query.filter_by(slug="AUD-1").first()
self.options = {
'program_id': self.db_program.id,
'audit_id': self.db_audit.id,
'dry_run': False,
}
def tearDown(self):
self.patcher.stop()
super(TestRequest, self).tearDown()
def test_new_and_existing(self):
csv_filename = join(CSV_DIR, "request_import.csv")
expected_request_slugs = set(["REQ-1","REQ-2"])
expected_due_dates = set([self.date1])
expected_statuses = set(["Amended Request", "Requested"])
handle_csv_import(RequestsConverter, csv_filename, **self.options)
actual_requests = set(self.db_program.audits[0].requests)
actual_request_slugs = set([x.slug for x in actual_requests])
actual_due_dates = set([x.due_on for x in actual_requests])
actual_statuses = set([x.status for x in actual_requests])
# Verify that first one is updated and second is added
self.assertEqual(expected_request_slugs, actual_request_slugs)
self.assertEqual(expected_due_dates, actual_due_dates)
self.assertEqual(expected_statuses, actual_statuses)
def test_no_objective(self):
csv_filename = join(CSV_DIR, "request_import_no_objective.csv")
expected_warning = 'An Objective will need to be mapped later'
converter = handle_csv_import(
RequestsConverter, csv_filename, **self.options)
actual_warning = converter.objects[0].warnings_for('objective_id')[0]
self.assertEqual(expected_warning, actual_warning)
def test_bad_objective(self):
csv_filename = join(CSV_DIR, "request_import_bad_objective.csv")
# Make dry run since objective currently required at DB level
self.options['dry_run'] = True
expected_error = "Objective code 'OBJ-BAD' does not exist."
converter = handle_csv_import(
RequestsConverter, csv_filename, **self.options)
actual_error = converter.objects[0].errors_for('objective_id')[0]
self.assertEqual(expected_error, actual_error)
| apache-2.0 |
vdt/SimpleCV | SimpleCV/examples/util/ColorCube.py | 13 | 1901 | from SimpleCV import Image, Camera, Display, Color
import pygame as pg
import numpy as np
from pylab import *
from mpl_toolkits.mplot3d import axes3d
from matplotlib.backends.backend_agg import FigureCanvasAgg
import cv2
bins = 8
#precompute
idxs = []
colors = []
offset = bins/2
skip = 255/bins
for x in range(0,bins):
for y in range(0,bins):
for z in range(0,bins):
b = ((x*skip)+offset)/255.0
g = ((y*skip)+offset)/255.0
r = ((z*skip)+offset)/255.0
idxs.append((x,y,z,(r,g,b)))
# plot points in 3D
cam = Camera()
disp = Display((800,600))
fig = figure()
fig.set_size_inches( (10,7) )
canvas = FigureCanvasAgg(fig)
azim = 0
while disp.isNotDone():
ax = fig.gca(projection='3d')
ax.set_xlabel('BLUE', color=(0,0,1) )
ax.set_ylabel('GREEN',color=(0,1,0))
ax.set_zlabel('RED',color=(1,0,0))
# Get the color histogram
img = cam.getImage().scale(0.3)
rgb = img.getNumpyCv2()
hist = cv2.calcHist([rgb],[0,1,2],None,[bins,bins,bins],[0,256,0,256,0,256])
hist = hist/np.max(hist)
# render everything
[ ax.plot([x],[y],[z],'.',markersize=max(hist[x,y,z]*100,6),color=color) for x,y,z,color in idxs if(hist[x][y][z]>0) ]
#[ ax.plot([x],[y],[z],'.',color=color) for x,y,z,color in idxs if(hist[x][y][z]>0) ]
ax.set_xlim3d(0, bins-1)
ax.set_ylim3d(0, bins-1)
ax.set_zlim3d(0, bins-1)
azim = (azim+0.5)%360
ax.view_init(elev=35, azim=azim)
########### convert matplotlib to SimpleCV image
canvas.draw()
renderer = canvas.get_renderer()
raw_data = renderer.tostring_rgb()
size = canvas.get_width_height()
surf = pg.image.fromstring(raw_data, size, "RGB")
figure = Image(surf)
############ All done
figure = figure.floodFill((0,0), tolerance=5,color=Color.WHITE)
result = figure.blit(img, pos=(20,20))
result.save(disp)
fig.clf()
| bsd-3-clause |
lnhubbell/tweetTrack | streamScript/domain/analysis_playground.py | 1 | 1928 | """This file is intended to be used as a playground to analyze data pulled
from the database. The code below was used to analyze data from the Tweet200
database, but this is a good place for any sort of extra analysis."""
def count_unique_users(data, n=10000):
u""" Uses blocks of tweets from single users per city.
Takes in a raw dataset and an optional parameter to limit the feature
set to n. Defaults to 10000. Returns a tuple containing a matrix of n features,
a vector of labels, and a vocabulary list of the features examined."""
user_matrix = []
user_array = []
for key, val in data.items():
user_list = []
count = 0
user_count = 0
# print key
print len(val)
for ind, tweet in enumerate(val):
# print user_count
if user_count >= 110:
# print "MORE THAN A THOUSAND!!!"
continue
if count == 0:
this_user = tweet[1]
our_string = ""
if (tweet[1] == this_user) and (count < 200):
our_string += tweet[2].lower()
count += 1
elif (tweet[1] != this_user): # and len(our_string) >= 14000:
count = 0
user_count += 1
print ind, tweet[1],this_user
user_matrix.append(our_string)
user_array.append(key)
user_list.append(this_user)
# elif tweet[1] != this_user:
# count = 0
# print len(user_matrix)
# print len(user_array)
# print "----------Break---------"
# last_user = None
# unique_users = []
# for user in user_list:
# if user != last_user:
# unique_users.append(user)
# last_user = user
# print len(unique_users)
# user_list = []
return user_matrix, user_array, n | mit |
openfun/edx-platform | common/djangoapps/third_party_auth/tests/test_settings.py | 16 | 2420 | """Unit tests for settings.py."""
from third_party_auth import provider, settings
from third_party_auth.tests import testutil
import unittest
_ORIGINAL_AUTHENTICATION_BACKENDS = ('first_authentication_backend',)
_ORIGINAL_INSTALLED_APPS = ('first_installed_app',)
_ORIGINAL_MIDDLEWARE_CLASSES = ('first_middleware_class',)
_ORIGINAL_TEMPLATE_CONTEXT_PROCESSORS = ('first_template_context_preprocessor',)
_SETTINGS_MAP = {
'AUTHENTICATION_BACKENDS': _ORIGINAL_AUTHENTICATION_BACKENDS,
'INSTALLED_APPS': _ORIGINAL_INSTALLED_APPS,
'MIDDLEWARE_CLASSES': _ORIGINAL_MIDDLEWARE_CLASSES,
'TEMPLATE_CONTEXT_PROCESSORS': _ORIGINAL_TEMPLATE_CONTEXT_PROCESSORS,
'FEATURES': {},
}
class SettingsUnitTest(testutil.TestCase):
"""Unit tests for settings management code."""
# Allow access to protected methods (or module-protected methods) under
# test. pylint: disable-msg=protected-access
# Suppress sprurious no-member warning on fakes.
# pylint: disable-msg=no-member
def setUp(self):
super(SettingsUnitTest, self).setUp()
self.settings = testutil.FakeDjangoSettings(_SETTINGS_MAP)
def test_apply_settings_adds_exception_middleware(self):
settings.apply_settings(self.settings)
for middleware_name in settings._MIDDLEWARE_CLASSES:
self.assertIn(middleware_name, self.settings.MIDDLEWARE_CLASSES)
def test_apply_settings_adds_fields_stored_in_session(self):
settings.apply_settings(self.settings)
self.assertEqual(settings._FIELDS_STORED_IN_SESSION, self.settings.FIELDS_STORED_IN_SESSION)
def test_apply_settings_adds_third_party_auth_to_installed_apps(self):
settings.apply_settings(self.settings)
self.assertIn('third_party_auth', self.settings.INSTALLED_APPS)
@unittest.skipUnless(testutil.AUTH_FEATURE_ENABLED, 'third_party_auth not enabled')
def test_apply_settings_enables_no_providers_by_default(self):
# Providers are only enabled via ConfigurationModels in the database
settings.apply_settings(self.settings)
self.assertEqual([], provider.Registry.enabled())
def test_apply_settings_turns_off_raising_social_exceptions(self):
# Guard against submitting a conf change that's convenient in dev but
# bad in prod.
settings.apply_settings(self.settings)
self.assertFalse(self.settings.SOCIAL_AUTH_RAISE_EXCEPTIONS)
| agpl-3.0 |
aaronzirbes/ansible | lib/ansible/errors/yaml_strings.py | 264 | 3419 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__all__ = [
'YAML_SYNTAX_ERROR',
'YAML_POSITION_DETAILS',
'YAML_COMMON_DICT_ERROR',
'YAML_COMMON_UNQUOTED_VARIABLE_ERROR',
'YAML_COMMON_UNQUOTED_COLON_ERROR',
'YAML_COMMON_PARTIALLY_QUOTED_LINE_ERROR',
'YAML_COMMON_UNBALANCED_QUOTES_ERROR',
]
YAML_SYNTAX_ERROR = """\
Syntax Error while loading YAML.
"""
YAML_POSITION_DETAILS = """\
The error appears to have been in '%s': line %s, column %s, but may
be elsewhere in the file depending on the exact syntax problem.
"""
YAML_COMMON_DICT_ERROR = """\
This one looks easy to fix. YAML thought it was looking for the start of a
hash/dictionary and was confused to see a second "{". Most likely this was
meant to be an ansible template evaluation instead, so we have to give the
parser a small hint that we wanted a string instead. The solution here is to
just quote the entire value.
For instance, if the original line was:
app_path: {{ base_path }}/foo
It should be written as:
app_path: "{{ base_path }}/foo"
"""
YAML_COMMON_UNQUOTED_VARIABLE_ERROR = """\
We could be wrong, but this one looks like it might be an issue with
missing quotes. Always quote template expression brackets when they
start a value. For instance:
with_items:
- {{ foo }}
Should be written as:
with_items:
- "{{ foo }}"
"""
YAML_COMMON_UNQUOTED_COLON_ERROR = """\
This one looks easy to fix. There seems to be an extra unquoted colon in the line
and this is confusing the parser. It was only expecting to find one free
colon. The solution is just add some quotes around the colon, or quote the
entire line after the first colon.
For instance, if the original line was:
copy: src=file.txt dest=/path/filename:with_colon.txt
It can be written as:
copy: src=file.txt dest='/path/filename:with_colon.txt'
Or:
copy: 'src=file.txt dest=/path/filename:with_colon.txt'
"""
YAML_COMMON_PARTIALLY_QUOTED_LINE_ERROR = """\
This one looks easy to fix. It seems that there is a value started
with a quote, and the YAML parser is expecting to see the line ended
with the same kind of quote. For instance:
when: "ok" in result.stdout
Could be written as:
when: '"ok" in result.stdout'
Or equivalently:
when: "'ok' in result.stdout"
"""
YAML_COMMON_UNBALANCED_QUOTES_ERROR = """\
We could be wrong, but this one looks like it might be an issue with
unbalanced quotes. If starting a value with a quote, make sure the
line ends with the same set of quotes. For instance this arbitrary
example:
foo: "bad" "wolf"
Could be written as:
foo: '"bad" "wolf"'
"""
| gpl-3.0 |
google/adiantum | specification/python/formatperf.py | 1 | 2697 | # Copyright 2018 Google LLC
#
# Use of this source code is governed by an MIT-style
# license that can be found in the LICENSE file or at
# https://opensource.org/licenses/MIT.
import re
def parseperf(fn):
rexp = re.compile(r"(\S+) (encryption|decryption|hashing) \(\S+\)\s+(\d+\.\d+) cpb")
for l in fn.open():
m = rexp.match(l)
if m:
yield (m.group(1), m.group(2), float(m.group(3)))
def gen_blockciphers():
yield from ["NOEKEON", "XTEA"]
for l in [128, 256]:
yield from [f"Speck128/{l}", f"AES-{l}"]
def gen_interesting():
yield from ['NH', 'Poly1305']
for b in gen_blockciphers():
yield f"{b}-XTS"
for r in [8, 12, 20]:
yield f"ChaCha{r}"
yield f"HPolyC-XChaCha{r}-AES"
yield f"Adiantum-XChaCha{r}-AES"
interesting = set(gen_interesting())
def readperf(table, bufsize, fn):
for cipher, dirn, cpb in parseperf(fn):
if cipher not in interesting:
continue
cm = table.setdefault(cipher, {})
bm = cm.setdefault(dirn, {})
bm[bufsize] = min(cpb, bm.get(bufsize, cpb))
def rowbounds(entry):
bounds = {}
for speeds in entry.values():
for bufsize, s in speeds.items():
prev = bounds.get(bufsize, (s, s))
bounds[bufsize] = (min(s, prev[0]), max(s, prev[1]))
return bounds
def boundstight(bounds):
for b in bounds.values():
if b[0]*1.02 < b[1]:
return False
return True
def readperfs(d):
table = {}
sizes = ['4096', '512']
for bufsize in sizes:
readperf(table, bufsize, d / f"output{bufsize}")
return table
def summarize(table):
sizes = ['4096', '512']
for cipher, entry in table.items():
bounds = rowbounds(entry)
if boundstight(bounds):
yield([bounds[s][1] for s in sizes], cipher, None)
else:
for dirn, speeds in entry.items():
yield ([speeds[s] for s in sizes], cipher, dirn)
def formatperf(target, source):
perfs = list(summarize(readperfs(source)))
perfs.sort()
with target.open("w") as f:
for speeds, cipher, dirn in perfs:
if dirn is None:
entries = [cipher]
else:
entries = [f"{cipher} ({dirn})"]
entries += [f"{s:.1f}" for s in speeds]
if "XChaCha12" in cipher:
entries = [f"\\textbf{{{e}}}" for e in entries]
f.write(" & ".join(entries) + " \\\\\n")
def main():
import pathlib
wd = pathlib.Path(__file__).parent.resolve().parent
formatperf(wd / "work" / "performance.tex", wd / "performance")
if __name__ == "__main__":
main()
| mit |
debomatic/debomatic | modules/RemovePackages.py | 1 | 2077 | # Deb-o-Matic - RemovePackages module
#
# Copyright (C) 2018-2021 Luca Falavigna
#
# Authors: Luca Falavigna <dktrkranz@debian.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Remove packages built long ago
import os
from shutil import rmtree
from time import time
class DebomaticModule_RemovePackages:
def __init__(self):
pass
def periodic(self, args):
ctime = time()
if args.opts.has_section('removepackages'):
delta = args.opts.getint('removepackages', 'days') * 24 * 60 * 60
if os.path.isdir(args.directory):
for suite in os.listdir(args.directory):
element = os.path.join(args.directory, suite)
pool = os.path.join(element, 'pool')
if os.path.isdir(pool):
for package in os.listdir(pool):
package = os.path.join(pool, package)
if os.path.isdir(package):
ptime = os.stat(package).st_mtime
if ptime + delta < ctime:
rmtree(package)
if not os.listdir(pool):
rmtree(pool)
if os.path.isdir(element):
if not [f for f in os.listdir(element)
if f != 'dists' and f != 'logs']:
rmtree(element)
| gpl-3.0 |
ARMmbed/yotta_osx_installer | workspace/lib/python2.7/site-packages/pip/operations/freeze.py | 284 | 3984 | from __future__ import absolute_import
import logging
import re
import pip
from pip.compat import stdlib_pkgs
from pip.req import InstallRequirement
from pip.utils import get_installed_distributions
from pip._vendor import pkg_resources
logger = logging.getLogger(__name__)
# packages to exclude from freeze output
freeze_excludes = stdlib_pkgs + ['setuptools', 'pip', 'distribute']
def freeze(
requirement=None,
find_links=None, local_only=None, user_only=None, skip_regex=None,
find_tags=False,
default_vcs=None,
isolated=False,
wheel_cache=None):
find_links = find_links or []
skip_match = None
if skip_regex:
skip_match = re.compile(skip_regex)
dependency_links = []
for dist in pkg_resources.working_set:
if dist.has_metadata('dependency_links.txt'):
dependency_links.extend(
dist.get_metadata_lines('dependency_links.txt')
)
for link in find_links:
if '#egg=' in link:
dependency_links.append(link)
for link in find_links:
yield '-f %s' % link
installations = {}
for dist in get_installed_distributions(local_only=local_only,
skip=freeze_excludes,
user_only=user_only):
req = pip.FrozenRequirement.from_dist(
dist,
dependency_links,
find_tags=find_tags,
)
installations[req.name] = req
if requirement:
with open(requirement) as req_file:
for line in req_file:
if (not line.strip() or
line.strip().startswith('#') or
(skip_match and skip_match.search(line)) or
line.startswith((
'-r', '--requirement',
'-Z', '--always-unzip',
'-f', '--find-links',
'-i', '--index-url',
'--extra-index-url'))):
yield line.rstrip()
continue
if line.startswith('-e') or line.startswith('--editable'):
if line.startswith('-e'):
line = line[2:].strip()
else:
line = line[len('--editable'):].strip().lstrip('=')
line_req = InstallRequirement.from_editable(
line,
default_vcs=default_vcs,
isolated=isolated,
wheel_cache=wheel_cache,
)
else:
line_req = InstallRequirement.from_line(
line,
isolated=isolated,
wheel_cache=wheel_cache,
)
if not line_req.name:
logger.info(
"Skipping line because it's not clear what it "
"would install: %s",
line.strip(),
)
logger.info(
" (add #egg=PackageName to the URL to avoid"
" this warning)"
)
elif line_req.name not in installations:
logger.warning(
"Requirement file contains %s, but that package is"
" not installed",
line.strip(),
)
else:
yield str(installations[line_req.name]).rstrip()
del installations[line_req.name]
yield(
'## The following requirements were added by '
'pip freeze:'
)
for installation in sorted(
installations.values(), key=lambda x: x.name.lower()):
yield str(installation).rstrip()
| apache-2.0 |
yocheah/DMF | idaes_dmf/base_model.py | 1 | 1598 | import os
import abc
import pwd
import sys
import logging
from metadata import Metadata
from datetime import datetime as dt
__author__ = "You-Wei Cheah"
__version__ = '0.1.0'
_log = logging.getLogger("BaseModel")
class BaseModel:
'''Base model'''
def __init__(self, **metas):
_log.info(self.__class__.__name__ + 'initialized')
# Get submodule file path
self.file_path = sys.modules[self.__module__].__file__
self.model_metas = metas
def _populate_basic_meta(self, meta, info):
meta.add_meta("User", pwd.getpwuid(info.st_uid).pw_name)
meta.add_meta(
"Last modified time", dt.fromtimestamp(info.st_mtime))
@abc.abstractmethod
def configure(self):
raise NotImplementedError()
@abc.abstractmethod
def save(self):
raise NotImplementedError()
@abc.abstractmethod
def display(self):
raise NotImplementedError()
def get_metadata(self):
file_meta = Metadata()
if os.path.exists(self.file_path) and os.path.isfile(self.file_path):
info = os.stat(self.file_path)
self._populate_basic_meta(file_meta, info)
file_meta.add_meta("File size", info.st_size)
else:
_log.error("File path does not exist: {f}".format(
f=self.file_path))
return file_meta.get_meta()
if __name__ == "__main__":
# For simple testing only
class TestModel(BaseModel):
pass
test_model = TestModel()
print test_model.file_path
print test_model.get_metadata()
test_model.save()
| mit |
lancezlin/ml_template_py | lib/python2.7/site-packages/mpl_toolkits/mplot3d/axis3d.py | 7 | 17489 | #!/usr/bin/python
# axis3d.py, original mplot3d version by John Porter
# Created: 23 Sep 2005
# Parts rewritten by Reinier Heeres <reinier@heeres.eu>
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from matplotlib.externals import six
import math
import copy
from matplotlib import lines as mlines, axis as maxis, \
patches as mpatches
from . import art3d
from . import proj3d
import numpy as np
def get_flip_min_max(coord, index, mins, maxs):
if coord[index] == mins[index]:
return maxs[index]
else:
return mins[index]
def move_from_center(coord, centers, deltas, axmask=(True, True, True)):
'''Return a coordinate that is moved by "deltas" away from the center.'''
coord = copy.copy(coord)
#print coord, centers, deltas, axmask
for i in range(3):
if not axmask[i]:
continue
if coord[i] < centers[i]:
coord[i] -= deltas[i]
else:
coord[i] += deltas[i]
return coord
def tick_update_position(tick, tickxs, tickys, labelpos):
'''Update tick line and label position and style.'''
for (label, on) in ((tick.label1, tick.label1On), \
(tick.label2, tick.label2On)):
if on:
label.set_position(labelpos)
tick.tick1On, tick.tick2On = True, False
tick.tick1line.set_linestyle('-')
tick.tick1line.set_marker('')
tick.tick1line.set_data(tickxs, tickys)
tick.gridline.set_data(0, 0)
class Axis(maxis.XAxis):
# These points from the unit cube make up the x, y and z-planes
_PLANES = (
(0, 3, 7, 4), (1, 2, 6, 5), # yz planes
(0, 1, 5, 4), (3, 2, 6, 7), # xz planes
(0, 1, 2, 3), (4, 5, 6, 7), # xy planes
)
# Some properties for the axes
_AXINFO = {
'x': {'i': 0, 'tickdir': 1, 'juggled': (1, 0, 2),
'color': (0.95, 0.95, 0.95, 0.5)},
'y': {'i': 1, 'tickdir': 0, 'juggled': (0, 1, 2),
'color': (0.90, 0.90, 0.90, 0.5)},
'z': {'i': 2, 'tickdir': 0, 'juggled': (0, 2, 1),
'color': (0.925, 0.925, 0.925, 0.5)},
}
def __init__(self, adir, v_intervalx, d_intervalx, axes, *args, **kwargs):
# adir identifies which axes this is
self.adir = adir
# data and viewing intervals for this direction
self.d_interval = d_intervalx
self.v_interval = v_intervalx
# This is a temporary member variable.
# Do not depend on this existing in future releases!
self._axinfo = self._AXINFO[adir].copy()
self._axinfo.update({'label' : {'va': 'center',
'ha': 'center'},
'tick' : {'inward_factor': 0.2,
'outward_factor': 0.1},
'axisline': {'linewidth': 0.75,
'color': (0, 0, 0, 1)},
'grid' : {'color': (0.9, 0.9, 0.9, 1),
'linewidth': 1.0},
})
maxis.XAxis.__init__(self, axes, *args, **kwargs)
self.set_rotate_label(kwargs.get('rotate_label', None))
def init3d(self):
self.line = mlines.Line2D(xdata=(0, 0), ydata=(0, 0),
linewidth=self._axinfo['axisline']['linewidth'],
color=self._axinfo['axisline']['color'],
antialiased=True,
)
# Store dummy data in Polygon object
self.pane = mpatches.Polygon(np.array([[0,0], [0,1], [1,0], [0,0]]),
closed=False,
alpha=0.8,
facecolor=(1,1,1,0),
edgecolor=(1,1,1,0))
self.set_pane_color(self._axinfo['color'])
self.axes._set_artist_props(self.line)
self.axes._set_artist_props(self.pane)
self.gridlines = art3d.Line3DCollection([], )
self.axes._set_artist_props(self.gridlines)
self.axes._set_artist_props(self.label)
self.axes._set_artist_props(self.offsetText)
# Need to be able to place the label at the correct location
self.label._transform = self.axes.transData
self.offsetText._transform = self.axes.transData
def get_tick_positions(self):
majorLocs = self.major.locator()
self.major.formatter.set_locs(majorLocs)
majorLabels = [self.major.formatter(val, i) for i, val in enumerate(majorLocs)]
return majorLabels, majorLocs
def get_major_ticks(self, numticks=None):
ticks = maxis.XAxis.get_major_ticks(self, numticks)
for t in ticks:
t.tick1line.set_transform(self.axes.transData)
t.tick2line.set_transform(self.axes.transData)
t.gridline.set_transform(self.axes.transData)
t.label1.set_transform(self.axes.transData)
t.label2.set_transform(self.axes.transData)
return ticks
def set_pane_pos(self, xys):
xys = np.asarray(xys)
xys = xys[:,:2]
self.pane.xy = xys
self.stale = True
def set_pane_color(self, color):
'''Set pane color to a RGBA tuple'''
self._axinfo['color'] = color
self.pane.set_edgecolor(color)
self.pane.set_facecolor(color)
self.pane.set_alpha(color[-1])
self.stale = True
def set_rotate_label(self, val):
'''
Whether to rotate the axis label: True, False or None.
If set to None the label will be rotated if longer than 4 chars.
'''
self._rotate_label = val
self.stale = True
def get_rotate_label(self, text):
if self._rotate_label is not None:
return self._rotate_label
else:
return len(text) > 4
def _get_coord_info(self, renderer):
minx, maxx, miny, maxy, minz, maxz = self.axes.get_w_lims()
if minx > maxx:
minx, maxx = maxx, minx
if miny > maxy:
miny, maxy = maxy, miny
if minz > maxz:
minz, maxz = maxz, minz
mins = np.array((minx, miny, minz))
maxs = np.array((maxx, maxy, maxz))
centers = (maxs + mins) / 2.
deltas = (maxs - mins) / 12.
mins = mins - deltas / 4.
maxs = maxs + deltas / 4.
vals = mins[0], maxs[0], mins[1], maxs[1], mins[2], maxs[2]
tc = self.axes.tunit_cube(vals, renderer.M)
avgz = [tc[p1][2] + tc[p2][2] + tc[p3][2] + tc[p4][2] for \
p1, p2, p3, p4 in self._PLANES]
highs = np.array([avgz[2*i] < avgz[2*i+1] for i in range(3)])
return mins, maxs, centers, deltas, tc, highs
def draw_pane(self, renderer):
renderer.open_group('pane3d')
mins, maxs, centers, deltas, tc, highs = self._get_coord_info(renderer)
info = self._axinfo
index = info['i']
if not highs[index]:
plane = self._PLANES[2 * index]
else:
plane = self._PLANES[2 * index + 1]
xys = [tc[p] for p in plane]
self.set_pane_pos(xys)
self.pane.draw(renderer)
renderer.close_group('pane3d')
def draw(self, renderer):
self.label._transform = self.axes.transData
renderer.open_group('axis3d')
# code from XAxis
majorTicks = self.get_major_ticks()
majorLocs = self.major.locator()
info = self._axinfo
index = info['i']
# filter locations here so that no extra grid lines are drawn
locmin, locmax = self.get_view_interval()
if locmin > locmax:
locmin, locmax = locmax, locmin
# Rudimentary clipping
majorLocs = [loc for loc in majorLocs if
locmin <= loc <= locmax]
self.major.formatter.set_locs(majorLocs)
majorLabels = [self.major.formatter(val, i)
for i, val in enumerate(majorLocs)]
mins, maxs, centers, deltas, tc, highs = self._get_coord_info(renderer)
# Determine grid lines
minmax = np.where(highs, maxs, mins)
# Draw main axis line
juggled = info['juggled']
edgep1 = minmax.copy()
edgep1[juggled[0]] = get_flip_min_max(edgep1, juggled[0], mins, maxs)
edgep2 = edgep1.copy()
edgep2[juggled[1]] = get_flip_min_max(edgep2, juggled[1], mins, maxs)
pep = proj3d.proj_trans_points([edgep1, edgep2], renderer.M)
centpt = proj3d.proj_transform(centers[0], centers[1], centers[2], renderer.M)
self.line.set_data((pep[0][0], pep[0][1]), (pep[1][0], pep[1][1]))
self.line.draw(renderer)
# Grid points where the planes meet
xyz0 = []
for val in majorLocs:
coord = minmax.copy()
coord[index] = val
xyz0.append(coord)
# Draw labels
peparray = np.asanyarray(pep)
# The transAxes transform is used because the Text object
# rotates the text relative to the display coordinate system.
# Therefore, if we want the labels to remain parallel to the
# axis regardless of the aspect ratio, we need to convert the
# edge points of the plane to display coordinates and calculate
# an angle from that.
# TODO: Maybe Text objects should handle this themselves?
dx, dy = (self.axes.transAxes.transform([peparray[0:2, 1]]) -
self.axes.transAxes.transform([peparray[0:2, 0]]))[0]
lxyz = 0.5*(edgep1 + edgep2)
# A rough estimate; points are ambiguous since 3D plots rotate
ax_scale = self.axes.bbox.size / self.figure.bbox.size
ax_inches = np.multiply(ax_scale, self.figure.get_size_inches())
ax_points_estimate = sum(72. * ax_inches)
deltas_per_point = 48. / ax_points_estimate
default_offset = 21.
labeldeltas = (self.labelpad + default_offset) * deltas_per_point\
* deltas
axmask = [True, True, True]
axmask[index] = False
lxyz = move_from_center(lxyz, centers, labeldeltas, axmask)
tlx, tly, tlz = proj3d.proj_transform(lxyz[0], lxyz[1], lxyz[2], \
renderer.M)
self.label.set_position((tlx, tly))
if self.get_rotate_label(self.label.get_text()):
angle = art3d.norm_text_angle(math.degrees(math.atan2(dy, dx)))
self.label.set_rotation(angle)
self.label.set_va(info['label']['va'])
self.label.set_ha(info['label']['ha'])
self.label.draw(renderer)
# Draw Offset text
# Which of the two edge points do we want to
# use for locating the offset text?
if juggled[2] == 2 :
outeredgep = edgep1
outerindex = 0
else :
outeredgep = edgep2
outerindex = 1
pos = copy.copy(outeredgep)
pos = move_from_center(pos, centers, labeldeltas, axmask)
olx, oly, olz = proj3d.proj_transform(pos[0], pos[1], pos[2], renderer.M)
self.offsetText.set_text( self.major.formatter.get_offset() )
self.offsetText.set_position( (olx, oly) )
angle = art3d.norm_text_angle(math.degrees(math.atan2(dy, dx)))
self.offsetText.set_rotation(angle)
# Must set rotation mode to "anchor" so that
# the alignment point is used as the "fulcrum" for rotation.
self.offsetText.set_rotation_mode('anchor')
#-----------------------------------------------------------------------
# Note: the following statement for determining the proper alignment of
# the offset text. This was determined entirely by trial-and-error
# and should not be in any way considered as "the way". There are
# still some edge cases where alignment is not quite right, but
# this seems to be more of a geometry issue (in other words, I
# might be using the wrong reference points).
#
# (TT, FF, TF, FT) are the shorthand for the tuple of
# (centpt[info['tickdir']] <= peparray[info['tickdir'], outerindex],
# centpt[index] <= peparray[index, outerindex])
#
# Three-letters (e.g., TFT, FTT) are short-hand for the array
# of bools from the variable 'highs'.
# ---------------------------------------------------------------------
if centpt[info['tickdir']] > peparray[info['tickdir'], outerindex] :
# if FT and if highs has an even number of Trues
if (centpt[index] <= peparray[index, outerindex]
and ((len(highs.nonzero()[0]) % 2) == 0)) :
# Usually, this means align right, except for the FTT case,
# in which offset for axis 1 and 2 are aligned left.
if highs.tolist() == [False, True, True] and index in (1, 2) :
align = 'left'
else :
align = 'right'
else :
# The FF case
align = 'left'
else :
# if TF and if highs has an even number of Trues
if (centpt[index] > peparray[index, outerindex]
and ((len(highs.nonzero()[0]) % 2) == 0)) :
# Usually mean align left, except if it is axis 2
if index == 2 :
align = 'right'
else :
align = 'left'
else :
# The TT case
align = 'right'
self.offsetText.set_va('center')
self.offsetText.set_ha(align)
self.offsetText.draw(renderer)
# Draw grid lines
if len(xyz0) > 0:
# Grid points at end of one plane
xyz1 = copy.deepcopy(xyz0)
newindex = (index + 1) % 3
newval = get_flip_min_max(xyz1[0], newindex, mins, maxs)
for i in range(len(majorLocs)):
xyz1[i][newindex] = newval
# Grid points at end of the other plane
xyz2 = copy.deepcopy(xyz0)
newindex = (index + 2) % 3
newval = get_flip_min_max(xyz2[0], newindex, mins, maxs)
for i in range(len(majorLocs)):
xyz2[i][newindex] = newval
lines = list(zip(xyz1, xyz0, xyz2))
if self.axes._draw_grid:
self.gridlines.set_segments(lines)
self.gridlines.set_color([info['grid']['color']] * len(lines))
self.gridlines.draw(renderer, project=True)
# Draw ticks
tickdir = info['tickdir']
tickdelta = deltas[tickdir]
if highs[tickdir]:
ticksign = 1
else:
ticksign = -1
for tick, loc, label in zip(majorTicks, majorLocs, majorLabels):
if tick is None:
continue
# Get tick line positions
pos = copy.copy(edgep1)
pos[index] = loc
pos[tickdir] = edgep1[tickdir] + info['tick']['outward_factor'] * \
ticksign * tickdelta
x1, y1, z1 = proj3d.proj_transform(pos[0], pos[1], pos[2], \
renderer.M)
pos[tickdir] = edgep1[tickdir] - info['tick']['inward_factor'] * \
ticksign * tickdelta
x2, y2, z2 = proj3d.proj_transform(pos[0], pos[1], pos[2], \
renderer.M)
# Get position of label
default_offset = 8. # A rough estimate
labeldeltas = (tick.get_pad() + default_offset) * deltas_per_point\
* deltas
axmask = [True, True, True]
axmask[index] = False
pos[tickdir] = edgep1[tickdir]
pos = move_from_center(pos, centers, labeldeltas, axmask)
lx, ly, lz = proj3d.proj_transform(pos[0], pos[1], pos[2], \
renderer.M)
tick_update_position(tick, (x1, x2), (y1, y2), (lx, ly))
tick.set_label1(label)
tick.set_label2(label)
tick.draw(renderer)
renderer.close_group('axis3d')
self.stale = False
def get_view_interval(self):
"""return the Interval instance for this 3d axis view limits"""
return self.v_interval
def set_view_interval(self, vmin, vmax, ignore=False):
if ignore:
self.v_interval = vmin, vmax
else:
Vmin, Vmax = self.get_view_interval()
self.v_interval = min(vmin, Vmin), max(vmax, Vmax)
# TODO: Get this to work properly when mplot3d supports
# the transforms framework.
def get_tightbbox(self, renderer) :
# Currently returns None so that Axis.get_tightbbox
# doesn't return junk info.
return None
# Use classes to look at different data limits
class XAxis(Axis):
def get_data_interval(self):
'return the Interval instance for this axis data limits'
return self.axes.xy_dataLim.intervalx
class YAxis(Axis):
def get_data_interval(self):
'return the Interval instance for this axis data limits'
return self.axes.xy_dataLim.intervaly
class ZAxis(Axis):
def get_data_interval(self):
'return the Interval instance for this axis data limits'
return self.axes.zz_dataLim.intervalx
| mit |
tango-controls/pytango | tango/attribute_proxy.py | 4 | 18343 | # ------------------------------------------------------------------------------
# This file is part of PyTango (http://pytango.rtfd.io)
#
# Copyright 2006-2012 CELLS / ALBA Synchrotron, Bellaterra, Spain
# Copyright 2013-2014 European Synchrotron Radiation Facility, Grenoble, France
#
# Distributed under the terms of the GNU Lesser General Public License,
# either version 3 of the License, or (at your option) any later version.
# See LICENSE.txt for more info.
# ------------------------------------------------------------------------------
"""
This is an internal PyTango module. It completes the binding of
:class:`tango.AttributeProxy`.
To access these members use directly :mod:`tango` module and NOT
tango.attribute_proxy.
"""
try:
import collections.abc as collections_abc # python 3.3+
except ImportError:
import collections as collections_abc
from ._tango import StdStringVector, DbData, DbDatum, DeviceProxy
from ._tango import __AttributeProxy as _AttributeProxy
from .utils import seq_2_StdStringVector, seq_2_DbData, DbData_2_dict
from .utils import is_pure_str, is_non_str_seq
from .green import green, get_green_mode
from .device_proxy import __init_device_proxy_internals as init_device_proxy
__all__ = ("AttributeProxy", "attribute_proxy_init", "get_attribute_proxy")
@green(consume_green_mode=False)
def get_attribute_proxy(*args, **kwargs):
"""
get_attribute_proxy(self, full_attr_name, green_mode=None, wait=True, timeout=True) -> AttributeProxy
get_attribute_proxy(self, device_proxy, attr_name, green_mode=None, wait=True, timeout=True) -> AttributeProxy
Returns a new :class:`~tango.AttributeProxy`.
There is no difference between using this function and the direct
:class:`~tango.AttributeProxy` constructor if you use the default kwargs.
The added value of this function becomes evident when you choose a green_mode
to be *Futures* or *Gevent*. The AttributeProxy constructor internally makes some
network calls which makes it *slow*. By using one of the *green modes* as
green_mode you are allowing other python code to be executed in a cooperative way.
:param full_attr_name: the full name of the attribute
:type full_attr_name: str
:param device_proxy: the :class:`~tango.DeviceProxy`
:type device_proxy: DeviceProxy
:param attr_name: attribute name for the given device proxy
:type attr_name: str
:param green_mode: determines the mode of execution of the device (including
the way it is created). Defaults to the current global
green_mode (check :func:`~tango.get_green_mode` and
:func:`~tango.set_green_mode`)
:type green_mode: :obj:`~tango.GreenMode`
:param wait: whether or not to wait for result. If green_mode
Ignored when green_mode is Synchronous (always waits).
:type wait: bool
:param timeout: The number of seconds to wait for the result.
If None, then there is no limit on the wait time.
Ignored when green_mode is Synchronous or wait is False.
:type timeout: float
:returns:
if green_mode is Synchronous or wait is True:
:class:`~tango.AttributeProxy`
else if green_mode is Futures:
:class:`concurrent.futures.Future`
else if green_mode is Gevent:
:class:`gevent.event.AsynchResult`
:throws:
* a *DevFailed* if green_mode is Synchronous or wait is True
and there is an error creating the attribute.
* a *concurrent.futures.TimeoutError* if green_mode is Futures,
wait is False, timeout is not None and the time to create the attribute
has expired.
* a *gevent.timeout.Timeout* if green_mode is Gevent, wait is False,
timeout is not None and the time to create the attribute has expired.
New in PyTango 8.1.0
"""
return AttributeProxy(*args, **kwargs)
def __AttributeProxy__get_property(self, propname, value=None):
"""
get_property(self, propname, value) -> DbData
Get a (list) property(ies) for an attribute.
This method accepts the following types as propname parameter:
1. string [in] - single property data to be fetched
2. sequence<string> [in] - several property data to be fetched
3. tango.DbDatum [in] - single property data to be fetched
4. tango.DbData [in,out] - several property data to be fetched.
5. sequence<DbDatum> - several property data to be feteched
Note: for cases 3, 4 and 5 the 'value' parameter if given, is IGNORED.
If value is given it must be a tango.DbData that will be filled with the
property values
Parameters :
- propname : (str) property(ies) name(s)
- value : (tango.DbData) (optional, default is None meaning that the
method will create internally a tango.DbData and return
it filled with the property values
Return : (DbData) containing the property(ies) value(s). If a
tango.DbData is given as parameter, it returns the same
object otherwise a new tango.DbData is returned
Throws : NonDbDevice, ConnectionFailed (with database),
CommunicationFailed (with database),
DevFailed from database device
"""
if is_pure_str(propname) or isinstance(propname, StdStringVector):
new_value = value
if new_value is None:
new_value = DbData()
self._get_property(propname, new_value)
return DbData_2_dict(new_value)
elif isinstance(propname, DbDatum):
new_value = DbData()
new_value.append(propname)
self._get_property(new_value)
return DbData_2_dict(new_value)
elif isinstance(propname, collections_abc.Sequence):
if isinstance(propname, DbData):
self._get_property(propname)
return DbData_2_dict(propname)
if is_pure_str(propname[0]):
new_propname = StdStringVector()
for i in propname:
new_propname.append(i)
new_value = value
if new_value is None:
new_value = DbData()
self._get_property(new_propname, new_value)
return DbData_2_dict(new_value)
elif isinstance(propname[0], DbDatum):
new_value = DbData()
for i in propname:
new_value.append(i)
self._get_property(new_value)
return DbData_2_dict(new_value)
def __AttributeProxy__put_property(self, value):
"""
put_property(self, value) -> None
Insert or update a list of properties for this attribute.
This method accepts the following types as value parameter:
1. tango.DbDatum - single property data to be inserted
2. tango.DbData - several property data to be inserted
3. sequence<DbDatum> - several property data to be inserted
4. dict<str, DbDatum> - keys are property names and value has data to be inserted
5. dict<str, seq<str>> - keys are property names and value has data to be inserted
6. dict<str, obj> - keys are property names and str(obj) is property value
Parameters :
- value : can be one of the following:
1. tango.DbDatum - single property data to be inserted
2. tango.DbData - several property data to be inserted
3. sequence<DbDatum> - several property data to be inserted
4. dict<str, DbDatum> - keys are property names and value has data to be inserted
5. dict<str, seq<str>> - keys are property names and value has data to be inserted
6. dict<str, obj> - keys are property names and str(obj) is property value
Return : None
Throws : ConnectionFailed, CommunicationFailed
DevFailed from device (DB_SQLError),
TypeError
"""
if isinstance(value, DbData):
pass
elif isinstance(value, DbDatum):
new_value = DbData()
new_value.append(value)
value = new_value
elif is_non_str_seq(value):
new_value = seq_2_DbData(value)
elif isinstance(value, collections_abc.Mapping):
new_value = DbData()
for k, v in value.items():
if isinstance(v, DbDatum):
new_value.append(v)
continue
db_datum = DbDatum(k)
if is_non_str_seq(v):
seq_2_StdStringVector(v, db_datum.value_string)
else:
db_datum.value_string.append(str(v))
new_value.append(db_datum)
value = new_value
else:
raise TypeError(
'Value must be a tango.DbDatum, tango.DbData, '
'a sequence<DbDatum> or a dictionary')
return self._put_property(value)
def __AttributeProxy__delete_property(self, value):
"""
delete_property(self, value) -> None
Delete a the given of properties for this attribute.
This method accepts the following types as value parameter:
1. string [in] - single property to be deleted
2. tango.DbDatum [in] - single property data to be deleted
3. tango.DbData [in] - several property data to be deleted
4. sequence<string> [in]- several property data to be deleted
5. sequence<DbDatum> [in] - several property data to be deleted
6. dict<str, obj> [in] - keys are property names to be deleted
(values are ignored)
7. dict<str, DbDatum> [in] - several DbDatum.name are property names
to be deleted (keys are ignored)
Parameters :
- value : can be one of the following:
1. string [in] - single property data to be deleted
2. tango.DbDatum [in] - single property data to be deleted
3. tango.DbData [in] - several property data to be deleted
4. sequence<string> [in]- several property data to be deleted
5. sequence<DbDatum> [in] - several property data to be deleted
6. dict<str, obj> [in] - keys are property names to be deleted
(values are ignored)
7. dict<str, DbDatum> [in] - several DbDatum.name are property
names to be deleted (keys are ignored)
Return : None
Throws : ConnectionFailed, CommunicationFailed
DevFailed from device (DB_SQLError),
TypeError
"""
if isinstance(value, DbData) or isinstance(value, StdStringVector) or \
is_pure_str(value):
new_value = value
elif isinstance(value, DbDatum):
new_value = DbData()
new_value.append(value)
elif isinstance(value, collections_abc.Sequence):
new_value = DbData()
for e in value:
if isinstance(e, DbDatum):
new_value.append(e)
else:
new_value.append(DbDatum(str(e)))
elif isinstance(value, collections_abc.Mapping):
new_value = DbData()
for k, v in value.items():
if isinstance(v, DbDatum):
new_value.append(v)
else:
new_value.append(DbDatum(k))
else:
raise TypeError(
'Value must be a string, tango.DbDatum, '
'tango.DbData, a sequence or a dictionary')
return self._delete_property(new_value)
# It is easier to reimplement AttributeProxy in python using DeviceProxy than
# wrapping C++ AttributeProxy. However I still rely in the original
# AttributeProxy for the constructor (parsing strings if necessary) and some
# other things. With the _method_* functions defined later it is really easy.
# One reason to do it this way: get_device_proxy() will always return the
# same tango.DeviceProxy with this implementation. And then we can trust
# it's automatic event unsubscription to handle events.
class AttributeProxy(object):
"""
AttributeProxy is the high level Tango object which provides the
client with an easy-to-use interface to TANGO attributes.
To create an AttributeProxy, a complete attribute name must be set
in the object constructor.
Example:
att = AttributeProxy("tango/tangotest/1/long_scalar")
Note: PyTango implementation of AttributeProxy is in part a
python reimplementation of the AttributeProxy found on the C++ API.
"""
def __init__(self, *args, **kwds):
green_mode = kwds.pop('green_mode', get_green_mode())
self.__attr_proxy = _AttributeProxy(*args, **kwds)
# get_device_proxy() returns a different python object each time
# we don't want a different object, so we save the current one.
self.__dev_proxy = dp = self.__attr_proxy.get_device_proxy()
init_device_proxy(dp)
dp.set_green_mode(green_mode)
def get_device_proxy(self):
"""
get_device_proxy(self) -> DeviceProxy
A method which returns the device associated to the attribute
Parameters : None
Return : (DeviceProxy)
"""
return self.__dev_proxy
def name(self):
"""
name(self) -> str
Returns the attribute name
Parameters : None
Return : (str) with the attribute name
"""
return self.__attr_proxy.name()
def __str__(self):
return "AttributeProxy(%s)" % self.name()
def __repr__(self):
return "AttributeProxy(%s)" % self.name()
def _method_dev_and_name(dp_fn_name, doc=True):
def __new_fn(self, *args, **kwds):
return getattr(self._AttributeProxy__dev_proxy, dp_fn_name)(self.name(), *args, **kwds)
if doc:
__new_fn.__doc__ = "This method is a simple way to do:\n" + \
"\tself.get_device_proxy()." + dp_fn_name + \
"(self.name(), ...)\n\n" + \
"For convenience, here is the documentation of DeviceProxy." + \
dp_fn_name + "(...):\n" + \
str(getattr(DeviceProxy, dp_fn_name).__doc__)
__new_fn.__name__ = dp_fn_name
return __new_fn
def _method_device(dp_fn_name, doc=True):
def __new_fn(self, *args, **kwds):
return getattr(self._AttributeProxy__dev_proxy, dp_fn_name)(*args, **kwds)
if doc:
__new_fn.__doc__ = "This method is a simple way to do:\n" + \
"\tself.get_device_proxy()." + dp_fn_name + \
"(...)\n\n" + \
"For convenience, here is the documentation of DeviceProxy." + \
dp_fn_name + "(...):\n" + \
str(getattr(DeviceProxy, dp_fn_name).__doc__)
__new_fn.__name__ = dp_fn_name
return __new_fn
def _method_attribute(dp_fn_name, doc=True):
def __new_fn(self, *args, **kwds):
return getattr(self._AttributeProxy__attr_proxy, dp_fn_name)(*args, **kwds)
if doc:
__new_fn.__doc__ = getattr(_AttributeProxy, dp_fn_name).__doc__
__new_fn.__name__ = dp_fn_name
return __new_fn
def __init_AttributeProxy(doc=True):
_AttributeProxy.get_property = __AttributeProxy__get_property
_AttributeProxy.put_property = __AttributeProxy__put_property
_AttributeProxy.delete_property = __AttributeProxy__delete_property
# General methods
# AttributeProxy.name manually defined
AttributeProxy.status = _method_device('status', doc=doc)
AttributeProxy.state = _method_device('state', doc=doc)
AttributeProxy.ping = _method_device('ping', doc=doc)
AttributeProxy.get_transparency_reconnection = _method_device('get_transparency_reconnection', doc=doc)
AttributeProxy.set_transparency_reconnection = _method_device('set_transparency_reconnection', doc=doc)
# Property methods
AttributeProxy.get_property = _method_attribute('get_property', doc=doc)
AttributeProxy.put_property = _method_attribute('put_property', doc=doc)
AttributeProxy.delete_property = _method_attribute('delete_property', doc=doc)
# Attribute methods
AttributeProxy.get_config = _method_dev_and_name('get_attribute_config', doc=doc)
AttributeProxy.set_config = _method_device('set_attribute_config', doc=doc)
AttributeProxy.write = _method_dev_and_name('write_attribute', doc=doc)
AttributeProxy.read = _method_dev_and_name('read_attribute', doc=doc)
AttributeProxy.write_read = _method_dev_and_name('write_read_attribute', doc=doc)
# History methods...
AttributeProxy.history = _method_dev_and_name('attribute_history', doc=doc)
# Polling administration methods
AttributeProxy.poll = _method_dev_and_name('poll_attribute', doc=doc)
AttributeProxy.get_poll_period = _method_dev_and_name('get_attribute_poll_period', doc=doc)
AttributeProxy.is_polled = _method_dev_and_name('is_attribute_polled', doc=doc)
AttributeProxy.stop_poll = _method_dev_and_name('stop_poll_attribute', doc=doc)
# Asynchronous methods
AttributeProxy.read_asynch = _method_dev_and_name('read_attribute_asynch', doc=doc)
AttributeProxy.read_reply = _method_device('read_attribute_reply', doc=doc)
AttributeProxy.write_asynch = _method_device('write_attribute_asynch', doc=doc)
AttributeProxy.write_reply = _method_device('write_attribute_reply', doc=doc)
# Event methods
AttributeProxy.subscribe_event = _method_dev_and_name('subscribe_event', doc=doc)
AttributeProxy.unsubscribe_event = _method_device('unsubscribe_event', doc=doc)
AttributeProxy.get_events = _method_device('get_events', doc=doc)
AttributeProxy.event_queue_size = _method_device('event_queue_size', doc=doc)
AttributeProxy.get_last_event_date = _method_device('get_last_event_date', doc=doc)
AttributeProxy.is_event_queue_empty = _method_device('is_event_queue_empty', doc=doc)
def attribute_proxy_init(doc=True):
__init_AttributeProxy(doc=doc)
| lgpl-3.0 |
sachinpro/sachinpro.github.io | tensorflow/python/ops/batch_norm_benchmark.py | 9 | 9579 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""End-to-end benchmark for batch normalization."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import tensorflow as tf
from tensorflow.python.ops import gen_nn_ops
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_boolean("use_gpu", True, """Run GPU benchmarks.""")
def batch_norm_op(tensor, mean, variance, beta, gamma, scale):
"""Fused kernel for batch normalization."""
# _batch_norm_with_global_normalization is deprecated in v9
tf.get_default_graph().graph_def_versions.producer = 8
# pylint: disable=protected-access
return gen_nn_ops._batch_norm_with_global_normalization(
tensor, mean, variance, beta, gamma, 0.001, scale)
# pylint: enable=protected-access
# Note that the naive implementation is much slower:
# batch_norm = (tensor - mean) * tf.rsqrt(variance + 0.001)
# if scale:
# batch_norm *= gamma
# return batch_norm + beta
def batch_norm_py(tensor, mean, variance, beta, gamma, scale):
"""Python implementation of batch normalization."""
return tf.nn.batch_normalization(
tensor, mean, variance, beta, gamma if scale else None, 0.001)
def batch_norm_slow(tensor, mean, variance, beta, gamma, scale):
batch_norm = (tensor - mean) * tf.rsqrt(variance + 0.001)
if scale:
batch_norm *= gamma
return batch_norm + beta
def build_graph(device, input_shape, axes, num_layers, mode, scale, train):
"""Build a graph containing a sequence of batch normalizations.
Args:
device: string, the device to run on.
input_shape: shape of the input tensor.
axes: axes that are to be normalized across.
num_layers: number of batch normalization layers in the graph.
mode: "op", "py" or "slow" depending on the implementation.
scale: scale after normalization.
train: if true, also run backprop.
Returns:
An array of tensors to run()
"""
moment_shape = []
keep_dims = mode == "py" or mode == "slow"
if keep_dims:
for axis in range(len(input_shape)):
if axis in axes:
moment_shape.append(1)
else:
moment_shape.append(input_shape[axis])
else:
for axis in range(len(input_shape)):
if axis not in axes:
moment_shape.append(input_shape[axis])
with tf.device("/%s:0" % device):
tensor = tf.Variable(tf.truncated_normal(input_shape))
for _ in range(num_layers):
mean, variance = tf.nn.moments(tensor, axes, keep_dims=keep_dims)
beta = tf.Variable(tf.zeros(moment_shape))
gamma = tf.Variable(tf.constant(1.0, shape=moment_shape))
if mode == "py":
tensor = batch_norm_py(tensor, mean, variance, beta, gamma, scale)
elif mode == "op":
tensor = batch_norm_op(tensor, mean, variance, beta, gamma, scale)
elif mode == "slow":
tensor = batch_norm_slow(tensor, mean, variance, beta, gamma, scale)
if train:
return tf.gradients([tensor], tf.trainable_variables())
else:
return [tensor]
def print_difference(mode, t1, t2):
"""Print the difference in timing between two runs."""
difference = (t2 - t1) / t1 * 100.0
print("=== %s: %.1f%% ===" % (mode, difference))
class BatchNormBenchmark(tf.test.Benchmark):
"""Benchmark batch normalization."""
def _run_graph(
self, device, input_shape, axes, num_layers, mode, scale, train,
num_iters):
"""Run the graph and print its execution time.
Args:
device: string, the device to run on.
input_shape: shape of the input tensor.
axes: axes that are to be normalized across.
num_layers: number of batch normalization layers in the graph.
mode: "op", "py" or "slow" depending on the implementation.
scale: scale after normalization.
train: if true, also run backprop.
num_iters: number of steps to run.
Returns:
The duration of the run in seconds.
"""
graph = tf.Graph()
with graph.as_default():
outputs = build_graph(device, input_shape, axes, num_layers, mode, scale,
train)
with tf.Session(graph=graph) as session:
tf.initialize_all_variables().run()
_ = session.run([out.op for out in outputs]) # warm up.
start_time = time.time()
for _ in range(num_iters):
_ = session.run([out.op for out in outputs])
duration = time.time() - start_time
print("%s shape:%d/%d #layers:%d mode:%s scale:%r train:%r - %f secs" %
(device, len(input_shape), len(axes), num_layers, mode, scale, train,
duration / num_iters))
name_template = (
"batch_norm_{device}_input_shape_{shape}_axes_{axes}_mode_{mode}_"
"layers_{num_layers}_scale_{scale}_"
"train_{train}")
self.report_benchmark(
name=name_template.format(
device=device, mode=mode, num_layers=num_layers, scale=scale,
train=train,
shape=str(input_shape).replace(" ", ""),
axes=str(axes)).replace(" ", ""),
iters=num_iters, wall_time=duration / num_iters)
return duration
def benchmark_batch_norm(self):
print("Forward convolution (lower layers).")
shape = [8, 128, 128, 32]
axes = [0, 1, 2]
t1 = self._run_graph("cpu", shape, axes, 10, "op", True, False, 5)
t2 = self._run_graph("cpu", shape, axes, 10, "py", True, False, 5)
t3 = self._run_graph("cpu", shape, axes, 10, "slow", True, False, 5)
print_difference("op vs py", t1, t2)
print_difference("py vs slow", t2, t3)
if FLAGS.use_gpu:
t1 = self._run_graph("gpu", shape, axes, 10, "op", True, False, 50)
t2 = self._run_graph("gpu", shape, axes, 10, "py", True, False, 50)
t3 = self._run_graph("gpu", shape, axes, 10, "slow", True, False, 50)
print_difference("op vs py", t1, t2)
print_difference("py vs slow", t2, t3)
print("Forward/backward convolution (lower layers).")
t1 = self._run_graph("cpu", shape, axes, 10, "op", True, True, 5)
t2 = self._run_graph("cpu", shape, axes, 10, "py", True, True, 5)
t3 = self._run_graph("cpu", shape, axes, 10, "slow", True, True, 5)
print_difference("op vs py", t1, t2)
print_difference("py vs slow", t2, t3)
if FLAGS.use_gpu:
t1 = self._run_graph("gpu", shape, axes, 10, "op", True, True, 50)
t2 = self._run_graph("gpu", shape, axes, 10, "py", True, True, 50)
t2 = self._run_graph("gpu", shape, axes, 10, "slow", True, True, 50)
print_difference("op vs py", t1, t2)
print_difference("py vs slow", t2, t3)
print("Forward convolution (higher layers).")
shape = [256, 17, 17, 32]
axes = [0, 1, 2]
t1 = self._run_graph("cpu", shape, axes, 10, "op", True, False, 5)
t2 = self._run_graph("cpu", shape, axes, 10, "py", True, False, 5)
t3 = self._run_graph("cpu", shape, axes, 10, "slow", True, False, 5)
print_difference("op vs py", t1, t2)
print_difference("py vs slow", t2, t3)
if FLAGS.use_gpu:
t1 = self._run_graph("gpu", shape, axes, 10, "op", True, False, 50)
t2 = self._run_graph("gpu", shape, axes, 10, "py", True, False, 50)
t3 = self._run_graph("gpu", shape, axes, 10, "slow", True, False, 50)
print_difference("op vs py", t1, t2)
print_difference("py vs slow", t2, t3)
print("Forward/backward convolution (higher layers).")
t1 = self._run_graph("cpu", shape, axes, 10, "op", True, True, 5)
t2 = self._run_graph("cpu", shape, axes, 10, "py", True, True, 5)
t3 = self._run_graph("cpu", shape, axes, 10, "slow", True, True, 5)
print_difference("op vs py", t1, t2)
print_difference("py vs slow", t2, t3)
if FLAGS.use_gpu:
t1 = self._run_graph("gpu", shape, axes, 10, "op", True, True, 50)
t2 = self._run_graph("gpu", shape, axes, 10, "py", True, True, 50)
t3 = self._run_graph("gpu", shape, axes, 10, "slow", True, True, 50)
print_difference("op vs py", t1, t2)
print_difference("py vs slow", t2, t3)
print("Forward fully-connected.")
shape = [1024, 32]
axes = [0]
t1 = self._run_graph("cpu", shape, axes, 10, "py", True, False, 5)
t2 = self._run_graph("cpu", shape, axes, 10, "slow", True, False, 5)
print_difference("py vs slow", t1, t2)
if FLAGS.use_gpu:
t1 = self._run_graph("gpu", shape, axes, 10, "py", True, False, 50)
t2 = self._run_graph("gpu", shape, axes, 10, "slow", True, False, 50)
print_difference("py vs slow", t1, t2)
print("Forward/backward fully-connected.")
t1 = self._run_graph("cpu", shape, axes, 10, "py", True, True, 50)
t2 = self._run_graph("cpu", shape, axes, 10, "slow", True, True, 50)
print_difference("py vs slow", t1, t2)
if FLAGS.use_gpu:
t1 = self._run_graph("gpu", shape, axes, 10, "py", True, True, 5)
t2 = self._run_graph("gpu", shape, axes, 10, "slow", True, True, 5)
print_difference("py vs slow", t1, t2)
if __name__ == "__main__":
tf.test.main()
| apache-2.0 |
nicolashainaux/mathmakerlib | mathmakerlib/geometry/bipoint.py | 1 | 8954 | # -*- coding: utf-8 -*-
# Mathmaker Lib offers lualatex-printable mathematical objects.
# Copyright 2006-2017 Nicolas Hainaux <nh.techn@gmail.com>
# This file is part of Mathmaker Lib.
# Mathmaker Lib is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# any later version.
# Mathmaker Lib is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Mathmaker Lib; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from mathmakerlib.exceptions import ZERO_OBJECTS_ERRORS, ZeroVector
from mathmakerlib.geometry.point import Point
from mathmakerlib.geometry.vector import Vector
from mathmakerlib.calculus.number import Number
from mathmakerlib.core.dimensional import Dimensional
from mathmakerlib.calculus.tools import is_number, is_integer
class Bipoint(Dimensional):
"""
A pair of Points. Gather methods common to LineSegment, Line, Ray.
Bipoints are quite close to, but not completely the same as, bound vectors.
For free vectors, see Vector.
Notice that if:
A = Point(0, 0); B = Point(1, 0); C = Point(0, 1) and D = Point(1, 1),
then: Bipoint(A, B) != Bipoint(C, D)
but: Vector(A, B) == Vector(C, D)
Also, note that contrary to LineSegments, Bipoint(A, B) != Bipoint(B, A).
This class won't ever need to get Drawable, but can be instanciated.
"""
def __init__(self, tail, head, allow_zero_length=True):
"""
A Bipoint can be created from a pair of Points or a Point + a Vector.
:param tail: the first Point of the Bipoint
:type tail: Point
:param head: the second Point of the Bipoint. If a Vector is provided,
the second Point will be calculated using the first Point and this
Vector.
:type head: Point or Vector
:param allow_zero_length: whether zero length Bipoints are allowed or
not (default True).
:type allow_zero_length: bool
"""
if not isinstance(tail, Point):
raise TypeError('First argument must be a Point, found {} '
'instead.'.format(repr(tail)))
if not isinstance(head, (Point, Vector)):
raise TypeError('Second argument must be a Point or a Vector, '
'found {} instead.'.format(repr(head)))
self._three_dimensional = tail.three_dimensional \
or head.three_dimensional
if isinstance(head, Vector):
if self._three_dimensional:
zval = tail.z + head.z
else:
zval = 'undefined'
head = Point(tail.x + head.x, tail.y + head.y, zval)
if (not allow_zero_length
and tail.coordinates == head.coordinates):
msg = 'Explicitly disallowed creation of a zero-length {}.'\
.format(type(self).__name__)
raise ZERO_OBJECTS_ERRORS[type(self).__name__](msg)
self._points = [tail, head]
self._Δx = self.points[1].x - self.points[0].x
self._Δy = self.points[1].y - self.points[0].y
self._Δz = self.points[1].z - self.points[0].z
def __repr__(self):
return 'Bipoint({}, {})'.format(repr(self.tail), repr(self.head))
def __eq__(self, other):
if isinstance(other, Bipoint):
return self.head == other.head and self.tail == other.tail
else:
return False
def __add__(self, other):
return self.add(other)
def add(self, other, new_endpoint_name='automatic'):
if not isinstance(other, Bipoint):
raise TypeError('Can only add a Bipoint to another Bipoint. '
'Found {} instead.'.format(repr(other)))
if self.three_dimensional:
zval = self.points[1].z + other.Δz
else:
zval = 'undefined'
return Bipoint(self.points[0],
Point(self.points[1].x + other.Δx,
self.points[1].y + other.Δy,
z=zval,
name=new_endpoint_name))
@property
def points(self):
return self._points
@property
def tail(self):
return self.points[0]
@property
def head(self):
return self.points[1]
@property
def Δx(self):
return self._Δx
@property
def Δy(self):
return self._Δy
@property
def Δz(self):
return self._Δz
@property
def coordinates(self):
return (self._Δx, self._Δy, self._Δz)
@property
def length(self):
"""Length between the two Points."""
return Number(self.Δx ** 2 + self.Δy ** 2 + self.Δz ** 2)\
.sqrt()
@property
def slope(self):
"""Slope of the pair of Points, from -180° to 180°."""
try:
return Vector(self).slope
except ZeroVector:
msg = 'Cannot calculate the slope of a zero-length {}.'\
.format(type(self).__name__)
raise ZERO_OBJECTS_ERRORS[type(self).__name__](msg)
@property
def slope360(self):
"""Slope of the pair of Points, from 0° to 360°."""
try:
return Vector(self).slope360
except ZeroVector:
msg = 'Cannot calculate the slope of a zero-length {}.'\
.format(type(self).__name__)
raise ZERO_OBJECTS_ERRORS[type(self).__name__](msg)
def midpoint(self, name='automatic'):
"""Bipoint's midpoint."""
if self.three_dimensional:
zval = (self.points[0].z + self.points[1].z) / 2
else:
zval = 'undefined'
return Point((self.points[0].x + self.points[1].x) / 2,
(self.points[0].y + self.points[1].y) / 2,
z=zval,
name=name)
def point_at(self, position, name='automatic'):
"""
A Point aligned with the Bipoint, at provided position.
The Bipoint's length is the length unit of position.
Hence, position 0 matches points[0], position 1 matches points[1],
position 0.5 matches the midpoint, position 0.75 is three quarters
on the way from points[0] to points[1], position 2 is a Point that
makes points[1] the middle between it and points[0], position -1 makes
points[0] the middle between it and points[1].
:param position: a number
:type position: number
:param name: the name to give to the Point
:type name: str
"""
if not is_number(position):
raise TypeError('position must be a number, found {} instead.'
.format(type(position)))
k = Number(position)
if k == 0:
return self.points[0]
elif k == 1:
return self.points[1]
else:
if self.three_dimensional:
zval = (self.points[0].z
+ (self.points[1].z - self.points[0].z) * k)
else:
zval = 'undefined'
return Point((self.points[0].x
+ (self.points[1].x - self.points[0].x) * k),
(self.points[0].y
+ (self.points[1].y - self.points[0].y) * k),
z=zval,
name=name)
def dividing_points(self, n=None, prefix='a'):
"""
Create the list of Points that divide the Bipoint in n parts.
:param n: the number of parts (so it will create n - 1 points)
n must be greater or equal to 1
:type n: int
"""
if not (is_number(n) and is_integer(n)):
raise TypeError('n must be an integer')
if not n >= 1:
raise ValueError('n must be greater or equal to 1')
x0 = self.points[0].x
x1 = self.points[1].x
xstep = (x1 - x0) / n
x_list = [x0 + (i + 1) * xstep for i in range(int(n - 1))]
y0 = self.points[0].y
y1 = self.points[1].y
ystep = (y1 - y0) / n
y_list = [y0 + (i + 1) * ystep for i in range(int(n - 1))]
if self.three_dimensional:
z0 = self.points[0].z
z1 = self.points[1].z
zstep = (z1 - z0) / n
z_list = [z0 + (i + 1) * zstep for i in range(int(n - 1))]
else:
z_list = ['undefined' for i in range(int(n - 1))]
return [Point(x, y, z, prefix + str(i + 1))
for i, (x, y, z) in enumerate(zip(x_list, y_list, z_list))]
| gpl-3.0 |
Cryptcollector/ARG2.0 | qa/rpc-tests/listtransactions.py | 145 | 6081 | #!/usr/bin/env python
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Exercise the listtransactions API
# Add python-bitcoinrpc to module search path:
import os
import sys
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), "python-bitcoinrpc"))
import json
import shutil
import subprocess
import tempfile
import traceback
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from util import *
def check_array_result(object_array, to_match, expected):
"""
Pass in array of JSON objects, a dictionary with key/value pairs
to match against, and another dictionary with expected key/value
pairs.
"""
num_matched = 0
for item in object_array:
all_match = True
for key,value in to_match.items():
if item[key] != value:
all_match = False
if not all_match:
continue
for key,value in expected.items():
if item[key] != value:
raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
num_matched = num_matched+1
if num_matched == 0:
raise AssertionError("No objects matched %s"%(str(to_match)))
def run_test(nodes):
# Simple send, 0 to 1:
txid = nodes[0].sendtoaddress(nodes[1].getnewaddress(), 0.1)
sync_mempools(nodes)
check_array_result(nodes[0].listtransactions(),
{"txid":txid},
{"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":0})
check_array_result(nodes[1].listtransactions(),
{"txid":txid},
{"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":0})
# mine a block, confirmations should change:
nodes[0].setgenerate(True, 1)
sync_blocks(nodes)
check_array_result(nodes[0].listtransactions(),
{"txid":txid},
{"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":1})
check_array_result(nodes[1].listtransactions(),
{"txid":txid},
{"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":1})
# send-to-self:
txid = nodes[0].sendtoaddress(nodes[0].getnewaddress(), 0.2)
check_array_result(nodes[0].listtransactions(),
{"txid":txid, "category":"send"},
{"amount":Decimal("-0.2")})
check_array_result(nodes[0].listtransactions(),
{"txid":txid, "category":"receive"},
{"amount":Decimal("0.2")})
# sendmany from node1: twice to self, twice to node2:
send_to = { nodes[0].getnewaddress() : 0.11, nodes[1].getnewaddress() : 0.22,
nodes[0].getaccountaddress("from1") : 0.33, nodes[1].getaccountaddress("toself") : 0.44 }
txid = nodes[1].sendmany("", send_to)
sync_mempools(nodes)
check_array_result(nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.11")},
{"txid":txid} )
check_array_result(nodes[0].listtransactions(),
{"category":"receive","amount":Decimal("0.11")},
{"txid":txid} )
check_array_result(nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.22")},
{"txid":txid} )
check_array_result(nodes[1].listtransactions(),
{"category":"receive","amount":Decimal("0.22")},
{"txid":txid} )
check_array_result(nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.33")},
{"txid":txid} )
check_array_result(nodes[0].listtransactions(),
{"category":"receive","amount":Decimal("0.33")},
{"txid":txid, "account" : "from1"} )
check_array_result(nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.44")},
{"txid":txid, "account" : ""} )
check_array_result(nodes[1].listtransactions(),
{"category":"receive","amount":Decimal("0.44")},
{"txid":txid, "account" : "toself"} )
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--nocleanup", dest="nocleanup", default=False, action="store_true",
help="Leave bitcoinds and test.* datadir on exit or error")
parser.add_option("--srcdir", dest="srcdir", default="../../src",
help="Source directory containing bitcoind/bitcoin-cli (default: %default%)")
parser.add_option("--tmpdir", dest="tmpdir", default=tempfile.mkdtemp(prefix="test"),
help="Root directory for datadirs")
(options, args) = parser.parse_args()
os.environ['PATH'] = options.srcdir+":"+os.environ['PATH']
check_json_precision()
success = False
nodes = []
try:
print("Initializing test directory "+options.tmpdir)
if not os.path.isdir(options.tmpdir):
os.makedirs(options.tmpdir)
initialize_chain(options.tmpdir)
nodes = start_nodes(2, options.tmpdir)
connect_nodes(nodes[1], 0)
sync_blocks(nodes)
run_test(nodes)
success = True
except AssertionError as e:
print("Assertion failed: "+e.message)
except Exception as e:
print("Unexpected exception caught during testing: "+str(e))
traceback.print_tb(sys.exc_info()[2])
if not options.nocleanup:
print("Cleaning up")
stop_nodes(nodes)
wait_bitcoinds()
shutil.rmtree(options.tmpdir)
if success:
print("Tests successful")
sys.exit(0)
else:
print("Failed")
sys.exit(1)
if __name__ == '__main__':
main()
| mit |
Dandandan/wikiprogramming | jsrepl/build/extern/python/unclosured/lib/python2.7/distutils/unixccompiler.py | 98 | 14432 | """distutils.unixccompiler
Contains the UnixCCompiler class, a subclass of CCompiler that handles
the "typical" Unix-style command-line C compiler:
* macros defined with -Dname[=value]
* macros undefined with -Uname
* include search directories specified with -Idir
* libraries specified with -lllib
* library search directories specified with -Ldir
* compile handled by 'cc' (or similar) executable with -c option:
compiles .c to .o
* link static library handled by 'ar' command (possibly with 'ranlib')
* link shared library handled by 'cc -shared'
"""
__revision__ = "$Id$"
import os, sys, re
from types import StringType, NoneType
from distutils import sysconfig
from distutils.dep_util import newer
from distutils.ccompiler import \
CCompiler, gen_preprocess_options, gen_lib_options
from distutils.errors import \
DistutilsExecError, CompileError, LibError, LinkError
from distutils import log
# XXX Things not currently handled:
# * optimization/debug/warning flags; we just use whatever's in Python's
# Makefile and live with it. Is this adequate? If not, we might
# have to have a bunch of subclasses GNUCCompiler, SGICCompiler,
# SunCCompiler, and I suspect down that road lies madness.
# * even if we don't know a warning flag from an optimization flag,
# we need some way for outsiders to feed preprocessor/compiler/linker
# flags in to us -- eg. a sysadmin might want to mandate certain flags
# via a site config file, or a user might want to set something for
# compiling this module distribution only via the setup.py command
# line, whatever. As long as these options come from something on the
# current system, they can be as system-dependent as they like, and we
# should just happily stuff them into the preprocessor/compiler/linker
# options and carry on.
def _darwin_compiler_fixup(compiler_so, cc_args):
"""
This function will strip '-isysroot PATH' and '-arch ARCH' from the
compile flags if the user has specified one them in extra_compile_flags.
This is needed because '-arch ARCH' adds another architecture to the
build, without a way to remove an architecture. Furthermore GCC will
barf if multiple '-isysroot' arguments are present.
"""
stripArch = stripSysroot = 0
compiler_so = list(compiler_so)
kernel_version = os.uname()[2] # 8.4.3
major_version = int(kernel_version.split('.')[0])
if major_version < 8:
# OSX before 10.4.0, these don't support -arch and -isysroot at
# all.
stripArch = stripSysroot = True
else:
stripArch = '-arch' in cc_args
stripSysroot = '-isysroot' in cc_args
if stripArch or 'ARCHFLAGS' in os.environ:
while 1:
try:
index = compiler_so.index('-arch')
# Strip this argument and the next one:
del compiler_so[index:index+2]
except ValueError:
break
if 'ARCHFLAGS' in os.environ and not stripArch:
# User specified different -arch flags in the environ,
# see also distutils.sysconfig
compiler_so = compiler_so + os.environ['ARCHFLAGS'].split()
if stripSysroot:
try:
index = compiler_so.index('-isysroot')
# Strip this argument and the next one:
del compiler_so[index:index+2]
except ValueError:
pass
# Check if the SDK that is used during compilation actually exists,
# the universal build requires the usage of a universal SDK and not all
# users have that installed by default.
sysroot = None
if '-isysroot' in cc_args:
idx = cc_args.index('-isysroot')
sysroot = cc_args[idx+1]
elif '-isysroot' in compiler_so:
idx = compiler_so.index('-isysroot')
sysroot = compiler_so[idx+1]
if sysroot and not os.path.isdir(sysroot):
log.warn("Compiling with an SDK that doesn't seem to exist: %s",
sysroot)
log.warn("Please check your Xcode installation")
return compiler_so
class UnixCCompiler(CCompiler):
compiler_type = 'unix'
# These are used by CCompiler in two places: the constructor sets
# instance attributes 'preprocessor', 'compiler', etc. from them, and
# 'set_executable()' allows any of these to be set. The defaults here
# are pretty generic; they will probably have to be set by an outsider
# (eg. using information discovered by the sysconfig about building
# Python extensions).
executables = {'preprocessor' : None,
'compiler' : ["cc"],
'compiler_so' : ["cc"],
'compiler_cxx' : ["cc"],
'linker_so' : ["cc", "-shared"],
'linker_exe' : ["cc"],
'archiver' : ["ar", "-cr"],
'ranlib' : None,
}
if sys.platform[:6] == "darwin":
executables['ranlib'] = ["ranlib"]
# Needed for the filename generation methods provided by the base
# class, CCompiler. NB. whoever instantiates/uses a particular
# UnixCCompiler instance should set 'shared_lib_ext' -- we set a
# reasonable common default here, but it's not necessarily used on all
# Unices!
src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"]
obj_extension = ".o"
static_lib_extension = ".a"
shared_lib_extension = ".so"
dylib_lib_extension = ".dylib"
static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s"
if sys.platform == "cygwin":
exe_extension = ".exe"
def preprocess(self, source,
output_file=None, macros=None, include_dirs=None,
extra_preargs=None, extra_postargs=None):
ignore, macros, include_dirs = \
self._fix_compile_args(None, macros, include_dirs)
pp_opts = gen_preprocess_options(macros, include_dirs)
pp_args = self.preprocessor + pp_opts
if output_file:
pp_args.extend(['-o', output_file])
if extra_preargs:
pp_args[:0] = extra_preargs
if extra_postargs:
pp_args.extend(extra_postargs)
pp_args.append(source)
# We need to preprocess: either we're being forced to, or we're
# generating output to stdout, or there's a target output file and
# the source file is newer than the target (or the target doesn't
# exist).
if self.force or output_file is None or newer(source, output_file):
if output_file:
self.mkpath(os.path.dirname(output_file))
try:
self.spawn(pp_args)
except DistutilsExecError, msg:
raise CompileError, msg
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
compiler_so = self.compiler_so
if sys.platform == 'darwin':
compiler_so = _darwin_compiler_fixup(compiler_so, cc_args + extra_postargs)
try:
self.spawn(compiler_so + cc_args + [src, '-o', obj] +
extra_postargs)
except DistutilsExecError, msg:
raise CompileError, msg
def create_static_lib(self, objects, output_libname,
output_dir=None, debug=0, target_lang=None):
objects, output_dir = self._fix_object_args(objects, output_dir)
output_filename = \
self.library_filename(output_libname, output_dir=output_dir)
if self._need_link(objects, output_filename):
self.mkpath(os.path.dirname(output_filename))
self.spawn(self.archiver +
[output_filename] +
objects + self.objects)
# Not many Unices required ranlib anymore -- SunOS 4.x is, I
# think the only major Unix that does. Maybe we need some
# platform intelligence here to skip ranlib if it's not
# needed -- or maybe Python's configure script took care of
# it for us, hence the check for leading colon.
if self.ranlib:
try:
self.spawn(self.ranlib + [output_filename])
except DistutilsExecError, msg:
raise LibError, msg
else:
log.debug("skipping %s (up-to-date)", output_filename)
def link(self, target_desc, objects,
output_filename, output_dir=None, libraries=None,
library_dirs=None, runtime_library_dirs=None,
export_symbols=None, debug=0, extra_preargs=None,
extra_postargs=None, build_temp=None, target_lang=None):
objects, output_dir = self._fix_object_args(objects, output_dir)
libraries, library_dirs, runtime_library_dirs = \
self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,
libraries)
if type(output_dir) not in (StringType, NoneType):
raise TypeError, "'output_dir' must be a string or None"
if output_dir is not None:
output_filename = os.path.join(output_dir, output_filename)
if self._need_link(objects, output_filename):
ld_args = (objects + self.objects +
lib_opts + ['-o', output_filename])
if debug:
ld_args[:0] = ['-g']
if extra_preargs:
ld_args[:0] = extra_preargs
if extra_postargs:
ld_args.extend(extra_postargs)
self.mkpath(os.path.dirname(output_filename))
try:
if target_desc == CCompiler.EXECUTABLE:
linker = self.linker_exe[:]
else:
linker = self.linker_so[:]
if target_lang == "c++" and self.compiler_cxx:
# skip over environment variable settings if /usr/bin/env
# is used to set up the linker's environment.
# This is needed on OSX. Note: this assumes that the
# normal and C++ compiler have the same environment
# settings.
i = 0
if os.path.basename(linker[0]) == "env":
i = 1
while '=' in linker[i]:
i = i + 1
linker[i] = self.compiler_cxx[i]
if sys.platform == 'darwin':
linker = _darwin_compiler_fixup(linker, ld_args)
self.spawn(linker + ld_args)
except DistutilsExecError, msg:
raise LinkError, msg
else:
log.debug("skipping %s (up-to-date)", output_filename)
# -- Miscellaneous methods -----------------------------------------
# These are all used by the 'gen_lib_options() function, in
# ccompiler.py.
def library_dir_option(self, dir):
return "-L" + dir
def _is_gcc(self, compiler_name):
return "gcc" in compiler_name or "g++" in compiler_name
def runtime_library_dir_option(self, dir):
# XXX Hackish, at the very least. See Python bug #445902:
# http://sourceforge.net/tracker/index.php
# ?func=detail&aid=445902&group_id=5470&atid=105470
# Linkers on different platforms need different options to
# specify that directories need to be added to the list of
# directories searched for dependencies when a dynamic library
# is sought. GCC has to be told to pass the -R option through
# to the linker, whereas other compilers just know this.
# Other compilers may need something slightly different. At
# this time, there's no way to determine this information from
# the configuration data stored in the Python installation, so
# we use this hack.
compiler = os.path.basename(sysconfig.get_config_var("CC"))
if sys.platform[:6] == "darwin":
# MacOSX's linker doesn't understand the -R flag at all
return "-L" + dir
elif sys.platform[:5] == "hp-ux":
if self._is_gcc(compiler):
return ["-Wl,+s", "-L" + dir]
return ["+s", "-L" + dir]
elif sys.platform[:7] == "irix646" or sys.platform[:6] == "osf1V5":
return ["-rpath", dir]
elif self._is_gcc(compiler):
return "-Wl,-R" + dir
else:
return "-R" + dir
def library_option(self, lib):
return "-l" + lib
def find_library_file(self, dirs, lib, debug=0):
shared_f = self.library_filename(lib, lib_type='shared')
dylib_f = self.library_filename(lib, lib_type='dylib')
static_f = self.library_filename(lib, lib_type='static')
if sys.platform == 'darwin':
# On OSX users can specify an alternate SDK using
# '-isysroot', calculate the SDK root if it is specified
# (and use it further on)
cflags = sysconfig.get_config_var('CFLAGS')
m = re.search(r'-isysroot\s+(\S+)', cflags)
if m is None:
sysroot = '/'
else:
sysroot = m.group(1)
for dir in dirs:
shared = os.path.join(dir, shared_f)
dylib = os.path.join(dir, dylib_f)
static = os.path.join(dir, static_f)
if sys.platform == 'darwin' and (
dir.startswith('/System/') or (
dir.startswith('/usr/') and not dir.startswith('/usr/local/'))):
shared = os.path.join(sysroot, dir[1:], shared_f)
dylib = os.path.join(sysroot, dir[1:], dylib_f)
static = os.path.join(sysroot, dir[1:], static_f)
# We're second-guessing the linker here, with not much hard
# data to go on: GCC seems to prefer the shared library, so I'm
# assuming that *all* Unix C compilers do. And of course I'm
# ignoring even GCC's "-static" option. So sue me.
if os.path.exists(dylib):
return dylib
elif os.path.exists(shared):
return shared
elif os.path.exists(static):
return static
# Oops, didn't find it in *any* of 'dirs'
return None
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.