content
stringlengths 0
1.05M
| origin
stringclasses 2
values | type
stringclasses 2
values |
|---|---|---|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import unittest
import gym
import shutil
import tempfile
import ray
from ray.rllib.a3c import DEFAULT_CONFIG
from ray.rllib.a3c.a3c_evaluator import A3CEvaluator
from ray.rllib.dqn.dqn_evaluator import adjust_nstep
from ray.tune.registry import get_registry
class DQNEvaluatorTest(unittest.TestCase):
def testNStep(self):
obs = [1, 2, 3, 4, 5, 6, 7]
actions = ["a", "b", "a", "a", "a", "b", "a"]
rewards = [10.0, 0.0, 100.0, 100.0, 100.0, 100.0, 100000.0]
new_obs = [2, 3, 4, 5, 6, 7, 8]
dones = [1, 0, 0, 0, 0, 1, 0]
adjust_nstep(3, 0.9, obs, actions, rewards, new_obs, dones)
self.assertEqual(obs, [1, 2, 3, 4, 5])
self.assertEqual(actions, ["a", "b", "a", "a", "a"])
self.assertEqual(rewards, [10.0, 171.0, 271.0, 271.0, 190.0])
self.assertEqual(new_obs, [2, 5, 6, 7, 7])
self.assertEqual(dones, [1, 0, 0, 0, 0])
class A3CEvaluatorTest(unittest.TestCase):
def setUp(self):
ray.init(num_cpus=1)
config = DEFAULT_CONFIG.copy()
config["num_workers"] = 1
config["observation_filter"] = "ConcurrentMeanStdFilter"
config["reward_filter"] = "MeanStdFilter"
config["batch_size"] = 2
self._temp_dir = tempfile.mkdtemp("a3c_evaluator_test")
self.e = A3CEvaluator(
get_registry(),
lambda config: gym.make("CartPole-v0"),
config,
logdir=self._temp_dir)
def tearDown(self):
ray.worker.cleanup()
shutil.rmtree(self._temp_dir)
def sample_and_flush(self):
e = self.e
self.e.sample()
filters = e.get_filters(flush_after=True)
obs_f = filters["obs_filter"]
rew_f = filters["rew_filter"]
self.assertNotEqual(obs_f.rs.n, 0)
self.assertNotEqual(obs_f.buffer.n, 0)
self.assertNotEqual(rew_f.rs.n, 0)
self.assertNotEqual(rew_f.buffer.n, 0)
return obs_f, rew_f
def testGetFilters(self):
e = self.e
obs_f, rew_f = self.sample_and_flush()
COUNT = obs_f.rs.n
filters = e.get_filters(flush_after=False)
obs_f = filters["obs_filter"]
NEW_COUNT = obs_f.rs.n
self.assertGreaterEqual(NEW_COUNT, COUNT)
self.assertLessEqual(obs_f.buffer.n, NEW_COUNT - COUNT)
def testSyncFilter(self):
"""Show that sync_filters rebases own buffer over input"""
e = self.e
obs_f, _ = self.sample_and_flush()
# Current State
filters = e.get_filters(flush_after=False)
obs_f = filters["obs_filter"]
rew_f = filters["rew_filter"]
self.assertLessEqual(obs_f.buffer.n, 20)
new_obsf = obs_f.copy()
new_obsf.rs._n = 100
e.sync_filters({"obs_filter": new_obsf, "rew_filter": rew_f})
filters = e.get_filters(flush_after=False)
obs_f = filters["obs_filter"]
self.assertGreaterEqual(obs_f.rs.n, 100)
self.assertLessEqual(obs_f.buffer.n, 20)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
"""
Management of Redis server
==========================
.. versionadded:: 2014.7.0
:depends: - redis Python module
:configuration: See :py:mod:`salt.modules.redis` for setup instructions.
.. code-block:: yaml
key_in_redis:
redis.string:
- value: string data
The redis server information specified in the minion config file can be
overridden in states using the following arguments: ``host``, ``post``, ``db``,
``password``.
.. code-block:: yaml
key_in_redis:
redis.string:
- value: string data
- host: localhost
- port: 6379
- db: 0
- password: somuchkittycat
"""
from __future__ import absolute_import, print_function, unicode_literals
import copy
__virtualname__ = "redis"
def __virtual__():
"""
Only load if the redis module is in __salt__
"""
if "redis.set_key" in __salt__:
return __virtualname__
return (False, "redis module could not be loaded")
def string(name, value, expire=None, expireat=None, **connection_args):
"""
Ensure that the key exists in redis with the value specified
name
Redis key to manage
value
Data to persist in key
expire
Sets time to live for key in seconds
expireat
Sets expiration time for key via UNIX timestamp, overrides `expire`
"""
ret = {
"name": name,
"changes": {},
"result": True,
"comment": "Key already set to defined value",
}
old_key = __salt__["redis.get_key"](name, **connection_args)
if old_key != value:
__salt__["redis.set_key"](name, value, **connection_args)
ret["changes"][name] = "Value updated"
ret["comment"] = "Key updated to new value"
if expireat:
__salt__["redis.expireat"](name, expireat, **connection_args)
ret["changes"]["expireat"] = "Key expires at {0}".format(expireat)
elif expire:
__salt__["redis.expire"](name, expire, **connection_args)
ret["changes"]["expire"] = "TTL set to {0} seconds".format(expire)
return ret
def absent(name, keys=None, **connection_args):
"""
Ensure key absent from redis
name
Key to ensure absent from redis
keys
list of keys to ensure absent, name will be ignored if this is used
"""
ret = {
"name": name,
"changes": {},
"result": True,
"comment": "Key(s) specified already absent",
}
if keys:
if not isinstance(keys, list):
ret["result"] = False
ret["comment"] = "`keys` not formed as a list type"
return ret
delete_list = [
key for key in keys if __salt__["redis.exists"](key, **connection_args)
]
if not delete_list:
return ret
__salt__["redis.delete"](*delete_list, **connection_args)
ret["changes"]["deleted"] = delete_list
ret["comment"] = "Keys deleted"
return ret
if __salt__["redis.exists"](name, **connection_args):
__salt__["redis.delete"](name, **connection_args)
ret["comment"] = "Key deleted"
ret["changes"]["deleted"] = [name]
return ret
def slaveof(
name,
sentinel_host=None,
sentinel_port=None,
sentinel_password=None,
**connection_args
):
"""
Set this redis instance as a slave.
.. versionadded: 2016.3.0
name
Master to make this a slave of
sentinel_host
Ip of the sentinel to check for the master
sentinel_port
Port of the sentinel to check for the master
"""
ret = {
"name": name,
"changes": {},
"result": False,
"comment": "Failed to setup slave",
}
kwargs = copy.copy(connection_args)
sentinel_master = __salt__["redis.sentinel_get_master_ip"](
name, sentinel_host, sentinel_port, sentinel_password
)
if sentinel_master["master_host"] in __salt__["network.ip_addrs"]():
ret["result"] = True
ret["comment"] = "Minion is the master: {0}".format(name)
return ret
first_master = __salt__["redis.get_master_ip"](**connection_args)
if first_master == sentinel_master:
ret["result"] = True
ret["comment"] = "Minion already slave of master: {0}".format(name)
return ret
if __opts__["test"] is True:
ret["comment"] = "Minion will be made a slave of {0}: {1}".format(
name, sentinel_master["host"]
)
ret["result"] = None
return ret
kwargs.update(**sentinel_master)
__salt__["redis.slaveof"](**kwargs)
current_master = __salt__["redis.get_master_ip"](**connection_args)
if current_master != sentinel_master:
return ret
ret["result"] = True
ret["changes"] = {
"old": first_master,
"new": current_master,
}
ret["comment"] = "Minion successfully connected to master: {0}".format(name)
return ret
|
nilq/baby-python
|
python
|
import cv2
import numpy as np
import matplotlib.pyplot as plt
img = cv2.imread('canyon.png')
gray_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
values = []
for i in range(256):
n = np.where(gray_img == i, 1., 0.).sum()
values.append(n)
plt.bar(range(256), height=values, width=1.)
plt.xlabel('intensity')
plt.ylabel('pixels')
plt.savefig('chart.png')
|
nilq/baby-python
|
python
|
"""
Profile ../profile-datasets-py/div83/023.py
file automaticaly created by prof_gen.py script
"""
self["ID"] = "../profile-datasets-py/div83/023.py"
self["Q"] = numpy.array([ 2.79844200e+00, 3.21561000e+00, 4.08284300e+00,
4.76402700e+00, 4.58551900e+00, 4.69499800e+00,
5.22973300e+00, 5.84156600e+00, 6.18858200e+00,
6.21784100e+00, 6.21911100e+00, 6.16721200e+00,
6.00577400e+00, 5.67508800e+00, 5.23121300e+00,
4.81113700e+00, 4.55030900e+00, 4.39987100e+00,
4.29931200e+00, 4.22282200e+00, 4.15552300e+00,
4.09809300e+00, 4.04698400e+00, 4.00343400e+00,
3.96829400e+00, 3.94125400e+00, 3.91683500e+00,
3.89245500e+00, 3.87021500e+00, 3.85012500e+00,
3.83217500e+00, 3.82010500e+00, 3.81511500e+00,
3.81423500e+00, 3.81682500e+00, 3.82262500e+00,
3.83055500e+00, 3.84032500e+00, 3.85492500e+00,
3.87799500e+00, 3.90433500e+00, 3.92437500e+00,
3.94598400e+00, 3.97470400e+00, 4.01590400e+00,
4.04815400e+00, 4.03697400e+00, 4.03236400e+00,
4.03976400e+00, 4.14153300e+00, 4.59046900e+00,
5.41755100e+00, 6.21536100e+00, 7.46027400e+00,
9.47053000e+00, 1.24248500e+01, 1.70469100e+01,
2.38214300e+01, 2.79383200e+01, 3.07220600e+01,
3.31475000e+01, 3.59911000e+01, 3.99466000e+01,
4.64855400e+01, 5.73845100e+01, 7.41050100e+01,
1.02426500e+02, 1.38336900e+02, 1.73875800e+02,
2.01908200e+02, 2.49736600e+02, 3.41031700e+02,
4.82377200e+02, 6.08759200e+02, 6.98893200e+02,
7.49942200e+02, 6.57168800e+02, 5.95259500e+02,
5.49234200e+02, 3.84860800e+02, 2.39486600e+02,
1.34069000e+02, 1.01649700e+02, 1.26293000e+02,
1.82372700e+02, 2.29509300e+02, 3.02564400e+02,
4.86049600e+02, 6.92652900e+02, 1.47797200e+03,
2.63994200e+03, 3.37610300e+03, 3.89369000e+03,
4.35807400e+03, 5.44476200e+03, 6.95739600e+03,
7.05270600e+03, 7.76016900e+03, 7.54917600e+03,
7.34658800e+03, 7.15201000e+03])
self["P"] = numpy.array([ 5.00000000e-03, 1.61000000e-02, 3.84000000e-02,
7.69000000e-02, 1.37000000e-01, 2.24400000e-01,
3.45400000e-01, 5.06400000e-01, 7.14000000e-01,
9.75300000e-01, 1.29720000e+00, 1.68720000e+00,
2.15260000e+00, 2.70090000e+00, 3.33980000e+00,
4.07700000e+00, 4.92040000e+00, 5.87760000e+00,
6.95670000e+00, 8.16550000e+00, 9.51190000e+00,
1.10038000e+01, 1.26492000e+01, 1.44559000e+01,
1.64318000e+01, 1.85847000e+01, 2.09224000e+01,
2.34526000e+01, 2.61829000e+01, 2.91210000e+01,
3.22744000e+01, 3.56505000e+01, 3.92566000e+01,
4.31001000e+01, 4.71882000e+01, 5.15278000e+01,
5.61260000e+01, 6.09895000e+01, 6.61253000e+01,
7.15398000e+01, 7.72396000e+01, 8.32310000e+01,
8.95204000e+01, 9.61138000e+01, 1.03017000e+02,
1.10237000e+02, 1.17778000e+02, 1.25646000e+02,
1.33846000e+02, 1.42385000e+02, 1.51266000e+02,
1.60496000e+02, 1.70078000e+02, 1.80018000e+02,
1.90320000e+02, 2.00989000e+02, 2.12028000e+02,
2.23442000e+02, 2.35234000e+02, 2.47408000e+02,
2.59969000e+02, 2.72919000e+02, 2.86262000e+02,
3.00000000e+02, 3.14137000e+02, 3.28675000e+02,
3.43618000e+02, 3.58966000e+02, 3.74724000e+02,
3.90893000e+02, 4.07474000e+02, 4.24470000e+02,
4.41882000e+02, 4.59712000e+02, 4.77961000e+02,
4.96630000e+02, 5.15720000e+02, 5.35232000e+02,
5.55167000e+02, 5.75525000e+02, 5.96306000e+02,
6.17511000e+02, 6.39140000e+02, 6.61192000e+02,
6.83667000e+02, 7.06565000e+02, 7.29886000e+02,
7.53628000e+02, 7.77790000e+02, 8.02371000e+02,
8.27371000e+02, 8.52788000e+02, 8.78620000e+02,
9.04866000e+02, 9.31524000e+02, 9.58591000e+02,
9.86067000e+02, 1.01395000e+03, 1.04223000e+03,
1.07092000e+03, 1.10000000e+03])
self["CO2"] = numpy.array([ 375.11 , 375.1098, 375.1095, 375.1092, 375.1103, 375.1112,
375.102 , 375.0748, 375.0387, 375.0027, 374.9657, 374.9347,
374.9347, 374.9779, 375.066 , 375.2462, 375.4553, 375.5563,
375.5914, 375.6144, 375.6834, 375.7585, 375.8175, 375.8015,
375.8185, 375.8575, 375.9295, 376.0125, 376.1415, 376.2736,
376.3256, 376.3816, 376.4906, 376.6096, 376.7786, 376.9766,
377.1776, 377.3716, 377.5755, 377.9805, 378.4275, 378.9005,
379.4005, 379.9165, 379.9895, 380.0655, 380.1185, 380.1515,
380.1955, 380.3004, 380.4083, 380.4939, 380.5716, 380.6732,
380.8284, 380.9903, 381.1505, 381.3149, 381.4673, 381.6093,
381.7373, 381.8043, 381.8727, 381.8402, 381.7941, 381.6917,
381.5509, 381.4192, 381.3097, 381.212 , 381.1708, 381.112 ,
381.0581, 381.0109, 380.9795, 380.9641, 380.9895, 380.9991,
380.9846, 381.0113, 381.0177, 381.0059, 380.9633, 380.8999,
380.8305, 380.7756, 380.7188, 380.6409, 380.5542, 380.2432,
379.7768, 379.4616, 379.2207, 379.007 , 378.5665, 377.9679,
377.9128, 377.6306, 377.703 , 377.7771, 377.8511])
self["CO"] = numpy.array([ 0.2633713 , 0.2658481 , 0.2708949 , 0.2798357 , 0.2943857 ,
0.3169055 , 0.3270093 , 0.3025382 , 0.2106017 , 0.1228242 ,
0.07192025, 0.04720251, 0.02749763, 0.0176824 , 0.01330503,
0.01175584, 0.01216294, 0.01235085, 0.01191595, 0.01109635,
0.01048156, 0.01065176, 0.01107116, 0.01118836, 0.01095626,
0.01048936, 0.01037516, 0.01030036, 0.01028216, 0.01026766,
0.01031526, 0.01036646, 0.01061996, 0.01090526, 0.01135316,
0.01191435, 0.01257265, 0.01338555, 0.01430114, 0.01698863,
0.02058872, 0.0247199 , 0.02928728, 0.03486766, 0.03547156,
0.03611435, 0.03603435, 0.03538126, 0.03486586, 0.03537075,
0.03590374, 0.0365283 , 0.03721557, 0.03815132, 0.03968272,
0.04133349, 0.04397005, 0.04697288, 0.04988211, 0.05270888,
0.05543176, 0.05699425, 0.05865026, 0.05972622, 0.06078511,
0.06102698, 0.06077157, 0.0600102 , 0.05846103, 0.05707617,
0.05641971, 0.05576448, 0.05575019, 0.05573665, 0.05577429,
0.05582021, 0.05583748, 0.05584664, 0.05582452, 0.05580362,
0.05573265, 0.05565114, 0.05552915, 0.0553906 , 0.05522743,
0.05502187, 0.05479002, 0.05452069, 0.05428088, 0.05400267,
0.05363653, 0.05297076, 0.05227825, 0.0516777 , 0.0512194 ,
0.0509122 , 0.05080981, 0.05063281, 0.05016561, 0.04969551,
0.04922292])
self["T"] = numpy.array([ 197.556, 205.148, 219.995, 240.314, 257.486, 264.952,
260.228, 247.287, 234.817, 228.724, 228.789, 228.982,
226.537, 222.31 , 217.705, 214.27 , 211.775, 209.309,
207.224, 206.093, 206.618, 207.997, 209.315, 210.38 ,
211.099, 211.505, 211.873, 212.371, 213.051, 213.908,
214.837, 215.571, 216.029, 216.031, 215.881, 215.892,
216.224, 216.699, 216.908, 216.703, 216.372, 216.338,
216.472, 216.439, 216.209, 216.181, 216.454, 216.965,
217.694, 218.094, 218.032, 217.803, 217.179, 215.873,
214.502, 213.833, 213.502, 212.442, 212.136, 212.741,
213.98 , 215.429, 216.953, 218.865, 221.017, 223.335,
225.768, 228.287, 230.855, 233.441, 235.821, 238.02 ,
240.023, 242.129, 244.298, 246.531, 248.765, 250.979,
253.162, 255.179, 257.081, 258.906, 260.675, 262.335,
263.83 , 265.078, 265.993, 266.655, 267.483, 267.492,
267.496, 268.771, 270.518, 272.394, 274.203, 275.936,
278.184, 279.876, 279.876, 279.876, 279.876])
self["N2O"] = numpy.array([ 0.00066 , 0.00066 , 0.00066 , 0.00170999, 0.00157999,
0.00117 , 0.00102 , 0.00093999, 0.00109999, 0.00186999,
0.00278998, 0.00385998, 0.00502997, 0.00794995, 0.01126994,
0.01598992, 0.0223999 , 0.03255986, 0.04303981, 0.05860975,
0.07344969, 0.09338962, 0.1153595 , 0.1364095 , 0.1593194 ,
0.1816793 , 0.2032092 , 0.2216691 , 0.2387291 , 0.255199 ,
0.269519 , 0.2770789 , 0.2843989 , 0.2914989 , 0.2959989 ,
0.2990189 , 0.3018688 , 0.3031288 , 0.3042188 , 0.3052488 ,
0.3062088 , 0.3071688 , 0.3081388 , 0.3091188 , 0.3100888 ,
0.3110487 , 0.3119987 , 0.3129287 , 0.3138187 , 0.3146787 ,
0.3154986 , 0.3162583 , 0.316948 , 0.3175776 , 0.318107 ,
0.318326 , 0.3185246 , 0.3187024 , 0.3188511 , 0.3189802 ,
0.3190694 , 0.3191285 , 0.3191473 , 0.3191452 , 0.3191417 ,
0.3191363 , 0.3191273 , 0.3191158 , 0.3191045 , 0.3190956 ,
0.3190803 , 0.3190512 , 0.319006 , 0.3189657 , 0.3189369 ,
0.3189206 , 0.3189503 , 0.31897 , 0.3189847 , 0.3190372 ,
0.3190836 , 0.3191172 , 0.3191276 , 0.3191197 , 0.3191018 ,
0.3190867 , 0.3190634 , 0.3190049 , 0.3189389 , 0.3186883 ,
0.3183174 , 0.3180825 , 0.3179173 , 0.3177691 , 0.3174222 ,
0.3169395 , 0.3169091 , 0.3166833 , 0.3167506 , 0.3168153 ,
0.3168774 ])
self["O3"] = numpy.array([ 0.1701035 , 0.1775344 , 0.2070442 , 0.3108455 , 0.4857418 ,
0.7174716 , 1.135664 , 1.854109 , 2.782353 , 3.737137 ,
4.530402 , 5.187768 , 5.835555 , 6.478703 , 7.042913 ,
7.406004 , 7.476306 , 7.470997 , 7.415398 , 7.330429 ,
7.24153 , 7.137661 , 6.989612 , 6.787533 , 6.528764 ,
6.220455 , 5.884567 , 5.503819 , 5.09551 , 4.693582 ,
4.327893 , 3.985085 , 3.678286 , 3.335367 , 2.953589 ,
2.57246 , 2.217582 , 1.889313 , 1.586384 , 1.319205 ,
1.117156 , 1.004066 , 0.9006934 , 0.7929348 , 0.6598114 ,
0.5608037 , 0.5221579 , 0.4566192 , 0.3826315 , 0.3151257 ,
0.2497519 , 0.1938919 , 0.161567 , 0.1437669 , 0.1241348 ,
0.09875807, 0.08337688, 0.07254977, 0.06489109, 0.06011635,
0.0572758 , 0.05523301, 0.05391805, 0.05260935, 0.05118826,
0.04970942, 0.04921116, 0.04938217, 0.04948469, 0.04887103,
0.0496528 , 0.05139117, 0.05410789, 0.05309466, 0.05165747,
0.0498389 , 0.04871117, 0.04829424, 0.04853613, 0.04912888,
0.04907085, 0.0484611 , 0.04731189, 0.04690288, 0.04737696,
0.0483425 , 0.04885881, 0.04753838, 0.04404007, 0.0405377 ,
0.03945616, 0.04006498, 0.03954044, 0.03722645, 0.03488482,
0.03295968, 0.02738151, 0.02086214, 0.02086658, 0.02087084,
0.02087493])
self["CH4"] = numpy.array([ 0.00907582, 0.00907582, 0.07173381, 0.1168504 , 0.1393494 ,
0.1532193 , 0.1714531 , 0.1907549 , 0.2226026 , 0.2802003 ,
0.3650427 , 0.4563802 , 0.5494887 , 0.6393934 , 0.7256052 ,
0.8122181 , 0.9008319 , 0.9992386 , 1.092925 , 1.184485 ,
1.271705 , 1.344764 , 1.409534 , 1.471594 , 1.516804 ,
1.558394 , 1.598414 , 1.620794 , 1.637004 , 1.637534 ,
1.638104 , 1.638704 , 1.639354 , 1.635034 , 1.630864 ,
1.626924 , 1.623304 , 1.620094 , 1.624864 , 1.629904 ,
1.635204 , 1.640764 , 1.646614 , 1.680853 , 1.691653 ,
1.702943 , 1.710403 , 1.714873 , 1.719423 , 1.723453 ,
1.727652 , 1.729941 , 1.731429 , 1.732867 , 1.734094 ,
1.735358 , 1.73741 , 1.739609 , 1.741511 , 1.743156 ,
1.744602 , 1.745187 , 1.74578 , 1.745269 , 1.74462 ,
1.743181 , 1.741222 , 1.739449 , 1.738058 , 1.736819 ,
1.736406 , 1.735898 , 1.735642 , 1.735413 , 1.735316 ,
1.735308 , 1.735469 , 1.735556 , 1.735436 , 1.735492 ,
1.735344 , 1.735097 , 1.734644 , 1.734111 , 1.733554 ,
1.733122 , 1.732716 , 1.732248 , 1.73171 , 1.730099 ,
1.727667 , 1.725943 , 1.724649 , 1.723625 , 1.721685 ,
1.719006 , 1.718802 , 1.717547 , 1.717893 , 1.718243 ,
1.71858 ])
self["CTP"] = 500.0
self["CFRACTION"] = 0.0
self["IDG"] = 0
self["ISH"] = 0
self["ELEVATION"] = 0.0
self["S2M"]["T"] = 279.876
self["S2M"]["Q"] = 7152.01027939
self["S2M"]["O"] = 0.0208749268383
self["S2M"]["P"] = 1007.99701
self["S2M"]["U"] = 0.0
self["S2M"]["V"] = 0.0
self["S2M"]["WFETC"] = 100000.0
self["SKIN"]["SURFTYPE"] = 1
self["SKIN"]["WATERTYPE"] = 1
self["SKIN"]["T"] = 279.876
self["SKIN"]["SALINITY"] = 35.0
self["SKIN"]["FOAM_FRACTION"] = 0.0
self["SKIN"]["FASTEM"] = numpy.array([ 3. , 5. , 15. , 0.1, 0.3])
self["ZENANGLE"] = 0.0
self["AZANGLE"] = 0.0
self["SUNZENANGLE"] = 0.0
self["SUNAZANGLE"] = 0.0
self["LATITUDE"] = -50.931
self["GAS_UNITS"] = 2
self["BE"] = 0.0
self["COSBK"] = 0.0
self["DATE"] = numpy.array([2007, 6, 10])
self["TIME"] = numpy.array([0, 0, 0])
|
nilq/baby-python
|
python
|
"""
Outpost URL Configuration
"""
import django
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.static import serve
from django.views.i18n import JavaScriptCatalog
from rest_framework.authtoken import views as authtoken
js_info_dict = {
'packages': ('recurrence', ),
}
urlpatterns = []
if settings.DEBUG:
import debug_toolbar
urlpatterns.extend(
[
url(r"^media/(?P<path>.*)$", serve, {"document_root": settings.MEDIA_ROOT}),
url(
r"^static/(?P<path>.*)$", serve, {"document_root": settings.STATIC_ROOT}
),
]
)
urlpatterns.extend([url(r"^__debug__/", include(debug_toolbar.urls))])
urlpatterns.extend(
[
url(r"^admin/", admin.site.urls),
url(r'^jsi18n/$', JavaScriptCatalog.as_view(), js_info_dict),
url(r"^auth/api/", include("rest_framework.urls", namespace="rest_framework")),
url(r"^prometheus/", include("django_prometheus.urls")),
url(r"^auth/token/", authtoken.obtain_auth_token),
url(
r"^saml2/",
include(
("djangosaml2.urls", "saml2")
if django.VERSION >= (2, 1)
else "djangosaml2.urls",
namespace="saml2",
),
),
url(
r"^oauth2/",
include(
("outpost.django.oauth2.urls", "oauth2")
if django.VERSION >= (2, 1)
else "outpost.django.oauth2.urls",
namespace="oauth2",
),
),
url(r"^lti/", include("outpost.django.lti.urls", namespace="lti")),
url(
r"^attendance/",
include("outpost.django.attendance.urls", namespace="attendance"),
),
url(
r"^research/",
include("outpost.django.research.urls", namespace="research"),
),
url(
r"^campusonline/",
include("outpost.django.campusonline.urls", namespace="campusonline"),
),
url(
r"^networktoken/",
include("outpost.django.networktoken.urls", namespace="networktoken"),
),
url(r"^salt/", include("outpost.django.salt.urls", namespace="salt")),
url(r"^typo3/", include("outpost.django.typo3.urls", namespace="typo3")),
url(r"^borg/", include("outpost.django.borg.urls", namespace="borg")),
url(r"^video/", include("outpost.django.video.urls", namespace="video")),
url(
r"^redirect/", include("outpost.django.redirect.urls", namespace="redirect")
),
url(r"^", include("outpost.django.api.urls", namespace="api")),
url(
r"^",
include(("django.contrib.auth.urls", "accounts"), namespace="accounts"),
),
url(r"^", include("outpost.django.base.urls", namespace="base")),
]
)
|
nilq/baby-python
|
python
|
import argparse
from flatdb import flatdb_app
from flatdb.app import define_urls
def get_options():
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--debug', action='store_true', default=False)
parser.add_argument('-p', '--port', type=int, default=7532)
parser.add_argument('-b', '--database')
parser.add_argument('-H', '--host', default='127.0.0.1')
return parser.parse_args()
def configure_app(app, options):
app.config['DB'] = options.database
define_urls(app)
def dev_server():
options = get_options()
configure_app(flatdb_app, options)
flatdb_app.run(debug=options.debug, port=options.port, host=options.host)
def run_server():
options = get_options()
configure_app(flatdb_app, options)
from gevent.pywsgi import WSGIServer
server = WSGIServer((options.host, options.port), flatdb_app)
server.serve_forever()
if __name__ == '__main__':
run_server()
|
nilq/baby-python
|
python
|
'''
the following import is only necessary because eip is not in this directory
'''
import sys
sys.path.append('..')
'''
The simplest example of reading a tag from a PLC
NOTE: You only need to call .Close() after you are done exchanging
data with the PLC. If you were going to read in a loop or read
more tags, you wouldn't want to call .Close() every time.
'''
from pylogix import PLC
comm = PLC()
comm.IPAddress = '192.168.1.9'
ret = comm.Read('CurrentScreen')
print(ret.value)
comm.Close()
|
nilq/baby-python
|
python
|
from __future__ import unicode_literals
import frappe
from frappe.model.utils.rename_field import rename_field
def execute():
frappe.reload_doc("accounts", "doctype", "pricing_rule")
try:
rename_field("Pricing Rule", "price_or_discount", "rate_or_discount")
rename_field("Pricing Rule", "price", "rate")
except Exception as e:
if e.args[0]!=1054:
raise
|
nilq/baby-python
|
python
|
import logging
import os
import requests
import pickle
import json
from configparser import ConfigParser
logger = logging.getLogger(__name__)
project_dir = os.path.abspath(os.path.dirname(__file__)) + '/'
config = ConfigParser()
config.read(project_dir + '/config.cfg')
def get_or_download_file(filename, k, value, config):
folder_path = f'{project_dir}/' + config.get(k, value)
file_path = f'{folder_path}/{filename}'
if not os.path.exists(folder_path):
logger.warning('mkdir: %s', folder_path)
os.makedirs(folder_path)
if not os.path.exists(file_path):
logger.warning('downloading data file to: %s', file_path)
url = 'https://bioseed.mcs.anl.gov/~fxliu/modelseedpy/' + filename
r = requests.get(url, allow_redirects=True)
with open(file_path, 'wb') as fh:
fh.write(r.content)
return file_path
def get_file(filename, k, value):
return get_or_download_file(filename, k, value, config)
def get_classifier(classifier_id):
from modelseedpy.core.msgenomeclassifier import MSGenomeClassifier
cls_pickle = get_file(f'{classifier_id}.pickle', 'data', 'classifier_folder')
cls_features = get_file(f'{classifier_id}_features.json', 'data', 'classifier_folder')
with open(cls_pickle, 'rb') as fh:
model_filter = pickle.load(fh)
with open(cls_features, 'r') as fh:
features = json.load(fh)
return MSGenomeClassifier(model_filter, features)
def get_template(template_id):
# we need a mstemplate object!
template_file = get_file(f'{template_id}.json', 'data', 'template_folder')
with open(template_file, 'r') as fh:
return json.load(fh)
|
nilq/baby-python
|
python
|
from collections import namedtuple
Point = namedtuple('point', 'x, y')
mouse_pos = Point(100, 200)
print("X Position of Mouse:", mouse_pos.x)
|
nilq/baby-python
|
python
|
import json
from graphql_relay import to_global_id
from tracker.api.services.auth import (
generate_auth_token,
)
from tracker.api.status_codes import StatusEnum
async def test_create_role_mutation(
client,
setup_project_list_test_retrun_auth_token
):
pm_auth_token = setup_project_list_test_retrun_auth_token
app = client.server.app
auth_token = generate_auth_token(app['config'], user_id=2)
query = '''
mutation RoleCreationMutation($input: RoleCreationInput!) {
role {
roleCreation(input: $input) {
roleCreationPayload {
duplicatedEmailList
status
errorList
}
}
}
}
'''
variables = {
'input': {
'projectId': to_global_id('ProjectType', 1),
'role': 'team_member',
'emailList': ['joke@thejoker5.com', 'joke2@thejoker5.com'],
}
}
# with no pm token
response = await client.post(
'/graphql',
data=json.dumps({
'query': query,
'variables': json.dumps(variables),
}),
headers={
'content-type': 'application/json',
'Authorization': f'Bearer {auth_token}'
},
)
# if something will go wrong there will be response body output
print(await response.text())
assert response.status == 200
data = await response.json()
assert data['errors'][0]['status'] == StatusEnum.FORBIDDEN._name_
# with invalid project id
variables = {
'input': {
'projectId': to_global_id('ProjectType', 99999),
'role': 'team_member',
'emailList': ['joke@thejoker5.com', 'joke2@thejoker5.com'],
}
}
response = await client.post(
'/graphql',
data=json.dumps({
'query': query,
'variables': json.dumps(variables),
}),
headers={
'content-type': 'application/json',
'Authorization': f'Bearer {pm_auth_token}'
},
)
# if something will go wrong there will be response body output
print(await response.text())
assert response.status == 200
data = await response.json()
assert data['errors'][0]['status'] == StatusEnum.FORBIDDEN._name_
|
nilq/baby-python
|
python
|
import pandas as pd
from .taxa_tree import NCBITaxaTree
from ..constants import MICROBE_DIR
MICROBE_DIR_COLS = [
'gram_stain',
'microbiome_location',
'antimicrobial_susceptibility',
'optimal_temperature',
'extreme_environment',
'biofilm_forming',
'optimal_ph',
'animal_pathogen',
'spore_forming',
'pathogenicity',
'plant_pathogen'
]
def annotate_taxa(taxa):
"""Return a pandas dataframe with annotations for the given taxa."""
taxa_tree = NCBITaxaTree.parse_files()
phyla = [taxa_tree.phyla(taxon, 'unknown') for taxon in taxa]
annotated = pd.DataFrame.from_dict({'taxa': taxa, 'phyla': phyla}, orient='columns')
annotated = annotated.set_index('taxa')
microbe_dir = pd.read_csv(MICROBE_DIR).set_index('species')
annotated = annotated.join(microbe_dir[MICROBE_DIR_COLS], how='left')
return annotated
|
nilq/baby-python
|
python
|
import json
import os
from django.apps import apps
DJANGO_TAILWIND_APP_DIR = os.path.dirname(__file__)
def get_app_path(app_name):
app_label = app_name.split(".")[-1]
return apps.get_app_config(app_label).path
def get_tailwind_src_path(app_name):
return os.path.join(get_app_path(app_name), "static_src")
def get_package_json_path(app_name):
return os.path.join(get_app_path(app_name), "static_src", "package.json")
def get_package_json_contents(app_name):
with open(get_package_json_path(app_name), "r") as f:
return json.load(f)
def is_path_absolute(path):
return path.startswith("/") or path.startswith("http")
|
nilq/baby-python
|
python
|
from model_defs import *
from utils import *
from tensorflow.models.rnn.rnn_cell import *
###################################
# Building blocks #
###################################
# takes features and outputs potentials
def potentials_layer(in_layer, mask, config, params, reuse=False, name='Potentials'):
batch_size = int(in_layer.get_shape()[0])
num_steps = int(in_layer.get_shape()[1])
input_size = int(in_layer.get_shape()[2])
pot_shape = [config.n_tags] * config.pot_window
out_shape = [batch_size, num_steps] + pot_shape
#~ pot_size = config.n_tags ** config.pot_window
#~ if reuse:
#~ tf.get_variable_scope().reuse_variables()
#~ W_pot = params.W_pot
#~ b_pot = params.b_pot
#~ else:
#~ W_pot = weight_variable([input_size, pot_size], name=name)
#~ b_pot = bias_variable([pot_size], name=name)
#~ flat_input = tf.reshape(in_layer, [-1, input_size])
#~ pre_scores = tf.matmul(flat_input, W_pot) + b_pot
# BOGUS
W_pot = False
b_pot = False
reshaped_in = tf.reshape(in_layer, [batch_size, num_steps, config.pot_window, -1])
pre_scores = tf.reduce_sum(reshaped_in, 2)
# /BOGUS
pots_layer = tf.reshape(pre_scores, out_shape)
# define potentials for padding tokens
padding_pot = np.zeros(pot_shape)
num = config.pot_window / 2
idx = [slice(None)] * num + [0] + [slice(None)] * num
padding_pot[idx] += 10000
pad_pot = tf.convert_to_tensor(padding_pot, tf.float32)
pad_pots = tf.expand_dims(tf.expand_dims(pad_pot, 0), 0)
pad_pots = tf.tile(pad_pots, [batch_size, num_steps] + [1] * config.pot_window)
# expand mask
mask_a = mask
for _ in range(config.pot_window):
mask_a = tf.expand_dims(mask_a, -1)
mask_a = tf.tile(mask_a, [1, 1] + pot_shape)
# combine
pots_layer = (pots_layer * mask_a + (1 - mask_a) * pad_pots)
return (pots_layer, W_pot, b_pot)
# pseudo-likelihood criterion
def pseudo_likelihood(potentials, pot_indices, targets, config):
batch_size = int(potentials.get_shape()[0])
num_steps = int(potentials.get_shape()[1])
pots_shape = map(int, potentials.get_shape()[2:])
# move the current tag to the last dimension
perm = range(len(potentials.get_shape()))
mid = config.pot_window / 2
perm[-1] = perm[-mid - 1]
for i in range(-1, mid -1):
perm[-mid + i] = perm[-mid + i] + 1
perm_potentials = tf.transpose(potentials, perm=perm)
# get conditional distribution of the current tag
flat_pots = tf.reshape(perm_potentials, [-1, config.n_tags])
flat_cond = tf.gather(flat_pots, pot_indices)
pre_cond = tf.nn.softmax(flat_cond)
conditional = tf.reshape(pre_cond, [batch_size, num_steps, -1])
# compute pseudo-log-likelihood of sequence
p_ll = tf.reduce_sum(targets * tf.log(conditional))
return (conditional, p_ll)
# dynamic programming part 1: max sum
class CRFMaxCell(RNNCell):
"""Dynamic programming for CRF"""
def __init__(self, config):
self._num_units = config.n_tags ** (config.pot_window - 1)
self.n_tags = config.n_tags
@property
def input_size(self):
return self._num_units
@property
def output_size(self):
return self._num_units
@property
def state_size(self):
return self._num_units
def __call__(self, inputs, state, scope=None):
"""Summation for dynamic programming. Inputs are the
log-potentials. States are the results of the summation at the
last step"""
with tf.variable_scope(scope or type(self).__name__):
# add states and log-potentials
multiples = [1] * (len(state.get_shape()) + 1)
multiples[-1] = self.n_tags
exp_state = tf.tile(tf.expand_dims(state, -1), multiples)
added = exp_state + inputs
# return maxes, arg_maxes along first dimension (after the batch dim)
new_state = tf.reduce_max(added, 1)
max_id = tf.argmax(added, 1)
return new_state, max_id
# max a posteriori tags assignment: implement dynamic programming
def map_assignment(potentials, config):
batch_size = int(potentials.get_shape()[0])
num_steps = int(potentials.get_shape()[1])
pots_shape = map(int, potentials.get_shape()[2:])
inputs_list = [tf.reshape(x, [batch_size] + pots_shape)
for x in tf.split(1, num_steps, potentials)]
# forward pass
max_cell = CRFMaxCell(config)
max_ids = [0] * len(inputs_list)
# initial state: starts at 0 - 0 - 0 etc...
state = tf.zeros(pots_shape[:-1])
for t, input_ in enumerate(inputs_list):
state, max_id = max_cell(inputs_list[t], state)
max_ids[t] = max_id
# backward pass
powers = tf.to_int64(map(float, range(batch_size))) * \
(config.n_tags ** (config.pot_window - 1))
outputs = [-1] * len(inputs_list)
best_end = tf.argmax(tf.reshape(state, [batch_size, -1]), 1)
current = best_end
mid = config.pot_window / 2
max_pow = (config.n_tags ** mid)
for i, _ in enumerate(outputs):
outputs[-1 - i] = (current / max_pow)
prev_best = tf.gather(tf.reshape(max_ids[-1 - i], [-1]), current + powers)
current = prev_best * max_pow + (current / config.n_tags)
map_tags = tf.transpose(tf.pack(outputs))
return map_tags
# dynamic programming part 2: sum product
class CRFSumCell(RNNCell):
"""Dynamic programming for CRF"""
def __init__(self, config):
self._num_units = config.n_tags ** (config.pot_window - 1)
self.n_tags = config.n_tags
@property
def input_size(self):
return self._num_units
@property
def output_size(self):
return self._num_units
@property
def state_size(self):
return self._num_units
def __call__(self, inputs, state, scope=None):
"""Summation for dynamic programming. Inputs are the
log-potentials. States are the results of the summation at the
last step"""
with tf.variable_scope(scope or type(self).__name__):
# add states and log-potentials
multiples = [1] * (len(state.get_shape()) + 1)
multiples[-1] = self.n_tags
exp_state = tf.tile(tf.expand_dims(state, -1), multiples)
added = exp_state + inputs
# log-sum along first dimension (after the batch dim)
max_val = tf.reduce_max(added)
added_exp = tf.exp(added - max_val)
summed_exp = tf.reduce_sum(added_exp, 1)
new_state = tf.log(summed_exp) + max_val
return new_state
# computing the log partition for a sequence of length config.num_steps
def log_partition(potentials, config):
batch_size = int(potentials.get_shape()[0])
num_steps = int(potentials.get_shape()[1])
pots_shape = map(int, potentials.get_shape()[2:])
inputs_list = [tf.reshape(x, [batch_size] + pots_shape)
for x in tf.split(1, num_steps, potentials)]
# forward pass
sum_cell = CRFSumCell(config)
state = tf.zeros([batch_size] + pots_shape[:-1])
partial_sums = [0] * len(inputs_list)
for t, input_ in enumerate(inputs_list):
state = sum_cell(inputs_list[t], state)
partial_sums[t] = state
# sum at the end
max_val = tf.reduce_max(state)
state_exp = tf.exp(state - max_val)
log_part = tf.log(tf.reduce_sum(tf.reshape(state_exp, [batch_size, -1]), 1)) + max_val
return tf.reduce_sum(log_part)
# compute the log to get the log-likelihood
def log_score(potentials, window_indices, mask, config):
batch_size = int(potentials.get_shape()[0])
num_steps = int(potentials.get_shape()[1])
pots_shape = map(int, potentials.get_shape()[2:])
flat_pots = tf.reshape(potentials, [-1])
flat_scores = tf.gather(flat_pots, window_indices)
scores = tf.reshape(flat_scores, [batch_size, num_steps])
scores = tf.mul(scores, mask)
return tf.reduce_sum(scores)
# TODO: alpha-beta rec
def marginals(potentials, config):
batch_size = int(potentials.get_shape()[0])
num_steps = int(potentials.get_shape()[1])
pots_shape = map(int, potentials.get_shape()[2:])
inputs_list = [tf.reshape(x, [batch_size] + pots_shape)
for x in tf.split(1, num_steps, potentials)]
# forward and backwar pass
sum_cell_f = CRFSumCell(config)
sum_cell_b = CRFSumCell(config)
state_f = tf.convert_to_tensor(np.zeros(pots_shape[:-1]))
state_b = tf.convert_to_tensor(np.zeros(pots_shape[:-1]))
partial_sums_f = [0] * len(inputs_list)
partial_sums_b = [0] * len(inputs_list)
for t, _ in enumerate(inputs_list):
state_f = sum_cell_f(inputs_list[t], state_f)
partial_sums_f[t] = state_f
state_b = sum_cell_b(inputs_list[t], state_b)
partial_sums_b[-1 - t] = state_b
# TODO: compute marginals
marginals = 0
return marginals
###################################
# Making a (deep) CRF #
###################################
class CRF:
def __init__(self, config):
self.batch_size = config.batch_size
self.num_steps = config.num_steps
num_features = len(config.input_features)
# input_ids <- batch.features
self.input_ids = tf.placeholder(tf.int32, shape=[self.batch_size,
self.num_steps,
num_features])
# mask <- batch.mask
self.mask = tf.placeholder(tf.float32, [self.batch_size, self.num_steps])
# pot_indices <- batch.tag_neighbours_lin
self.pot_indices = tf.placeholder(tf.int32,
[config.batch_size * config.num_steps])
# targets <- batch.tags_one_hot
self.targets = tf.placeholder(tf.float32, [config.batch_size,
config.num_steps,
config.n_tags])
# window_indices <- batch.tag_windows_lin
self.window_indices = tf.placeholder(tf.int32,
[config.batch_size * config.num_steps])
def make(self, config, params, reuse=False, name='CRF'):
# TODO: add marginal inference
with tf.variable_scope(name):
if reuse:
tf.get_variable_scope().reuse_variables()
# out_layer <- output of NN (TODO: add layers)
(out_layer, embeddings) = feature_layer(self.input_ids,
config, params,
reuse=reuse)
params.embeddings = embeddings
if config.verbose:
print('features layer done')
self.out_layer = out_layer
# pots_layer <- potentials
(pots_layer, W_pot, b_pot) = potentials_layer(out_layer,
self.mask,
config, params,
reuse=reuse)
params.W_pot = W_pot
params.b_pot = b_pot
if config.verbose:
print('potentials layer done')
self.pots_layer = pots_layer
# pseudo-log-likelihood
conditional, pseudo_ll = pseudo_likelihood(pots_layer,
self.pot_indices,
self.targets, config)
self.pseudo_ll = pseudo_ll
# accuracy of p(t_i | t_{i-1}, t_{i+1})
correct_cond_pred = tf.equal(tf.argmax(conditional, 2), tf.argmax(self.targets, 2))
correct_cond_pred = tf.cast(correct_cond_pred,"float")
cond_accuracy = tf.reduce_sum(correct_cond_pred * tf.reduce_sum(self.targets, 2)) /\
tf.reduce_sum(self.targets)
self.cond_accuracy = cond_accuracy
# log-likelihood
log_sc = log_score(self.pots_layer, self.window_indices,
self.mask, config)
log_part = log_partition(self.pots_layer, config)
log_likelihood = log_sc - log_part
self.log_likelihood = log_likelihood
# L1 regularization
self.l1_norm = tf.reduce_sum(tf.zeros([1]))
for feat in config.l1_list:
self.l1_norm += config.l1_reg * \
tf.reduce_sum(tf.abs(params.embeddings[feat]))
# L2 regularization
self.l2_norm = tf.reduce_sum(tf.zeros([1]))
for feat in config.l2_list:
self.l2_norm += config.l2_reg * \
tf.reduce_sum(tf.mul(params.embeddings[feat],
params.embeddings[feat]))
# map assignment and accuracy of map assignment
map_tags = map_assignment(self.pots_layer, config)
correct_pred = tf.equal(map_tags, tf.argmax(self.targets, 2))
correct_pred = tf.cast(correct_pred,"float")
accuracy = tf.reduce_sum(correct_pred * tf.reduce_sum(self.targets, 2)) /\
tf.reduce_sum(self.targets)
self.map_tags = map_tags
self.accuracy = accuracy
def train_epoch(self, data, config, params, session, crit_type='likelihood'):
batch_size = config.batch_size
criterion = None
if crit_type == 'pseudo':
criterion = -self.pseudo_ll
else:
criterion = -self.log_likelihood
criterion -= config.l1_reg * self.l1_norm + config.l1_reg * self.l2_norm
train_step = tf.train.AdagradOptimizer(config.learning_rate).minimize(criterion)
session.run(tf.initialize_all_variables())
# TODO: gradient clipping
total_crit = 0.
n_batches = len(data) / batch_size
batch = Batch()
for i in range(n_batches):
batch.read(data, i * batch_size, config)
f_dict = {self.input_ids: batch.features,
self.pot_indices: batch.tag_neighbours_lin,
self.window_indices: batch.tag_windows_lin,
self.mask: batch.mask,
self.targets: batch.tags_one_hot}
train_step.run(feed_dict=f_dict)
crit = criterion.eval(feed_dict=f_dict)
total_crit += crit
if i % 50 == 0:
train_accuracy = self.accuracy.eval(feed_dict=f_dict)
print i, n_batches, train_accuracy, crit
print("step %d of %d, training accuracy %f, criterion %f" %
(i, n_batches, train_accuracy, crit))
print 'total crit', total_crit / n_batches
return total_crit / n_batches
def validate_accuracy(self, data, config):
batch_size = config.batch_size
batch = Batch()
total_accuracy = 0.
total_cond_accuracy = 0.
total = 0.
for i in range(len(data) / batch_size):
batch.read(data, i * batch_size, config)
f_dict = {self.input_ids: batch.features,
self.targets: batch.tags_one_hot,
self.pot_indices: batch.tag_neighbours_lin}
dev_accuracy = self.accuracy.eval(feed_dict=f_dict)
dev_cond_accuracy = self.cond_accuracy.eval(feed_dict=f_dict)
pll = self.pseudo_ll.eval(feed_dict=f_dict)
ll = self.log_likelihood.eval(feed_dict=f_dict)
total_accuracy += dev_accuracy
total_cond_accuracy += dev_cond_accuracy
total_pll += pll
total_ll += ll
total += 1
if i % 100 == 0:
print("%d of %d: \t map accuracy: %f \t cond accuracy: %f \
\t pseudo_ll: %f \t log_likelihood: %f" % (i, len(data) / batch_size,
total_accuracy / total,
total_cond_accuracy / total))
return (total_accuracy / total, total_cond_accuracy / total)
|
nilq/baby-python
|
python
|
import networkx as nx
import numpy as np
import torch
from gym_ds3.envs.core.node import Node
from gym_ds3.envs.utils.helper_dict import OrderedSet
class JobDAG(object):
def __init__(self, job):
self.job = job
self.jobID = self.job.task_list[0].jobID
self.commvol = self.job.comm_vol
self.tasks = self.job.task_list
self.adj_mat = self.get_adj_mat(self.tasks)
self.arrived = False # dag is arrived
self.is_completed = False
self.is_running = False
self.start_exec_time = np.inf # dag start time
self.start_inject_time = np.inf # dag inject time
self.completion_time = np.inf # dag finish time
# Dependency graph (num_tasks, num_tasks)
self.predecessor = self.predecessors(self.tasks)
# The features of Node : jobID, taskID, status, deadline, start time, finish time, est
self.nodes = self.get_nodes(self.tasks, self.adj_mat)
self.num_nodes = len(self.job.task_list)
self.frontier_nodes = OrderedSet()
for node in self.nodes:
if node.is_schedulable():
self.frontier_nodes.add(node)
def get_adj_mat(self, tasks):
adj = nx.DiGraph(self.commvol)
adj.remove_edges_from(
# Remove all edges with weight of 0 since we have no placeholder for "this edge doesn't exist"
[edge for edge in adj.edges() if adj.get_edge_data(*edge)['weight'] == '0.0']
)
nx.relabel_nodes(adj, lambda idx: idx, copy=False)
adj = from_networkx(adj)
mat = np.zeros((len(tasks), len(tasks)))
index_list = adj['edge_index'].transpose(0, 1) # .T does not work with pytorch > v1.1
for i in range(len(index_list)):
mat[index_list[i][0]][index_list[i][1]] = 1
return mat
def get_nodes(self, tasks, adj_mat):
nodes = [Node(task) for task in tasks]
for i in range(len(tasks)):
for j in range(len(tasks)):
if adj_mat[i, j] == 1:
nodes[i].child_nodes.append(nodes[j])
nodes[j].parent_nodes.append(nodes[i])
return nodes
def predecessors(self, tasks):
dependency = np.zeros((len(tasks), len(tasks)))
for idx, node in enumerate(tasks):
for predecessorNode in node.predecessors:
dependency[idx][predecessorNode % len(tasks)] = 1.
return dependency
# Modified from https://github.com/rusty1s/pytorch_geometric/blob/e6b8d6427ad930c6117298006d7eebea0a37ceac/torch_geometric/utils/convert.py#L108
def from_networkx(G):
r"""Converts a :obj:`networkx.Graph` or :obj:`networkx.DiGraph` to a
:class:`torch_geometric.data.Data` instance.
Args:
G (networkx.Graph or networkx.DiGraph): A networkx graph.
"""
G = nx.convert_node_labels_to_integers(G)
G = G.to_directed() if not nx.is_directed(G) else G
edge_index = torch.LongTensor(list(G.edges)).t().contiguous()
data = {}
for i, (_, feat_dict) in enumerate(G.nodes(data=True)):
for key, value in feat_dict.items():
data[str(key)] = [value] if i == 0 else data[str(key)] + [value]
for i, (_, _, feat_dict) in enumerate(G.edges(data=True)):
for key, value in feat_dict.items():
data[str(key)] = [value] if i == 0 else data[str(key)] + [value]
for key, item in data.items():
try:
data[key] = torch.tensor(item)
except ValueError:
pass
data['edge_index'] = edge_index.view(2, -1)
data['num_nodes'] = G.number_of_nodes()
return data
|
nilq/baby-python
|
python
|
from typing import List
import torch
from torch.nn import ParameterList, Parameter
from allennlp.common.checks import ConfigurationError
class ScalarMix(torch.nn.Module):
"""
Computes a parameterised scalar mixture of N tensors, `mixture = gamma * sum(s_k * tensor_k)`
where `s = softmax(w)`, with `w` and `gamma` scalar parameters.
In addition, if `do_layer_norm=True` then apply layer normalization to each tensor
before weighting.
"""
def __init__(
self,
mixture_size: int,
do_layer_norm: bool = False,
initial_scalar_parameters: List[float] = None,
trainable: bool = True,
) -> None:
super().__init__()
self.mixture_size = mixture_size
self.do_layer_norm = do_layer_norm
if initial_scalar_parameters is None:
initial_scalar_parameters = [0.0] * mixture_size
elif len(initial_scalar_parameters) != mixture_size:
raise ConfigurationError(
"Length of initial_scalar_parameters {} differs "
"from mixture_size {}".format(initial_scalar_parameters, mixture_size)
)
self.scalar_parameters = ParameterList(
[
Parameter(
torch.FloatTensor([initial_scalar_parameters[i]]), requires_grad=trainable
)
for i in range(mixture_size)
]
)
self.gamma = Parameter(torch.FloatTensor([1.0]), requires_grad=trainable)
def forward(self, tensors: List[torch.Tensor], mask: torch.BoolTensor = None) -> torch.Tensor:
"""
Compute a weighted average of the `tensors`. The input tensors an be any shape
with at least two dimensions, but must all be the same shape.
When `do_layer_norm=True`, the `mask` is required input. If the `tensors` are
dimensioned `(dim_0, ..., dim_{n-1}, dim_n)`, then the `mask` is dimensioned
`(dim_0, ..., dim_{n-1})`, as in the typical case with `tensors` of shape
`(batch_size, timesteps, dim)` and `mask` of shape `(batch_size, timesteps)`.
When `do_layer_norm=False` the `mask` is ignored.
"""
if len(tensors) != self.mixture_size:
raise ConfigurationError(
"{} tensors were passed, but the module was initialized to "
"mix {} tensors.".format(len(tensors), self.mixture_size)
)
def _do_layer_norm(tensor, broadcast_mask, num_elements_not_masked):
tensor_masked = tensor * broadcast_mask
mean = torch.sum(tensor_masked) / num_elements_not_masked
variance = (
torch.sum(((tensor_masked - mean) * broadcast_mask) ** 2) / num_elements_not_masked
)
return (tensor - mean) / torch.sqrt(variance + 1e-12)
normed_weights = torch.nn.functional.softmax(
torch.cat([parameter for parameter in self.scalar_parameters]), dim=0
)
normed_weights = torch.split(normed_weights, split_size_or_sections=1)
if not self.do_layer_norm:
pieces = []
for weight, tensor in zip(normed_weights, tensors):
pieces.append(weight * tensor)
return self.gamma * sum(pieces)
else:
broadcast_mask = mask.unsqueeze(-1)
input_dim = tensors[0].size(-1)
num_elements_not_masked = torch.sum(mask) * input_dim
pieces = []
for weight, tensor in zip(normed_weights, tensors):
pieces.append(
weight * _do_layer_norm(tensor, broadcast_mask, num_elements_not_masked)
)
return self.gamma * sum(pieces)
|
nilq/baby-python
|
python
|
import bpy,bmesh
import time,copy,mathutils,math
from mathutils import noise
Context={
"lasttick":0,
"running":False,
"store":{},
"starttime":-1
}
def timeNow():
t=time.time()
return t
def calcFrameTime():
fps = max(1.0, float(bpy.context.scene.render.fps))
return 1.0/fps
def wind(v,co,col,noiseAmmount,timeN):
pos=list(co)
distorsion=1.0
noisec=[pos[0]+timeN,
pos[2]+timeN,
0]
windNoise=[0,0,0]
windNoise[0]=mathutils.noise.noise(noisec)
noisec[2]=1000
windNoise[1]=mathutils.noise.noise(noisec)
noisec[2]=9000
windNoise[2]=mathutils.noise.noise(noisec)
vcolor=(1,1,1,1)
if col:
vcolor=col
pos[2] += math.sin(timeN*20) * vcolor[3] * noiseAmmount[2] * vcolor[0] * windNoise[1]
pos[2] += math.sin(timeN*15) * vcolor[3] * noiseAmmount[2] * vcolor[1] * windNoise[1]
pos[2] += math.sin(timeN*25) * vcolor[3] * noiseAmmount[2] * vcolor[2] * windNoise[1]
pos[0] += math.sin(timeN*20) * vcolor[3] * noiseAmmount[0] * vcolor[0] * windNoise[0]
pos[0] += math.sin(timeN*15) * vcolor[3] * noiseAmmount[0] * vcolor[1] * windNoise[0]
pos[0] += math.sin(timeN*25) * vcolor[3] * noiseAmmount[0] * vcolor[2] * windNoise[0]
pos[1] += windNoise[0] * noiseAmmount[1] * vcolor[3] * vcolor[0]
pos[1] += windNoise[1] * noiseAmmount[1] * vcolor[3] * vcolor[1]
pos[1] += windNoise[2] * noiseAmmount[1] * vcolor[3] * vcolor[2]
return pos
def preFrameChange(scene):
global Context
timeN=timeNow()
Context["running"]=True
Context["lasttick"]=timeNow()
if Context["starttime"]==-1:
Context["starttime"]=Context["lasttick"]
noiseAmmount=(1,1,1)
timeN=Context["lasttick"]-Context["starttime"]
if not "cols" in Context:
Context["cols"]={}
if bpy.ops.object.mode_set.poll():
for obj in bpy.context.scene.objects:
if obj.select_get():
if not obj in Context["store"]:
Context["store"][obj]={}
if not obj in Context["cols"]:
Context["cols"][obj]={}
src_mesh=obj.data
for i, poly in enumerate(src_mesh.polygons):
for k in poly.loop_indices:
vl = src_mesh.loops[k]
index=vl.vertex_index
if src_mesh.vertex_colors.active:
c=[]
c.extend(src_mesh.vertex_colors.active.data[k].color)
c.append(1.0)
Context["cols"][obj][index]=c
bpy.ops.object.mode_set(mode='EDIT')
mesh = bmesh.from_edit_mesh(obj.data)
for vert in mesh.verts:
if not vert.index in Context["store"][obj]:
v=[]
v.append(vert.co[0])
v.append(vert.co[1])
v.append(vert.co[2])
Context["store"][obj][vert.index]=v
#print("Store "+str(vert.index)+str(v))
col=Context["cols"][obj][vert.index] if vert.index in Context["cols"][obj] else None
vert.co=wind(vert,tuple(Context["store"][obj][vert.index]),col,noiseAmmount,timeN)
bmesh.update_edit_mesh(obj.data)
bpy.ops.object.mode_set(mode='OBJECT')
def resetAnim():
global Context
if Context["running"]:
dtime=timeNow()-Context["lasttick"]
frametime2=calcFrameTime()*2
if dtime > frametime2:
print("Reset Now")
for obj in bpy.context.scene.objects:
if obj in Context["store"]:
bpy.ops.object.mode_set(mode='EDIT')
mesh = bmesh.from_edit_mesh(obj.data)
for vert in mesh.verts:
if vert.index in Context["store"][obj]:
v=Context["store"][obj][vert.index]
#print("Reset "+str(vert.index)+" to "+str(v))
vert.co=v
bmesh.update_edit_mesh(obj.data)
bpy.ops.object.mode_set(mode='OBJECT')
del Context["store"][obj]
Context["running"]=False
del Context["cols"]
delay=calcFrameTime()
return delay
class ModalTimerOperator(bpy.types.Operator):
bl_idname = "wm.modal_timer_operator"
bl_label = "Modal Timer Operator"
_timer = None
def modal(self, context, event):
if event.type in {'RIGHTMOUSE', 'ESC'}:
self.cancel(context)
return {'CANCELLED'}
if event.type == 'TIMER':
resetAnim()
return {'PASS_THROUGH'}
def execute(self, context):
wm = context.window_manager
self._timer = wm.event_timer_add(calcFrameTime(), window=context.window)
wm.modal_handler_add(self)
return {'RUNNING_MODAL'}
def cancel(self, context):
wm = context.window_manager
wm.event_timer_remove(self._timer)
def register():
bpy.app.handlers.frame_change_pre.append(preFrameChange)
bpy.utils.register_class(ModalTimerOperator)
def unregister():
bpy.app.handlers.frame_change_pre.remove(preFrameChange)
register()
bpy.ops.wm.modal_timer_operator()
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
import os
from .. import StorageTests, get_server_mixin
dav_server = os.environ.get('DAV_SERVER', 'skip')
ServerMixin = get_server_mixin(dav_server)
class DAVStorageTests(ServerMixin, StorageTests):
dav_server = dav_server
|
nilq/baby-python
|
python
|
from __future__ import absolute_import, division, print_function
import os
import time
import numpy as np
import seaborn as sns
import tensorflow as tf
import tensorflow_probability as tfp
from matplotlib import pyplot as plt
from tensorflow import keras
from odin import visual as vs
from odin.bay import kl_divergence
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
os.environ['TF_FORCE_GPU_ALLOW_GROWTH'] = 'true'
tf.random.set_seed(8)
np.random.seed(8)
sns.set()
# ===========================================================================
# Helper functions
# ===========================================================================
def minimize(loss_func,
params,
verbose=False,
print_params=True,
learning_rate=0.1,
epochs=500):
opt = tf.optimizers.Adam(learning_rate=learning_rate)
benchmark = []
history = []
for i in range(epochs):
start_time = time.time()
with tf.GradientTape() as tape:
tape.watch(params)
loss = tf.reduce_mean(loss_func())
grad = tape.gradient(loss, params)
benchmark.append(time.time() - start_time)
if verbose and (i + 1) % (epochs // 2) == 0:
print("#%-4d Loss:%.4f (%.2f sec/100)" %
(i + 1, loss, np.mean(benchmark) * 100))
if print_params:
for p in params:
print(' * %s: %s' % (p.name, str(p.numpy())))
history.append([loss.numpy()] + [p.numpy() for p in params])
opt.apply_gradients(grads_and_vars=zip(grad, params))
return history
create_posterior = lambda: tfp.distributions.Normal(
loc=tf.Variable(0., dtype='float32', trainable=True, name='loc'),
scale=tf.Variable(1., dtype='float32', trainable=True, name='scale'),
name='Normal')
# NOTE: it important to get the loc spread wide enough to prevent mode collapse
# however, the scale must be small enough for not exploding the gradients
create_mixture_posterior = lambda n, loc_min=0, loc_max=100: \
tfp.distributions.MixtureSameFamily(
mixture_distribution=tfp.distributions.Categorical(probs=[1. / n] * n),
components_distribution=tfp.distributions.Normal(
loc=tf.Variable(
np.linspace(loc_min, loc_max, n),
dtype='float32', trainable=True, name='loc'),
scale=tf.Variable(
[1.] * n, dtype='float32', trainable=True, name='scale')))
def plot_posteriors(posterior, prior, n=1000):
# this is very hard-coded function
plt.figure(figsize=(12, 8))
sns.kdeplot(prior.sample(int(n)).numpy(), label="Prior")
for post, analytic, reverse, sample_shape in posterior:
sns.kdeplot(post.sample(int(n)).numpy(),
linestyle='-' if reverse else '--',
label='%s-%s mcmc:%d' % ('KL(q||p)' if reverse else 'KL(p||q)',
'A' if analytic else 'S', sample_shape))
def plot_histories(posterior, histories):
plt.figure(figsize=(24, 5))
for idx, (post, analytic, reverse, sample_shape) in enumerate(posterior):
ax = plt.subplot(1, len(posterior), idx + 1)
hist = histories[idx]
name = '%s-%s mcmc:%d' % \
('KL(q||p)' if reverse else 'KL(p||q)', 'A' if analytic else 'S', sample_shape)
loc = np.asarray([i[1] for i in hist])
plt.plot(loc, label='loc', linestyle='-' if reverse else '--')
scale = np.asarray([i[2] for i in hist])
plt.plot(scale, label='scale', linestyle='-' if reverse else '--')
plt.legend()
ax = ax.twinx()
plt.plot([i[0] for i in hist], label='loss', color='r')
plt.title(name)
plt.tight_layout()
# ===========================================================================
# Can deep network fix posterior mode collapse due to loc initialization
# * Appropriate learning rate is essential
# * High amount of components help, but not too high
# * Too deep network will make overfitting to the first components.
# * If input features are useless, deep network cannot help
# * maximum likelihood might end up with more modes
# ===========================================================================
prior = tfp.distributions.MixtureSameFamily(
mixture_distribution=tfp.distributions.Categorical(probs=[1.0 / 3] * 3),
components_distribution=tfp.distributions.Normal(loc=[0, 25, 80],
scale=[1, 12, 4]))
n_components = 3
X = np.zeros(shape=(1, n_components)).astype('float32')
X = np.linspace(0, 80, num=n_components, dtype='float32')[None, :]
# X = np.random.rand(1, 3).astype('float32')
outputs = {}
for reverse in (True, False):
loc = keras.Sequential([
keras.layers.Dense(16, activation='relu', input_shape=(n_components,)),
keras.layers.Dense(n_components,
activation='linear',
input_shape=(n_components,)),
])
scale = tf.Variable([1.] * n_components,
dtype='float32',
trainable=True,
name='scale')
history = minimize(lambda: kl_divergence(tfp.distributions.MixtureSameFamily(
mixture_distribution=tfp.distributions.Categorical(
probs=[1. / n_components] * n_components),
components_distribution=tfp.distributions.Normal(loc=loc(X), scale=scale
)),
prior,
reverse=reverse,
q_sample=100),
params=loc.trainable_variables + [scale],
verbose=True,
print_params=False,
learning_rate=0.01,
epochs=1200)
posterior = tfp.distributions.MixtureSameFamily(
mixture_distribution=tfp.distributions.Categorical(
probs=[1. / n_components] * n_components),
components_distribution=tfp.distributions.Normal(loc=loc(X), scale=scale))
outputs[reverse] = [posterior, history]
plt.figure(figsize=(18, 8))
plt.subplot(1, 2, 1)
sns.kdeplot(prior.sample(10000).numpy(), label='Prior')
sns.kdeplot(outputs[True][0].sample(10000).numpy().ravel(),
label='Posterior-KL(q||p)')
sns.kdeplot(outputs[False][0].sample(10000).numpy().ravel(),
label='Posterior-KL(p||q)',
linestyle='--')
plt.legend()
ax = plt.subplot(1, 2, 2)
l1 = plt.plot([i[0] for i in outputs[True][1]], label='KL(q||p)')
ax.twinx()
l2 = plt.plot([i[0] for i in outputs[False][1]],
label='KL(p||q)',
linestyle='--')
plt.title("KL loss")
plt.legend(handles=[l1[0], l2[0]])
# ===========================================================================
# Mixture with Mixture Posterior
# ===========================================================================
prior = tfp.distributions.MixtureSameFamily(
mixture_distribution=tfp.distributions.Categorical(probs=[1.0 / 3] * 3),
components_distribution=tfp.distributions.Normal(loc=[0, 32, 80],
scale=[1, 12, 4]))
for n in [2, 3, 5]:
# analytic, reverse, nmcmc
posterior = [
(create_mixture_posterior(n=n), False, True, 1),
(create_mixture_posterior(n=n), False, False, 1),
(create_mixture_posterior(n=n), False, True, 100),
(create_mixture_posterior(n=n), False, False, 100),
]
histories = []
for post, analytic, reverse, sample_shape in posterior:
print("Training:", analytic, reverse, sample_shape)
h = minimize(lambda: kl_divergence(
q=post, p=prior, analytic=analytic, reverse=reverse, q_sample=sample_shape), [
post.components_distribution.loc, post.components_distribution.scale
],
verbose=False)
histories.append(h)
# for more complicated distribution, need more samples
plot_posteriors(posterior, prior, n=10000)
plt.title("Prior:3-mixture Posterior:%d-mixture" % n)
plot_histories(posterior, histories)
vs.plot_save()
exit()
# ===========================================================================
# Mixture with Normal Posterior
# ===========================================================================
prior = tfp.distributions.MixtureSameFamily(
mixture_distribution=tfp.distributions.Categorical(probs=[0.5, 0.5]),
components_distribution=tfp.distributions.Normal(loc=[2, 20], scale=[1, 4]))
posterior = [
(create_posterior(), False, True, 1), # analytic, reverse, nmcmc
(create_posterior(), False, False, 1),
(create_posterior(), False, True, 100),
(create_posterior(), False, False, 100),
]
histories = []
for post, analytic, reverse, sample_shape in posterior:
print("Training:", analytic, reverse, sample_shape)
h = minimize(lambda: kl_divergence(
q=post, p=prior, analytic=analytic, reverse=reverse, q_sample=sample_shape),
[post.loc, post.scale],
verbose=False)
histories.append(h)
plot_posteriors(posterior, prior)
plt.title("Prior:2-mixture Posterior:Normal")
plot_histories(posterior, histories)
# ===========================================================================
# Simple distribution
# ===========================================================================
prior = tfp.distributions.Normal(loc=8, scale=12)
posterior = [
(create_posterior(), True, True, 1), # analytic, reverse, nmcmc
(create_posterior(), True, False, 1),
(create_posterior(), False, True, 1),
(create_posterior(), False, True, 100),
(create_posterior(), False, False, 1),
(create_posterior(), False, False, 100)
]
histories = []
for post, analytic, reverse, sample_shape in posterior:
print("Training:", analytic, reverse, sample_shape)
h = minimize(lambda: kl_divergence(
q=post, p=prior, analytic=analytic, reverse=reverse, q_sample=sample_shape),
[post.loc, post.scale],
verbose=False)
histories.append(h)
plot_posteriors(posterior, prior)
plt.title("Prior:Normal Posterior:Normal")
plot_histories(posterior, histories)
|
nilq/baby-python
|
python
|
"""Script to convert MultiWOZ 2.2 from SGD format to MultiWOZ format."""
import glob
import json
import os
from absl import app
from absl import flags
from absl import logging
FLAGS = flags.FLAGS
flags.DEFINE_string("multiwoz21_data_dir", None,
"Path of the MultiWOZ 2.1 dataset.")
flags.DEFINE_string("output_file", None, "Output file path in MultiWOZ format.")
_UNTRACKED_SLOTS = frozenset({
"taxi-bookphone", "train-booktrainid", "taxi-booktype",
"restaurant-bookreference", "hospital-bookreference", "hotel-bookreference",
"train-bookreference", "hospital-booktime"
})
_DIR_PATH = os.path.dirname(os.path.abspath(__file__))
flags.mark_flags_as_required(["multiwoz21_data_dir", "output_file"])
def get_slot_name(slot_name, service_name, in_book_field=False):
"""Get the slot name that is consistent with the schema file."""
slot_name = ("book" + slot_name if in_book_field and slot_name != "department"
and slot_name != "name" else slot_name)
return "-".join([service_name, slot_name]).lower()
def format_states(groundtruth_states, states_to_correct):
"""Correct the dialogue states in place."""
for domain_name, values in states_to_correct.items():
for k, v in values["book"].items():
if isinstance(v, list):
for item_dict in v:
for slot_name in item_dict:
new_slot_name = get_slot_name(
slot_name, domain_name, in_book_field=True)
if new_slot_name in _UNTRACKED_SLOTS:
continue
# For the tracked slots, correct their states.
if new_slot_name in groundtruth_states:
item_dict[slot_name] = groundtruth_states[new_slot_name]
else:
item_dict[slot_name] = []
if isinstance(v, str):
slot_name = get_slot_name(k, domain_name, in_book_field=True)
if slot_name in _UNTRACKED_SLOTS:
continue
if slot_name in groundtruth_states:
values["book"][k] = groundtruth_states[slot_name]
else:
values["book"][k] = []
for slot_name in values["semi"]:
new_slot_name = get_slot_name(slot_name, domain_name)
# All the slots in "semi" are tracked.
if new_slot_name in groundtruth_states:
values["semi"][slot_name] = groundtruth_states[new_slot_name]
else:
values["semi"][slot_name] = []
def main(argv):
data_path = os.path.join(FLAGS.multiwoz21_data_dir, "data.json")
with open(data_path, "r") as f:
multiwoz_data = json.load(f)
file_pattern = os.path.join(_DIR_PATH, "*/dialogues_*.json")
files = glob.glob(file_pattern)
clean_data = {}
for file_name in files:
with open(file_name, "r") as f:
dialogues = json.load(f)
for dialogue in dialogues:
clean_data[dialogue["dialogue_id"]] = dialogue
# Load action file.
action_file = os.path.join(_DIR_PATH, "dialog_acts.json")
with open(action_file, "r") as f:
action_data = json.load(f)
dialogue_ids = list(multiwoz_data.keys())
for dialogue_id in dialogue_ids:
dialogue = multiwoz_data[dialogue_id]["log"]
if dialogue_id not in clean_data:
logging.info("Dialogue %s doesn't exist in MultiWOZ 2.2.", dialogue_id)
del multiwoz_data[dialogue_id]
continue
clean_dialogue = clean_data[dialogue_id]
for i, turn in enumerate(dialogue):
# Update the utterance.
turn["text"] = clean_dialogue["turns"][i]["utterance"]
dialog_act = {}
span_info = []
if str(i) in action_data[dialogue_id]:
dialog_act = action_data[dialogue_id][str(i)]["dialog_act"]
span_info = action_data[dialogue_id][str(i)]["span_info"]
turn["dialog_act"] = dialog_act
turn["span_info"] = span_info
# Skip user turns because states are written in the system turns.
if i % 2 == 0:
continue
clean_states = {}
for frame in clean_dialogue["turns"][i - 1]["frames"]:
clean_states.update(frame["state"]["slot_values"])
format_states(clean_states, turn["metadata"])
with open(FLAGS.output_file, "w") as f:
json.dump(multiwoz_data, f, indent=2, separators=(",", ": "), sort_keys=True)
logging.info("Finish writing %d dialogues", len(multiwoz_data))
if __name__ == "__main__":
app.run(main)
|
nilq/baby-python
|
python
|
#SENSOR_DATA_TRANSFER
import socket
import serial
host = "192.168.137.54"
port = 50007
import time
mySocket = socket.socket()
mySocket.bind((host,port))
mySocket.listen(1)
conn, addr = mySocket.accept()
print ("Connection from: " + str(addr))
aD=serial.Serial('/dev/ttyACM0',9600)
while True:
while (aD.inWaiting()==0):
pass
try:
astring=str(aD.readline())
#astring=str(aD.readline())
astring=astring[2:]
astring=astring[:-5]
'''data = conn.recv(1024).decode()
if not data:
break
print ("from connected user: " + str(data))'''
#data = str(data).upper()
#print ("sending: " + str(data))
conn.send(astring.encode())
time.sleep(0.09)
except:
pass
conn.close()
|
nilq/baby-python
|
python
|
import time
from typing import Any, Union
from copy import deepcopy
import biorbd_casadi as biorbd
import numpy as np
from scipy import interpolate as sci_interp
from scipy.integrate import solve_ivp
from casadi import vertcat, DM, Function
from matplotlib import pyplot as plt
from ..dynamics.ode_solver import OdeSolver
from ..limits.path_conditions import InitialGuess, InitialGuessList
from ..misc.enums import ControlType, CostType, Shooting, InterpolationType, Solver
from ..misc.utils import check_version
from ..optimization.non_linear_program import NonLinearProgram
from ..optimization.optimization_variable import OptimizationVariableList, OptimizationVariable
class Solution:
"""
Data manipulation, graphing and storage
Attributes
----------
ocp: SimplifiedOCP
The OCP simplified
ns: list
The number of shooting point for each phase
is_interpolated: bool
If the current structure is interpolated
is_integrated: bool
If the current structure is integrated
is_merged: bool
If the phases were merged
vector: np.ndarray
The data in the vector format
_cost: float
The value of the cost function
constraints: list
The values of the constraint
lam_g: list
The Lagrange multiplier of the constraints
lam_p: list
The Lagrange multiplier of the parameters
lam_x: list
The Lagrange multiplier of the states and controls
inf_pr: list
The unscaled constraint violation at each iteration
inf_du: list
The scaled dual infeasibility at each iteration
solver_time_to_optimize: float
The total time to solve the program
iterations: int
The number of iterations that were required to solve the program
status: int
Optimization success status (Ipopt: 0=Succeeded, 1=Failed)
_states: list
The data structure that holds the states
_controls: list
The data structure that holds the controls
parameters: dict
The data structure that holds the parameters
phase_time: list
The total time for each phases
Methods
-------
copy(self, skip_data: bool = False) -> Any
Create a deepcopy of the Solution
@property
states(self) -> Union[list, dict]
Returns the state in list if more than one phases, otherwise it returns the only dict
@property
controls(self) -> Union[list, dict]
Returns the controls in list if more than one phases, otherwise it returns the only dict
integrate(self, shooting_type: Shooting = Shooting.MULTIPLE, keep_intermediate_points: bool = True,
merge_phases: bool = False, continuous: bool = True) -> Solution
Integrate the states
interpolate(self, n_frames: Union[int, list, tuple]) -> Solution
Interpolate the states
merge_phases(self) -> Solution
Get a data structure where all the phases are merged into one
_merge_phases(self, skip_states: bool = False, skip_controls: bool = False) -> tuple
Actually performing the phase merging
_complete_control(self)
Controls don't necessarily have dimensions that matches the states. This method aligns them
graphs(self, automatically_organize: bool, show_bounds: bool,
show_now: bool, shooting_type: Shooting)
Show the graphs of the simulation
animate(self, n_frames: int = 0, show_now: bool = True, **kwargs: Any) -> Union[None, list]
Animate the simulation
print(self, cost_type: CostType = CostType.ALL)
Print the objective functions and/or constraints to the console
"""
class SimplifiedOptimizationVariable:
"""
Simplified version of OptimizationVariable (compatible with pickle)
"""
def __init__(self, other: OptimizationVariable):
self.name = other.name
self.index = other.index
self.mapping = other.mapping
def __len__(self):
return len(self.index)
class SimplifiedOptimizationVariableList:
"""
Simplified version of OptimizationVariableList (compatible with pickle)
"""
def __init__(self, other: Union[OptimizationVariableList]):
self.elements = []
if isinstance(other, Solution.SimplifiedOptimizationVariableList):
self.shape = other.shape
else:
self.shape = other.cx.shape[0]
for elt in other:
self.append(other[elt])
def __getitem__(self, item):
if isinstance(item, int):
return self.elements[item]
elif isinstance(item, str):
for elt in self.elements:
if item == elt.name:
return elt
raise KeyError(f"{item} is not in the list")
else:
raise ValueError("OptimizationVariableList can be sliced with int or str only")
def append(self, other: OptimizationVariable):
self.elements.append(Solution.SimplifiedOptimizationVariable(other))
def __contains__(self, item):
for elt in self.elements:
if item == elt.name:
return True
else:
return False
def keys(self):
return [elt.name for elt in self]
def __len__(self):
return len(self.elements)
def __iter__(self):
self._iter_idx = 0
return self
def __next__(self):
self._iter_idx += 1
if self._iter_idx > len(self):
raise StopIteration
return self[self._iter_idx - 1].name
class SimplifiedNLP:
"""
A simplified version of the NonLinearProgram structure (compatible with pickle)
Attributes
----------
control_type: ControlType
The control type for the current nlp
dynamics: list[ODE_SOLVER]
All the dynamics for each of the node of the phase
g: list[list[Constraint]]
All the constraints at each of the node of the phase
J: list[list[Objective]]
All the objectives at each of the node of the phase
model: biorbd.Model
A reference to the biorbd Model
variable_mappings: dict
All the BiMapping of the states and controls
ode_solver: OdeSolverBase
The number of finite element of the RK
ns: int
The number of shooting points
"""
def __init__(self, nlp: NonLinearProgram):
"""
Parameters
----------
nlp: NonLinearProgram
A reference to the NonLinearProgram to strip
"""
self.phase_idx = nlp.phase_idx
self.model = nlp.model
self.states = Solution.SimplifiedOptimizationVariableList(nlp.states)
self.controls = Solution.SimplifiedOptimizationVariableList(nlp.controls)
self.dynamics = nlp.dynamics
self.dynamics_func = nlp.dynamics_func
self.ode_solver = nlp.ode_solver
self.variable_mappings = nlp.variable_mappings
self.control_type = nlp.control_type
self.J = nlp.J
self.J_internal = nlp.J_internal
self.g = nlp.g
self.g_internal = nlp.g_internal
self.ns = nlp.ns
self.parameters = nlp.parameters
class SimplifiedOCP:
"""
A simplified version of the NonLinearProgram structure (compatible with pickle)
Attributes
----------
g: list
Constraints that are not phase dependent (mostly parameters and continuity constraints)
J: list
Objective values that are not phase dependent (mostly parameters)
nlp: NLP
All the phases of the ocp
phase_transitions: list[PhaseTransition]
The list of transition constraint between phases
prepare_plots: Callable
The function to call to prepare the PlotOCP
v: OptimizationVector
The variable optimization holder
"""
def __init__(self, ocp):
"""
Parameters
----------
ocp: OptimalControlProgram
A reference to the ocp to strip
"""
self.nlp = [Solution.SimplifiedNLP(nlp) for nlp in ocp.nlp]
self.v = ocp.v
self.J = ocp.J
self.J_internal = ocp.J_internal
self.g = ocp.g
self.g_internal = ocp.g_internal
self.phase_transitions = ocp.phase_transitions
self.prepare_plots = ocp.prepare_plots
def __init__(self, ocp, sol: Union[dict, list, tuple, np.ndarray, DM, None]):
"""
Parameters
----------
ocp: OptimalControlProgram
A reference to the ocp to strip
sol: Union[dict, list, tuple, np.ndarray, DM]
The values of a solution
"""
self.ocp = Solution.SimplifiedOCP(ocp) if ocp else None
self.ns = [nlp.ns for nlp in self.ocp.nlp]
# Current internal state of the data
self.is_interpolated = False
self.is_integrated = False
self.is_merged = False
self.recomputed_time_steps = False
self.vector = None
self._cost = None
self.constraints = None
self.lam_g = None
self.lam_p = None
self.lam_x = None
self.inf_pr = None
self.inf_du = None
self.solver_time_to_optimize = None
self.real_time_to_optimize = None
self.iterations = None
self.status = None
# Extract the data now for further use
self._states, self._controls, self.parameters = {}, {}, {}
self.phase_time = []
def init_from_dict(_sol: dict):
"""
Initialize all the attributes from an Ipopt-like dictionary data structure
Parameters
----------
_sol: dict
The solution in a Ipopt-like dictionary
"""
self.vector = _sol["x"]
if _sol["solver"] == Solver.IPOPT:
self._cost = _sol["f"]
self.constraints = _sol["g"]
self.lam_g = _sol["lam_g"]
self.lam_p = _sol["lam_p"]
self.lam_x = _sol["lam_x"]
self.inf_pr = _sol["inf_pr"]
self.inf_du = _sol["inf_du"]
self.solver_time_to_optimize = _sol["solver_time_to_optimize"]
self.real_time_to_optimize = _sol["real_time_to_optimize"]
self.iterations = _sol["iter"]
self.status = _sol["status"]
# Extract the data now for further use
self._states, self._controls, self.parameters = self.ocp.v.to_dictionaries(self.vector)
self._complete_control()
self.phase_time = self.ocp.v.extract_phase_time(self.vector)
def init_from_initial_guess(_sol: list):
"""
Initialize all the attributes from a list of initial guesses (states, controls)
Parameters
----------
_sol: list
The list of initial guesses
"""
n_param = len(ocp.v.parameters_in_list)
# Sanity checks
for i in range(len(_sol)): # Convert to list if necessary and copy for as many phases there are
if isinstance(_sol[i], InitialGuess):
tp = InitialGuessList()
for _ in range(len(self.ns)):
tp.add(deepcopy(_sol[i].init), interpolation=_sol[i].init.type)
_sol[i] = tp
if sum([isinstance(s, InitialGuessList) for s in _sol]) != 2:
raise ValueError(
"solution must be a solution dict, "
"an InitialGuess[List] of len 2 or 3 (states, controls, parameters), "
"or a None"
)
if sum([len(s) != len(self.ns) if p != 3 else False for p, s in enumerate(_sol)]) != 0:
raise ValueError("The InitialGuessList len must match the number of phases")
if n_param != 0:
if len(_sol) != 3 and len(_sol[2]) != 1 and _sol[2][0].shape != (n_param, 1):
raise ValueError(
"The 3rd element is the InitialGuess of the parameter and "
"should be a unique vector of size equal to n_param"
)
self.vector = np.ndarray((0, 1))
sol_states, sol_controls = _sol[0], _sol[1]
for p, s in enumerate(sol_states):
ns = self.ocp.nlp[p].ns + 1 if s.init.type != InterpolationType.EACH_FRAME else self.ocp.nlp[p].ns
s.init.check_and_adjust_dimensions(self.ocp.nlp[p].states.shape, ns, "states")
for i in range(self.ns[p] + 1):
self.vector = np.concatenate((self.vector, s.init.evaluate_at(i)[:, np.newaxis]))
for p, s in enumerate(sol_controls):
control_type = self.ocp.nlp[p].control_type
if control_type == ControlType.CONSTANT:
off = 0
elif control_type == ControlType.LINEAR_CONTINUOUS:
off = 1
else:
raise NotImplementedError(f"control_type {control_type} is not implemented in Solution")
s.init.check_and_adjust_dimensions(self.ocp.nlp[p].controls.shape, self.ns[p], "controls")
for i in range(self.ns[p] + off):
self.vector = np.concatenate((self.vector, s.init.evaluate_at(i)[:, np.newaxis]))
if n_param:
sol_params = _sol[2]
for p, s in enumerate(sol_params):
self.vector = np.concatenate((self.vector, np.repeat(s.init, self.ns[p] + 1)[:, np.newaxis]))
self._states, self._controls, self.parameters = self.ocp.v.to_dictionaries(self.vector)
self._complete_control()
self.phase_time = self.ocp.v.extract_phase_time(self.vector)
def init_from_vector(_sol: Union[np.ndarray, DM]):
"""
Initialize all the attributes from a vector of solution
Parameters
----------
_sol: Union[np.ndarray, DM]
The solution in vector format
"""
self.vector = _sol
self._states, self._controls, self.parameters = self.ocp.v.to_dictionaries(self.vector)
self._complete_control()
self.phase_time = self.ocp.v.extract_phase_time(self.vector)
if isinstance(sol, dict):
init_from_dict(sol)
elif isinstance(sol, (list, tuple)) and len(sol) in (2, 3):
init_from_initial_guess(sol)
elif isinstance(sol, (np.ndarray, DM)):
init_from_vector(sol)
elif sol is None:
self.ns = []
else:
raise ValueError("Solution called with unknown initializer")
@property
def cost(self):
if self._cost is None:
self._cost = 0
for J in self.ocp.J:
_, val_weighted = self._get_penalty_cost(None, J)
self._cost += val_weighted
for idx_phase, nlp in enumerate(self.ocp.nlp):
for J in nlp.J:
_, val_weighted = self._get_penalty_cost(nlp, J)
self._cost += val_weighted
return self._cost
def copy(self, skip_data: bool = False) -> Any:
"""
Create a deepcopy of the Solution
Parameters
----------
skip_data: bool
If data should be ignored in the copy
Returns
-------
Return a Solution data structure
"""
new = Solution(self.ocp, None)
new.vector = deepcopy(self.vector)
new._cost = deepcopy(self._cost)
new.constraints = deepcopy(self.constraints)
new.lam_g = deepcopy(self.lam_g)
new.lam_p = deepcopy(self.lam_p)
new.lam_x = deepcopy(self.lam_x)
new.inf_pr = deepcopy(self.inf_pr)
new.inf_du = deepcopy(self.inf_du)
new.solver_time_to_optimize = deepcopy(self.solver_time_to_optimize)
new.real_time_to_optimize = deepcopy(self.real_time_to_optimize)
new.iterations = deepcopy(self.iterations)
new.is_interpolated = deepcopy(self.is_interpolated)
new.is_integrated = deepcopy(self.is_integrated)
new.is_merged = deepcopy(self.is_merged)
new.phase_time = deepcopy(self.phase_time)
new.ns = deepcopy(self.ns)
if skip_data:
new._states, new._controls, new.parameters = [], [], {}
else:
new._states = deepcopy(self._states)
new._controls = deepcopy(self._controls)
new.parameters = deepcopy(self.parameters)
return new
@property
def states(self) -> Union[list, dict]:
"""
Returns the state in list if more than one phases, otherwise it returns the only dict
Returns
-------
The states data
"""
return self._states[0] if len(self._states) == 1 else self._states
@property
def controls(self) -> Union[list, dict]:
"""
Returns the controls in list if more than one phases, otherwise it returns the only dict
Returns
-------
The controls data
"""
if not self._controls:
raise RuntimeError(
"There is no controls in the solution. "
"This may happen in "
"previously integrated and interpolated structure"
)
return self._controls[0] if len(self._controls) == 1 else self._controls
def integrate(
self,
shooting_type: Shooting = Shooting.SINGLE_CONTINUOUS,
keep_intermediate_points: bool = False,
merge_phases: bool = False,
continuous: bool = True,
use_scipy_integrator: bool = False,
) -> Any:
"""
Integrate the states
Parameters
----------
shooting_type: Shooting
Which type of integration
keep_intermediate_points: bool
If the integration should returns the intermediate values of the integration [False]
or only keep the node [True] effective keeping the initial size of the states
merge_phases: bool
If the phase should be merged in a unique phase
continuous: bool
If the arrival value of a node should be discarded [True] or kept [False]. The value of an integrated
arrival node and the beginning of the next one are expected to be almost equal when the problem converged
use_scipy_integrator: bool
Ignore the dynamics defined by OCP and use an separate integrator provided by scipy
Returns
-------
A Solution data structure with the states integrated. The controls are removed from this structure
"""
# Sanity check
if self.is_integrated:
raise RuntimeError("Cannot integrate twice")
if self.is_interpolated:
raise RuntimeError("Cannot integrate after interpolating")
if self.is_merged:
raise RuntimeError("Cannot integrate after merging phases")
if shooting_type == Shooting.MULTIPLE and not keep_intermediate_points:
raise ValueError(
"Shooting.MULTIPLE and keep_intermediate_points=False cannot be used simultaneously "
"since it would do nothing"
)
if shooting_type == Shooting.SINGLE_CONTINUOUS and not continuous:
raise ValueError(
"Shooting.SINGLE_CONTINUOUS and continuous=False cannot be used simultaneously it is a contradiction"
)
out = self.__perform_integration(shooting_type, keep_intermediate_points, continuous, use_scipy_integrator)
if merge_phases:
if continuous:
out = out.interpolate(sum(out.ns) + 1)
else:
out._states, _, out.phase_time, out.ns = out._merge_phases(skip_controls=True, continuous=continuous)
out.is_merged = True
out.is_integrated = True
return out
def __perform_integration(
self, shooting_type: Shooting, keep_intermediate_points: bool, continuous: bool, use_scipy_integrator: bool
):
n_direct_collocation = sum([nlp.ode_solver.is_direct_collocation for nlp in self.ocp.nlp])
if n_direct_collocation > 0 and not use_scipy_integrator:
if continuous:
raise RuntimeError(
"Integration with direct collocation must be not continuous if not use_scipy_integrator"
)
if shooting_type != Shooting.MULTIPLE:
raise RuntimeError(
"Integration with direct collocation must using shooting_type=Shooting.MULTIPLE "
"if not use_scipy_integrator"
)
# Copy the data
out = self.copy(skip_data=True)
out.recomputed_time_steps = use_scipy_integrator
out._states = []
for _ in range(len(self._states)):
out._states.append({})
params = self.parameters["all"]
x0 = self._states[0]["all"][:, 0]
for p, nlp in enumerate(self.ocp.nlp):
param_scaling = nlp.parameters.scaling
n_states = self._states[p]["all"].shape[0]
n_steps = nlp.ode_solver.steps_scipy if use_scipy_integrator else nlp.ode_solver.steps
if not continuous:
n_steps += 1
if keep_intermediate_points:
out.ns[p] *= n_steps
out._states[p]["all"] = np.ndarray((n_states, out.ns[p] + 1))
# Get the first frame of the phase
if shooting_type == Shooting.SINGLE_CONTINUOUS:
if p != 0:
u0 = self._controls[p - 1]["all"][:, -1]
val = self.ocp.phase_transitions[p - 1].function(vertcat(x0, x0), vertcat(u0, u0), params)
if val.shape[0] != x0.shape[0]:
raise RuntimeError(
f"Phase transition must have the same number of states ({val.shape[0]}) "
f"when integrating with Shooting.SINGLE_CONTINUOUS. If it is not possible, "
f"please integrate with Shooting.SINGLE"
)
x0 += np.array(val)[:, 0]
else:
col = slice(0, n_steps) if nlp.ode_solver.is_direct_collocation and not use_scipy_integrator else 0
x0 = self._states[p]["all"][:, col]
for s in range(self.ns[p]):
if nlp.control_type == ControlType.CONSTANT:
u = self._controls[p]["all"][:, s]
elif nlp.control_type == ControlType.LINEAR_CONTINUOUS:
u = self._controls[p]["all"][:, s : s + 2]
else:
raise NotImplementedError(f"ControlType {nlp.control_type} " f"not yet implemented in integrating")
if use_scipy_integrator:
t_init = sum(out.phase_time[:p]) / nlp.ns
t_end = sum(out.phase_time[: (p + 2)]) / nlp.ns
n_points = n_steps + 1 if continuous else n_steps
t_eval = np.linspace(t_init, t_end, n_points) if keep_intermediate_points else [t_init, t_end]
integrated = solve_ivp(
lambda t, x: np.array(nlp.dynamics_func(x, u, params))[:, 0], [t_init, t_end], x0, t_eval=t_eval
).y
next_state_col = (
(s + 1) * (nlp.ode_solver.steps + 1) if nlp.ode_solver.is_direct_collocation else s + 1
)
cols_in_out = [s * n_steps, (s + 1) * n_steps] if keep_intermediate_points else [s, s + 2]
else:
if nlp.ode_solver.is_direct_collocation:
if keep_intermediate_points:
integrated = x0 # That is only for continuous=False
cols_in_out = [s * n_steps, (s + 1) * n_steps]
else:
integrated = x0[:, [0, -1]]
cols_in_out = [s, s + 2]
next_state_col = slice((s + 1) * n_steps, (s + 2) * n_steps)
else:
if keep_intermediate_points:
integrated = np.array(nlp.dynamics[s](x0=x0, p=u, params=params / param_scaling)["xall"])
cols_in_out = [s * n_steps, (s + 1) * n_steps]
else:
integrated = np.concatenate(
(x0[:, np.newaxis], nlp.dynamics[s](x0=x0, p=u, params=params / param_scaling)["xf"]),
axis=1,
)
cols_in_out = [s, s + 2]
next_state_col = s + 1
cols_in_out = slice(
cols_in_out[0], cols_in_out[1] + 1 if continuous and keep_intermediate_points else cols_in_out[1]
)
out._states[p]["all"][:, cols_in_out] = integrated
x0 = (
np.array(self._states[p]["all"][:, next_state_col])
if shooting_type == Shooting.MULTIPLE
else integrated[:, -1]
)
if not continuous:
out._states[p]["all"][:, -1] = self._states[p]["all"][:, -1]
# Dispatch the integrated values to all the keys
for key in nlp.states:
out._states[p][key] = out._states[p]["all"][nlp.states[key].index, :]
return out
def interpolate(self, n_frames: Union[int, list, tuple]) -> Any:
"""
Interpolate the states
Parameters
----------
n_frames: Union[int, list, tuple]
If the value is an int, the Solution returns merges the phases,
otherwise, it interpolates them independently
Returns
-------
A Solution data structure with the states integrated. The controls are removed from this structure
"""
out = self.copy(skip_data=True)
t_all = []
for p, data in enumerate(self._states):
nlp = self.ocp.nlp[p]
if nlp.ode_solver.is_direct_collocation and not self.recomputed_time_steps:
time_offset = sum(out.phase_time[: p + 1])
step_time = np.array(nlp.dynamics[0].step_time)
dt = out.phase_time[p + 1] / nlp.ns
t_tp = np.array([step_time * dt + s * dt + time_offset for s in range(nlp.ns)]).reshape(-1, 1)
t_all.append(np.concatenate((t_tp, [[t_tp[-1, 0]]]))[:, 0])
else:
t_all.append(np.linspace(sum(out.phase_time[: p + 1]), sum(out.phase_time[: p + 2]), out.ns[p] + 1))
if isinstance(n_frames, int):
data_states, _, out.phase_time, out.ns = self._merge_phases(skip_controls=True)
t_all = [np.concatenate((np.concatenate([_t[:-1] for _t in t_all]), [t_all[-1][-1]]))]
n_frames = [n_frames]
out.is_merged = True
elif isinstance(n_frames, (list, tuple)) and len(n_frames) == len(self._states):
data_states = self._states
else:
raise ValueError(
"n_frames should either be a int to merge_phases phases "
"or a list of int of the number of phases dimension"
)
out._states = []
for _ in range(len(data_states)):
out._states.append({})
for p in range(len(data_states)):
x_phase = data_states[p]["all"]
n_elements = x_phase.shape[0]
t_phase = t_all[p]
t_phase, time_index = np.unique(t_phase, return_index=True)
t_int = np.linspace(t_phase[0], t_phase[-1], n_frames[p])
x_interpolate = np.ndarray((n_elements, n_frames[p]))
for j in range(n_elements):
s = sci_interp.splrep(t_phase, x_phase[j, time_index], k=1)
x_interpolate[j, :] = sci_interp.splev(t_int, s)
out._states[p]["all"] = x_interpolate
offset = 0
for key in data_states[p]:
if key == "all":
continue
n_elements = data_states[p][key].shape[0]
out._states[p][key] = out._states[p]["all"][offset : offset + n_elements]
offset += n_elements
out.is_interpolated = True
return out
def merge_phases(self) -> Any:
"""
Get a data structure where all the phases are merged into one
Returns
-------
The new data structure with the phases merged
"""
new = self.copy(skip_data=True)
new.parameters = deepcopy(self.parameters)
new._states, new._controls, new.phase_time, new.ns = self._merge_phases()
new.is_merged = True
return new
def _merge_phases(self, skip_states: bool = False, skip_controls: bool = False, continuous: bool = True) -> tuple:
"""
Actually performing the phase merging
Parameters
----------
skip_states: bool
If the merge should ignore the states
skip_controls: bool
If the merge should ignore the controls
continuous: bool
If the last frame of each phase should be kept [False] or discard [True]
Returns
-------
A tuple containing the new states, new controls, the recalculated phase time
and the new number of shooting points
"""
if self.is_merged:
return deepcopy(self._states), deepcopy(self._controls), deepcopy(self.phase_time), deepcopy(self.ns)
def _merge(data: list, is_control: bool) -> Union[list, dict]:
"""
Merge the phases of a states or controls data structure
Parameters
----------
data: list
The data to structure to merge the phases
is_control: bool
If the current data is a control
Returns
-------
The data merged
"""
if isinstance(data, dict):
return data
# Sanity check (all phases must contain the same keys with the same dimensions)
keys = data[0].keys()
sizes = [data[0][d].shape[0] for d in data[0]]
for d in data:
if d.keys() != keys or [d[key].shape[0] for key in d] != sizes:
raise RuntimeError("Program dimension must be coherent across phases to merge_phases them")
data_out = [{}]
for i, key in enumerate(keys):
data_out[0][key] = np.ndarray((sizes[i], 0))
add = 0 if is_control or continuous else 1
for p in range(len(data)):
d = data[p]
for key in d:
if self.ocp.nlp[p].ode_solver.is_direct_collocation and not is_control:
steps = self.ocp.nlp[p].ode_solver.steps + 1
data_out[0][key] = np.concatenate(
(data_out[0][key], d[key][:, : self.ns[p] * steps + add]), axis=1
)
else:
data_out[0][key] = np.concatenate((data_out[0][key], d[key][:, : self.ns[p] + add]), axis=1)
if add == 0:
for key in data[-1]:
data_out[0][key] = np.concatenate((data_out[0][key], data[-1][key][:, -1][:, np.newaxis]), axis=1)
return data_out
if len(self._states) == 1:
out_states = deepcopy(self._states)
else:
out_states = _merge(self.states, is_control=False) if not skip_states and self._states else None
if len(self._controls) == 1:
out_controls = deepcopy(self._controls)
else:
out_controls = _merge(self.controls, is_control=True) if not skip_controls and self._controls else None
phase_time = [0] + [sum([self.phase_time[i + 1] for i in range(len(self.phase_time) - 1)])]
ns = [sum(self.ns)]
return out_states, out_controls, phase_time, ns
def _complete_control(self):
"""
Controls don't necessarily have dimensions that matches the states. This method aligns them
"""
for p, nlp in enumerate(self.ocp.nlp):
if nlp.control_type == ControlType.CONSTANT:
for key in self._controls[p]:
self._controls[p][key] = np.concatenate(
(self._controls[p][key], np.nan * np.zeros((self._controls[p][key].shape[0], 1))), axis=1
)
elif nlp.control_type == ControlType.LINEAR_CONTINUOUS:
pass
else:
raise NotImplementedError(f"ControlType {nlp.control_type} is not implemented in _complete_control")
def graphs(
self,
automatically_organize: bool = True,
show_bounds: bool = False,
show_now: bool = True,
shooting_type: Shooting = Shooting.MULTIPLE,
use_scipy_integrator: bool = False,
):
"""
Show the graphs of the simulation
Parameters
----------
automatically_organize: bool
If the figures should be spread on the screen automatically
show_bounds: bool
If the plot should adapt to bounds (True) or to data (False)
show_now: bool
If the show method should be called. This is blocking
shooting_type: Shooting
The type of interpolation
use_scipy_integrator: bool
Use the scipy solve_ivp integrator for RungeKutta 45 instead of currently defined integrator
"""
if self.is_merged or self.is_interpolated or self.is_integrated:
raise NotImplementedError("It is not possible to graph a modified Solution yet")
plot_ocp = self.ocp.prepare_plots(automatically_organize, show_bounds, shooting_type, use_scipy_integrator)
plot_ocp.update_data(self.vector)
if show_now:
plt.show()
def animate(
self, n_frames: int = 0, shooting_type: Shooting = None, show_now: bool = True, **kwargs: Any
) -> Union[None, list]:
"""
Animate the simulation
Parameters
----------
n_frames: int
The number of frames to interpolate to. If the value is 0, the data are merged to a one phase if possible.
If the value is -1, the data is not merge in one phase
shooting_type: Shooting
The Shooting type to animate
show_now: bool
If the bioviz exec() function should be called automatically. This is blocking method
kwargs: Any
Any parameters to pass to bioviz
Returns
-------
A list of bioviz structures (one for each phase). So one can call exec() by hand
"""
try:
import bioviz
except ModuleNotFoundError:
raise RuntimeError("bioviz must be install to animate the model")
check_version(bioviz, "2.1.1", "2.2.0")
data_to_animate = self.integrate(shooting_type=shooting_type) if shooting_type else self.copy()
if n_frames == 0:
try:
data_to_animate = data_to_animate.interpolate(sum(self.ns))
except RuntimeError:
pass
elif n_frames > 0:
data_to_animate = data_to_animate.interpolate(n_frames)
states = data_to_animate.states
if not isinstance(states, (list, tuple)):
states = [states]
all_bioviz = []
for idx_phase, data in enumerate(states):
# Convert parameters to actual values
nlp = self.ocp.nlp[idx_phase]
for param in nlp.parameters:
if param.function:
param.function(nlp.model, self.parameters[param.name], **param.params)
all_bioviz.append(bioviz.Viz(self.ocp.nlp[idx_phase].model.path().absolutePath().to_string(), **kwargs))
all_bioviz[-1].load_movement(self.ocp.nlp[idx_phase].variable_mappings["q"].to_second.map(data["q"]))
if show_now:
b_is_visible = [True] * len(all_bioviz)
while sum(b_is_visible):
for i, b in enumerate(all_bioviz):
if b.vtk_window.is_active:
b.update()
else:
b_is_visible[i] = False
return None
else:
return all_bioviz
def _get_penalty_cost(self, nlp, penalty):
phase_idx = nlp.phase_idx
steps = nlp.ode_solver.steps + 1 if nlp.ode_solver.is_direct_collocation else 1
val = []
val_weighted = []
p = self.parameters["all"]
dt = (
Function("time", [nlp.parameters.cx], [penalty.dt])(self.parameters["time"])
if "time" in self.parameters
else penalty.dt
)
for idx in penalty.node_idx:
x = []
u = []
target = []
if nlp is not None:
if penalty.transition:
phase_post = (phase_idx + 1) % len(self._states)
x = np.concatenate((self._states[phase_idx]["all"][:, -1], self._states[phase_post]["all"][:, 0]))
u = np.concatenate(
(self._controls[phase_idx]["all"][:, -1], self._controls[phase_post]["all"][:, 0])
)
else:
col_x_idx = list(range(idx * steps, (idx + 1) * steps)) if penalty.integrate else [idx]
col_u_idx = [idx]
if penalty.derivative or penalty.explicit_derivative:
col_x_idx.append((idx + 1) * steps)
col_u_idx.append((idx + 1))
x = self._states[phase_idx]["all"][:, col_x_idx]
u = self._controls[phase_idx]["all"][:, col_u_idx]
target = penalty.target[:, penalty.node_idx.index(idx)] if penalty.target is not None else []
val.append(penalty.function(x, u, p))
val_weighted.append(penalty.weighted_function(x, u, p, penalty.weight, target, dt))
val = np.nansum(val)
val_weighted = np.nansum(val_weighted)
return val, val_weighted
def print(self, cost_type: CostType = CostType.ALL):
"""
Print the objective functions and/or constraints to the console
Parameters
----------
cost_type: CostType
The type of cost to console print
"""
def print_penalty_list(nlp, penalties, print_only_weighted):
running_total = 0
for penalty in penalties:
if not penalty:
continue
val, val_weighted = self._get_penalty_cost(nlp, penalty)
running_total += val_weighted
if print_only_weighted:
print(f"{penalty.name}: {val_weighted}")
else:
print(f"{penalty.name}: {val: .2f} (weighted {val_weighted})")
return running_total
def print_objective_functions(ocp):
"""
Print the values of each objective function to the console
"""
print(f"\n---- COST FUNCTION VALUES ----")
running_total = print_penalty_list(None, ocp.J_internal, False)
running_total += print_penalty_list(None, ocp.J, False)
if running_total:
print("")
for nlp in ocp.nlp:
print(f"PHASE {nlp.phase_idx}")
running_total += print_penalty_list(nlp, nlp.J_internal, False)
running_total += print_penalty_list(nlp, nlp.J, False)
print("")
print(f"Sum cost functions: {running_total}")
print(f"------------------------------")
def print_constraints(ocp, sol):
"""
Print the values of each constraints with its lagrange multiplier to the console
"""
if sol.constraints is None:
return
# Todo, min/mean/max
print(f"\n--------- CONSTRAINTS ---------")
if print_penalty_list(None, ocp.g_internal, True) + print_penalty_list(None, ocp.g, True):
print("")
for idx_phase, nlp in enumerate(ocp.nlp):
print(f"PHASE {idx_phase}")
print_penalty_list(nlp, nlp.g_internal, True)
print_penalty_list(nlp, nlp.g, True)
print("")
print(f"------------------------------")
if cost_type == CostType.OBJECTIVES:
print_objective_functions(self.ocp)
elif cost_type == CostType.CONSTRAINTS:
print_constraints(self.ocp, self)
elif cost_type == CostType.ALL:
print(
f"Solver reported time: {self.solver_time_to_optimize} sec\n"
f"Real time: {self.real_time_to_optimize} sec"
)
self.print(CostType.OBJECTIVES)
self.print(CostType.CONSTRAINTS)
else:
raise ValueError("print can only be called with CostType.OBJECTIVES or CostType.CONSTRAINTS")
|
nilq/baby-python
|
python
|
from django.conf.urls.defaults import *
from django.views.generic.simple import redirect_to
import views
urlpatterns = patterns('',
(r'^$', views.home),
# would like to avoid hardcoding mibbinator here
(r'^(o/\.|\.?)(?P<oid>[0-9.]*)$', redirect_to, { 'url': '/mibbinator/o/%(oid)s' }),
(r'^o/(?P<oid>[0-9.]+)$', views.byoid),
(r'^m/(?P<module>[\w-]+)$', views.bymodule),
(r'^(?P<name>\w+)$', views.byname),
)
|
nilq/baby-python
|
python
|
# coding=utf-8
# Copyright 2021 The OneFlow Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from abc import ABCMeta, abstractmethod
from typing import Any, Dict
import oneflow as flow
from libai.config import LazyConfig, try_get_key
from libai.engine import DefaultTrainer
from libai.utils import distributed as dist
from libai.utils.checkpoint import Checkpointer
from libai.utils.logger import setup_logger
logger = setup_logger(distributed_rank=dist.get_rank())
logger = logging.getLogger("libai.inference")
class BasePipeline(metaclass=ABCMeta):
"""
Base class for all task pipeline
"""
def __init__(
self,
config_file,
data_parallel=None,
tensor_parallel=None,
pipeline_parallel=None,
**kwargs,
):
# init cfg
self.cfg = LazyConfig.load(config_file)
flow.boxing.nccl.set_fusion_threshold_mbytes(
try_get_key(self.cfg, "train.nccl_fusion_threshold_mb", default=16)
)
flow.boxing.nccl.set_fusion_max_ops_num(
try_get_key(self.cfg, "train.nccl_fusion_max_ops", default=24)
)
self.update_cfg(data_parallel, tensor_parallel, pipeline_parallel)
dist.setup_dist_util(self.cfg.train.dist)
assert (
self.cfg.train.dist.data_parallel_size == 1
), "not support data parallel yet, only support tensor and pipeline parallel"
logger.info(self.cfg.train.dist)
# initial and load model
self.model = DefaultTrainer.build_model(self.cfg).eval()
self.load_pretrain_weight(self.model, self.cfg)
# initial tokenizer
self.tokenizer = self.build_tokenizer(self.cfg)
# set parameters
(
self._preprocess_params,
self._forward_params,
self._postprocess_params,
) = self._parse_parameters(**kwargs)
def update_cfg(
self,
data_parallel=1,
tensor_parallel=1,
pipeline_parallel=1,
):
self.cfg.train.dist.data_parallel_size = data_parallel
self.cfg.train.dist.tensor_parallel_size = tensor_parallel
self.cfg.train.dist.pipeline_parallel_size = pipeline_parallel
if self.cfg.train.dist.pipeline_parallel_size > 1:
assert (
try_get_key(self.cfg.train.dist, "pipeline_num_layers") is not None
), "cfg.train.dist.pipeline_num_layers must be set when run pipeline parallel"
def load_pretrain_weight(self, model, cfg):
Checkpointer(model, save_dir=cfg.train.output_dir).resume_or_load(
cfg.train.load_weight, resume=False
)
def build_tokenizer(self, cfg):
tokenizer = None
if try_get_key(cfg, "tokenization") is not None:
tokenizer = DefaultTrainer.build_tokenizer(cfg)
return tokenizer
@abstractmethod
def _parse_parameters(self, **pipeline_parameters):
raise NotImplementedError("_parse_parameters not implemented")
def __call__(self, inputs, *args, batch_size=None, **kwargs) -> dict:
preprocess_params, forward_params, postprocess_params = self._parse_parameters(
**kwargs
) # noqa
# Fuse __init__ params and __call__ params without modifying the __init__ ones.
preprocess_params = {**self._preprocess_params, **preprocess_params}
forward_params = {**self._forward_params, **forward_params}
postprocess_params = {**self._postprocess_params, **postprocess_params}
with flow.no_grad():
model_inputs_dict = self.preprocess(inputs, **preprocess_params)
model_outputs_dict = self.forward(model_inputs_dict, **forward_params)
model_outputs_dict = self.to_local(model_outputs_dict)
if dist.is_main_process():
outputs_dict = self.postprocess(model_outputs_dict, **postprocess_params)
else:
outputs_dict = {}
dist.synchronize()
return outputs_dict
def to_local(self, model_outputs_dict):
for key, value in model_outputs_dict.items():
if isinstance(value, flow.Tensor) and value.is_global:
model_outputs_dict[key] = dist.ttol(
value, ranks=[0] if value.placement.ranks.ndim == 1 else [[0]]
)
if flow.cuda.is_available():
dist.synchronize()
return model_outputs_dict
@abstractmethod
def preprocess(self, input_: Any, **preprocess_parameters: Dict) -> dict:
raise NotImplementedError("preprocess not implemented")
@abstractmethod
def forward(self, **kwargs: Dict) -> dict:
raise NotImplementedError("forward not implemented")
@abstractmethod
def postprocess(self, **kwargs: Dict) -> dict:
raise NotImplementedError("postprocess not implemented")
|
nilq/baby-python
|
python
|
# Generated by Django 3.1 on 2020-10-19 16:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('shop', '0013_auto_20201020_0122'),
]
operations = [
migrations.AlterField(
model_name='restaurant',
name='business_number',
field=models.CharField(max_length=30, null=True),
),
migrations.AlterField(
model_name='restaurant',
name='open_time',
field=models.CharField(max_length=13, null=True),
),
]
|
nilq/baby-python
|
python
|
from jax import numpy as jnp
from typing import Callable
def weighted_dot(sigma_i: float, sigma_r: float, sigma_b: float) -> Callable:
"""Defines weighed dot product, i.e. <u, v> with u = [i, j]
Returns function which calculates the product given the weights.
"""
def dot(gram: jnp.ndarray, kernel: jnp.ndarray) -> jnp.ndarray:
return sigma_i ** 2 * gram + sigma_r ** 2 * kernel + sigma_b ** 2
return dot
|
nilq/baby-python
|
python
|
import requests,json
from HelperTools import Debug
import os
sourceStreamInfoListUrl = os.getenv('SourceStream')
GetNotFixedStreamAPIUrl = os.getenv("NotFixedStreamAPI")
# 需要做代理的源流信息
sourceStreamInfoList = {}
def GetSourceStreamInfoList():
if sourceStreamInfoListUrl == None or sourceStreamInfoListUrl == "default_source_stream_url":
Debug.Log("SourceStream is null,so return GetSourceStreamInfoList")
return
resultData = requests.get(sourceStreamInfoListUrl)
resultJson = json.loads(resultData.text)
sourceStreamInfoList.clear()
for oneStream in resultJson:
sourceStreamInfoList[oneStream["url"]] = {}
sourceStreamInfoList[oneStream["url"]]["bFixedUrl"] = bool(int(oneStream["FIXEDURL"]))
sourceStreamInfoList[oneStream["url"]]["url"] = oneStream["CAMERAINDEXCODE"]
Debug.Log(f"GetSourceStreamInfoList:{sourceStreamInfoList}")
return sourceStreamInfoList
GetSourceStreamInfoList()
def GetUnFixedUrl(UnFixedStreamUrl):
if GetNotFixedStreamAPIUrl == None or GetNotFixedStreamAPIUrl == "default_notfixed_stream_url":
Debug.Log("NotFixedStreamAPI is null,so return GetUnFixedUrl")
return None
requestData = {
"url": UnFixedStreamUrl
}
resultData = requests.get(GetNotFixedStreamAPIUrl, params=requestData)
resultJson = json.loads(resultData.text)
if int(resultJson["code"]) == 0 and resultJson["msg"] == "success":
return resultJson["data"]["url"]
|
nilq/baby-python
|
python
|
import pygame as p
import dartboard
import buttonPanel
import statPanel
from dartboard import PERCENTAGES, LASTBULLSEYE
WIDTH = 800
HEIGHT = 700
BACKGROUND = p.Color("red")
MAX_FPS = 30
def main():
global throwing
p.init()
screen = p.display.set_mode((WIDTH, HEIGHT))
screen.fill(BACKGROUND)
p.display.set_caption("Dart Sim V. 1")
clock = p.time.Clock()
dartboard.draw(screen)
buttons = buttonPanel.draw(screen)
running = True
throwing = False
while running:
for e in p.event.get():
if e.type == p.QUIT:
running = False
if e.type == p.MOUSEBUTTONDOWN:
if buttons[0].collidepoint(e.pos): # start button
throwing = True
p.draw.rect(screen, p.Color("light green"), buttons[0])
p.draw.rect(screen, p.Color("red"), buttons[1])
p.draw.rect(screen, p.Color("blue"), buttons[2])
elif buttons[1].collidepoint(e.pos): # stop button
throwing = False
p.draw.rect(screen, p.Color("green"), buttons[0])
p.draw.rect(screen, p.Color("tomato"), buttons[1])
p.draw.rect(screen, p.Color("blue"), buttons[2])
elif buttons[2].collidepoint(e.pos): # reset button
reset(screen)
p.draw.rect(screen, p.Color("green"), buttons[0])
p.draw.rect(screen, p.Color("red"), buttons[1])
p.draw.rect(screen, p.Color("cyan"), buttons[2])
if throwing:
dartboard.throwDartRandomly(screen)
statPanel.draw(screen)
buttonPanel.draw(screen)
p.display.flip()
clock.tick(MAX_FPS)
def reset(screen):
global throwing #add this to main()
dartboard.draw(screen)
throwing = False
dartboard.NUM_DARTS = 0
dartboard.LASTBULLSEYE = 0
dartboard.PERCENTAGES = [0, 0, 0, 0, 0, 0]
dartboard.SPOTS = [0, 0, 0, 0, 0, 0] #reset count in all rings
main()
|
nilq/baby-python
|
python
|
import copy
import inspect
import math
import numpy as np
import random
def create_new_children_through_cppn_mutation(pop, print_log, new_children=None, mutate_network_probs=None,
max_mutation_attempts=1500):
"""Create copies, with modification, of existing individuals in the population.
Parameters
----------
pop : Population class
This provides the individuals to mutate.
print_log : PrintLog()
For logging
new_children : a list of new children created outside this function (may be empty)
This is useful if creating new children through multiple functions, e.g. Crossover and Mutation.
mutate_network_probs : probability, float between 0 and 1 (inclusive)
The probability of mutating each network.
max_mutation_attempts : int
Maximum number of invalid mutation attempts to allow before giving up on mutating a particular individual.
Returns
-------
new_children : list
A list of new individual SoftBots.
"""
if new_children is None:
new_children = []
random.shuffle(pop.individuals)
while len(new_children) < pop.pop_size:
for ind in pop:
clone = copy.deepcopy(ind)
if mutate_network_probs is None:
required = 0
else:
required = mutate_network_probs.count(1)
selection = []
while np.sum(selection) <= required:
if mutate_network_probs is None:
# uniformly select networks
selection = np.random.random(len(clone.genotype)) < 1 / float(len(clone.genotype))
else:
# use probability distribution
selection = np.random.random(len(clone.genotype)) < mutate_network_probs
# don't select any frozen networks (used to freeze aspects of genotype during evolution)
for idx in range(len(selection)):
if clone.genotype[idx].freeze:
selection[idx] = False
selected_networks = np.arange(len(clone.genotype))[selection].tolist()
for rank, goal in pop.objective_dict.items():
setattr(clone, "parent_{}".format(goal["name"]), getattr(clone, goal["name"]))
clone.parent_genotype = ind.genotype
clone.parent_id = clone.id
for name, details in clone.genotype.to_phenotype_mapping.items():
details["old_state"] = copy.deepcopy(details["state"])
for selected_net_idx in selected_networks:
mutation_counter = 0
done = False
while not done:
mutation_counter += 1
candidate = copy.deepcopy(clone)
# perform mutation(s)
for _ in range(candidate.genotype[selected_net_idx].num_consecutive_mutations):
if not clone.genotype[selected_net_idx].direct_encoding:
# using CPPNs
mut_func_args = inspect.getargspec(candidate.genotype[selected_net_idx].mutate)
mut_func_args = [0 for _ in range(1, len(mut_func_args.args))]
choice = random.choice(range(len(mut_func_args)))
mut_func_args[choice] = 1
variation_type, variation_degree = candidate.genotype[selected_net_idx].mutate(*mut_func_args)
else:
# direct encoding with possibility of evolving mutation rate
# TODO: enable cppn mutation rate evolution
rate = None
for net in clone.genotype:
if "mutation_rate" in net.output_node_names:
rate = net.values # evolved mutation rates, one for each voxel
if "mutation_rate" not in candidate.genotype[selected_net_idx].output_node_names:
# use evolved mutation rates
variation_type, variation_degree = candidate.genotype[selected_net_idx].mutate(rate)
else:
# this is the mutation rate itself (use predefined meta-mutation rate)
variation_type, variation_degree = candidate.genotype[selected_net_idx].mutate()
if variation_degree != "":
candidate.variation_type = "{0}({1})".format(variation_type, variation_degree)
else:
candidate.variation_type = str(variation_type)
candidate.genotype.express()
if candidate.genotype[selected_net_idx].allow_neutral_mutations:
done = True
clone = copy.deepcopy(candidate) # SAM: ensures change is made to every net
break
else:
for name, details in candidate.genotype.to_phenotype_mapping.items():
new = details["state"]
old = details["old_state"]
changes = np.array(new != old, dtype=np.bool)
if np.any(changes) and candidate.phenotype.is_valid():
done = True
clone = copy.deepcopy(candidate) # SAM: ensures change is made to every net
break
# for name, details in candidate.genotype.to_phenotype_mapping.items():
# if np.sum( details["old_state"] != details["state"] ) and candidate.phenotype.is_valid():
# done = True
# break
if mutation_counter > max_mutation_attempts:
print_log.message("Couldn't find a successful mutation in {} attempts! "
"Skipping this network.".format(max_mutation_attempts))
num_edges = len(clone.genotype[selected_net_idx].graph.edges())
num_nodes = len(clone.genotype[selected_net_idx].graph.nodes())
print_log.message("num edges: {0}; num nodes {1}".format(num_edges, num_nodes))
break
# end while
if not clone.genotype[selected_net_idx].direct_encoding:
for output_node in clone.genotype[selected_net_idx].output_node_names:
clone.genotype[selected_net_idx].graph.node[output_node]["old_state"] = ""
# reset all objectives we calculate in VoxCad to unevaluated values
for rank, goal in pop.objective_dict.items():
if goal["tag"] is not None:
setattr(clone, goal["name"], goal["worst_value"])
clone.id = pop.max_id
pop.max_id += 1
new_children.append(clone)
return new_children
def mutate_controllers(pop, children, crossover_rate=0.4):
# controllers crossover
random.shuffle(children)
for i in range(0, int(math.floor(crossover_rate*pop.pop_size))):
indices = random.sample(range(len(pop)), 2)
contr_1 = pop[indices[0]].genotype.controller
contr_2 = pop[indices[1]].genotype.controller
child_contr = children[i].genotype.controller
for attr in child_contr.__dict__.keys():
child_contr[attr] = (contr_1[attr]+contr_2[attr])/2
random.shuffle(children)
for child in children:
child.genotype.controller.mutate()
return children
def mutate_new_materials(pop, children, crossover_rate=0.4):
# new materials crossover
random.shuffle(children)
for i in range(0, int(math.floor(crossover_rate * pop.pop_size))):
indices = random.sample(range(len(pop)), 2)
new_materials_1 = pop[indices[0]].genotype.materials
new_materials_2 = pop[indices[1]].genotype.materials
child_new_materials = children[i].genotype.materials
for material_idx in child_new_materials.keys():
child_new_materials[material_idx].young_modulus = (new_materials_1.get(
material_idx).young_modulus + new_materials_2.get(material_idx).young_modulus) / 2
child_new_materials[material_idx].density = (new_materials_1.get(
material_idx).density + new_materials_2.get(material_idx).density) / 2
child_new_materials[material_idx].cte = (new_materials_1.get(
material_idx).cte + new_materials_2.get(material_idx).cte) / 2
random.shuffle(children)
for child in children:
for material_idx in child.genotype.materials.keys():
if material_idx == "9":
# counterphase actuation
child.genotype.materials[material_idx].young_modulus = child.genotype.materials["8"].young_modulus
child.genotype.materials[material_idx].density = child.genotype.materials["8"].density
child.genotype.materials[material_idx].cte = -child.genotype.materials["8"].cte
else:
child.genotype.materials[material_idx].mutate()
return children
def create_new_children(pop, print_log):
"""Create copies, with modification, of existing individuals in the population.
Parameters
----------
pop : Population class
This provides the individuals to mutate.
print_log : PrintLog()
For logging
"""
cppn_mutated_children = create_new_children_through_cppn_mutation(pop, print_log)
controller_evolution = hasattr(pop[0].genotype, "controller")
materials_evolution = hasattr(pop[0].genotype, "materials")
if controller_evolution:
new_children = mutate_controllers(pop, cppn_mutated_children)
elif materials_evolution:
new_children = mutate_new_materials(pop, cppn_mutated_children)
else:
new_children = cppn_mutated_children
return new_children
def genome_wide_mutation(pop, print_log):
mutate_network_probs = [1 for _ in range(len(pop[0].genotype))]
return create_new_children_through_cppn_mutation(pop, print_log, mutate_network_probs=mutate_network_probs)
|
nilq/baby-python
|
python
|
import sys
def solution(A):
difference = sys.maxsize
left = 0
right = sum(A)
for i in range(0, len(A)-1):
left += A[i]
right -= A[i]
if abs(right - left) < difference:
difference = abs(right - left)
return difference
def test_solution():
assert solution([3, 1, 2, 4, 3]) == 1
|
nilq/baby-python
|
python
|
import json
def readJsonFromFile(filename):
file = open(filename, 'r')
arr = json.loads(file.read())
file.close()
return arr
|
nilq/baby-python
|
python
|
from enum import Enum
class TaskState(Enum):
# static states:
# a task can either succeed or fail
VALID = 0x0
INVALID = 0x1
# actionable states
DROP = 0x10
DONE = 0x99
class TaskConfigState(Enum):
VALID = 0x0
INVALID = 0x1
|
nilq/baby-python
|
python
|
import os
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets('/tmp/data', one_hot=True)
# Path to Computation graphs
LOGDIR = './graphs'
# Start session
sess = tf.Session()
# Hyper parameters
LEARNING_RATE = 0.01
BATCH_SIZE = 1000
EPOCHS = 10
# Hidden Layers
HL_1 = 1000
HL_2 = 500
# Other parameters
INPUT_SIZE = 28 * 28
N_CLASSES = 10
with tf.name_scope('input'):
images = tf.placeholder(tf.float32, [None, INPUT_SIZE], name="images")
labels = tf.placeholder(tf.float32, [None, N_CLASSES], name="labels")
def fc_layer(x, layer, size_out, activation=None):
with tf.name_scope(layer):
size_in = int(x.shape[1])
W = tf.Variable(tf.random_normal([size_in, size_out]), name="weights")
b = tf.Variable(tf.constant(-1, dtype=tf.float32, shape=[size_out]), name="biases")
wx_plus_b = tf.add( tf.matmul(x, W), b)
if activation:
return activation(wx_plus_b)
return wx_plus_b
fc_1 = fc_layer(images, 'fc_1', HL_1, tf.nn.relu)
fc_2 = fc_layer(fc_1, 'fc_2', HL_2, tf.nn.relu)
#to prevent overfitting
dropped = tf.nn.dropout(fc_2, keep_prob=0.9)
# output layer
y = fc_layer(dropped, 'output', N_CLASSES)
with tf.name_scope('loss'):
loss = tf.reduce_mean( tf.nn.softmax_cross_entropy_with_logits(logits=y, labels=labels))
tf.summary.scalar('loss', loss)
with tf.name_scope('optimizer'):
train = tf.train.AdamOptimizer(LEARNING_RATE).minimize(loss)
with tf.name_scope('evaluation'):
correct = tf.equal( tf.argmax(y, 1 ), tf.argmax(labels, 1))
accuracy = tf.reduce_mean( tf.cast(correct, dtype=tf.float32))
tf.summary.scalar('accuracy', accuracy)
train_writer = tf.summary.FileWriter(os.path.join(LOGDIR, "train"), sess.graph)
test_writer = tf.summary.FileWriter(os.path.join(LOGDIR, "test"), sess.graph)
summary_op = tf.summary.merge_all()
init = tf.global_variables_initializer()
sess.run(init)
with tf.name_scope('training'):
step = 0
for epoch in range(EPOCHS):
print("epoch ", epoch, "\n-----------\n")
for batch in range(int(mnist.train.labels.shape[0]/BATCH_SIZE)):
step += 1
batch_xs, batch_ys = mnist.train.next_batch(BATCH_SIZE)
summary_result, _ = sess.run( [summary_op, train], feed_dict={images: batch_xs, labels: batch_ys} )
train_writer.add_summary(summary_result, step)
summary_result, acc = sess.run( [summary_op, accuracy], feed_dict={images: mnist.test.images, labels: mnist.test.labels} )
test_writer.add_summary(summary_result, step)
print("Batch ", batch, ": accuracy = ", acc)
train_writer.close()
test_writer.close()
sess.close()
|
nilq/baby-python
|
python
|
#:::::::::::::::::::::::::
#::
#:: ProjectDependencies/check.py
#::_______________________
#::
#:: Author: Clement BERTHAUD
#::
#:: MIT License
#:: Copyright (c) 2018 ProjectDependencies - Clément BERTHAUD
#::
#:: Permission is hereby granted, free of charge, to any person obtaining a copy
#:: of this software and associated documentation files (the "Software"), to deal
#:: in the Software without restriction, including without limitation the rights
#:: to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#:: copies of the Software, and to permit persons to whom the Software is
#:: furnished to do so, subject to the following conditions:
#::
#:: The above copyright notice and this permission notice shall be included in all
#:: copies or substantial portions of the Software.
#::
#:: THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#:: IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#:: FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#:: AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#:: LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#:: OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#:: SOFTWARE.
#::
#:::::::::::::::::::::::::
import os
import ProjectDependencies.utils
from colorama import Fore, Back, Style
from colorama import init as init_colorama
init_colorama()
def command( iArgs, iFiles, iConfig, iDirs, iKeys ):
ProjectDependencies.utils.notify_ignore_args( iArgs )
ProjectDependencies.utils.smart_gather_wtree_resolve_all_hash_inconsistencies( iDirs, iFiles )
index_list_with_hash = ProjectDependencies.utils.gather_list_with_hash( iFiles["index"] )
# Check for inconsistencies in index against working directory
missing_index_list = []
bFoundMissingIndexedFile = False
for entry in index_list_with_hash:
absolute_entry = iDirs["root"] + entry["file"]
if not os.path.exists( absolute_entry ):
bFoundMissingIndexedFile = True
missing_index_list.append( entry )
if bFoundMissingIndexedFile:
print( "Yeah, you should run download again." )
print( "Here is the list of missing indexed files:" )
print( Fore.RED )
for entry in missing_index_list:
print( ProjectDependencies.utils.make_offset( 4 ) + "missing: " + entry["file"] )
print(Style.RESET_ALL)
else:
print( "Everything's fine, chill out." )
|
nilq/baby-python
|
python
|
"""
We presume that Channels is operating over a Redis channel_layer here, and use it
explicitly.
"""
from base64 import b64encode
from json import dumps
def message_to_hash(message):
message_hash = b64encode(dumps(message).encode("utf-8"))
return b"semaphore:" + message_hash
async def get_set_message_semaphore(channel_layer, message):
"""Set a semaphore in redis.
Used to prevent sending the same message twice within 2 seconds."""
msg_hash = message_to_hash(message)
async with channel_layer.connection(0) as connection:
return await connection.set(msg_hash, 1, expire=2, exist="SET_IF_NOT_EXIST")
async def clear_message_semaphore(channel_layer, message):
msg_hash = message_to_hash(message)
async with channel_layer.connection(0) as connection:
return await connection.delete(msg_hash)
|
nilq/baby-python
|
python
|
"""
Configuration for the integration issues tests
"""
import pytest
@pytest.fixture(scope="package", autouse=True)
def xfail():
pytest.xfail("Issues tests need refactored")
|
nilq/baby-python
|
python
|
import argparse
import json
import logging
import os
import subprocess
import tqdm
import wget
from collections import defaultdict
from datetime import datetime
from pathlib import Path
import numpy as np
import torch
import random
def extracting_log_info(log_files, experiment, logging):
metrics_t2v = defaultdict(list)
metrics_v2t = defaultdict(list)
for file_name in log_files:
output_string = f"{experiment}:\n"
with open(Path("logs_eval") / file_name, 'r') as f:
content_lines = f.read().splitlines()
content_lines = content_lines[-14:]
for line in content_lines:
if 't2v' in line:
metric_entry = line.split('test_t2v_metrics_')[1].split(':')[0]
metrics_t2v[metric_entry].append(float(line.split('test_t2v_metrics_')[1].split(':')[1]))
elif 'v2t' in line:
metric_entry = line.split('test_v2t_metrics_')[1].split(':')[0]
metrics_v2t[metric_entry].append(float(line.split('test_v2t_metrics_')[1].split(':')[1]))
keys = list(metrics_t2v.keys())
for key in keys:
output_string += f"{key}_t2v: {np.mean(metrics_t2v[key]):.1f}, {np.std(metrics_t2v[key], ddof=1):.1f}\n"
for key in keys:
output_string += f"{key}_v2t: {np.mean(metrics_v2t[key]):.1f}, {np.std(metrics_v2t[key], ddof=1):.1f}\n"
logging.info(output_string)
with open(Path("logs_eval") / f"{experiment}_summary.txt", 'w') as f:
f.write(output_string)
def run_exp(experiments, logging):
for experiment in experiments:
logging.info(f"Now running {experiment}")
run_one_exp(experiment, experiments, logging)
def download_configs(experiment, trained_model_path, group_id, seed, timestamp):
new_folder = str(trained_model_path).split('/trained_model.pth')[0]
url_config = f"http://www.robots.ox.ac.uk/~vgg/research/collaborative-experts/data/models/{experiment}/{group_id}/{seed}/{timestamp}/config.json"
config_path = Path(new_folder) / 'config.json'
wget.download(url_config, out=str(config_path))
with open(config_path, 'r') as f:
config_content = json.load(f)
config_content['seed'] = int(seed[-1])
with open(config_path, 'w') as f:
json.dump(config_content, f)
def download_models(experiment, logging, trained_model_path,
group_id, seed, timestamp):
new_folder = str(trained_model_path).split('/trained_model.pth')[0]
if os.path.exists(trained_model_path) is False:
logging.info(f"Downloading model for {seed} since it does not exist on the local machine")
url = f"http://www.robots.ox.ac.uk/~vgg/research/collaborative-experts/data/models/{experiment}/{group_id}/{seed}/{timestamp}/trained_model.pth"
# import pdb; pdb.set_trace()
Path(new_folder).mkdir(exist_ok=True, parents=True)
wget.download(url, out=str(trained_model_path))
else:
logging.info(f"Model already downloaded for {experiment} seed {seed}")
if os.path.exists(Path(new_folder) / 'config.json') is False:
download_configs(experiment, trained_model_path, group_id, seed, timestamp)
else:
logging.info(f"Config already downloaded for {experiment} seed {seed}")
def run_one_exp(experiment, experiments, logging):
group_id = experiments[experiment][0]
with open('exp_to_seed_time.json', 'r') as f:
json_dict = json.load(f)
log_files = []
for (group_id, seed, timestamp) in json_dict[experiment]:
group_id_path = Path("data/saved/models") / experiment / group_id
logging.info("Running evaluation on existent seeds")
(Path("logs_eval")).mkdir(exist_ok=True, parents=True)
trained_model_path = group_id_path / seed / timestamp / 'trained_model.pth'
download_models(experiment, logging, trained_model_path,
group_id, seed, timestamp)
config_path = group_id_path / seed / timestamp / 'config.json'
cmd = f"python test.py --config {config_path} --resume {trained_model_path} --device 0 --eval_from_training_config >&1 | tee logs_eval/log_{group_id}_{seed}.txt"
log_files.append(f"log_{group_id}_{seed}.txt")
logging.info(cmd)
subprocess.call(cmd, shell=True)
logging.info("Now averaging results")
extracting_log_info(log_files, experiment, logging)
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--experiments_path", default="misc/experiments-audiocaps.json")
parser.add_argument("--experiment", type=str, default=None)
parser.add_argument(
"--data_dir",
type=Path,
default="data",
)
parser.add_argument(
"--dataset",
type=str,
default="data",
)
parser.add_argument(
"--refresh",
action="store_true",
)
args = parser.parse_args()
os.makedirs('logs', exist_ok=True)
logging.basicConfig(filename=f"logs/{datetime.now().strftime(r'%m%d_%H%M%S')}.log",
level=logging.INFO)
logging.getLogger().addHandler(logging.StreamHandler())
logging.info(args)
with open(args.experiments_path, "r") as f:
experiments = json.load(f)
if args.experiment is None:
run_exp(experiments, logging)
else:
run_one_exp(args.experiment, experiments, logging)
if __name__ == "__main__":
main()
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
from __future__ import print_function
"""
test_split_regex_and_collate.py
"""
JOBS_PER_TASK = 5
import os
tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
import sys
import re
# add grandparent to search path for testing
grandparent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
sys.path.insert(0, grandparent_dir)
# module name = script name without extension
module_name = os.path.splitext(os.path.basename(__file__))[0]
# funky code to import by file name
parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
import ruffus
from ruffus import pipeline_run, pipeline_printout, Pipeline, suffix, regex, formatter, originate, follows, merge, mkdir, posttask, subdivide, transform, collate, split
from ruffus.ruffus_exceptions import RethrownJobError
from ruffus.ruffus_utility import RUFFUS_HISTORY_FILE, CHECKSUM_FILE_TIMESTAMPS
from ruffus.combinatorics import *
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
# options
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
try:
from StringIO import StringIO
except:
from io import StringIO
import shutil
import unittest
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
# imports
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
# use simplejson in place of json for python < 2.6
try:
import json
except ImportError:
import simplejson
json = simplejson
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
# Main logic
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
#
# Three starting files
#
original_files = [tempdir + "/original_%d.fa" % d for d in range(3)]
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
# Tasks
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
@mkdir(tempdir)
@originate(original_files)
def generate_initial_files(out_name):
with open(out_name, 'w') as outfile:
pass
#
# split_fasta_file
#
@posttask(lambda: sys.stderr.write("\tSplit into %d files each\n" % JOBS_PER_TASK))
@subdivide(generate_initial_files,
regex(r".*\/original_(\d+).fa"), # match original files
[tempdir + r"/files.split.\1.success", # flag file for each original file
tempdir + r"/files.split.\1.*.fa"], # glob pattern
r"\1") # index of original file
def split_fasta_file (input_file, outputs, original_index):
#
# remove previous fasta files
#
success_flag = outputs[0]
output_file_names = outputs[1:]
for f in output_file_names:
os.unlink(f)
#
# create as many files as we are simulating in JOBS_PER_TASK
#
for i in range(JOBS_PER_TASK):
with open(tempdir + "/files.split.%s.%03d.fa" % (original_index, i), "w") as oo:
pass
with open(success_flag, "w") as oo:
pass
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
#
# align_sequences
#
@posttask(lambda: sys.stderr.write("\tSequences aligned\n"))
@transform(split_fasta_file, suffix(".fa"), ".aln") # fa -> aln
def align_sequences (input_file, output_filename):
with open(output_filename, "w") as oo:
oo.write("%s\n" % output_filename)
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
#
# percentage_identity
#
@posttask(lambda: sys.stderr.write("\t%Identity calculated\n"))
@transform(align_sequences, # find all results from align_sequences
suffix(".aln"), # replace suffix with:
[r".pcid", # .pcid suffix for the result
r".pcid_success"]) # .pcid_success to indicate job completed
def percentage_identity (input_file, output_files):
(output_filename, success_flag_filename) = output_files
with open(output_filename, "w") as oo:
oo.write("%s\n" % output_filename)
with open(success_flag_filename, "w") as oo:
pass
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
#
# combine_results
#
@posttask(lambda: sys.stderr.write("\tResults recombined\n"))
@collate(percentage_identity, regex(r".*files.split\.(\d+)\.\d+.pcid"),
[tempdir + r"/\1.all.combine_results",
tempdir + r"/\1.all.combine_results_success"])
def combine_results (input_files, output_files):
"""
Combine all
"""
(output_filename, success_flag_filename) = output_files
with open(output_filename, "w") as out:
for inp, flag in input_files:
with open(inp) as ii:
out.write(ii.read())
with open(success_flag_filename, "w") as oo:
pass
class Test_ruffus(unittest.TestCase):
def setUp(self):
import os
try:
shutil.rmtree(tempdir)
except:
pass
os.makedirs(tempdir)
for f in original_files:
with open(f, "w") as p: pass
def cleanup_tmpdir(self):
os.system('rm -f %s %s' % (os.path.join(tempdir, '*'), RUFFUS_HISTORY_FILE))
#___________________________________________________________________________
#
# test product() pipeline_printout and pipeline_run
#___________________________________________________________________________
def test_collate(self):
self.cleanup_tmpdir()
s = StringIO()
pipeline_printout(s, [combine_results], verbose=5, wrap_width = 10000, pipeline= "main")
self.assertTrue(re.search('Job needs update:.*Missing files.*', s.getvalue(), re.DOTALL) is not None)
#print s.getvalue()
pipeline_run([combine_results], verbose=0, pipeline= "main")
def test_newstyle_collate (self):
"""
As above but create pipeline on the fly using object orientated syntax rather than decorators
"""
#
# Create pipeline on the fly, joining up tasks
#
test_pipeline = Pipeline("test")
test_pipeline.originate(task_func = generate_initial_files,
output = original_files)\
.mkdir(tempdir, tempdir+"/test")
test_pipeline.subdivide( task_func = split_fasta_file,
input = generate_initial_files,
filter = regex(r".*\/original_(\d+).fa"), # match original files
output = [tempdir + r"/files.split.\1.success", # flag file for each original file
tempdir + r"/files.split.\1.*.fa"], # glob pattern
extras = [r"\1"])\
.posttask(lambda: sys.stderr.write("\tSplit into %d files each\n" % JOBS_PER_TASK))
test_pipeline.transform(task_func = align_sequences,
input = split_fasta_file,
filter = suffix(".fa"),
output = ".aln") \
.posttask(lambda: sys.stderr.write("\tSequences aligned\n"))
test_pipeline.transform(task_func = percentage_identity,
input = align_sequences, # find all results from align_sequences
filter = suffix(".aln"), # replace suffix with:
output = [r".pcid", # .pcid suffix for the result
r".pcid_success"] # .pcid_success to indicate job completed
)\
.posttask(lambda: sys.stderr.write("\t%Identity calculated\n"))
test_pipeline.collate(task_func = combine_results,
input = percentage_identity,
filter = regex(r".*files.split\.(\d+)\.\d+.pcid"),
output = [tempdir + r"/\1.all.combine_results",
tempdir + r"/\1.all.combine_results_success"])\
.posttask(lambda: sys.stderr.write("\tResults recombined\n"))
#
# Cleanup, printout and run
#
self.cleanup_tmpdir()
s = StringIO()
test_pipeline.printout(s, [combine_results], verbose=5, wrap_width = 10000)
self.assertTrue(re.search('Job needs update:.*Missing files.*', s.getvalue(), re.DOTALL) is not None)
test_pipeline.run(verbose=0)
#___________________________________________________________________________
#
# cleanup
#___________________________________________________________________________
def tearDown(self):
shutil.rmtree(tempdir)
#
# Necessary to protect the "entry point" of the program under windows.
# see: http://docs.python.org/library/multiprocessing.html#multiprocessing-programming
#
if __name__ == '__main__':
unittest.main()
|
nilq/baby-python
|
python
|
from django.views.generic import View
from core.models import Cardapio
from django.shortcuts import render
from core.outros.categoriasCardapio import categoriasCardapio
import json
class CardapioView(View):
def get(self, request, *args, **kwargs):
categoria = request.GET.get('categoria')
item_adicionado = request.GET.get('item_adicionado')
id = request.GET.get('id')
carrinho = []
if (request.session.get('carrinho')):
carrinho = json.loads(request.session['carrinho'])
if categoria == None:
context = {
'categoriasCardapio': categoriasCardapio,
'carrinhoTamanho': len(carrinho),
}
return render(request, 'core/cardapio.html', context)
else:
pratosQuery = Cardapio.objects.filter(categoria=categoria)
"""
criar matrix para renderização no template.
"""
cardapioCatArray = []
arrayLinha = []
indexColeta = 2
for i in range(len(pratosQuery)):
if i <= indexColeta:
arrayLinha.append(pratosQuery[i])
if i == indexColeta:
cardapioCatArray.push(arrayLinha)
arrayLinha = []
indexColeta += 3
if len(arrayLinha) > 0:
cardapioCatArray.append(arrayLinha)
if item_adicionado != None:
itemCardapio = Cardapio.objects.get(id=id)
item = {
'id': str(itemCardapio.id),
'nome': str(itemCardapio.nome),
'fotoUrl': str(itemCardapio.foto.url),
'valor': str(itemCardapio.valor),
'descricao': str(itemCardapio.descricao),
}
carrinho.append(item)
carrinhoJSON = json.dumps(carrinho)
request.session['carrinho'] = carrinhoJSON
context = {
'categoria': categoria,
'pratos': cardapioCatArray,
'item_adicionado': item_adicionado,
'carrinhoTamanho': len(carrinho),
}
return render(request, 'core/cardapio.html', context)
|
nilq/baby-python
|
python
|
import argparse
import logging
import os
from sentiment_analysis.src.managers.survey_replies_manager import SurveyRepliesManager
from utils.data_connection.api_data_manager import APISourcesFetcher
from utils.data_connection.source_manager import Connector
from utils.gcloud.nlp_client import NLPGoogleClient
from utils.utilities import get_last_week, create_list_weeks_years, extract_first_last_weeks, custom_year_week_format
from google.cloud.language_v1 import LanguageServiceClient
from utils.data_connection.factory.redis_factory import RedisFactory
from utils.data_connection.redis_manager import RedisManager
from nested_lookup import nested_lookup
logger = logging.getLogger()
def inject_year_week_sentiment_analysis(db_connector: Connector,
google_client: LanguageServiceClient,
redis_manager: RedisManager,
list_week_year: list,
company_id: str) -> dict:
"""
Inject the week/year to surveys replies manager
:param db_connector: connector
:param google_client: google client
:param redis_manager: redis manager
:param list_week_year: list of weeks years
:param company_id: company target
:return:
"""
week_s, year_s, week_e, year_e = extract_first_last_weeks(list_week_year)
period = {"start_year": year_s,
"start_week": week_s,
"end_year": year_e,
"end_week": week_e}
survey_replies_manager = SurveyRepliesManager(api_manager=APISourcesFetcher(db_connector=db_connector),
google_client=google_client,
redis_manager=redis_manager,
period=period,
company_ids=[company_id])
survey_replies_manager.fetch_data()
survey_replies_manager.process_replies(process_scores_only=True)
return survey_replies_manager.get_results()
def persist_result_redis(company_id: str, processing_result: dict, redis_manager) -> None:
"""
Write the dictionary into redis
:param company_id: str of company id
:param redis_manager: redis manager
:param processing_result: dict to persist into redis
:return:
"""
redis_score_field = "score"
# Empty data
if not processing_result:
logger.warning(msg="No data to be persisted in Redis.")
return
company_redis_data = redis_manager.retrieve(key=company_id,
field=redis_score_field)
processed_data = nested_lookup(redis_score_field, processing_result[company_id])
scores = company_redis_data + processed_data
redis_manager.persist(key=company_id,
field=redis_score_field,
data=scores)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Extract surveys replies from DB and inject to REDIS')
parser.add_argument('--year', help='year of the survey')
parser.add_argument('--week', help='week of the survey')
parser.add_argument("--number_week_to_insert", help="number of week to agglomerate", default=1)
parser.add_argument('--company_id', help='target company id', required=True)
args = parser.parse_args()
target_year = args.year
target_week = args.week
number_week_to_insert = args.number_week_to_insert
company_id = args.company_id
if target_week is None or target_year is None:
target_year, target_week = get_last_week()
connector = Connector(os.getenv("DB_USER"),
os.getenv("DB_PASSWORD"),
os.getenv("DB_HOST"),
os.getenv("DB_PORT"))
g_client = NLPGoogleClient.open_client()
list_week_year = create_list_weeks_years(week=int(target_week),
year=int(target_year),
number_weeks_analyze=int(number_week_to_insert))
weeks = custom_year_week_format(year_weeks=list_week_year)
redis_manager = RedisFactory.build()
processing_result = inject_year_week_sentiment_analysis(db_connector=connector,
google_client=g_client,
redis_manager=redis_manager,
list_week_year=list_week_year,
company_id=company_id)
persist_result_redis(company_id=company_id,
redis_manager=redis_manager,
processing_result=processing_result)
|
nilq/baby-python
|
python
|
import numpy as np
import os
forcing_filename = os.path.join(os.path.dirname(__file__), 'cmip6_solar.csv')
class Forcing:
forcing = np.loadtxt(forcing_filename, skiprows=7, delimiter=',')
year = forcing[:,0]
solar = forcing[:,1]
|
nilq/baby-python
|
python
|
import os
import numpy as np
from play_model import PlayModel
class PlayAgent:
def __init__(self, params):
self.parameters = params
os.environ['TF_CPP_MIN_LOG_LEVEL'] = str(self.parameters['tf_log_level']) #reduce log out for tensorflow
self.model = PlayModel(self.parameters)
def get_action(self, state):
q_values = self.model.predict([state])
return np.argmax(q_values)
def __enter__(self):
self.model.__enter__()
def __exit__(self, ty, value, tb):
pass
|
nilq/baby-python
|
python
|
import pytest
import asyncio
from async_v20.client import OandaClient
@pytest.yield_fixture
@pytest.mark.asyncio
async def client():
oanda_client = OandaClient(rest_host='127.0.0.1', rest_port=8080, rest_scheme='http',
stream_host='127.0.0.1', stream_port=8080, stream_scheme='http',
health_host='127.0.0.1', health_port=8080, health_scheme='http')
yield oanda_client
await oanda_client.close()
await asyncio.sleep(0)
|
nilq/baby-python
|
python
|
"""
Defines the blueprint for the auth
"""
import uuid
import datetime
from flasgger import swag_from
from flask import Blueprint, request
from flask.json import jsonify
from flask_bcrypt import generate_password_hash, check_password_hash
from flask_jwt_extended import (create_access_token, get_jwt_identity)
from repositories import AuthRepository, UserRepository
AUTH_BLUEPRINT = Blueprint("auth", __name__)
@AUTH_BLUEPRINT.route('/auth', methods=['POST'])
@swag_from("../swagger/auth/POST.yml")
def create():
""" Create a auth """
body = request.get_json()
auth = AuthRepository.getByUsername(body["username"].lower())
expires = datetime.timedelta(days=1)
if not auth:
pw_hash = generate_password_hash(body["password"], 10)
auth = AuthRepository.create(
body["username"].lower(), pw_hash
)
user = UserRepository.create(
body["username"], None, None
)
expires = datetime.timedelta(days=1)
# access_token = create_access_token(identity = { "username": auth.username, "id": auth.id }, expires_delta=expires)
access_token = create_access_token(identity = { "username": auth.username, "id": auth.id })
response = jsonify({"success": True, "access_token": access_token})
response.status_code = 200
return response
validatePassword = check_password_hash(auth.password, body["password"])
if not validatePassword:
response = jsonify({"success": False, "message": "Invalid username or password"})
response.status_code = 200
return response
access_token = create_access_token(identity = { "username": auth.username, "id": auth.id })
AuthRepository.updateToken(id=auth.id, token=access_token)
response = jsonify({"success": True, "access_token": access_token })
response.status_code = 200
return response
|
nilq/baby-python
|
python
|
import numpy as np
def main():
nx = 2
ny = 3
nz = 4
def do_it(i):
return [i % nx, (i / nx) % ny, i / (nx * ny)]
def undo_it(x, y, z):
# return z + ny * (y + nx * x)
# return x + nx * (x + ny * y)
# return (x * nx * ny) + (y * ny) + z
# return (z * nx * ny) + (nx * y) + x
return nx * ((nx * z) + y) + x
for i in range(nx * ny * nz):
xyz = do_it(i)
print '{}: {}'.format(i, xyz)
uuzz = np.array(xyz)
print '{}: {}'.format(i, undo_it(*uuzz))
# print undo_it(*xyz)
main()
|
nilq/baby-python
|
python
|
import utils
import euclides
def esCarmichael(p):
# p > 0
# Retorna cert si p és un nombre de carmichael, fals altrament
i = 1
while i < p:
if euclides.sonCoprimers(i, p):
if utils.potencia_modular_eficient(i, p-1, p) != 1:
return False
i += 1
if i == p:
s = utils.factors_primers(p)
if len(set(s)) == len(s) and len(s) >= 3:
return True
return False
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available.
Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
""" # noqa
from __future__ import unicode_literals
import datetime
import json
from django.conf import settings
from django.http import JsonResponse
from django.views.generic import View
from app.models import App, SecureInfo
from common.constants import ModeEnum, ModeNameDict
from common.exceptions import BadRequestException
from common.log import logger
from common.mixins.base import AppDeveloperRequiredMixin
from common.responses import FailJsonResponse, OKJsonResponse
from common.views.mako import JsonView, MakoTemplateView
from components.engine import get_event_log
from engine.deploy import (app_to_offline_task, app_to_online_task,
app_to_test_task)
from engine.models import BkServer, ThirdServer
from release.constants import (APP_DID_OPERATE_ID_LIST, OPERATE_CODE_LIST, EventStatusEnum,
OperateIDEnum, UserOperateTypeEnum, DeployPageTypeEnum)
from release.models import Record, Version
from release.utils import get_event_status, get_release_home_page_data
from release.utils import record_user_release_operate as _r
class HomeView(AppDeveloperRequiredMixin, MakoTemplateView):
"""发布部署 - 首页
"""
template_name = 'release/home.html'
def get_context_data(self, **kwargs):
context = super(HomeView, self).get_context_data(**kwargs)
request = self.request
app_code = self.kwargs["app_code"]
username = request.user.username
data = get_release_home_page_data(app_code, username)
context.update(data)
return context
class RecordPageView(AppDeveloperRequiredMixin, MakoTemplateView):
"""发布记录
"""
template_name = 'release/record.html'
def get_context_data(self, **kwargs):
context = super(RecordPageView, self).get_context_data(**kwargs)
app_code = self.kwargs["app_code"]
context.update({"app_code": app_code, "tab": "record"})
return context
class AppRecordView(AppDeveloperRequiredMixin, MakoTemplateView):
"""获取发布记录页面
operate_id: 操作对应ID, 0: 全部, 1:提测, 2:上线,3:下架
"""
template_name = 'release/record_list.part'
def get_context_data(self, **kwargs):
context = super(AppRecordView, self).get_context_data(**kwargs)
app_code = self.kwargs["app_code"]
operate_code = self.kwargs["operate_code"]
if operate_code not in OPERATE_CODE_LIST:
raise BadRequestException("operate_code is invalid")
query = Record.objects.query_records(app_code, operate_code, size=100)
record_list = []
for _record in query:
# 提测展示信息
extra_data = _record.get_extra_data()
if not extra_data:
task_detail = ''
extra_msg = '--'
else:
task_detail = extra_data.get("task_detail", "")
if _record.operate_id in [OperateIDEnum.IN_OFFLINE.value, OperateIDEnum.TO_OFFLINE.value]:
_extra_data_mode = extra_data.get("mode", ModeEnum.ALL.value)
_env = ModeNameDict.get(_extra_data_mode, "--")
extra_msg = "选择下架环境:{}".format(_env)
else:
extra_msg = "--"
is_done = _record.operate_id in APP_DID_OPERATE_ID_LIST
record_list.append({
"operate_type": _record.get_operate_id_display(),
"operate_user": _record.operate_user,
"is_success": _record.is_success,
"is_done": is_done,
"operate_time": _record.operate_time_display,
"extra_data": extra_msg,
"detail": _record.message.replace('\n', '<br/>') if _record.message else "没有返回信息!",
"task_detail": task_detail
})
context.update({
"record_list": record_list,
"app_code": app_code,
})
return context
class ReleaseVersion(AppDeveloperRequiredMixin, MakoTemplateView):
"""版本信息
app发布历史版本
"""
template_name = 'release/version.html'
def get_context_data(self, **kwargs):
context = super(ReleaseVersion, self).get_context_data(**kwargs)
app_code = self.kwargs["app_code"]
app = App.objects.get(code=app_code)
version_list = Version.objects.get_version_list(app)
context.update({"app_code": app_code, "version_list": version_list, "tab": "version"})
return context
class DeployPageView(AppDeveloperRequiredMixin, MakoTemplateView):
"""获取部署页面信息
"""
def get_template_names(self):
page_type = self.kwargs["page_type"]
tpl = "release/{}.part".format(page_type)
return [tpl]
def get_context_data(self, **kwargs):
context = super(DeployPageView, self).get_context_data(**kwargs)
app_code = self.kwargs["app_code"]
page_type = self.kwargs["page_type"]
app = App.objects.get(code=app_code)
vcs_info = SecureInfo.objects.get_vcs_info(app_code)
vcs_url = vcs_info.get("VCS_PATH") if vcs_info else '--'
data = {"app": app,
"vcs_url": vcs_url,
"app_code": app_code,
}
if page_type == DeployPageTypeEnum.TEST.value:
data.update({"is_service_rabbitmq_active": ThirdServer.objects.is_rabbitmq_active()})
context.update(data)
return context
class ReleaseTestView(AppDeveloperRequiredMixin, View):
"""app提测
"""
def post(self, request, *args, **kwargs):
app_code = self.kwargs["app_code"]
username = request.user.username
logger.info("[app:%s] 开始进行[测试部署]...", app_code)
app = App.objects.get(code=app_code)
# 检测测试服务器是否激活
is_test_app_deployable = BkServer.objects.check_test_app_deployable()
if not is_test_app_deployable:
message = "当前没有可用的[测试服务器], 无法进行提测操作. 请到<a href='/engine/server/'> [开发者中心-服务器信息] </a> 注册并激活服务器"
logger.info("[app:%s] %s", app_code, message)
return FailJsonResponse(message)
# 只有[下架/开发/测试/上线]状态可操作
can_be_test, message = app.can_be_test()
if not can_be_test:
logger.info("[app:%s] %s", app_code, message)
return FailJsonResponse(message)
# 启用服务
form_data = request.POST.get("form_data", None)
if form_data:
try:
form_data = json.loads(request.POST.get("form_data"))
except Exception as e:
message = "参数错误!"
logger.exception("[app:%s] %s error=%s", app_code, message, str(e))
return BadRequestException(message)
is_use_celery = (form_data.get("is_use_celery") == "checked")
is_use_celery_beat = (form_data.get("is_use_celery_beat") == "checked")
try:
app.trigger_celery(is_use_celery, is_use_celery_beat)
logger.info("update app info [is_use_celery=%s, is_use_celery_beat=%s]",
app.is_use_celery, app.is_use_celery_beat)
except Exception:
logger.exception("Update is_use_celery/is_use_celery_beat fail!")
# 提测
ok, event_id, message = app_to_test_task(app_code, app, username)
# 记录操作流水日志
extra_data = {"username": username, "is_success": ok, "event_id": event_id}
_r(app_code, username, UserOperateTypeEnum.RELEASE_TEST.value, extra_data)
if ok:
message = "测试部署事件提交成功!"
logger.info("[app:%s] %s event_id: %s", app_code, message, event_id)
return OKJsonResponse(message, event_id=event_id)
logger.info("[app:%s] %s event_id: %s", app_code, message, event_id)
return FailJsonResponse(message, event_id=event_id)
class ReleaseProductionView(AppDeveloperRequiredMixin, View):
"""app上线
"""
def post(self, request, *args, **kwargs):
app_code = self.kwargs["app_code"]
username = request.user.username
logger.info("[app:%s] 开始进行[正式部署]...", app_code)
app = App.objects.get(code=app_code)
try:
form_data = json.loads(request.POST.get("form_data", '{}'))
except Exception as e:
message = "参数错误!"
logger.exception("[app:%s] %s error=%s", app_code, message, e)
return BadRequestException(message)
is_prod_app_deployable = BkServer.objects.check_prod_app_deployable()
if not is_prod_app_deployable:
message = "当前没有可用的[正式服务器], 无法进行提测操作. 请到<a href='/engine/server/'> [开发者中心-服务器信息] </a> 注册并激活服务器"
logger.info("[app:%s] %s", app_code, message)
return FailJsonResponse(message)
# 前端变量不要一直向后, 限制
is_tips = form_data.get("is_tips", 0)
features = form_data.get("features", "")
bugs = form_data.get("bugs", "")
can_be_online, message = app.can_be_online()
if not can_be_online:
logger.info("[app:%s] %s", app_code, message)
return FailJsonResponse(message)
# 上线操作
ok, event_id, message = app_to_online_task(app_code, app, username, is_tips, features, bugs)
# 操作流水日志
extra_data = {"username": username, "form_data": form_data}
_r(app_code, username, UserOperateTypeEnum.RELEASE_ONLINE.value, extra_data)
if not ok:
logger.info("[app:%s] %s event_id: %s", app_code, message, event_id)
return FailJsonResponse(message, event_id=event_id)
message = "正式部署事件提交成功!"
logger.info("[app:%s] %s event_id: %s", app_code, message, event_id)
return OKJsonResponse(message, event_id=event_id)
class ReleaseOfflineView(AppDeveloperRequiredMixin, MakoTemplateView):
"""app下架
发布部署 - 下架首页
发布部署 - 执行下架
"""
template_name = 'release/home.html'
def get_context_data(self, **kwargs):
context = super(ReleaseOfflineView, self).get_context_data(**kwargs)
request = self.request
app_code = self.kwargs["app_code"]
username = request.user.username
data = get_release_home_page_data(app_code, username, page="unrelease")
context.update(data)
return context
def post(self, request, *args, **kwargs):
app_code = self.kwargs["app_code"]
username = request.user.username
logger.info("[app:%s] 开始进行[下架]...", app_code)
try:
form_data = json.loads(request.POST.get("form_data", '{}'))
except Exception:
message = "参数错误!"
logger.exception("[app:%s] %s", app_code, message)
return BadRequestException(message)
# NOTE: 下架不加检查服务器, 因为此时已经提测/上线的, 所以默认可以下架成功
# 获取应用基本信息
app = App.objects.get(code=app_code)
# 状态判定
mode = form_data.get("mode", "all")
can_be_offline, message = app.can_be_offline(mode)
if not can_be_offline:
logger.info("[app:%s] %s", app_code, message)
return FailJsonResponse(message)
# 执行下架
app_old_state = app.state
auth_token = app.auth_token
ok, event_id = app_to_offline_task(app_code, auth_token, username, mode, app_old_state)
# 操作流水日志
extra_data = {"username": username, "form_data": form_data}
_r(app_code, username, UserOperateTypeEnum.RELEASE_OFFLINE.value, extra_data)
if ok:
message = "下架事件提交成功!"
logger.info("[app:%s] %s event_id: %s", app_code, message, event_id)
return OKJsonResponse(message, event_id=event_id)
message = "下架事件提交失败!"
logger.info("[app:%s] %s event_id: %s", app_code, message, event_id)
return FailJsonResponse(message, event_id=event_id)
class ApplicationDeleteView(AppDeveloperRequiredMixin, View):
"""删除应用
"""
def post(self, request, *args, **kwargs):
app_code = self.kwargs["app_code"]
logger.info("[app:%s] 开始进行[删除]...", app_code)
username = request.user.username
app = App.objects.get(code=app_code)
can_be_deleted, message = app.can_be_deleted(username)
if not can_be_deleted:
logger.info("[app:%s] %s", app_code, message)
return FailJsonResponse(message)
try:
SecureInfo.objects.filter(app_code=app_code).delete()
App.objects.filter(code=app_code).delete()
# 将APP的发布记录保存为上一次,避免下次创建时显示冲突
Record.objects.filter(app_code=app_code).update(version='last')
except Exception:
message = "删除失败!"
logger.exception("[app:%s] %s", app_code, message)
return FailJsonResponse(message)
# 操作流水日志
extra_data = {"username": username}
_r(app_code, username, UserOperateTypeEnum.APP_DELETE.value, extra_data)
message = "删除成功!"
logger.info("[app:%s] %s", app_code, message)
return OKJsonResponse(message)
class EventStatusView(AppDeveloperRequiredMixin, View):
"""查询事件状态
app 提测、上线、下架后台任务状态轮询
@return: result:0:失败,1:成功,2:正在执行
"""
def get(self, request, *args, **kwargs):
app_code = self.kwargs["app_code"]
event_id = request.GET.get("event_id", '')
ok, message, data = get_event_status(event_id, app_code, request=request)
result = {
"result": ok,
"message": message,
"data": data
}
return JsonResponse(result)
class UnfinishedTaskView(AppDeveloperRequiredMixin, View):
"""到app engine检查并更新最近10条未完成的task的状态
"""
def get(self, request, *args, **kwargs):
app_code = self.kwargs["app_code"]
app = App.objects.get(code=app_code)
records = Record.objects.get_last_ongoing_records(app_code, size=10)
for record in records:
event_id = record.event_id
event_ids = [event_id]
if record.operate_id == OperateIDEnum.IN_OFFLINE.value:
try:
event_ids = json.loads(record.extra_data).get("event_ids", [])
except Exception:
event_ids = [event_id]
ok, data = get_event_log(app_code=app_code, auth_token=app.auth_token, event_ids=event_ids)
if not ok:
continue
status = data.get("status")
# 判定操作时间, 超过了, 就判定是超时, 直接失败
expire_seconds = (datetime.datetime.now() - record.operate_time).total_seconds()
if (expire_seconds > settings.HISTORY_EVENT_STATE_EXPIRE_SECONDS
and status != EventStatusEnum.SUCCESS.value):
message = "check_unfinished_task, 事件超时({}s), 设置为失败".format(settings.HISTORY_EVENT_STATE_EXPIRE_SECONDS) # noqa
logger.info("[app:%s] %s, event_id:%s", app_code, message, event_id)
record.message = message
status = EventStatusEnum.FAILURE.value
if status in (EventStatusEnum.SUCCESS.value, EventStatusEnum.FAILURE.value):
record.is_success = (status == EventStatusEnum.SUCCESS.value)
to_operate_id = {OperateIDEnum.IN_TEST.value: OperateIDEnum.TO_TEST.value,
OperateIDEnum.IN_ONLINE.value: OperateIDEnum.TO_ONLINE.value,
OperateIDEnum.IN_OFFLINE.value: OperateIDEnum.TO_OFFLINE.value
}.get(record.operate_id, record.operate_id)
record.operate_id = to_operate_id
record.save()
return OKJsonResponse("success")
class LastReleaseRecordView(AppDeveloperRequiredMixin, JsonView):
"""获取部署最新的一条记录,用于刷新页面后继续轮询部署状态
"""
def get_context_data(self, **kwargs):
context = super(LastReleaseRecordView, self).get_context_data(**kwargs)
app_code = self.kwargs["app_code"]
try:
# 查询最近一条, 处于这几种状态的记录, 则是app的最新记录
record = Record.objects.get_app_newest_record(app_code)
context.update({
"result": True,
"message": "success",
"data": {
"record_id": record.id,
"event_id": record.event_id
}
})
return context
except Exception:
message = "[app:{}] {}".format(app_code, "get_last_release_record 查询错误!")
logger.exception(message)
context.update({
"result": False,
"message": message,
"data": {}
})
return context
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# -*- coding: utf8
from __future__ import print_function, division
from pyksc import dist
import glob
import numpy as np
import os
import plac
import sys
def main(tseries_fpath, in_folder):
ids = []
with open(tseries_fpath) as tseries_file:
for l in tseries_file:
ids.append(l.split()[0])
ids = np.array(ids)
folders = glob.glob(os.path.join(in_folder, 'fold-*/ksc'))
num_folders = len(folders)
agree = 0
diff = 0
for i in xrange(num_folders):
base_i = os.path.dirname(folders[i])
Ci = np.loadtxt(os.path.join(folders[i], 'cents.dat'))
train_i = np.loadtxt(os.path.join(base_i, 'train.dat'), dtype='bool')
assign_i = np.loadtxt(os.path.join(folders[i], 'assign.dat'))
for j in xrange(i, num_folders):
base_j = os.path.dirname(folders[j])
Cj = np.loadtxt(os.path.join(folders[j], 'cents.dat'))
dists = dist.dist_all(Ci, Cj, rolling=True)[0]
argsrt = dists.argsort(axis=1)
train_j = np.loadtxt(os.path.join(base_j, 'train.dat'), dtype='bool')
assign_j = np.loadtxt(os.path.join(folders[j], 'assign.dat'))
for k in xrange(argsrt.shape[0]):
first = True
for o in argsrt[k]:
ids_k = set(ids[train_i][assign_i == k])
ids_o = set(ids[train_j][assign_j == o])
n_inter = len(ids_k.intersection(ids_o))
if first:
first = False
agree += n_inter
else:
diff += n_inter
print('AgreedProb = ', agree / (agree + diff))
print('DisagreeProb = ', diff / (agree + diff))
if __name__ == '__main__':
sys.exit(plac.call(main))
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Processor for CRGA models"""
import otbApplication
from decloud.core import system
import pyotb
import unittest
from decloud.production import crga_processor
from decloud.production.inference import inference
from .decloud_unittest import DecloudTest
import datetime
def get_timestamp(yyyymmdd):
dt = datetime.datetime.strptime(yyyymmdd, '%Y%m%d')
ts = dt.replace(tzinfo=datetime.timezone.utc).timestamp()
return str(ts)
class InferenceTest(DecloudTest):
def test_inference_with_mosaic(self):
# Logger
system.basic_logging_init()
# Baseline
baseline_path = self.get_path("baseline/reconstructed_baseline_w_mosaic.tif")
# Model
model_path = self.get_path("models/crga_os2david_occitanie_pretrained")
# Input sources
s1_tm1 = [
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_110_20200929t060008_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1a_31TEJ_vvvh_DES_037_20200930txxxxxx_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_139_20201001txxxxxx_from-10to3dB.tif')]
s2_tm1 = [
self.get_path(
'baseline/PREPARE/S2_PREPARE/T31TEJ/SENTINEL2B_20200926-103901-393_L2A_T31TEJ_C_V2-2/SENTINEL2B_20200926-103901-393_L2A_T31TEJ_C_V2-2_FRE_10m.tif'),
self.get_path(
'baseline/PREPARE/S2_PREPARE/T31TEJ/SENTINEL2B_20200929-104857-489_L2A_T31TEJ_C_V2-2/SENTINEL2B_20200929-104857-489_L2A_T31TEJ_C_V2-2_FRE_10m.tif')]
s1_t = [
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_110_20201011t060008_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_139_20201013txxxxxx_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1a_31TEJ_vvvh_DES_037_20201012txxxxxx_from-10to3dB.tif')]
s2_t = [
self.get_path(
'baseline/PREPARE/S2_PREPARE/T31TEJ/SENTINEL2B_20201012-105848-497_L2A_T31TEJ_C_V2-2/SENTINEL2B_20201012-105848-497_L2A_T31TEJ_C_V2-2_FRE_10m.tif')]
s1_tp1 = [
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_139_20201025txxxxxx_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1a_31TEJ_vvvh_DES_037_20201024txxxxxx_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_110_20201023t060008_from-10to3dB.tif')]
s2_tp1 = [
self.get_path(
'baseline/PREPARE/S2_PREPARE/T31TEJ/SENTINEL2B_20201026-103901-924_L2A_T31TEJ_C_V2-2/SENTINEL2B_20201026-103901-924_L2A_T31TEJ_C_V2-2_FRE_10m.tif'),
self.get_path(
'baseline/PREPARE/S2_PREPARE/T31TEJ/SENTINEL2A_20201024-104859-766_L2A_T31TEJ_C_V2-2/SENTINEL2A_20201024-104859-766_L2A_T31TEJ_C_V2-2_FRE_10m.tif')]
# Input sources
sources = {'s1_tm1': pyotb.Mosaic(il=s1_tm1, nodata=0),
's2_tm1': pyotb.Mosaic(il=s2_tm1, nodata=-10000),
's1_tp1': pyotb.Mosaic(il=s1_tp1, nodata=0),
's2_tp1': pyotb.Mosaic(il=s2_tp1, nodata=-10000),
's1_t': pyotb.Mosaic(il=s1_t, nodata=0),
's2_t': pyotb.Mosaic(il=s2_t, nodata=-10000),
'dem': self.get_path('baseline/PREPARE/DEM_PREPARE/T31TEJ.tif')}
# Sources scales
sources_scales = {"dem": 2}
# Inference
out_tensor = "s2_estim"
outpath = '/tmp/reconstructed_w_mosaic.tif'
processor = inference(sources=sources, sources_scales=sources_scales, pad=64,
ts=256, savedmodel_dir=model_path, out_tensor=out_tensor, out_nodatavalue=-10000,
out_pixeltype=otbApplication.ImagePixelType_int16,
nodatavalues={"s1_tm1": 0, "s2_tm1": -10000, "s1_tp1": 0,
"s2_tp1": -10000, "s1_t": 0, "s2_t": -10000})
processor.write(out=outpath, filename_extension="&streaming:type=tiled&streaming:sizemode=height&"
"streaming:sizevalue=256&"
"gdal:co:COMPRESS=DEFLATE&gdal:co:TILED=YES")
# Just a dummy test
self.assertTrue(system.file_exists(outpath))
self.compare_images(outpath, baseline_path)
self.compare_raster_metadata(outpath, baseline_path)
def test_inference_with_generic_preprocessor(self):
# Logger
system.basic_logging_init()
# Baseline
baseline_path = self.get_path("baseline/reconstructed_baseline_w_preprocessor.tif")
# Model
model_path = self.get_path("models/crga_os2david_occitanie_pretrained")
# Input sources
s1_tm1 = [
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_110_20200929t060008_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1a_31TEJ_vvvh_DES_037_20200930txxxxxx_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_139_20201001txxxxxx_from-10to3dB.tif')]
s2_tm1 = [
self.get_path('baseline/PREPARE/S2_PREPARE/T31TEJ/SENTINEL2B_20200926-103901-393_L2A_T31TEJ_C_V2-2/'),
self.get_path('baseline/PREPARE/S2_PREPARE/T31TEJ/SENTINEL2B_20200929-104857-489_L2A_T31TEJ_C_V2-2/')]
s1_t = [
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_110_20201011t060008_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_139_20201013txxxxxx_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1a_31TEJ_vvvh_DES_037_20201012txxxxxx_from-10to3dB.tif')]
s2_t = self.get_path('baseline/PREPARE/S2_PREPARE/T31TEJ/SENTINEL2B_20201012-105848-497_L2A_T31TEJ_C_V2-2')
s1_tp1 = [
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_139_20201025txxxxxx_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1a_31TEJ_vvvh_DES_037_20201024txxxxxx_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_110_20201023t060008_from-10to3dB.tif')]
s2_tp1 = [
self.get_path('baseline/PREPARE/S2_PREPARE/T31TEJ/SENTINEL2B_20201026-103901-924_L2A_T31TEJ_C_V2-2'),
self.get_path('baseline/PREPARE/S2_PREPARE/T31TEJ/SENTINEL2A_20201024-104859-766_L2A_T31TEJ_C_V2-2/')]
outpath = '/tmp/reconstructed_w_preprocessor.tif'
crga_processor.crga_processor(il_s1before=s1_tm1, il_s2before=s2_tm1,
il_s1=s1_t, in_s2=s2_t,
il_s1after=s1_tp1, il_s2after=s2_tp1,
dem=self.get_path('baseline/PREPARE/DEM_PREPARE/T31TEJ.tif'),
output=outpath, maxgap=48, savedmodel=model_path)
# Just a dummy test
self.assertTrue(system.file_exists(outpath))
self.compare_images(outpath, baseline_path)
self.compare_raster_metadata(outpath, baseline_path)
if __name__ == '__main__':
unittest.main()
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# Copyright (c) 2018 Harold Wang, Ryan L. Collins, and the Talkowski Lab
# Distributed under terms of the MIT License (see LICENSE)
# Contact: Ryan L. Collins <rlcollins@g.harvard.edu>
# gnomAD credits: http://gnomad.broadinstitute.org/
"""
Helper script for workflow to calculates B-allele frequencies
per sample from an input VCF file
"""
#Import libraries
import argparse
from collections import deque
import numpy as np
import pandas as pd
import pysam
import boto3
import sys
#Function to load an S3-hosted VCF
def load_s3vcf(bucket, vcf_path, index_filename=None):
"""
Load an S3-hosted VCF.
Parameters
----------
bucket : str
S3 bucket
vcf_path : str
S3 key
Returns
-------
vcf : pysam.VariantFile
"""
s3 = boto3.client('s3')
url = s3.generate_presigned_url(
ClientMethod='get_object',
Params={'Bucket': bucket, 'Key': vcf_path},
ExpiresIn=86400)
return pysam.VariantFile(url, index_filename=index_filename)
#Function to filter VCF records
def filter_records(record):
"""
Filter VCF records to those informative for BAF genotyping.
Returns only records which match all of the following criteria:
1) Biallelic
2) SNP
3) FILTER == PASS
Parameters
----------
records : iterator of pysam.VariantRecords
Returns
------
record : pysam.VariantRecord
"""
# for record in records:
# Restrict to biallelic sites
if len(record.alleles) > 2:
return
# Restrict to variants which PASS
if record.filter.keys() != ['PASS']:
return
# Restrict to SNPs
ref, alt = record.alleles
if len(ref) > 1 or len(alt) > 1:
return
return record
#Function to calculate BAF per VCF record
def calc_BAF(record, samples=None):
"""
Parameters
----------
record : pysam.VariantRecord
samples : list of str, optional
Subset of samples in record to consider
Returns
-------
bafs : np.ndarray of np.float
BAF at site for each sample
"""
def _is_het(sample):
return record.samples[sample]['GT'] == (0, 1)
def _calc_BAF(sample):
if not _is_het(sample):
return np.nan
DP = record.samples[sample]['DP']
AD = record.samples[sample]['AD']
if DP!=None and DP > 10: # SNP sites with >10 DP are included in BAF profile
return AD[0] / DP
else:
return np.nan
if samples is None:
samples = record.samples.keys()
bafs = np.atleast_2d(np.array([_calc_BAF(sample) for sample in samples], dtype=np.float))
return bafs
#Function to normalize BAF estimations
def normalize_bafs(bafs, max_std=0.2):
"""
Normalize BAFs and exclude outlying sites
Normalize so per variant median BAF==0.5. Ignore sites with more than 0.2 standard deviation across samples.
Parameters
----------
bafs : np.ndarray (n_sites x n_samples)
max_std : float, optional
Maximium standard deviation permitted at a site
Returns
-------
normalized_bafs : np.ndarray
"""
# Convert to n_samples x n_sites
bafs = bafs.transpose()
# Remove variants not informative in any sample (all NA)
bafs = bafs.loc[:, ~bafs.isnull().all()] # .copy()
# Remove sites with excessive variance
# Permit sites with a single sample (SD=NA)
std = bafs.std()
bafs = bafs.loc[:, ((std < max_std) | std.isnull())]
# Center each site's median BAF at 0.5
bafs = bafs - bafs.median()
bafs = bafs + 0.5
return bafs
#Main function
def main():
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
# parser.add_argument('vcf', help='GATK VCF.')
# parser.add_argument('--tbi', help='Path to VCF index. (Required for tabix '
# 'querying when accessing an S3-hosted VCF.)')
# parser.add_argument('-w', '--window', type=int, default=None,
# help='Window outside variant start and end to query '
# 'for SNPs. Defaults to CNV length if not specified.')
# parser.add_argument('-s', '--samples', type=str, default=None,
# help='Samples')
# parser.add_argument('-i', '--ID', type=str, default='test',help='Samples')
# parser.add_argument('-t', '--type', type=str, default='test',help='Samples')
parser.add_argument('-b', '--batch', default='batch.txt')
# help='Samples')
args = parser.parse_args()
vcf = pysam.VariantFile(sys.stdin)
while True:
try:
record=next(vcf)
record=filter_records(record)
if record:
site=[record.pos]
site=np.array(site, dtype=np.int)
samples = list(vcf.header.samples)
baf=calc_BAF(record)
# print(baf.shape)
baf = pd.DataFrame(baf)
baf.columns = samples
baf = baf.set_index(site)
baf = normalize_bafs(baf)
baf.index.name = 'sample'
baf = baf.reset_index()
bf = pd.melt(baf, id_vars=['sample'], var_name='pos', value_name='baf')
bf = bf.loc[~bf.baf.isnull()]
called_bafs = bf
called_bafs['chrom'] = record.chrom
called_bafs['pos'] = called_bafs.pos.astype(int)
cols = 'chrom pos baf sample'.split()
called_bafs = called_bafs[cols]
if not called_bafs.empty:
called_bafs[cols].to_csv(sys.stdout, index=False, mode='a',header=False, sep='\t')
except StopIteration:
break
#Main block
if __name__ == '__main__':
main()
|
nilq/baby-python
|
python
|
"""Lib module for daq sub units"""
pass
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
r"""Aggregate, create, and save 1D and 2D histograms and binned plots.
"""
from . import agg_plot
from . import hist1d
from . import hist2d
AggPlot = agg_plot.AggPlot
Hist1D = hist1d.Hist1D
Hist2D = hist2d.Hist2D
# import pdb # noqa: F401
# import logging
# import numpy as np
# import pandas as pd
# import matplotlib as mpl
# from types import FunctionType
# from numbers import Number
# from matplotlib import pyplot as plt
# from abc import abstractproperty, abstractmethod
# from collections import namedtuple
# from scipy.signal import savgol_filter
# try:
# from astropy.stats import knuth_bin_width
# except ModuleNotFoundError:
# pass
# from . import tools
# from . import base
# from . import labels as labels_module
# # import os
# # import psutil
# # def log_mem_usage():
# # usage = psutil.Process(os.getpid()).memory_info()
# # usage = "\n".join(
# # ["{} {:.3f} GB".format(k, v * 1e-9) for k, v in usage._asdict().items()]
# # )
# # logging.getLogger("main").warning("Memory usage\n%s", usage)
# class AggPlot(base.Base):
# r"""ABC for aggregating data in 1D and 2D.
# Properties
# ----------
# logger, data, bins, clip, cut, logx, labels.x, labels.y, clim, agg_axes
# Methods
# -------
# set_<>:
# Set property <>.
# calc_bins, make_cut, agg, clip_data, make_plot
# Abstract Properties
# -------------------
# path, _gb_axes
# Abstract Methods
# ----------------
# __init__, set_labels.y, set_path, set_data, _format_axis, make_plot
# """
# @property
# def edges(self):
# return {k: v.left.union(v.right) for k, v in self.intervals.items()}
# @property
# def categoricals(self):
# return dict(self._categoricals)
# @property
# def intervals(self):
# # return dict(self._intervals)
# return {k: pd.IntervalIndex(v) for k, v in self.categoricals.items()}
# @property
# def cut(self):
# return self._cut
# @property
# def clim(self):
# return self._clim
# @property
# def agg_axes(self):
# r"""The axis to aggregate into, e.g. the z variable in an (x, y, z) heatmap.
# """
# tko = [c for c in self.data.columns if c not in self._gb_axes]
# assert len(tko) == 1
# tko = tko[0]
# return tko
# @property
# def joint(self):
# r"""A combination of the categorical and continuous data for use in `Groupby`.
# """
# # cut = self.cut
# # tko = self.agg_axes
# # self.logger.debug(f"Joining data ({tko}) with cat ({cut.columns.values})")
# # other = self.data.loc[cut.index, tko]
# # # joint = pd.concat([cut, other.to_frame(name=tko)], axis=1, sort=True)
# # joint = cut.copy(deep=True)
# # joint.loc[:, tko] = other
# # joint.sort_index(axis=1, inplace=True)
# # return joint
# cut = self.cut
# tk_target = self.agg_axes
# target = self.data.loc[cut.index, tk_target]
# mi = pd.MultiIndex.from_frame(cut)
# target.index = mi
# return target
# @property
# def grouped(self):
# r"""`joint.groupby` with appropriate axes passes.
# """
# # tko = self.agg_axes
# # gb = self.data.loc[:, tko].groupby([v for k, v in self.cut.items()], observed=False)
# # gb = self.joint.groupby(list(self._gb_axes))
# # cut = self.cut
# # tk_target = self.agg_axes
# # target = self.data.loc[cut.index, tk_target]
# # mi = pd.MultiIndex.from_frame(cut)
# # target.index = mi
# target = self.joint
# gb_axes = list(self._gb_axes)
# gb = target.groupby(gb_axes, axis=0, observed=True)
# # agg_axes = self.agg_axes
# # gb = (
# # self.joint.set_index(gb_axes)
# # .loc[:, agg_axes]
# # .groupby(gb_axes, axis=0, observed=False)
# # )
# return gb
# @property
# def axnorm(self):
# r"""Data normalization in plot.
# Not `mpl.colors.Normalize` instance. That is passed as a `kwarg` to
# `make_plot`.
# """
# return self._axnorm
# # Old version that cuts at percentiles.
# @staticmethod
# def clip_data(data, clip):
# q0 = 0.0001
# q1 = 0.9999
# pct = data.quantile([q0, q1])
# lo = pct.loc[q0]
# up = pct.loc[q1]
# if isinstance(data, pd.Series):
# ax = 0
# elif isinstance(data, pd.DataFrame):
# ax = 1
# else:
# raise TypeError("Unexpected object %s" % type(data))
# if isinstance(clip, str) and clip.lower()[0] == "l":
# data = data.clip_lower(lo, axis=ax)
# elif isinstance(clip, str) and clip.lower()[0] == "u":
# data = data.clip_upper(up, axis=ax)
# else:
# data = data.clip(lo, up, axis=ax)
# return data
# # New version that uses binning to cut.
# # @staticmethod
# # def clip_data(data, bins, clip):
# # q0 = 0.001
# # q1 = 0.999
# # pct = data.quantile([q0, q1])
# # lo = pct.loc[q0]
# # up = pct.loc[q1]
# # lo = bins.iloc[0]
# # up = bins.iloc[-1]
# # if isinstance(clip, str) and clip.lower()[0] == "l":
# # data = data.clip_lower(lo)
# # elif isinstance(clip, str) and clip.lower()[0] == "u":
# # data = data.clip_upper(up)
# # else:
# # data = data.clip(lo, up)
# # return data
# def set_clim(self, lower=None, upper=None):
# f"""Set the minimum (lower) and maximum (upper) alowed number of
# counts/bin to return aftter calling :py:meth:`{self.__class__.__name__}.add()`.
# """
# assert isinstance(lower, Number) or lower is None
# assert isinstance(upper, Number) or upper is None
# self._clim = (lower, upper)
# def calc_bins_intervals(self, nbins=101, precision=None):
# r"""
# Calculate histogram bins.
# nbins: int, str, array-like
# If int, use np.histogram to calculate the bin edges.
# If str and nbins == "knuth", use `astropy.stats.knuth_bin_width`
# to calculate optimal bin widths.
# If str and nbins != "knuth", use `np.histogram(data, bins=nbins)`
# to calculate bins.
# If array-like, treat as bins.
# precision: int or None
# Precision at which to store intervals. If None, default to 3.
# """
# data = self.data
# bins = {}
# intervals = {}
# if precision is None:
# precision = 5
# gb_axes = self._gb_axes
# if isinstance(nbins, (str, int)) or (
# hasattr(nbins, "__iter__") and len(nbins) != len(gb_axes)
# ):
# # Single paramter for `nbins`.
# nbins = {k: nbins for k in gb_axes}
# elif len(nbins) == len(gb_axes):
# # Passed one bin spec per axis
# nbins = {k: v for k, v in zip(gb_axes, nbins)}
# else:
# msg = f"Unrecognized `nbins`\ntype: {type(nbins)}\n bins:{nbins}"
# raise ValueError(msg)
# for k in self._gb_axes:
# b = nbins[k]
# # Numpy and Astropy don't like NaNs when calculating bins.
# # Infinities in bins (typically from log10(0)) also create problems.
# d = data.loc[:, k].replace([-np.inf, np.inf], np.nan).dropna()
# if isinstance(b, str):
# b = b.lower()
# if isinstance(b, str) and b == "knuth":
# try:
# assert knuth_bin_width
# except NameError:
# raise NameError("Astropy is unavailable.")
# dx, b = knuth_bin_width(d, return_bins=True)
# else:
# try:
# b = np.histogram_bin_edges(d, b)
# except MemoryError:
# # Clip the extremely large values and extremely small outliers.
# lo, up = d.quantile([0.0005, 0.9995])
# b = np.histogram_bin_edges(d.clip(lo, up), b)
# except AttributeError:
# c, b = np.histogram(d, b)
# assert np.unique(b).size == b.size
# try:
# assert not np.isnan(b).any()
# except TypeError:
# assert not b.isna().any()
# b = b.round(precision)
# zipped = zip(b[:-1], b[1:])
# i = [pd.Interval(*b0b1, closed="right") for b0b1 in zipped]
# bins[k] = b
# # intervals[k] = pd.IntervalIndex(i)
# intervals[k] = pd.CategoricalIndex(i)
# bins = tuple(bins.items())
# intervals = tuple(intervals.items())
# # self._intervals = intervals
# self._categoricals = intervals
# def make_cut(self):
# r"""Calculate the `Categorical` quantities for the aggregation axes.
# """
# intervals = self.intervals
# data = self.data
# cut = {}
# for k in self._gb_axes:
# d = data.loc[:, k]
# i = intervals[k]
# if self.clip:
# d = self.clip_data(d, self.clip)
# c = pd.cut(d, i)
# cut[k] = c
# cut = pd.DataFrame.from_dict(cut, orient="columns")
# self._cut = cut
# def _agg_runner(self, cut, tko, gb, fcn, **kwargs):
# r"""Refactored out the actual doing of the aggregation so that :py:class:`OrbitPlot`
# can aggregate (Inbound, Outbound, and Both).
# """
# self.logger.debug(f"aggregating {tko} data along {cut.columns.values}")
# if fcn is None:
# other = self.data.loc[cut.index, tko]
# if other.dropna().unique().size == 1:
# fcn = "count"
# else:
# fcn = "mean"
# agg = gb.agg(fcn, **kwargs) # .loc[:, tko]
# c0, c1 = self.clim
# if c0 is not None or c1 is not None:
# cnt = gb.agg("count") # .loc[:, tko]
# tk = pd.Series(True, index=agg.index)
# # tk = pd.DataFrame(True,
# # index=agg.index,
# # columns=agg.columns
# # )
# if c0 is not None:
# tk = tk & (cnt >= c0)
# if c1 is not None:
# tk = tk & (cnt <= c1)
# agg = agg.where(tk)
# # # Using `observed=False` in `self.grouped` raised a TypeError because mixed Categoricals and np.nans. (20200229)
# # # Ensure all bins are represented in the data. (20190605)
# # # for k, v in self.intervals.items():
# # for k, v in self.categoricals.items():
# # # if > 1 intervals, pass level. Otherwise, don't as this raises a NotImplementedError. (20190619)
# # agg = agg.reindex(index=v, level=k if agg.index.nlevels > 1 else None)
# return agg
# def _agg_reindexer(self, agg):
# # Using `observed=False` in `self.grouped` raised a TypeError because mixed Categoricals and np.nans. (20200229)
# # Ensure all bins are represented in the data. (20190605)
# # for k, v in self.intervals.items():
# for k, v in self.categoricals.items():
# # if > 1 intervals, pass level. Otherwise, don't as this raises a NotImplementedError. (20190619)
# agg = agg.reindex(index=v, level=k if agg.index.nlevels > 1 else None)
# return agg
# def agg(self, fcn=None, **kwargs):
# r"""Perform the aggregation along the agg axes.
# If either of the count limits specified in `clim` are not None, apply them.
# `fcn` allows you to specify a specific function for aggregation. Otherwise,
# automatically choose "count" or "mean" based on the uniqueness of the aggregated
# values.
# """
# cut = self.cut
# tko = self.agg_axes
# self.logger.info(
# f"Starting {self.__class__.__name__!s} aggregation of ({tko}) in ({cut.columns.values})\n%s",
# "\n".join([f"{k!s}: {v!s}" for k, v in self.labels._asdict().items()]),
# )
# gb = self.grouped
# agg = self._agg_runner(cut, tko, gb, fcn, **kwargs)
# return agg
# def get_plotted_data_boolean_series(self):
# f"""A boolean `pd.Series` identifing each measurement that is plotted.
# Note: The Series is indexed identically to the data stored in the :py:class:`{self.__class__.__name__}`.
# To align with another index, you may want to use:
# tk = {self.__class__.__name__}.get_plotted_data_boolean_series()
# idx = tk.replace(False, np.nan).dropna().index
# """
# agg = self.agg().dropna()
# cut = self.cut
# tk = pd.Series(True, index=cut.index)
# for k, v in cut.items():
# chk = agg.index.get_level_values(k)
# # Use the codes directly because the categoricals are
# # failing with some Pandas numpy ufunc use. (20200611)
# chk = pd.CategoricalIndex(chk)
# tk_ax = v.cat.codes.isin(chk.codes)
# tk = tk & tk_ax
# self.logger.info(
# f"Taking {tk.sum()!s} ({100*tk.mean():.1f}%) {self.__class__.__name__} spectra"
# )
# return tk
# # Old version that cuts at percentiles.
# # @staticmethod
# # def clip_data(data, clip):
# # q0 = 0.0001
# # q1 = 0.9999
# # pct = data.quantile([q0, q1])
# # lo = pct.loc[q0]
# # up = pct.loc[q1]
# #
# # if isinstance(data, pd.Series):
# # ax = 0
# # elif isinstance(data, pd.DataFrame):
# # ax = 1
# # else:
# # raise TypeError("Unexpected object %s" % type(data))
# #
# # if isinstance(clip, str) and clip.lower()[0] == "l":
# # data = data.clip_lower(lo, axis=ax)
# # elif isinstance(clip, str) and clip.lower()[0] == "u":
# # data = data.clip_upper(up, axis=ax)
# # else:
# # data = data.clip(lo, up, axis=ax)
# # return data
# #
# # New version that uses binning to cut.
# # @staticmethod
# # def clip_data(data, bins, clip):
# # q0 = 0.001
# # q1 = 0.999
# # pct = data.quantile([q0, q1])
# # lo = pct.loc[q0]
# # up = pct.loc[q1]
# # lo = bins.iloc[0]
# # up = bins.iloc[-1]
# # if isinstance(clip, str) and clip.lower()[0] == "l":
# # data = data.clip_lower(lo)
# # elif isinstance(clip, str) and clip.lower()[0] == "u":
# # data = data.clip_upper(up)
# # else:
# # data = data.clip(lo, up)
# # return data
# @abstractproperty
# def _gb_axes(self):
# r"""The axes or columns over which the `groupby` aggregation takes place.
# 1D cases aggregate over `x`. 2D cases aggregate over `x` and `y`.
# """
# pass
# @abstractmethod
# def set_axnorm(self, new):
# r"""The method by which the gridded data is normalized.
# """
# pass
# class Hist1D(AggPlot):
# r"""Create 1D plot of `x`, optionally aggregating `y` in bins of `x`.
# Properties
# ----------
# _gb_axes, path
# Methods
# -------
# set_path, set_data, agg, _format_axis, make_plot
# """
# def __init__(
# self,
# x,
# y=None,
# logx=False,
# axnorm=None,
# clip_data=False,
# nbins=101,
# bin_precision=None,
# ):
# r"""
# Parameters
# ----------
# x: pd.Series
# Data from which to create bins.
# y: pd.Series, None
# If not None, the values to aggregate in bins of `x`. If None,
# aggregate counts of `x`.
# logx: bool
# If True, compute bins in log-space.
# axnorm: None, str
# Normalize the histogram.
# key normalization
# --- -------------
# t total
# d density
# clip_data: bool
# If True, remove the extreme values at 0.001 and 0.999 percentiles
# before calculating bins or aggregating.
# nbins: int, str, array-like
# Dispatched to `np.histogram_bin_edges` or `pd.cut` depending on
# input type and value.
# """
# super(Hist1D, self).__init__()
# self.set_log(x=logx)
# self.set_axnorm(axnorm)
# self.set_data(x, y, clip_data)
# self.set_labels(x="x", y=labels_module.Count(norm=axnorm) if y is None else "y")
# self.calc_bins_intervals(nbins=nbins, precision=bin_precision)
# self.make_cut()
# self.set_clim(None, None)
# @property
# def _gb_axes(self):
# return ("x",)
# def set_path(self, new, add_scale=True):
# path, x, y, z, scale_info = super(Hist1D, self).set_path(new, add_scale)
# if new == "auto":
# path = path / x / y
# else:
# assert x is None
# assert y is None
# if add_scale:
# assert scale_info is not None
# scale_info = scale_info[0]
# path = path / scale_info
# self._path = path
# set_path.__doc__ = base.Base.set_path.__doc__
# def set_data(self, x, y, clip):
# data = pd.DataFrame({"x": np.log10(np.abs(x)) if self.log.x else x})
# if y is None:
# y = pd.Series(1, index=x.index)
# data.loc[:, "y"] = y
# self._data = data
# self._clip = clip
# def set_axnorm(self, new):
# r"""The method by which the gridded data is normalized.
# ===== =============================================================
# key description
# ===== =============================================================
# d Density normalize
# t Total normalize
# ===== =============================================================
# """
# if new is not None:
# new = new.lower()[0]
# assert new == "d"
# ylbl = self.labels.y
# if isinstance(ylbl, labels_module.Count):
# ylbl.set_axnorm(new)
# ylbl.build_label()
# self._axnorm = new
# def construct_cdf(self, only_plotted=True):
# r"""Convert the obsered measuremets.
# Returns
# -------
# cdf: pd.DataFrame
# "x" column is the value of the measuremnt.
# "position" column is the normalized position in the cdf.
# To plot the cdf:
# cdf.plot(x="x", y="cdf")
# """
# data = self.data
# if not data.loc[:, "y"].unique().size <= 2:
# raise ValueError("Only able to convert data to a cdf if it is a histogram.")
# tk = self.cut.loc[:, "x"].notna()
# if only_plotted:
# tk = tk & self.get_plotted_data_boolean_series()
# x = data.loc[tk, "x"]
# cdf = x.sort_values().reset_index(drop=True)
# if self.log.x:
# cdf = 10.0 ** cdf
# cdf = cdf.to_frame()
# cdf.loc[:, "position"] = cdf.index / cdf.index.max()
# return cdf
# def _axis_normalizer(self, agg):
# r"""Takes care of row, column, total, and density normaliation.
# Written basically as `staticmethod` so that can be called in `OrbitHist2D`, but
# as actual method with `self` passed so we have access to `self.log` for density
# normalization.
# """
# axnorm = self.axnorm
# if axnorm is None:
# pass
# elif axnorm == "d":
# n = agg.sum()
# dx = pd.Series(pd.IntervalIndex(agg.index).length, index=agg.index)
# if self.log.x:
# dx = 10.0 ** dx
# agg = agg.divide(dx.multiply(n))
# elif axnorm == "t":
# agg = agg.divide(agg.max())
# else:
# raise ValueError("Unrecognized axnorm: %s" % axnorm)
# return agg
# def agg(self, **kwargs):
# if self.axnorm == "d":
# fcn = kwargs.get("fcn", None)
# if (fcn != "count") & (fcn is not None):
# raise ValueError("Unable to calculate a PDF with non-count aggregation")
# agg = super(Hist1D, self).agg(**kwargs)
# agg = self._axis_normalizer(agg)
# agg = self._agg_reindexer(agg)
# return agg
# def set_labels(self, **kwargs):
# if "z" in kwargs:
# raise ValueError(r"{} doesn't have a z-label".format(self))
# y = kwargs.pop("y", self.labels.y)
# if isinstance(y, labels_module.Count):
# y.set_axnorm(self.axnorm)
# y.build_label()
# super(Hist1D, self).set_labels(y=y, **kwargs)
# def make_plot(self, ax=None, fcn=None, **kwargs):
# f"""Make a plot.
# Parameters
# ----------
# ax: None, mpl.axis.Axis
# If `None`, create a subplot axis.
# fcn: None, str, aggregative function, or 2-tuple of strings
# Passed directly to `{self.__class__.__name__}.agg`. If
# None, use the default aggregation function. If str or a
# single aggregative function, use it.
# kwargs:
# Passed directly to `ax.plot`.
# """
# agg = self.agg(fcn=fcn)
# x = pd.IntervalIndex(agg.index).mid
# if fcn is None or isinstance(fcn, str):
# y = agg
# dy = None
# elif len(fcn) == 2:
# f0, f1 = fcn
# if isinstance(f0, FunctionType):
# f0 = f0.__name__
# if isinstance(f1, FunctionType):
# f1 = f1.__name__
# y = agg.loc[:, f0]
# dy = agg.loc[:, f1]
# else:
# raise ValueError(f"Unrecognized `fcn` ({fcn})")
# if ax is None:
# fig, ax = plt.subplots()
# if self.log.x:
# x = 10.0 ** x
# drawstyle = kwargs.pop("drawstyle", "steps-mid")
# pl, cl, bl = ax.errorbar(x, y, yerr=dy, drawstyle=drawstyle, **kwargs)
# self._format_axis(ax)
# return ax
# class Hist2D(base.Plot2D, AggPlot):
# r"""Create a 2D histogram with an optional z-value using an equal number
# of bins along the x and y axis.
# Parameters
# ----------
# x, y: pd.Series
# x and y data to aggregate
# z: None, pd.Series
# If not None, the z-value to aggregate.
# axnorm: str
# Normalize the histogram.
# key normalization
# --- -------------
# c column
# r row
# t total
# d density
# logx, logy: bool
# If True, log10 scale the axis.
# Properties
# ----------
# data:
# bins:
# cut:
# axnorm:
# log<x,y>:
# <x,y,z>label:
# path: None, Path
# Methods
# -------
# calc_bins:
# calculate the x, y bins.
# make_cut:
# Utilize the calculated bins to convert (x, y) into pd.Categoral
# or pd.Interval values used in aggregation.
# set_[x,y,z]label:
# Set the x, y, or z label.
# agg:
# Aggregate the data in the bins.
# If z-value is None, count the number of points in each bin.
# If z-value is not None, calculate the mean for each bin.
# make_plot:
# Make a 2D plot of the data with an optional color bar.
# """
# def __init__(
# self,
# x,
# y,
# z=None,
# axnorm=None,
# logx=False,
# logy=False,
# clip_data=False,
# nbins=101,
# bin_precision=None,
# ):
# super(Hist2D, self).__init__()
# self.set_log(x=logx, y=logy)
# self.set_data(x, y, z, clip_data)
# self.set_labels(
# x="x", y="y", z=labels_module.Count(norm=axnorm) if z is None else "z"
# )
# self.set_axnorm(axnorm)
# self.calc_bins_intervals(nbins=nbins, precision=bin_precision)
# self.make_cut()
# self.set_clim(None, None)
# @property
# def _gb_axes(self):
# return ("x", "y")
# def _maybe_convert_to_log_scale(self, x, y):
# if self.log.x:
# x = 10.0 ** x
# if self.log.y:
# y = 10.0 ** y
# return x, y
# # def set_path(self, new, add_scale=True):
# # # Bug: path doesn't auto-set log information.
# # path, x, y, z, scale_info = super(Hist2D, self).set_path(new, add_scale)
# # if new == "auto":
# # path = path / x / y / z
# # else:
# # assert x is None
# # assert y is None
# # assert z is None
# # if add_scale:
# # assert scale_info is not None
# # scale_info = "-".join(scale_info)
# # if bool(len(path.parts)) and path.parts[-1].endswith("norm"):
# # # Insert <norm> at end of path so scale order is (x, y, z).
# # path = path.parts
# # path = path[:-1] + (scale_info + "-" + path[-1],)
# # path = Path(*path)
# # else:
# # path = path / scale_info
# # self._path = path
# # set_path.__doc__ = base.Base.set_path.__doc__
# def set_labels(self, **kwargs):
# z = kwargs.pop("z", self.labels.z)
# if isinstance(z, labels_module.Count):
# try:
# z.set_axnorm(self.axnorm)
# except AttributeError:
# pass
# z.build_label()
# super(Hist2D, self).set_labels(z=z, **kwargs)
# # def set_data(self, x, y, z, clip):
# # data = pd.DataFrame(
# # {
# # "x": np.log10(np.abs(x)) if self.log.x else x,
# # "y": np.log10(np.abs(y)) if self.log.y else y,
# # }
# # )
# #
# #
# # if z is None:
# # z = pd.Series(1, index=x.index)
# #
# # data.loc[:, "z"] = z
# # data = data.dropna()
# # if not data.shape[0]:
# # raise ValueError(
# # "You can't build a %s with data that is exclusively NaNs"
# # % self.__class__.__name__
# # )
# #
# # self._data = data
# # self._clip = clip
# def set_data(self, x, y, z, clip):
# super(Hist2D, self).set_data(x, y, z, clip)
# data = self.data
# if self.log.x:
# data.loc[:, "x"] = np.log10(np.abs(data.loc[:, "x"]))
# if self.log.y:
# data.loc[:, "y"] = np.log10(np.abs(data.loc[:, "y"]))
# self._data = data
# def set_axnorm(self, new):
# r"""The method by which the gridded data is normalized.
# ===== =============================================================
# key description
# ===== =============================================================
# c Column normalize
# d Density normalize
# r Row normalize
# t Total normalize
# ===== =============================================================
# """
# if new is not None:
# new = new.lower()[0]
# assert new in ("c", "r", "t", "d")
# zlbl = self.labels.z
# if isinstance(zlbl, labels_module.Count):
# zlbl.set_axnorm(new)
# zlbl.build_label()
# self._axnorm = new
# def _axis_normalizer(self, agg):
# r"""Takes care of row, column, total, and density normaliation.
# Written basically as `staticmethod` so that can be called in `OrbitHist2D`, but
# as actual method with `self` passed so we have access to `self.log` for density
# normalization.
# """
# axnorm = self.axnorm
# if axnorm is None:
# pass
# elif axnorm == "c":
# agg = agg.divide(agg.max(level="x"), level="x")
# elif axnorm == "r":
# agg = agg.divide(agg.max(level="y"), level="y")
# elif axnorm == "t":
# agg = agg.divide(agg.max())
# elif axnorm == "d":
# N = agg.sum().sum()
# x = pd.IntervalIndex(agg.index.get_level_values("x").unique())
# y = pd.IntervalIndex(agg.index.get_level_values("y").unique())
# dx = pd.Series(
# x.length, index=x
# ) # dx = pd.Series(x.right - x.left, index=x)
# dy = pd.Series(
# y.length, index=y
# ) # dy = pd.Series(y.right - y.left, index=y)
# if self.log.x:
# dx = 10.0 ** dx
# if self.log.y:
# dy = 10.0 ** dy
# agg = agg.divide(dx, level="x").divide(dy, level="y").divide(N)
# elif hasattr(axnorm, "__iter__"):
# kind, fcn = axnorm
# if kind == "c":
# agg = agg.divide(agg.agg(fcn, level="x"), level="x")
# elif kind == "r":
# agg = agg.divide(agg.agg(fcn, level="y"), level="y")
# else:
# raise ValueError(f"Unrecognized axnorm with function ({kind}, {fcn})")
# else:
# raise ValueError(f"Unrecognized axnorm ({axnorm})")
# return agg
# def agg(self, **kwargs):
# agg = super(Hist2D, self).agg(**kwargs)
# agg = self._axis_normalizer(agg)
# agg = self._agg_reindexer(agg)
# return agg
# def _make_cbar(self, mappable, **kwargs):
# ticks = kwargs.pop(
# "ticks",
# mpl.ticker.MultipleLocator(0.1) if self.axnorm in ("c", "r") else None,
# )
# return super(Hist2D, self)._make_cbar(mappable, ticks=ticks, **kwargs)
# def _limit_color_norm(self, norm):
# if self.axnorm in ("c", "r"):
# # Don't limit us to (1%, 99%) interval.
# return None
# pct = self.data.loc[:, "z"].quantile([0.01, 0.99])
# v0 = pct.loc[0.01]
# v1 = pct.loc[0.99]
# if norm.vmin is None:
# norm.vmin = v0
# if norm.vmax is None:
# norm.vmax = v1
# norm.clip = True
# def make_plot(
# self,
# ax=None,
# cbar=True,
# limit_color_norm=False,
# cbar_kwargs=None,
# fcn=None,
# alpha_fcn=None,
# **kwargs,
# ):
# r"""
# Make a 2D plot on `ax` using `ax.pcolormesh`.
# Paremeters
# ----------
# ax: mpl.axes.Axes, None
# If None, create an `Axes` instance from `plt.subplots`.
# cbar: bool
# If True, create color bar with `labels.z`.
# limit_color_norm: bool
# If True, limit the color range to 0.001 and 0.999 percentile range
# of the z-value, count or otherwise.
# cbar_kwargs: dict, None
# If not None, kwargs passed to `self._make_cbar`.
# fcn: FunctionType, None
# Aggregation function. If None, automatically select in :py:meth:`agg`.
# alpha_fcn: None, str
# If not None, the function used to aggregate the data for setting alpha
# value.
# kwargs:
# Passed to `ax.pcolormesh`.
# If row or column normalized data, `norm` defaults to `mpl.colors.Normalize(0, 1)`.
# Returns
# -------
# ax: mpl.axes.Axes
# Axes upon which plot was made.
# cbar_or_mappable: colorbar.Colorbar, mpl.collections.QuadMesh
# If `cbar` is True, return the colorbar. Otherwise, return the `Quadmesh` used
# to create the colorbar.
# """
# agg = self.agg(fcn=fcn).unstack("x")
# x = self.edges["x"]
# y = self.edges["y"]
# # assert x.size == agg.shape[1] + 1
# # assert y.size == agg.shape[0] + 1
# # HACK: Works around `gb.agg(observed=False)` pandas bug. (GH32381)
# if x.size != agg.shape[1] + 1:
# # agg = agg.reindex(columns=self.intervals["x"])
# agg = agg.reindex(columns=self.categoricals["x"])
# if y.size != agg.shape[0] + 1:
# # agg = agg.reindex(index=self.intervals["y"])
# agg = agg.reindex(index=self.categoricals["y"])
# if ax is None:
# fig, ax = plt.subplots()
# # if self.log.x:
# # x = 10.0 ** x
# # if self.log.y:
# # y = 10.0 ** y
# x, y = self._maybe_convert_to_log_scale(x, y)
# axnorm = self.axnorm
# norm = kwargs.pop(
# "norm",
# mpl.colors.BoundaryNorm(np.linspace(0, 1, 11), 256, clip=True)
# if axnorm in ("c", "r")
# else None,
# )
# if limit_color_norm:
# self._limit_color_norm(norm)
# C = np.ma.masked_invalid(agg.values)
# XX, YY = np.meshgrid(x, y)
# pc = ax.pcolormesh(XX, YY, C, norm=norm, **kwargs)
# cbar_or_mappable = pc
# if cbar:
# if cbar_kwargs is None:
# cbar_kwargs = dict()
# if "cax" not in cbar_kwargs.keys() and "ax" not in cbar_kwargs.keys():
# cbar_kwargs["ax"] = ax
# # Pass `norm` to `self._make_cbar` so that we can choose the ticks to use.
# cbar = self._make_cbar(pc, norm=norm, **cbar_kwargs)
# cbar_or_mappable = cbar
# self._format_axis(ax)
# color_plot = self.data.loc[:, self.agg_axes].dropna().unique().size > 1
# if (alpha_fcn is not None) and color_plot:
# self.logger.warning(
# "Make sure you verify alpha actually set. I don't yet trust this."
# )
# alpha_agg = self.agg(fcn=alpha_fcn)
# alpha_agg = alpha_agg.unstack("x")
# alpha_agg = np.ma.masked_invalid(alpha_agg.values.ravel())
# # Feature scale then invert so smallest STD
# # is most opaque.
# alpha = 1 - mpl.colors.Normalize()(alpha_agg)
# self.logger.warning("Scaling alpha filter as alpha**0.25")
# alpha = alpha ** 0.25
# # Set masked values to zero. Otherwise, masked
# # values are rendered as black.
# alpha = alpha.filled(0)
# # Must draw to initialize `facecolor`s
# plt.draw()
# # Remove `pc` from axis so we can redraw with std
# # pc.remove()
# colors = pc.get_facecolors()
# colors[:, 3] = alpha
# pc.set_facecolor(colors)
# # ax.add_collection(pc)
# elif alpha_fcn is not None:
# self.logger.warning("Ignoring `alpha_fcn` because plotting counts")
# return ax, cbar_or_mappable
# def get_border(self):
# r"""Get the top and bottom edges of the plot.
# Returns
# -------
# border: namedtuple
# Contains "top" and "bottom" fields, each with a :py:class:`pd.Series`.
# """
# Border = namedtuple("Border", "top,bottom")
# top = {}
# bottom = {}
# for x, v in self.agg().unstack("x").items():
# yt = v.last_valid_index()
# if yt is not None:
# z = v.loc[yt]
# top[(yt, x)] = z
# yb = v.first_valid_index()
# if yb is not None:
# z = v.loc[yb]
# bottom[(yb, x)] = z
# top = pd.Series(top)
# bottom = pd.Series(bottom)
# for edge in (top, bottom):
# edge.index.names = ["y", "x"]
# border = Border(top, bottom)
# return border
# def _plot_one_edge(
# self,
# ax,
# edge,
# smooth=False,
# sg_kwargs=None,
# xlim=(None, None),
# ylim=(None, None),
# **kwargs,
# ):
# x = edge.index.get_level_values("x").mid
# y = edge.index.get_level_values("y").mid
# if sg_kwargs is None:
# sg_kwargs = dict()
# if smooth:
# wlength = sg_kwargs.pop("window_length", int(np.floor(y.shape[0] / 10)))
# polyorder = sg_kwargs.pop("polyorder", 3)
# if not wlength % 2:
# wlength -= 1
# y = savgol_filter(y, wlength, polyorder, **sg_kwargs)
# if self.log.x:
# x = 10.0 ** x
# if self.log.y:
# y = 10.0 ** y
# x0, x1 = xlim
# y0, y1 = ylim
# tk = np.full_like(x, True, dtype=bool)
# if x0 is not None:
# tk = tk & (x0 <= x)
# if x1 is not None:
# tk = tk & (x <= x1)
# if y0 is not None:
# tk = tk & (y0 <= y)
# if y1 is not None:
# tk = tk & (y <= y1)
# # if (~tk).any():
# x = x[tk]
# y = y[tk]
# return ax.plot(x, y, **kwargs)
# def plot_edges(self, ax, smooth=True, sg_kwargs=None, **kwargs):
# r"""Overplot the edges.
# Parameters
# ----------
# ax:
# Axis on which to plot.
# smooth: bool
# If True, apply a Savitzky-Golay filter (:py:func:`scipy.signal.savgol_filter`)
# to the y-values before plotting to smooth the curve.
# sg_kwargs: dict, None
# If not None, dict of kwargs passed to Savitzky-Golay filter. Also allows
# for setting of `window_length` and `polyorder` as kwargs. They default to
# 10\% of the number of observations (`window_length`) and 3 (`polyorder`).
# Note that because `window_length` must be odd, if the 10\% value is even, we
# take 1-window_length.
# kwargs:
# Passed to `ax.plot`
# """
# top, bottom = self.get_border()
# color = kwargs.pop("color", "cyan")
# label = kwargs.pop("label", None)
# etop = self._plot_one_edge(
# ax, top, smooth, sg_kwargs, color=color, label=label, **kwargs
# )
# ebottom = self._plot_one_edge(
# ax, bottom, smooth, sg_kwargs, color=color, **kwargs
# )
# return etop, ebottom
# def _get_contour_levels(self, levels):
# if (levels is not None) or (self.axnorm is None):
# pass
# elif (levels is None) and (self.axnorm == "t"):
# levels = [0.01, 0.1, 0.3, 0.7, 0.99]
# elif (levels is None) and (self.axnorm == "d"):
# levels = [3e-5, 1e-4, 3e-4, 1e-3, 1.7e-3, 2.3e-3]
# elif (levels is None) and (self.axnorm in ["r", "c"]):
# levels = [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0]
# else:
# raise ValueError(
# f"Unrecognized axis normalization {self.axnorm} for default levels."
# )
# return levels
# def _verify_contour_passthrough_kwargs(
# self, ax, clabel_kwargs, edges_kwargs, cbar_kwargs
# ):
# if clabel_kwargs is None:
# clabel_kwargs = dict()
# if edges_kwargs is None:
# edges_kwargs = dict()
# if cbar_kwargs is None:
# cbar_kwargs = dict()
# if "cax" not in cbar_kwargs.keys() and "ax" not in cbar_kwargs.keys():
# cbar_kwargs["ax"] = ax
# return clabel_kwargs, edges_kwargs, cbar_kwargs
# def plot_contours(
# self,
# ax=None,
# label_levels=True,
# cbar=True,
# limit_color_norm=False,
# cbar_kwargs=None,
# fcn=None,
# plot_edges=True,
# edges_kwargs=None,
# clabel_kwargs=None,
# skip_max_clbl=True,
# use_contourf=False,
# gaussian_filter_std=0,
# gaussian_filter_kwargs=None,
# **kwargs,
# ):
# f"""Make a contour plot on `ax` using `ax.contour`.
# Paremeters
# ----------
# ax: mpl.axes.Axes, None
# If None, create an `Axes` instance from `plt.subplots`.
# label_levels: bool
# If True, add labels to contours with `ax.clabel`.
# cbar: bool
# If True, create color bar with `labels.z`.
# limit_color_norm: bool
# If True, limit the color range to 0.001 and 0.999 percentile range
# of the z-value, count or otherwise.
# cbar_kwargs: dict, None
# If not None, kwargs passed to `self._make_cbar`.
# fcn: FunctionType, None
# Aggregation function. If None, automatically select in :py:meth:`agg`.
# plot_edges: bool
# If True, plot the smoothed, extreme edges of the 2D histogram.
# edges_kwargs: None, dict
# Passed to {self.plot_edges!s}.
# clabel_kwargs: None, dict
# If not None, dictionary of kwargs passed to `ax.clabel`.
# skip_max_clbl: bool
# If True, don't label the maximum contour. Primarily used when the maximum
# contour is, effectively, a point.
# maximum_color:
# The color for the maximum of the PDF.
# use_contourf: bool
# If True, use `ax.contourf`. Else use `ax.contour`.
# gaussian_filter_std: int
# If > 0, apply `scipy.ndimage.gaussian_filter` to the z-values using the
# standard deviation specified by `gaussian_filter_std`.
# gaussian_filter_kwargs: None, dict
# If not None and gaussian_filter_std > 0, passed to :py:meth:`scipy.ndimage.gaussian_filter`
# kwargs:
# Passed to :py:meth:`ax.pcolormesh`.
# If row or column normalized data, `norm` defaults to `mpl.colors.Normalize(0, 1)`.
# """
# levels = kwargs.pop("levels", None)
# cmap = kwargs.pop("cmap", None)
# norm = kwargs.pop(
# "norm",
# mpl.colors.BoundaryNorm(np.linspace(0, 1, 11), 256, clip=True)
# if self.axnorm in ("c", "r")
# else None,
# )
# linestyles = kwargs.pop(
# "linestyles",
# [
# "-",
# ":",
# "--",
# (0, (7, 3, 1, 3, 1, 3, 1, 3, 1, 3)),
# "--",
# ":",
# "-",
# (0, (7, 3, 1, 3, 1, 3)),
# ],
# )
# if ax is None:
# fig, ax = plt.subplots()
# clabel_kwargs, edges_kwargs, cbar_kwargs = self._verify_contour_passthrough_kwargs(
# ax, clabel_kwargs, edges_kwargs, cbar_kwargs
# )
# inline = clabel_kwargs.pop("inline", True)
# inline_spacing = clabel_kwargs.pop("inline_spacing", -3)
# fmt = clabel_kwargs.pop("fmt", "%s")
# agg = self.agg(fcn=fcn).unstack("x")
# x = self.intervals["x"].mid
# y = self.intervals["y"].mid
# # assert x.size == agg.shape[1]
# # assert y.size == agg.shape[0]
# # HACK: Works around `gb.agg(observed=False)` pandas bug. (GH32381)
# if x.size != agg.shape[1]:
# # agg = agg.reindex(columns=self.intervals["x"])
# agg = agg.reindex(columns=self.categoricals["x"])
# if y.size != agg.shape[0]:
# # agg = agg.reindex(index=self.intervals["y"])
# agg = agg.reindex(index=self.categoricals["y"])
# x, y = self._maybe_convert_to_log_scale(x, y)
# XX, YY = np.meshgrid(x, y)
# C = agg.values
# if gaussian_filter_std:
# from scipy.ndimage import gaussian_filter
# if gaussian_filter_kwargs is None:
# gaussian_filter_kwargs = dict()
# C = gaussian_filter(C, gaussian_filter_std, **gaussian_filter_kwargs)
# C = np.ma.masked_invalid(C)
# assert XX.shape == C.shape
# assert YY.shape == C.shape
# class nf(float):
# # Source: https://matplotlib.org/3.1.0/gallery/images_contours_and_fields/contour_label_demo.html
# # Define a class that forces representation of float to look a certain way
# # This remove trailing zero so '1.0' becomes '1'
# def __repr__(self):
# return str(self).rstrip("0")
# levels = self._get_contour_levels(levels)
# contour_fcn = ax.contour
# if use_contourf:
# contour_fcn = ax.contourf
# if levels is None:
# args = [XX, YY, C]
# else:
# args = [XX, YY, C, levels]
# qset = contour_fcn(*args, linestyles=linestyles, cmap=cmap, norm=norm, **kwargs)
# try:
# args = (qset, levels[:-1] if skip_max_clbl else levels)
# except TypeError:
# # None can't be subscripted.
# args = (qset,)
# lbls = None
# if label_levels:
# qset.levels = [nf(level) for level in qset.levels]
# lbls = ax.clabel(
# *args, inline=inline, inline_spacing=inline_spacing, fmt=fmt
# )
# if plot_edges:
# etop, ebottom = self.plot_edges(ax, **edges_kwargs)
# cbar_or_mappable = qset
# if cbar:
# # Pass `norm` to `self._make_cbar` so that we can choose the ticks to use.
# cbar = self._make_cbar(qset, norm=norm, **cbar_kwargs)
# cbar_or_mappable = cbar
# self._format_axis(ax)
# return ax, lbls, cbar_or_mappable, qset
# def project_1d(self, axis, only_plotted=True, project_counts=False, **kwargs):
# f"""Make a `Hist1D` from the data stored in this `His2D`.
# Parameters
# ----------
# axis: str
# "x" or "y", specifying the axis to project into 1D.
# only_plotted: bool
# If True, only pass data that appears in the {self.__class__.__name__} plot
# to the :py:class:`Hist1D`.
# project_counts: bool
# If True, only send the variable plotted along `axis` to :py:class:`Hist1D`.
# Otherwise, send both axes (but not z-values).
# kwargs:
# Passed to `Hist1D`. Primarily to allow specifying `bin_precision`.
# Returns
# -------
# h1: :py:class:`Hist1D`
# """
# axis = axis.lower()
# assert axis in ("x", "y")
# data = self.data
# if data.loc[:, "z"].unique().size >= 2:
# # Either all 1 or 1 and NaN.
# other = "z"
# else:
# possible_axes = {"x", "y"}
# possible_axes.remove(axis)
# other = possible_axes.pop()
# logx = self.log._asdict()[axis]
# x = self.data.loc[:, axis]
# if logx:
# # Need to convert back to regular from log-space for data setting.
# x = 10.0 ** x
# y = self.data.loc[:, other] if not project_counts else None
# logy = False # Defined b/c project_counts option.
# if y is not None:
# # Only select y-values plotted.
# logy = self.log._asdict()[other]
# yedges = self.edges[other].values
# y = y.where((yedges[0] <= y) & (y <= yedges[-1]))
# if logy:
# y = 10.0 ** y
# if only_plotted:
# tk = self.get_plotted_data_boolean_series()
# x = x.loc[tk]
# if y is not None:
# y = y.loc[tk]
# h1 = Hist1D(
# x,
# y=y,
# logx=logx,
# clip_data=False, # Any clipping will be addressed by bins.
# nbins=self.edges[axis].values,
# **kwargs,
# )
# h1.set_log(y=logy) # Need to propagate logy.
# h1.set_labels(x=self.labels._asdict()[axis])
# if not project_counts:
# h1.set_labels(y=self.labels._asdict()[other])
# h1.set_path("auto")
# return h1
# class GridHist2D(object):
# r"""A grid of 2D heatmaps separating the data based on a categorical value.
# Properties
# ----------
# data: pd.DataFrame
# axnorm: str or None
# Specify if column, row, total, or density normalization should be used.
# log: namedtuple
# Contains booleans identifying axes to log-scale.
# nbins: int or str
# Pass to `np.histogram_bin_edges` or `astropy.stats.knuth_bin_width`
# depending on the input.
# labels: namedtuple
# Contains axis labels. Recomend using `labels.TeXlabel` so
# grouped: pd.Groupeby
# The data grouped by the categorical.
# hist2ds: pd.Series
# The `Hist2D` objects created for each axis. Index is the unique
# categorical values.
# fig: mpl.figure.Figure
# The figure upon which the axes are placed.
# axes: pd.Series
# Contains the mpl axes upon which plots are drawn. Index should be
# identical to `hist2ds`.
# cbars: pd.Series
# Contains the colorbar instances. Similar to `hist2ds` and `axes`.
# cnorms: mpl.color.Normalize or pd.Series
# mpl.colors.Normalize instance or a pd.Series of them with one for
# each unique categorical value.
# use_gs: bool
# An attempt at the code is written, but not implemented because some
# minor details need to be worked out. Ideally, if True, use a single
# colorbar for the entire grid.
# Methods
# -------
# set_<>: setters
# For data, nbins, axnorm, log, labels, cnorms.
# make_h2ds:
# Make the `Hist2D` objects.
# make_plots:
# Make the `Hist2D` plots.
# """
# def __init__(self, x, y, cat, z=None):
# r"""Create 2D heatmaps of x, y, and optional z data in a grid for which
# each unique element in `cat` specifies one plot.
# Parameters
# ----------
# x, y, z: pd.Series or np.array
# The data to aggregate. pd.Series is prefered.
# cat: pd.Categorial
# The categorial series used to create subsets of the data for each
# grid element.
# """
# self.set_nbins(101)
# self.set_axnorm(None)
# self.set_log(x=False, y=False)
# self.set_data(x, y, cat, z)
# self._labels = base.AxesLabels("x", "y") # Unsure how else to set defaults.
# self.set_cnorms(None)
# @property
# def data(self):
# return self._data
# @property
# def axnorm(self):
# r"""Axis normalization."""
# return self._axnorm
# @property
# def logger(self):
# return self._log
# @property
# def nbins(self):
# return self._nbins
# @property
# def log(self):
# r"""LogAxes booleans.
# """
# return self._log
# @property
# def labels(self):
# return self._labels
# @property
# def grouped(self):
# return self.data.groupby("cat")
# @property
# def hist2ds(self):
# try:
# return self._h2ds
# except AttributeError:
# return self.make_h2ds()
# @property
# def fig(self):
# try:
# return self._fig
# except AttributeError:
# return self.init_fig()[0]
# @property
# def axes(self):
# try:
# return self._axes
# except AttributeError:
# return self.init_fig()[1]
# @property
# def cbars(self):
# return self._cbars
# @property
# def cnorms(self):
# r"""Color normalization (mpl.colors.Normalize instance)."""
# return self._cnorms
# @property
# def use_gs(self):
# return self._use_gs
# @property
# def path(self):
# raise NotImplementedError("Just haven't sat down to write this.")
# def _init_logger(self):
# self._logger = logging.getLogger(
# "{}.{}".format(__name__, self.__class__.__name__)
# )
# def set_nbins(self, new):
# self._nbins = new
# def set_axnorm(self, new):
# self._axnorm = new
# def set_cnorms(self, new):
# self._cnorms = new
# def set_log(self, x=None, y=None):
# if x is None:
# x = self.log.x
# if y is None:
# y = self.log.y
# log = base.LogAxes(x, y)
# self._log = log
# def set_data(self, x, y, cat, z):
# data = {"x": x, "y": y, "cat": cat}
# if z is not None:
# data["z"] = z
# data = pd.concat(data, axis=1)
# self._data = data
# def set_labels(self, **kwargs):
# r"""Set or update x, y, or z labels. Any label not specified in kwargs
# is propagated from `self.labels.<x, y, or z>`.
# """
# x = kwargs.pop("x", self.labels.x)
# y = kwargs.pop("y", self.labels.y)
# z = kwargs.pop("z", self.labels.z)
# if len(kwargs.keys()):
# raise KeyError("Unexpected kwarg: {}".format(kwargs.keys()))
# self._labels = base.AxesLabels(x, y, z)
# def set_fig_axes(self, fig, axes, use_gs=False):
# self._set_fig(fig)
# self._set_axes(axes)
# self._use_gs = bool(use_gs)
# def _set_fig(self, new):
# self._fig = new
# def _set_axes(self, new):
# if new.size != len(self.grouped.groups.keys()) + 1:
# msg = "Number of axes must match number of Categoricals + 1 for All."
# raise ValueError(msg)
# keys = ["All"] + sorted(self.grouped.groups.keys())
# axes = pd.Series(new.ravel(), index=pd.CategoricalIndex(keys))
# self._axes = axes
# def init_fig(self, use_gs=False, layout="auto", scale=1.5):
# if layout == "auto":
# raise NotImplementedError(
# """Need some densest packing algorithm I haven't
# found yet"""
# )
# assert len(layout) == 2
# nrows, ncols = layout
# if use_gs:
# raise NotImplementedError(
# """Unsure how to consistently store single cax or
# deal with variable layouts."""
# )
# fig = plt.figure(figsize=np.array([8, 6]) * scale)
# gs = mpl.gridspec.GridSpec(
# 3,
# 5,
# width_ratios=[1, 1, 1, 1, 0.1],
# height_ratios=[1, 1, 1],
# hspace=0,
# wspace=0,
# figure=fig,
# )
# axes = np.array(12 * [np.nan], dtype=object).reshape(3, 4)
# sharer = None
# for i in np.arange(0, 3):
# for j in np.arange(0, 4):
# if i and j:
# a = plt.subplot(gs[i, j], sharex=sharer, sharey=sharer)
# else:
# a = plt.subplot(gs[i, j])
# sharer = a
# axes[i, j] = a
# others = axes.ravel().tolist()
# a0 = others.pop(8)
# a0.get_shared_x_axes().join(a0, *others)
# a0.get_shared_y_axes().join(a0, *others)
# for ax in axes[:-1, 1:].ravel():
# # All off
# ax.tick_params(labelbottom=False, labelleft=False)
# ax.xaxis.label.set_visible(False)
# ax.yaxis.label.set_visible(False)
# for ax in axes[:-1, 0].ravel():
# # 0th column x-labels off.
# ax.tick_params(which="x", labelbottom=False)
# ax.xaxis.label.set_visible(False)
# for ax in axes[-1, 1:].ravel():
# # Nth row y-labels off.
# ax.tick_params(which="y", labelleft=False)
# ax.yaxis.label.set_visible(False)
# # cax = plt.subplot(gs[:, -1])
# else:
# fig, axes = tools.subplots(
# nrows=nrows, ncols=ncols, scale_width=scale, scale_height=scale
# )
# # cax = None
# self.set_fig_axes(fig, axes, use_gs)
# return fig, axes
# def _build_one_hist2d(self, x, y, z):
# h2d = Hist2D(
# x,
# y,
# z=z,
# logx=self.log.x,
# logy=self.log.y,
# clip_data=False,
# nbins=self.nbins,
# )
# h2d.set_axnorm(self.axnorm)
# xlbl, ylbl, zlbl = self.labels.x, self.labels.y, self.labels.z
# h2d.set_labels(x=xlbl, y=ylbl, z=zlbl)
# return h2d
# def make_h2ds(self):
# grouped = self.grouped
# # Build case that doesn't include subgroups.
# x = self.data.loc[:, "x"]
# y = self.data.loc[:, "y"]
# try:
# z = self.data.loc[:, "z"]
# except KeyError:
# z = None
# hall = self._build_one_hist2d(x, y, z)
# h2ds = {"All": hall}
# for k, g in grouped:
# x = g.loc[:, "x"]
# y = g.loc[:, "y"]
# try:
# z = g.loc[:, "z"]
# except KeyError:
# z = None
# h2ds[k] = self._build_one_hist2d(x, y, z)
# h2ds = pd.Series(h2ds)
# self._h2ds = h2ds
# return h2ds
# @staticmethod
# def _make_axis_text_label(key):
# r"""Format the `key` identifying the Categorial group for this axis. To modify,
# sublcass `GridHist2D` and redefine this staticmethod.
# """
# return key
# def _format_axes(self):
# axes = self.axes
# for k, ax in axes.items():
# lbl = self._make_axis_text_label(k)
# ax.text(
# 0.025,
# 0.95,
# lbl,
# transform=ax.transAxes,
# va="top",
# fontdict={"color": "k"},
# bbox={"color": "wheat"},
# )
# # ax.set_xlim(-1, 1)
# # ax.set_ylim(-1, 1)
# def make_plots(self, **kwargs):
# h2ds = self.hist2ds
# axes = self.axes
# cbars = {}
# cnorms = self.cnorms
# for k, h2d in h2ds.items():
# if isinstance(cnorms, mpl.colors.Normalize) or cnorms is None:
# cnorm = cnorms
# else:
# cnorm = cnorms.loc[k]
# ax = axes.loc[k]
# ax, cbar = h2d.make_plot(ax=ax, norm=cnorm, **kwargs)
# if not self.use_gs:
# cbars[k] = cbar
# else:
# raise NotImplementedError(
# "Unsure how to handle `use_gs == True` for color bars."
# )
# cbars = pd.Series(cbars)
# self._format_axes()
# self._cbars = cbars
|
nilq/baby-python
|
python
|
#-- GAUDI jobOptions generated on Mon Oct 12 10:07:37 2020
#-- Contains event types :
#-- 90000000 - 3737 files - 56787251 events - 2862.54 GBytes
#-- Extra information about the data processing phases:
#-- Processing Pass: '/Real Data/Reco14/Stripping21r1'
#-- StepId : 127013
#-- StepName : Stripping21r1-Merging-DV-v36r1
#-- ApplicationName : DaVinci
#-- ApplicationVersion : v36r1
#-- OptionFiles : $APPCONFIGOPTS/Merging/DV-Stripping-Merging.py
#-- DDDB : dddb-20130929
#-- CONDDB : cond-20141107
#-- ExtraPackages : AppConfig.v3r203;Det/SQLDDDB.v7r10
#-- Visible : N
from Gaudi.Configuration import *
from GaudiConf import IOHelper
IOHelper('ROOT').inputFiles([
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044696_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046559_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062717_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056833_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061974_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052008_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056802_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058118_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036336_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006868_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035830_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066425_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037536_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047804_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018011_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045529_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059937_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017797_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063290_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063921_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027612_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030702_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042868_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041996_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053629_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031308_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005971_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003989_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048934_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001834_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009151_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004636_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003016_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061106_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015116_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061809_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010001_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062020_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036167_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051184_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024719_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067774_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062653_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015456_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022770_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042959_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030650_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034199_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043192_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053421_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012121_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053002_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011871_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016694_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012979_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005443_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032341_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021210_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055340_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062585_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060617_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034909_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006962_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053479_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059726_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038635_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040210_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037667_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010698_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057078_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047780_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006524_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009523_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052920_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000372_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069510_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034169_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013611_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006433_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042570_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040328_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060648_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050899_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020814_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011349_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046478_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028830_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059906_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043506_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069095_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041540_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022839_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030145_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042270_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061813_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040484_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004061_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046058_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015987_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001681_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053873_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026170_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047586_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065929_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031199_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021866_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004716_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057952_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032889_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001347_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042339_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016554_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017515_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026703_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005542_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064045_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041461_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004909_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057820_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009834_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034529_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031887_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038766_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029719_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015016_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038508_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060162_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001156_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041998_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037978_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038232_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003700_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025679_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020664_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023033_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068086_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037407_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050702_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001520_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052940_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041994_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041520_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064230_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049654_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061881_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060609_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030516_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063366_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010056_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030390_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050481_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027070_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052079_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068235_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047734_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055316_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062532_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004687_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064452_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035671_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062626_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006105_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003351_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029284_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018977_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029329_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057633_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034242_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005098_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065228_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066162_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028141_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045380_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027181_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043339_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068857_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040753_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054720_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015769_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032450_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039563_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051648_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048897_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025932_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027846_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030161_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011348_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052809_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018046_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049899_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006626_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062183_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016557_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051461_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052399_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065269_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051958_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000474_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054740_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010910_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061550_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005630_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065927_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024284_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049844_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050331_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002874_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039730_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000634_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025164_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065805_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015842_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061722_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001865_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053627_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060203_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024539_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053807_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025046_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052315_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036808_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006465_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059302_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048636_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060515_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032689_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067885_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016386_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023054_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006539_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005594_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061690_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054902_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052919_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025708_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001849_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058830_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040613_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004579_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021438_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011380_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040399_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021471_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020561_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010517_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000256_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046160_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022420_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056496_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006846_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049607_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029515_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007741_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000565_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027130_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064842_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030391_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033495_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046123_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037774_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061264_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058699_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034736_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020352_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062342_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064457_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062488_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060935_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053669_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017015_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029610_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039510_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041674_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027624_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003190_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058030_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009768_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045182_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001798_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044312_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068120_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026802_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049319_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060462_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034752_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043645_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001223_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009476_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061830_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025707_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055622_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008602_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012843_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002129_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000802_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046125_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040854_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030518_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027463_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062491_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031655_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020435_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035890_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054432_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049295_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002988_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027331_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039366_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037859_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002181_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055226_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027344_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047778_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058369_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001091_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004075_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058946_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058491_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053808_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050278_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013245_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003246_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048307_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037263_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033802_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021364_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022700_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002026_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020931_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015118_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025515_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062246_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020845_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041898_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058893_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002327_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061669_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012684_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063323_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002542_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065225_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013493_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018130_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005215_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002497_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066161_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023356_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010271_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021824_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025098_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014919_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030621_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014807_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061871_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065782_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005458_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015329_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045531_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028847_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057320_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022003_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042088_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021288_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055941_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043354_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016559_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047720_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062550_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003098_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051536_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019938_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039899_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036424_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034480_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028965_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043480_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005512_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051926_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061576_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025896_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047031_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041675_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014301_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022473_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052435_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000314_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061901_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068181_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006553_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036846_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041997_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036274_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063159_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000126_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003113_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065611_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049622_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009029_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015844_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026291_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023904_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018489_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060323_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021184_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059123_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037172_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054174_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005485_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062261_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051921_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040468_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057081_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059231_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018269_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008044_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042269_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053422_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054918_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015665_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067853_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017679_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012174_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032768_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016421_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002411_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024481_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022123_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028264_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025359_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017282_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002838_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052863_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057969_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016973_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046656_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051828_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063806_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045460_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058267_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059005_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000757_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024364_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049047_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059477_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029094_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043484_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067038_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042427_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003538_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033148_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053913_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014069_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057003_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035749_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000098_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040283_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000371_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014550_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001010_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008154_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067567_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018490_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009706_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044470_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014332_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059160_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007665_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022624_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046004_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058389_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058270_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039978_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019469_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035321_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034632_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049836_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057209_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040323_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040971_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008457_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038929_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039384_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025390_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068891_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035999_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041040_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023890_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007808_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052693_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002269_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039377_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027580_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026636_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037986_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024366_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053310_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059804_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028271_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005864_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031251_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061091_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061156_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013939_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049207_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048813_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057344_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062528_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068765_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066858_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060396_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060086_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044168_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001908_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040535_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049934_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056507_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059529_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025030_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006347_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045065_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001534_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006845_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014549_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023529_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006332_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068600_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024658_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055041_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055940_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051810_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012993_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042173_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013438_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034344_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015951_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055572_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009720_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037776_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020393_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002960_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062565_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015148_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002903_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039977_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027264_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056033_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037059_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038760_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067528_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019178_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013452_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067836_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057644_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043024_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047526_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039225_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038339_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040402_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050798_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066345_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058835_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047991_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048610_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017923_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011520_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059800_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034115_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035489_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029433_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057339_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039008_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046805_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054738_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069200_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007619_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005300_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020092_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023692_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050506_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031851_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035904_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021312_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001695_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040969_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000579_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064321_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019583_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047886_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060564_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023314_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040576_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036537_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058832_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000329_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007190_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005732_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026247_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057421_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055598_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037980_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018228_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029720_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019183_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024856_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014599_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028661_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045339_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028223_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058433_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055150_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049115_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021711_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027401_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068073_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022934_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049443_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022338_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004939_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041374_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068755_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031991_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061405_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043202_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055591_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051353_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055430_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028997_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063909_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045852_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024854_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007954_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019259_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031307_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037123_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044660_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044083_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061948_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026465_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058678_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007119_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013093_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059959_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050836_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058185_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011518_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021162_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011630_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051707_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064279_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041039_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035244_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054747_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004773_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062770_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042747_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025650_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053423_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006451_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019838_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013424_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049394_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001049_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047521_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001381_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012896_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004701_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064553_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023407_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064195_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003294_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037537_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026877_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048520_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009797_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029328_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044078_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055959_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013572_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002048_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027659_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011933_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022171_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014070_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062467_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060619_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058213_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022403_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027723_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067160_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056925_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016693_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060827_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015768_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002425_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016091_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043124_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036986_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010536_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069496_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051472_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047459_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014484_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056622_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055944_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045933_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019441_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065070_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025895_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052691_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055820_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057953_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058603_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014418_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063289_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041177_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056677_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059523_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059930_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002197_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000598_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012948_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003803_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031018_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033977_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019181_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057552_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028744_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041298_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061086_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032769_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063691_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018573_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022989_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031994_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065012_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067529_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062551_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037135_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028864_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044775_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023193_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038234_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061934_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045345_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040758_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046904_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024482_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046473_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027669_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040858_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003973_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031738_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021072_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029857_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056418_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044653_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033732_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027688_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042428_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054063_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038137_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016763_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060143_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044922_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065438_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016555_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056457_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033492_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056153_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037061_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056939_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067898_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009921_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011698_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029411_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057707_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069360_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033976_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014613_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055288_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001660_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056440_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041455_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003382_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010460_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044261_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061303_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006891_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032552_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034111_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068277_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053628_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007913_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026215_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029612_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025456_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032242_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065983_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054973_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046789_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067159_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051023_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032594_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031105_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044984_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051129_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012363_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067813_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028730_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008231_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039631_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043643_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002611_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062418_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023931_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014706_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062923_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035932_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059836_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059432_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042337_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023887_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051199_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040950_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003350_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068466_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064568_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068990_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021026_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026345_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012100_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032567_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021249_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007699_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042338_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054198_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035770_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027777_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012978_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022128_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036552_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036501_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058577_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042936_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068793_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033101_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056725_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045036_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057721_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065014_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069214_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063971_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063007_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046472_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006947_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006274_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025651_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012429_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039362_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062300_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031888_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033568_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046137_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040614_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051314_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005953_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068941_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065266_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022855_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022261_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009935_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020605_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054094_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014379_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015014_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005655_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057315_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060053_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036057_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027948_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045001_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045459_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009524_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038316_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048954_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039363_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018344_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037154_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013076_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015439_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042650_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023216_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059406_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030246_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067959_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041200_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066279_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068670_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010258_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055771_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057498_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027681_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062925_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026818_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012710_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063288_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045908_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042267_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042268_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022172_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021224_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047224_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062257_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059232_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065091_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042426_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047733_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005798_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004229_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065609_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068407_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049459_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015513_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061631_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023474_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028388_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016154_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034630_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064467_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018030_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037410_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004532_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056154_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031102_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052565_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031794_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025531_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042775_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037979_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051940_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050258_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062942_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029906_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067708_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044684_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033881_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047719_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043685_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012058_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055251_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002528_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050621_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033582_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026704_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007664_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063164_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033013_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026637_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068328_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068044_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016122_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059211_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000212_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033104_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008321_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011286_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061916_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033338_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050403_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064455_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004831_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062328_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054877_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025800_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054561_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013836_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009030_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062773_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041427_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041413_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013507_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029669_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004140_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044275_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021823_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021417_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021540_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014259_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035490_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005113_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055424_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049025_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027072_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042559_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016352_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053068_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015609_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042171_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016738_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065394_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055393_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028054_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032107_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053069_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049598_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051649_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002725_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048137_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036246_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033497_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063634_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059483_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006793_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058371_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056876_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015950_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022609_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043675_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054910_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040751_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046657_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021929_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000978_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065615_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036645_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069318_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062682_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013299_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038068_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067906_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068758_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066709_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049037_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040407_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035355_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032176_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056875_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002511_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036015_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015973_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055770_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003817_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030620_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032980_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010345_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028001_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046185_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020512_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031641_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069100_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067268_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019732_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027317_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002439_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063190_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037860_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060678_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055486_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012895_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021931_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036502_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053535_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024802_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048390_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014116_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043020_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029773_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006152_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056753_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064196_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037151_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058914_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054786_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007480_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065614_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023098_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022809_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000773_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057338_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063017_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040595_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050958_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014567_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055800_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003071_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039765_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019870_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052865_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044449_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032337_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061360_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033265_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018444_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046328_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016880_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064159_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029514_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054710_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010436_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004910_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045047_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037977_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069227_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001747_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036651_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064333_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026998_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064165_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053987_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003831_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019182_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059619_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026410_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004399_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065079_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037411_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050533_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006103_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037199_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062446_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018744_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054156_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042869_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060789_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024540_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059084_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062470_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059681_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069245_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038841_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040951_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018922_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020958_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057484_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042561_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055773_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036732_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048974_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061164_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000028_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022125_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013046_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027444_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058195_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009891_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003799_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045508_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024754_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057323_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016912_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014140_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007549_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016060_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057753_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030947_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013837_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003686_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038930_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003309_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028648_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019059_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049790_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058200_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017798_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043481_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057410_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053316_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039886_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063950_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057701_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029668_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046325_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030047_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027275_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065784_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009687_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026571_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012641_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055527_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032692_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042086_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002946_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067271_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023807_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024050_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061953_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027493_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065443_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001410_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023635_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039028_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064776_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040152_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013851_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019483_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069332_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065147_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009031_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050634_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027191_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055468_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030190_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064456_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003845_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063803_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062803_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037777_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034968_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023313_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061675_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062323_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054217_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002783_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031237_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040193_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017283_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025799_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026199_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039212_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062236_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000505_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050656_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066285_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067352_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008675_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045338_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000725_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054252_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020203_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005141_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065226_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040037_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047611_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026505_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003592_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047812_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012607_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031399_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042562_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033846_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052494_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056991_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007226_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042969_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051752_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054778_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060872_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050237_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032695_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036339_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019039_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022264_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038318_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060076_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069420_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062176_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040397_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044687_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054978_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004047_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045356_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020205_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031792_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060907_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000709_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066861_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011151_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020201_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029038_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019666_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056924_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066163_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000613_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031509_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057706_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063143_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005383_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067035_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004911_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010833_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033772_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005865_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019387_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068643_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036218_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039701_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040769_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034114_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013134_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063287_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056209_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052084_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009860_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032452_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040319_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059142_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056322_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011802_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016041_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046475_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032108_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067717_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066165_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000551_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056815_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016884_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060281_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021865_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015946_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007353_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040449_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042751_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014485_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017324_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004353_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054330_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035263_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060763_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064118_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023315_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007253_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013938_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024943_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021608_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063631_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016620_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018014_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035078_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029184_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017165_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048546_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059144_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024656_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023557_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054421_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061310_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056992_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036988_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052387_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004302_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067967_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055065_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002974_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052285_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059615_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034737_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020286_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012949_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062046_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021504_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011410_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004744_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060059_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056633_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052625_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059433_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027364_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068050_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057795_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011548_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013589_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063663_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007399_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066888_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008945_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041094_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008407_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010810_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008278_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007043_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027446_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060970_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003717_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068764_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012812_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044658_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036266_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060620_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019940_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050298_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045340_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063861_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019798_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022728_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011284_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063988_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046201_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005426_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027303_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016882_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064971_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010000_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017323_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057757_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059590_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043970_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050817_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013396_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032336_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048328_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045642_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023408_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023195_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034788_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051948_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020761_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022610_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027289_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053536_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051225_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064484_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009248_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049400_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019016_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047922_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063386_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049277_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013244_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019566_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060264_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032979_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029964_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046513_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001478_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032125_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012592_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043307_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064726_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019287_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064310_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066776_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041233_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030330_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066710_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026569_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055777_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043972_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062039_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069231_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008532_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007354_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067555_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028773_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042725_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008357_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035690_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019565_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024240_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037772_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049223_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044267_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023128_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033626_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044654_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047311_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003176_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052040_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067040_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039164_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038768_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008646_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064232_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007269_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008320_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063442_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058370_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017680_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061478_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014764_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009436_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016451_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031530_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064238_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052066_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057169_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040103_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057247_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013165_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057309_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064555_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005055_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049266_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007869_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005155_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055004_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057565_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008600_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051043_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061198_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042881_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058526_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060023_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027537_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052759_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055012_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009334_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025345_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017469_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032508_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040156_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058013_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027494_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024129_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024066_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062956_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062991_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010158_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027225_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017899_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062188_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055942_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056642_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047368_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060482_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057673_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031494_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039365_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030851_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008227_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045739_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022579_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024130_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017724_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028224_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044663_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036000_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069287_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000473_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059446_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023355_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001936_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061472_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061504_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019385_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043511_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001646_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029412_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028351_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062406_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019599_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045364_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010108_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050076_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060491_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010583_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054661_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044469_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031711_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058131_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066526_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008489_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042950_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042791_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057490_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055829_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054183_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010386_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056416_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022552_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066033_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018270_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002797_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000270_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045397_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041085_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031103_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031795_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011301_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000741_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029804_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025031_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012142_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013117_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060017_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019665_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049683_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025099_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050929_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058559_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032177_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063145_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007994_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017340_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047840_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033882_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060761_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039672_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022919_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025454_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030247_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027755_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034528_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057465_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063174_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048000_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033103_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000430_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013571_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033567_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044769_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039648_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032448_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043482_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039474_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038992_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008102_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010232_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007028_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028983_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056513_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051483_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042170_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059330_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018171_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004109_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047815_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063854_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008833_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057724_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048146_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050743_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065613_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054731_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017278_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022124_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030116_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021040_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056270_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062277_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009419_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025342_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009626_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024365_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064156_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062610_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018106_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044188_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062922_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003524_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025583_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002917_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060075_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017117_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016166_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042654_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058609_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054467_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044921_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035136_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060273_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063630_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003161_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028508_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035549_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038397_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064877_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067420_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038762_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000402_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059763_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047289_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017280_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001833_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027544_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048029_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011108_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018012_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021003_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007121_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059825_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041687_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026185_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008531_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052246_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003268_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065783_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021709_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008601_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054979_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020513_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051407_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037668_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061004_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052810_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018620_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026759_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026128_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055168_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050114_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032691_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010045_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058675_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004176_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019839_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039014_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021741_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067942_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021969_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067269_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023932_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032340_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023097_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061651_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024239_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011684_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027792_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006183_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047508_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029611_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064473_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059214_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062983_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019180_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014584_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043001_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016573_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023888_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030032_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054727_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057325_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016724_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051617_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011109_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020665_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042341_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038931_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020758_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059403_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024100_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027551_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056586_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060325_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054598_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051033_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020014_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007001_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030517_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003801_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005013_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067421_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058188_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022004_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029024_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009417_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029093_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064883_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030949_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030635_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010014_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044076_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069079_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062663_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061740_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017752_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029980_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060773_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028816_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039135_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014273_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063807_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025275_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011948_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059729_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002754_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044290_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059543_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012232_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007298_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015189_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042804_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011440_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013372_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067351_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030519_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067530_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020883_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059258_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060269_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017411_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013730_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051315_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065610_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058632_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051343_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001922_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024036_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052436_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041281_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021526_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020064_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042087_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053001_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017622_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008816_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059803_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018918_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006384_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025475_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012295_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064453_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019995_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056943_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067033_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034709_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045011_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040108_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035889_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018015_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028878_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022018_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029462_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019316_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065928_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058587_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026705_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007525_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040020_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046752_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006104_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061018_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064650_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034116_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036783_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000787_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063158_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036822_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003957_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054681_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040659_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058027_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033607_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042089_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005616_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060562_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059967_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031119_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005497_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012013_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053738_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014068_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056348_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058121_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001106_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054267_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064647_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004413_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063990_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064649_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067527_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021609_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021777_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045343_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040299_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067034_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006479_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047513_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034652_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057763_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018082_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048926_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068877_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053944_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040396_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061132_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066857_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002483_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012760_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061851_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015112_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000284_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069424_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053311_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046577_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007188_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025830_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046134_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027071_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012813_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049334_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060398_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057843_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053670_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006792_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039405_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036168_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050466_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043973_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065304_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042871_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005797_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063936_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055192_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037568_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048581_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065653_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007563_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020757_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012246_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063804_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054631_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063315_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040063_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047434_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063633_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029413_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047862_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057615_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043166_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016301_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022500_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036112_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006710_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033390_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050031_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003349_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038027_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031213_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007283_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000537_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032553_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023244_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041478_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034412_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031493_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015841_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039370_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004652_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059713_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052566_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038639_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047221_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038509_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001463_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064329_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043210_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004593_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011396_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024241_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036626_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064172_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029996_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008118_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059445_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024855_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065031_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007481_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044533_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037295_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022310_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001880_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067973_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033978_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068699_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042711_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040730_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053000_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050217_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000070_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004940_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065930_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011456_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025029_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042556_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061886_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044137_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039892_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048901_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037775_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034857_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021056_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040040_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005541_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062199_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022742_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044996_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054650_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034997_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020455_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062452_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063475_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030771_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005127_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038008_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053426_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035429_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004546_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055676_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020436_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039364_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069019_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014331_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018789_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043185_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026835_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021710_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035672_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008885_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041071_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016155_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031020_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024541_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060787_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028603_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012499_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054687_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006987_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054586_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033295_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057202_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046690_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063285_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065348_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000849_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051748_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015948_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057849_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048833_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058834_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028507_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067532_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021643_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066283_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001604_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027089_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038634_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016879_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060315_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046963_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057084_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002283_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038764_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002693_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060592_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063286_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045780_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061665_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004369_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055945_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051215_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051267_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035243_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036267_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066711_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003440_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050574_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055232_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038236_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063805_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026758_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003620_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042935_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056585_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046990_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068763_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042271_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012430_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004033_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061375_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047908_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062232_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041077_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046014_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058390_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059843_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031993_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024576_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016881_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010071_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063144_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039680_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029858_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024575_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035245_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059288_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029137_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063893_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035064_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013148_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059441_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024929_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069006_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003030_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005328_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035611_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005999_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059083_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020915_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036991_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038991_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028591_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001462_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005201_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044448_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001319_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057254_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069174_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062162_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001123_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053740_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037538_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060575_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017164_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005084_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065377_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002342_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043974_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021610_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061562_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022263_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009553_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056838_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005595_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001506_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003802_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037058_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019569_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012201_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022309_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068367_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031104_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002241_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019942_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061895_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061685_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056808_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001348_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022891_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028061_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029138_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001590_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045341_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058092_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063434_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036440_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063298_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036864_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067994_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065781_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017514_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056252_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059343_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050537_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008379_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039997_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036423_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000055_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056829_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030593_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031383_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031197_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036111_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068014_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032509_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022964_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031995_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043125_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006030_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049291_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050432_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011683_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057567_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049714_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055939_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061784_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003254_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027510_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001285_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027803_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023475_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019939_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019302_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039583_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000901_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010502_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038763_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022517_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068615_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050926_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014649_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065788_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009093_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021128_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031491_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029772_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004003_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021968_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012566_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013729_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050822_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037408_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013541_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032891_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058103_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003906_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066708_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069458_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003685_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053202_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064834_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043485_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066856_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049755_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052757_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028506_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039546_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032693_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061236_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022375_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023004_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046324_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049039_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058384_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062732_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052391_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014548_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068779_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011846_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020206_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027644_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063673_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057187_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020514_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058544_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039512_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062783_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007374_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054358_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043180_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018923_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013410_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044386_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057294_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059750_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044316_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057246_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016695_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003619_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048968_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006722_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048770_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050318_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018978_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027819_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041597_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004509_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030086_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006106_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039361_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029415_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050372_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052626_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043310_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006151_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006229_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007029_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001284_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000168_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025988_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067754_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024928_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000154_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063444_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033214_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063357_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024591_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066712_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065785_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033193_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026412_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059842_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026995_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043969_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028325_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057577_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068759_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060175_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015408_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043704_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006257_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044138_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059719_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014419_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060267_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019733_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020336_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056031_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002358_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050347_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000241_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017277_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007118_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010459_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032890_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066529_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064458_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036637_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017116_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057256_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062744_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056380_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027250_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059119_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039684_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032214_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060805_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036129_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059437_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052518_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006573_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026110_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025584_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042340_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062730_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061988_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014849_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016958_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040853_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060900_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025344_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026953_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037187_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048213_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018517_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039888_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003454_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031682_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000112_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007428_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062621_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050709_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006432_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000211_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063917_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068698_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058611_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033524_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057580_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019680_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046753_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007254_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040400_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015113_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005411_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004817_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034633_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021776_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050525_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011123_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008764_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006383_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052179_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056155_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057429_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051118_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014916_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057664_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006182_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056229_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050470_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040714_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024657_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039993_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020063_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004908_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057135_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062701_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050359_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028570_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040398_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036769_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024964_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007912_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039229_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054194_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031697_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050766_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045907_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057096_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063291_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062064_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034635_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023691_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062695_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001781_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056420_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058374_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023809_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035092_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043483_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060758_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008732_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020759_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013179_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027891_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063926_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018689_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009127_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036992_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039982_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061816_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039547_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035748_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008946_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016435_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056844_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035190_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043975_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017325_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063491_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022808_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001894_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002588_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013336_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010332_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045800_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023018_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046910_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044656_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059776_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014210_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033337_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011572_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034479_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058690_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002672_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033731_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034634_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024720_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012446_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049618_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001071_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068055_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053114_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048672_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063368_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038044_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059883_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003510_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068956_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048994_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016558_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000887_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003044_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017281_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052332_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060834_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028309_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017383_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053144_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000056_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062658_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066166_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034306_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000299_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043049_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029787_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037469_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052939_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057713_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020815_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060041_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064180_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020202_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061391_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026466_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031385_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058255_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021794_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010632_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041841_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048432_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017432_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028757_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062050_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053315_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034117_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039890_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058051_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056473_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019386_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052334_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004301_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008970_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042797_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027932_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025719_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041178_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060484_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005676_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062555_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051871_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066784_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024422_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066284_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016105_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005954_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018013_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017696_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015311_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060115_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001981_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030948_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044455_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058137_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021930_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052298_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027440_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019245_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032052_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062163_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026231_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061635_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009270_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048263_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056731_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052360_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005012_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064648_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056603_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000445_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028404_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020560_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011519_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017738_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010213_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051639_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068642_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062376_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036043_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029965_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065612_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022769_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062736_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010142_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067917_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010697_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060598_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065227_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069259_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046150_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032338_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067932_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023743_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046124_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032892_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021002_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010696_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008458_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038317_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015310_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069406_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058837_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000489_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068914_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001256_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008415_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050526_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040227_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011767_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018674_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023995_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068671_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019994_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011601_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030572_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066775_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046487_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023636_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028425_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039623_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010318_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015666_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017623_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008647_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059591_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015406_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018284_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062426_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001576_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053806_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015947_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036628_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045347_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028892_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062772_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016405_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046863_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030329_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007807_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060222_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044271_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060102_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034112_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027161_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051849_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012362_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019568_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063632_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026065_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018242_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061123_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017573_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011751_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030817_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021838_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067673_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023994_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052627_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009230_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037666_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014155_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067270_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067578_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051784_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042423_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064884_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010723_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046326_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069391_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030061_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036029_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057925_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044182_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061155_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067897_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057749_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044812_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057328_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032893_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034414_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062516_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068601_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055943_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028509_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010881_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067422_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002099_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015567_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032051_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043971_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028292_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043768_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043820_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027237_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032453_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049151_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067798_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002211_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045125_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051169_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028692_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062667_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016300_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024718_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039169_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058831_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046189_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025391_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062164_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054668_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022948_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018214_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033979_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061387_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041041_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049987_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062778_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068843_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029229_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019015_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060040_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010107_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009168_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015845_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051973_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017062_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047220_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033266_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042090_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038336_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005879_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033677_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018393_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004622_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031640_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054651_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015715_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035035_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015018_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028787_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024927_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009673_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037234_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008801_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068821_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034413_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041897_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064896_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031739_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027415_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058097_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027427_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008248_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065449_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005245_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039979_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058849_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040021_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039064_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049467_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030816_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065926_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064119_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048899_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059668_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010937_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022876_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007120_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034856_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015581_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054355_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064695_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059833_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049779_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022580_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049096_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069444_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054517_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017016_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005040_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033391_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016553_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046862_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000652_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027574_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033993_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037669_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033105_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060590_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055196_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011285_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059299_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038022_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029185_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051380_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054062_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036633_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010171_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061177_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060531_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043040_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012724_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027818_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063848_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030865_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015013_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052758_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013298_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066044_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063379_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031992_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069273_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013246_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054208_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061738_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040145_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047772_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046576_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060737_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026325_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015190_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053874_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054289_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018172_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028000_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061252_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056941_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050658_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067297_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054750_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049111_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051870_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015512_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020666_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001950_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061071_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019797_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039575_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057343_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011803_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043829_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051090_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031382_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027758_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019567_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060481_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016387_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000226_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056323_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066777_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012567_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039887_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062049_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018621_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060977_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017841_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053480_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016302_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054878_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043502_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042768_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046181_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008197_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057676_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043308_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028373_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038342_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031996_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039368_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011934_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045525_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064691_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001562_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059082_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017368_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033975_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055159_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050468_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005914_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044652_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057255_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018921_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008153_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059178_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028676_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005733_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033336_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016197_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005070_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015115_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016303_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028926_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040759_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038765_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017770_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055255_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054892_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045994_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055289_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009184_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027563_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064970_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003634_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019273_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064960_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044803_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063998_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009767_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002379_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000932_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056674_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064454_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057356_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054270_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010203_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029010_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009539_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035414_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057274_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010124_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041806_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025261_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061473_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044703_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057765_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008226_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030344_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010002_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047949_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025389_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064687_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063331_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044920_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023194_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005900_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035191_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011013_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057023_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036073_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016304_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043306_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054403_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060738_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054136_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063629_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027490_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058697_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001465_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011206_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065276_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026111_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017681_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068513_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033494_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038131_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042424_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014209_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012014_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042273_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001224_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019228_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035956_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006646_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007884_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026876_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003859_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036990_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013045_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006214_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030418_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030515_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031198_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013676_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000667_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039136_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063509_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061533_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049340_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003566_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052025_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027833_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061610_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015714_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044347_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032694_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005229_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069304_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004912_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011107_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040029_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018919_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045837_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031384_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053739_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001964_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001632_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006200_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029871_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059656_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005200_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011014_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034891_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030772_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042272_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051271_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004801_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056190_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034954_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004508_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043733_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051697_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005471_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002556_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043008_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037288_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039889_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035415_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056657_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059851_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053143_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067161_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049198_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066424_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020407_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055027_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060287_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050069_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066882_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027143_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045553_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040750_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043577_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035049_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045507_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053875_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030176_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059564_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022262_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062971_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067661_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063628_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031850_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015949_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011287_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013782_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018083_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066583_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013639_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003552_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066527_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061779_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012101_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003145_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045960_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004352_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034304_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028966_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010763_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058853_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027379_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001029_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069472_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032793_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002167_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050552_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066530_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033192_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009092_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010531_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062921_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018920_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047112_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049589_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044657_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043325_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036667_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006525_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032449_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034478_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067970_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053988_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027866_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012500_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048214_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053425_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054131_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000014_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011137_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002071_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007679_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050361_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004212_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063431_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007526_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031149_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069051_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005010_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061217_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002640_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019664_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018572_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039587_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002153_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040559_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016959_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041132_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014918_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023742_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060152_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049518_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000690_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024307_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015438_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060963_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006736_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000834_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039595_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011041_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042425_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046585_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064117_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001170_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029414_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051729_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052864_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010288_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018571_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025232_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018170_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006946_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066280_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020392_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060969_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004531_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050778_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038233_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025516_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026803_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033296_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057174_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044079_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000915_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045851_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014252_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038235_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013524_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036985_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011364_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032908_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051459_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018199_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044077_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042560_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028140_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027703_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027103_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050356_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032339_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040066_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010486_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054061_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014100_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017279_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033496_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042172_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031492_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002932_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015531_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012217_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028620_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034631_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067867_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059220_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037124_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013978_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013337_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057105_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047145_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041432_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063320_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055536_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003468_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035829_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031495_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060185_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000140_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035610_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066707_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062924_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045991_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058833_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010975_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004787_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016090_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041676_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007618_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045643_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007717_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006331_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059777_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062765_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068971_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046327_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056448_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055032_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028540_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000084_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030551_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022501_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036627_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000963_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054106_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043156_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036987_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040767_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022458_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049308_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055772_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046470_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032690_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053989_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022683_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044983_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061102_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037221_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030770_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056832_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023889_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063904_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064554_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051477_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034243_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033771_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009231_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041995_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059875_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059705_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003204_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005799_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022459_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021289_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027116_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067036_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051448_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059478_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039891_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041258_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065270_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033149_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035322_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001492_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047398_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003002_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054969_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017855_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060860_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026411_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059366_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027602_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056261_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033102_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035550_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020335_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033678_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042652_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048857_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000994_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020916_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067776_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029285_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062926_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055017_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032712_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062575_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020760_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004383_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024423_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036749_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042719_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067090_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067531_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025343_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055697_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022684_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052164_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012568_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033845_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000182_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002453_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065361_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047273_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029609_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041838_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055415_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001303_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065347_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015117_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000863_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012202_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040672_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003368_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028634_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003509_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059431_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047546_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002313_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037060_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026638_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016556_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028142_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002002_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018346_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062220_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063671_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017397_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021017_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067810_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052322_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036529_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057425_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061409_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051193_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052094_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036332_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059120_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066164_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032241_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046658_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016196_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064015_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063894_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007870_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034708_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051566_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016696_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008196_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052493_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052224_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001257_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038123_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043061_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015017_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046329_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002469_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057996_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063146_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040549_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027878_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028405_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032451_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052156_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045644_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028556_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056480_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040852_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016610_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037409_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014483_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059785_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064961_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042740_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050660_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059655_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011315_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057345_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067665_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056510_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063684_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044359_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040008_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053626_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053067_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056404_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062240_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018788_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040536_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061724_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019455_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065780_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064816_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022838_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047159_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049017_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015553_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046340_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002396_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067091_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048806_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001362_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022187_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054166_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029202_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022685_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060600_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034982_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035918_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025959_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028055_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043023_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066282_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062235_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047144_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034939_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025649_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046567_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018445_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008718_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001733_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041308_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042651_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054424_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006627_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057447_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003800_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014705_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053314_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003247_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043030_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069388_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015236_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053099_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021311_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038761_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057751_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009495_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068110_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028139_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031793_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020606_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050749_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004018_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038124_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044802_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022127_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004123_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063802_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034925_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025987_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044183_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060331_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041737_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010196_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015312_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024421_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011207_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003085_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054514_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050629_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042558_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001139_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061471_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007993_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062920_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051572_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015843_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056619_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051071_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000519_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061113_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062584_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058986_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026012_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008717_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050828_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054481_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037412_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025231_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034170_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058415_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046474_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033801_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038315_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002085_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010359_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050819_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055363_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048970_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018345_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045342_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016305_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060397_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023230_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024926_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054872_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062562_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030283_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024174_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046962_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060740_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040668_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054824_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049738_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050966_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040773_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040780_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011616_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006153_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008277_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055496_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016331_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014765_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005009_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006709_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069081_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059194_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017900_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002297_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017811_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003933_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064923_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007806_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022337_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015407_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019440_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056032_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050048_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054571_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054894_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052692_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065146_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026026_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049882_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008101_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055677_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062977_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057258_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065931_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065925_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012158_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025751_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012015_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042557_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046170_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006333_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003396_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020204_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008408_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010472_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001464_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047962_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012431_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007820_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026570_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058372_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026274_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017063_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026496_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064750_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027400_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067039_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041272_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005041_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054581_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042767_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055081_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054665_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042441_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023808_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005301_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006662_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018743_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057575_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008944_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064803_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028802_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061776_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031586_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036733_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025997_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067878_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034305_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057932_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063456_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004563_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024144_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066281_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008603_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062024_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054317_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046366_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065616_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010187_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010880_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060488_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059100_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037981_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032888_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005343_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037062_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044655_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060252_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022404_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012294_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044260_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056584_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058836_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055331_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064780_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027590_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009596_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036192_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047948_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043644_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034113_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007953_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059420_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021644_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031150_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062381_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021418_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023473_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026300_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005397_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047369_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019229_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059616_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004457_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002255_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067194_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055137_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038767_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055172_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043752_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065420_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053203_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055105_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060984_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017530_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050697_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017710_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037773_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009333_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049739_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066528_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000949_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025233_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010028_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033493_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055519_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016777_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022727_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020270_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011805_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043578_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064161_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047113_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039147_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021177_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042870_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004730_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021363_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017572_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047449_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042431_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045559_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008475_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001548_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015114_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059355_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027934_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055133_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004244_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038343_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058624_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025748_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005329_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066423_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038398_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024283_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019179_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000817_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032925_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020959_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028908_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016040_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064221_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018376_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064143_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031019_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052147_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000343_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015235_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031587_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010385_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054584_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057184_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056156_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060448_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006107_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048524_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046471_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063801_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000416_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063502_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030665_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068065_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062557_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014917_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009418_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001193_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007189_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050163_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067732_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025247_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047294_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014349_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024801_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058209_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022126_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025914_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002824_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027710_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051186_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046345_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048957_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018673_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020271_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013783_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005382_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011752_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052141_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003907_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042729_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013479_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028163_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053313_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051174_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019796_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008252_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037232_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010811_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060962_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036734_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009287_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025894_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029613_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023693_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035135_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029966_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032894_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059881_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016883_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011829_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064462_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066582_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068872_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025455_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049257_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000387_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045183_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003410_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005985_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015015_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034527_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019941_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036219_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067037_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028526_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038072_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056907_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024101_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051574_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039660_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008676_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044369_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025749_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045999_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024480_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054921_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021129_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005011_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036989_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040710_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043305_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001618_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060298_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023258_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020762_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038139_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030879_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009421_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054303_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014072_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058913_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055589_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027896_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043486_1.semileptonic.dst',
], clear=True)
|
nilq/baby-python
|
python
|
"""
Customized Django model field subclasses
"""
from django.db import models
from django.db.models import fields
from django.db.models.fields.related import ManyToManyField
class CopyFromFieldMixin(fields.Field):
"""
Mixin to add attrs related to COPY FROM command to model fields.
"""
def __init__(self, *args, **kwargs):
self.source_column = kwargs.pop('source_column', None)
super().__init__(*args, **kwargs)
@property
def copy_from_name(self):
"""
Return the name of field to use in COPY FROM command.
"""
return self.source_column or self.column
class CopyFromBigIntegerField(fields.BigIntegerField, CopyFromFieldMixin):
"""
BigIntegerField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromBooleanField(fields.BooleanField, CopyFromFieldMixin):
"""
BooleanField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromCharField(fields.CharField, CopyFromFieldMixin):
"""
CharField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromDateField(fields.DateField, CopyFromFieldMixin):
"""
DateField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromDateTimeField(fields.DateTimeField, CopyFromFieldMixin):
"""
DateTimeField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromDecimalField(fields.DecimalField, CopyFromFieldMixin):
"""
DecimalField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromEmailField(fields.EmailField, CopyFromFieldMixin):
"""
EmailField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromFloatField(fields.FloatField, CopyFromFieldMixin):
"""
FloatField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromForeignKey(models.ForeignKey, CopyFromFieldMixin):
"""
ForeignKey subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromIntegerField(fields.IntegerField, CopyFromFieldMixin):
"""
IntegerField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromOneToOneField(models.OneToOneField, CopyFromFieldMixin):
"""
OneToOneField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromTextField(fields.TextField, CopyFromFieldMixin):
"""
TextField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromURLField(fields.URLField, CopyFromFieldMixin):
"""
URLField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromManyToManyField(ManyToManyField, CopyFromFieldMixin):
"""
ManyToManyField subclass with attrs related to COPY FROM command.
"""
pass
|
nilq/baby-python
|
python
|
# Aula de estrutura de repetição com variavel de controle
for c in range(0, 6):
print('oi')
print('fim')
for a in range(0, 10, 2): # o primeiro argumento e o segundo é o range de contagem, o terceiro é o metodo da contagem
print(a)
print('fim')
# Outro exemplo
for a in range(10, 0, -1): # Com uma condição de contagem regressiva
print(a)
print('Fim')
# Utilizando input
n = int(input('Numero: '))
for c in range(0, n+1):
print(c)
print('fim')
s = 0
for c in range(0, 4):
n = int(input('Digite um numero'))
s += n # Esta sintaxe s += n significa s = s + n, python permite esse tipo de apontamento
print(s)
|
nilq/baby-python
|
python
|
from __future__ import print_function
from matplotlib import rc
rc('font',**{'family':'sans-serif','sans-serif':['Helvetica']})
## for Palatino and other serif fonts use:
#rc('font',**{'family':'serif','serif':['Palatino']})
#rc('text', usetex=True)
# generate data
# list of points
from matplotlib.backends.backend_pdf import PdfPages
import numpy as np
import matplotlib.pyplot as plt
from scipy.spatial.distance import cdist
np.random.seed(22)
means = [[2, 2], [4, 2]]
cov = [[.3, .2], [.2, .3]]
N = 10
X0 = np.random.multivariate_normal(means[0], cov, N)
X1 = np.random.multivariate_normal(means[1], cov, N)
X = np.concatenate((X0.T, X1.T), axis = 1)
y = np.concatenate((np.ones((1, N)), -1*np.ones((1, N))), axis = 1)
# plot points
plt.plot(X0[:, 0], X0[:, 1], 'bs', markersize = 8, alpha = .8)
plt.plot(X1[:, 0], X1[:, 1], 'ro', markersize = 8, alpha = .8)
plt.axis('equal')
# axis limits
plt.ylim(0, 3)
plt.xlim(2, 4)
# hide tikcs
cur_axes = plt.gca()
cur_axes.axes.get_xaxis().set_ticks([])
cur_axes.axes.get_yaxis().set_ticks([])
plt.xlabel('$x_1$', fontsize = 20)
plt.ylabel('$x_2$', fontsize = 20)
# pdf.savefig()
plt.show()
from cvxopt import matrix, solvers
# build K
V = np.concatenate((X0.T, -X1.T), axis = 1)
K = matrix(V.T.dot(V))
p = matrix(-np.ones((2*N, 1)))
# build A, b, G, h
G = matrix(-np.eye(2*N))
h = matrix(np.zeros((2*N, 1)))
A = matrix(y)
b = matrix(np.zeros((1, 1)))
solvers.options['show_progress'] = False
sol = solvers.qp(K, p, G, h, A, b)
l = np.array(sol['x'])
print('lambda = \n', l.T)
S = np.where(l > 1e-6)[0]
VS = V[:, S]
XS = X[:, S]
yS = y[:, S]
lS = l[S]
# calculate w and b
w = VS.dot(lS)
b = np.mean(yS.T - w.T.dot(XS))
print('w = ', w.T)
print('b = ', b)
with PdfPages('svm4.pdf') as pdf:
# draw
# plot points
fig, ax = plt.subplots()
x1 = np.arange(-10, 10, 0.1)
y1 = -w[0, 0]/w[1, 0]*x1 - b/w[1, 0]
y2 = -w[0, 0]/w[1, 0]*x1 - (b-1)/w[1, 0]
y3 = -w[0, 0]/w[1, 0]*x1 - (b+1)/w[1, 0]
plt.plot(x1, y1, 'k', linewidth = 3)
plt.plot(x1, y2, 'k')
plt.plot(x1, y3, 'k')
y4 = 10*x1
plt.plot(x1, y1, 'k')
plt.fill_between(x1, y1, color='red', alpha=0.1)
plt.fill_between(x1, y1, y4, color = 'blue', alpha = 0.1)
plt.plot(X0[:, 0], X0[:, 1], 'bs', markersize = 8, alpha = .8)
plt.plot(X1[:, 0], X1[:, 1], 'ro', markersize = 8, alpha = .8)
plt.axis('equal')
plt.ylim(0, 3)
plt.xlim(2, 4)
# hide tikcs
cur_axes = plt.gca()
cur_axes.axes.get_xaxis().set_ticks([])
cur_axes.axes.get_yaxis().set_ticks([])
# add circles around support vectors
for m in S:
circle = plt.Circle((X[0, m], X[1, m] ), 0.1, color='k', fill = False)
ax.add_artist(circle)
plt.xlabel('$x_1$', fontsize = 20)
plt.ylabel('$x_2$', fontsize = 20)
# plt.savefig('svm4.png', bbox_inches='tight', dpi = 300)
pdf.savefig()
plt.show()
|
nilq/baby-python
|
python
|
import sys
import serial
import pprint
import time
import enum
import queue
from queue import Queue
from os.path import join, dirname, abspath
from qtpy.QtCore import Slot, QTimer, QThread, Signal, QObject, Qt, QMutex
class GcodeStates(enum.Enum):
WAIT_FOR_TIMEOUT = 1
GCODE_SENT = 2
READY_TO_SEND = 3
class BipapInitializationThread(QObject):
signal = Signal(str)
ppsignal = Signal(list)
def __init__(self, serialPort, codegen, que):
self.pressureque = que
self.serialPort = serialPort
self.position_pressure_list = []
#self.json = JsonSettings("settings.json")
self.codegen = codegen #GcodeGenerator(int(self.json.dict['vt']), int(self.json.dict['rr']), int(self.json.dict['ie']), int(self.json.dict['fio2']))
self.codegen.GenerateCMV()
self.codelist = self.codegen.gcodeinit.splitlines()
self.flagStop = False
self.variableDt = self.codegen.Dt
self.ustr = ""
super().__init__()
def Stop(self):
self.flagStop = True
@Slot()
def run(self):
try:
lst = []
for line in self.codelist:
if self.flagStop:
break
#self.serialPort.reset_input_buffer()
self.serialPort.write((str(line) + "\r\n").encode("utf-8"))
time.sleep(0.5)
in_waiting = self.serialPort.in_waiting
while in_waiting == 0:
time.sleep(1)
in_waiting = self.serialPort.in_waiting
jMessage = ""
while self.serialPort.in_waiting:
#print(self.serialPort.readline().decode('ascii'))
lst = self.serialPort.readlines()
for itm in lst:
try:
jMessage += itm.decode('ascii')
except:
pass
#jMessage += self.serialPort.readline().decode('ascii')
if "busy" in jMessage:
time.sleep(1)
continue
self.signal.emit(str(line) + " - " + jMessage)
while self.variableDt < self.codegen.Dp:
if self.flagStop:
break
try:
self.ustr = "G01 X"+str(self.variableDt) + " Y"+str(self.variableDt)+"\r\n"
self.serialPort.write((self.ustr.encode("utf-8")))
if self.pressureque.qsize() > 0:
self.pressureque.get(False)
time.sleep(0.12)
in_waiting = self.serialPort.in_waiting
#while in_waiting == 0:
#time.sleep(0.1)
#in_waiting = self.serialPort.in_waiting
#self.serialPort.reset_input_buffer()
while self.pressureque.qsize() <= 0:
pass
if self.pressureque.qsize() > 0:
pressure = self.pressureque.get(False)
if "\n" in pressure:
pass
else:
self.position_pressure_list.append([self.variableDt, pressure])
self.variableDt += 1
except serial.SerialException as ex:
print("Error In SerialException During Bipap Pushing" + str(ex.strerror))
self.signal.emit("Endbipapinit")
except Exception as e:
print("Error In Exception During Bipap Pushing")
pprint.pprint(e)
self.signal.emit("Endbipapinit")
self.ppsignal.emit(self.position_pressure_list)
self.ustr = "G01 X"+str(self.codegen.Dt) + " Y"+str(self.codegen.Dt)+"\r\n"
self.serialPort.write((self.ustr.encode("utf-8")))
pprint.pprint(self.position_pressure_list)
print("pressure list from thread")
self.signal.emit("Endbipapinit")
except serial.SerialException as ex:
print("Error In SerialException" + str(ex.strerror))
self.signal.emit("Stopped")
except Exception as e:
print('Error From Bipap run..')
pprint.pprint(e)
self.signal.emit("Stopped")
class PrimaryThread(QObject):
signal = Signal(str)
def __init__(self, serialPort, codegen):
self.serialPort = serialPort
#self.json = JsonSettings("settings.json")
self.codegen = codegen #GcodeGenerator(int(self.json.dict['vt']), int(self.json.dict['rr']), int(self.json.dict['ie']), int(self.json.dict['fio2']))
self.codegen.GenerateCMV()
self.codelist = self.codegen.gcodeprimary.splitlines()
self.flagStop = False
super().__init__()
def Stop(self):
self.flagStop = True
@Slot()
def run(self):
try:
lst = []
for line in self.codelist:
if self.flagStop:
break
#self.serialPort.reset_input_buffer()
self.serialPort.write((str(line) + "\r\n").encode("utf-8"))
time.sleep(0.5)
in_waiting = self.serialPort.in_waiting
while in_waiting == 0:
time.sleep(1)
in_waiting = self.serialPort.in_waiting
jMessage = ""
while self.serialPort.in_waiting:
#print(self.serialPort.readline().decode('ascii'))
lst = self.serialPort.readlines()
for itm in lst:
try:
jMessage += itm.decode('ascii')
except:
pass
#jMessage += self.serialPort.readline().decode('ascii')
if "busy" in jMessage:
time.sleep(1)
continue
self.signal.emit(str(line) + " - " + jMessage)
self.signal.emit("StoppedOK")
except serial.SerialException as ex:
print("Error In SerialException" + ex.strerror)
self.signal.emit("Stopped")
except Exception as e:
pprint.pprint(e)
self.signal.emit("Stopped")
class BipapThread(QObject):
signal = Signal(str)
def __init__(self, serl, codegen, que):
self.pressureque = que
self.serl = serl
self.codegen = codegen
self.codegen.GenerateCMV()
self.codelist = self.codegen.gcodestr.splitlines()
self.linecount = len(self.codelist)
self.flagStop = False
self.pause = True
self.gcode_exec_state = GcodeStates.READY_TO_SEND
self.gcode_move_count = 0
self.presentPosition = (0,0)
self.Tic = 0
self.Toc = 0
self.xyIncr = self.codegen.Dt
self.gstr = ""
self.sremsg = ""
self.serialmutex = QMutex()
self.startdelay = -1
super().__init__()
def gcodestep(self):
self.gstr = "G01 X" + str(self.xyIncr) + " Y" + str(self.xyIncr) + " F1000\r\n"
if self.xyIncr < self.codegen.xmax:
self.xyIncr += 1
def Stop(self):
self.flagStop = True
def updateGcode(self, codegen):
self.codegen = codegen
self.codegen.GenerateCMV()
self.codelist = self.codegen.gcodestr.splitlines()
def StartMoving(self):
self.pause = False
def StartMovingAfter(self, delay):
self.startdelay = delay
def StopMoving(self):
self.pause = True
self.xyIncr = self.codegen.Dt
@Slot()
def run(self):
lst = []
while 1:
if self.flagStop:
break
try:
if not self.pause:
if self.gcode_exec_state == GcodeStates.READY_TO_SEND:
self.gcodestep()
self.serialmutex.lock()
self.serl.write(self.gstr.encode("utf-8"))
self.serialmutex.unlock()
self.gcode_move_count += 1
if self.gcode_move_count >= 130:
#self.pause = True
self.gcode_move_count = 0
else:
self.gcode_exec_state = GcodeStates.WAIT_FOR_TIMEOUT
self.Tic = time.perf_counter()
if self.gcode_exec_state == GcodeStates.WAIT_FOR_TIMEOUT:
if (time.perf_counter() - self.Tic) >= 0.15:
#print("Gcode Executed\r\n")
self.gcode_exec_state = GcodeStates.READY_TO_SEND
elif self.startdelay > 0:
time.sleep(self.startdelay)
self.startdelay = -1
self.pause = False
except serial.SerialException as ex:
print("Error In SerialException" + str(ex.strerror))
class EncoderThread(QObject):
signal_pass_encoder = Signal(str)
def __init__(self, serialport):
self.rec_bytecount = 0
self.line = []
self.rec_data = ""
self.flagStop = False
self.serialport = serialport
#self.thread = QThread()
#self.thread.started.connect(self.run)
#self.signal_pass_encoder.connect(callback)
#self.moveToThread(self.thread)
#self.thread.start()
super().__init__()
def Stop(self):
self.flagStop = True
@Slot()
def run(self):
while True:
if self.flagStop:
break
else:
jMessage = ""
in_waiting = self.serialport.in_waiting
while in_waiting == 0:
time.sleep(0.05)
in_waiting = self.serialport.in_waiting
try:
lst = self.serialport.readlines()
except:
pass
for itm in lst:
try:
jMessage = itm.decode('ascii')
self.signal_pass_encoder.emit(jMessage)
except:
pass
'''
for char in self.serialport.read():
self.line.append(chr(char))
if chr(char) == '\n':
self.rec_data = "".join(self.line)
self.line.clear()
self.signal_pass_encoder.emit(self.rec_data)
#print(self.rec_data)
'''
'''
class WorkerThread(QObject):
signal = Signal(str)
def __init__(self, serialPort, codegen, commandque:Queue):
self.serialPort = serialPort
self.codegen = codegen
self.commandque = commandque
self.codelist = self.codegen.gcodestr.splitlines()
self.linecount = len(self.codelist)
self.flagexit = False
self.flagStop = False
super().__init__()
self.respondQue = Queue()
def Stop(self):
self.flagStop = True
def Resume(self):
self.flagStop = False
def updateGcode(self, codegen):
self.codegen = codegen
self.codelist = self.codegen.gcodestr.splitlines()
@Slot()
def run(self):
lst = []
while 1:
if self.flagStop:
time.sleep(1)
if self.respondQue.qsize() <= 0:
self.respondQue.put("stopped")
continue
if self.commandque.qsize() > 0:
if self.commandque.get() == "exit":
self.flagexit = True
break
try:
for line in self.codelist:
self.serialPort.write((str(line)+"\r\n").encode('utf-8'))
time.sleep(0.1)
in_waiting = self.serialPort.in_waiting
while in_waiting == 0:
time.sleep(0.5) #1
in_waiting = self.serialPort.in_waiting
jMessage = ""
while "ok" not in jMessage:
while self.serialPort.in_waiting:
lst = self.serialPort.readlines()
for itm in lst:
jMessage += itm.decode('ascii')
self.signal.emit(str(line) + " - " + jMessage)
except serial.SerialException as ex:
print("Error In SerialException" + str(ex))
'''
class WorkerThread(QObject):
signal = Signal(str)
def __init__(self, serialPort, codegen, commandque:Queue):
self.serialport = serialPort
self.codegen = codegen
self.commandque = commandque
self.codelist = self.codegen.gcodestr.splitlines()
self.linecount = len(self.codelist)
self.flagexit = False
self.flagStop = False
self.cycleToRun = 0
self.cycleCount = -1
super().__init__()
self.respondQue = Queue()
def Stop(self):
self.flagStop = True
def Resume(self):
self.flagStop = False
def updateGcode(self, codegen, cycleToRun=0):
self.cycleToRun = cycleToRun
self.codegen = codegen
self.codelist = self.codegen.gcodestr.splitlines()
@Slot()
def run(self):
jMessage:str = ""
unit:bytes = b''
itm:str = ''
in_waiting:int = 0
while 1:
if self.cycleToRun > 0:
#print( self.codegen.gcodestr + ' :: cycleToRun : ' + str(self.cycleToRun))
if (self.cycleCount >= self.cycleToRun):
time.sleep(1)
continue
else:
self.cycleCount += 1
if self.cycleCount == self.cycleToRun:
print( self.codegen.gcodestr + ' :: cycleToRun : ' + str(self.cycleToRun))
if self.flagStop:
time.sleep(1)
if self.respondQue.qsize() <= 0:
self.respondQue.put("stopped")
continue
if self.commandque.qsize() > 0:
if self.commandque.get() == "exit":
self.flagexit = True
break
try:
for line in self.codelist:
self.serialport.write((str(line)+"\r\n").encode('utf-8'))
time.sleep(0.5)
jMessage = ''
while 'ok' not in jMessage:
try:
in_waiting = self.serialport.in_waiting
except Exception as e:
print('Ex:0X17 : ' + str(e))
'''
while in_waiting == 0:
time.sleep(0.05)
try:
in_waiting = self.serialport.in_waiting
except Exception as e:
print('Ex:0x18 : ' + str(e))
'''
try:
while in_waiting == 0:
time.sleep(0.02)
in_waiting = self.serialport.in_waiting
unit = self.serialport.read(in_waiting)
except Exception as e:
print('Ex in sensor Thread readline() 392 : ' + str(e))
if len(unit) > 0:
try:
itm += unit.decode('ascii')
except:
pass
#else:
# time.sleep(0.1)
if b'\n' in unit:
jMessage = itm #.decode('ascii')
itm = ''
self.signal.emit(str(line) + " - " + jMessage)
if 'ok' not in jMessage:
pass
#time.sleep(0.01)
except serial.SerialException as ex:
print("Error In SerialException WorkerThread L- 410 : " + str(ex))
class BipapWorkerThread(QObject):
signal = Signal(str)
def __init__(self, serialPort, codegen, commandque:Queue):
self.serialPort = serialPort
self.codegen = codegen
self.commandque = commandque
self.codelist = self.codegen.gcodestr.splitlines()
self.linecount = len(self.codelist)
self.flagexit = False
self.flagStop = False
super().__init__()
self.respondQue = Queue()
def Stop(self):
self.flagStop = True
def Resume(self):
self.flagStop = False
def updateGcode(self, codegen):
self.codegen = codegen
self.codelist = self.codegen.gcodestr.splitlines()
@Slot()
def run(self):
lst = []
while 1:
if self.flagStop:
time.sleep(1)
if self.respondQue.qsize() <= 0:
self.respondQue.put("stopped")
continue
if self.commandque.qsize() > 0:
if self.commandque.get() == "exit":
self.flagexit = True
break
try:
for line in self.codelist:
self.serialPort.write((str(line)+"\r\n").encode('utf-8'))
time.sleep(0.1)
in_waiting = self.serialPort.in_waiting
while in_waiting == 0:
time.sleep(0.5) #1
in_waiting = self.serialPort.in_waiting
jMessage = ""
while "ok" not in jMessage:
while self.serialPort.in_waiting:
lst = self.serialPort.readlines()
for itm in lst:
try:
jMessage += itm.decode('ascii')
except:
pass
self.signal.emit(str(line) + " - " + jMessage)
except serial.SerialException as ex:
print("Error In SerialException" + str(ex))
class SensorThread(QObject):
signal = Signal(str)
plst = []
def __init__(self, serialPort, que):
self.pressureque = que
self.serialport = serialPort
self.flagStop = False
self.jMessage = ""
self._beep = False
self.flag_sensorlimit_tx = False
self.strdata = ""
super().__init__()
def Stop(self):
self.flagStop = True
def beep(self):
self._beep = True
def txsensordata(self, strdata):
self.strdata = strdata
self.flag_sensorlimit_tx = True
@Slot()
def run(self):
in_waiting = ''
jMessage = ""
unit = ''
itm = ''
while 1:
if self.flagStop:
break
try:
in_waiting = self.serialport.in_waiting
except Exception as e:
print('Ex:0X07 : ' + str(e))
while in_waiting == 0:
time.sleep(0.01)
try:
in_waiting = self.serialport.in_waiting
except Exception as e:
print('Ex:0x08 : ' + str(e))
try:
unit = self.serialport.read(in_waiting)
except Exception as e:
print('Ex in sensor Thread readline() 527 : ' + str(e))
if len(unit) > 0:
try:
itm += unit.decode('ascii')
except:
pass
if b'\n' in unit:
jMessage = itm #.decode('ascii')
itm = ''
#jMessage += ',' + str(time.perf_counter())
self.plst = jMessage.split(",")
self.signal.emit(jMessage)
if self.pressureque.qsize() <= 0:
self.pressureque.put(self.plst[0])
if self.flag_sensorlimit_tx:
self.flag_sensorlimit_tx = False
self.serialport.write(self.strdata.encode('utf-8'))
time.sleep(0.5)
|
nilq/baby-python
|
python
|
from urllib.parse import unquote
from flask import Flask
from flask import Response
from flask import abort
from flask import jsonify
from flask import render_template
from flask import request
from flask import send_file
from werkzeug.exceptions import BadRequest
from bootstrapper.lib import archive_utils
from bootstrapper.lib import bootstrapper_utils
from bootstrapper.lib import cache_utils
from bootstrapper.lib.db import db_session
from bootstrapper.lib.db import init_db
from bootstrapper.lib.exceptions import RequiredParametersError
from bootstrapper.lib.exceptions import TemplateNotFoundError
app = Flask(__name__)
defaults = bootstrapper_utils.load_defaults()
config = bootstrapper_utils.load_config()
@app.route('/')
def index():
"""
Default route, return simple HTML page
:return: index.htnl template
"""
return render_template('index.html', title='PanOS Bootstrap Utility')
@app.route('/bootstrapper.swagger.json')
def api():
"""
Simple api to return the swagger json
:return: json file
"""
return send_file('templates/bootstrapper.swagger.json')
@app.route('/get/<key>', methods=['GET'])
def get_object_contents(key):
"""
Get object from cache, useful to 'chain' together actions
:return: json encoded string with dict containing with key and contents keys
"""
if key is None or key == "":
r = jsonify(message="Not all required params are present", success=False, status_code=400)
r.status_code = 400
return r
contents = cache_utils.get(key)
return Response(contents)
@app.route('/set', methods=['POST'])
def set_object():
"""
Adds an serializable object to the cache
:return: json encoded string with dict containing key and success keys
"""
posted_json = request.get_json(force=True)
contents = posted_json.get('contents', None)
if contents is None:
r = jsonify(message="Not all required keys are present", success=False, status_code=400)
r.status_code = 400
return r
key = cache_utils.set(contents)
return jsonify(key=key, success=True)
@app.route('/generate_bootstrap_package', methods=['POST'])
def generate_bootstrap_package():
"""
Main function to build a bootstrap archive. You must post the following params:
hostname: we cannot build an archive without at least a hostname
deployment_type: openstack, kvm, vmware, etc.
archive_type: zip, iso
You must also supply all the variables required from included templates
:return: binary package containing variable interpolated templates
"""
try:
posted_json = request.get_json(force=True)
base_config = bootstrapper_utils.build_base_configs(posted_json)
except (BadRequest, RequiredParametersError):
abort(400, 'Invalid input parameters')
except TemplateNotFoundError:
print('Could not load tempaltes!')
abort(500, 'Could not load template!')
# if desired deployment type is openstack, then add the heat templates and whatnot
if 'deployment_type' in posted_json and posted_json['deployment_type'] == 'openstack':
try:
base_config = bootstrapper_utils.build_openstack_heat(base_config, posted_json, archive=True)
except RequiredParametersError:
abort(400, 'Could not parse JSON data')
if 'hostname' not in posted_json:
abort(400, 'No hostname found in posted data')
# if the user supplies an 'archive_type' parameter we can return either a ZIP or ISO
archive_type = posted_json.get('archive_type', 'zip')
# user has specified they want an ISO built
if archive_type == 'iso':
archive = archive_utils.create_iso(base_config, posted_json['hostname'])
mime_type = 'application/iso-image'
else:
# no ISO required, just make a zip
archive = archive_utils.create_archive(base_config, posted_json['hostname'])
mime_type = 'application/zip'
print("archive path is: %s" % archive)
if archive is None:
abort(500, 'Could not create archive! Check bootstrapper logs for more information')
return send_file(archive, mimetype=mime_type)
@app.route('/get_bootstrap_variables', methods=['POST'])
def get_bootstrap_variables():
print('Compiling variables required in payload to generate a valid bootstrap archive')
posted_json = request.get_json(force=True)
vs = bootstrapper_utils.get_bootstrap_variables(posted_json)
payload = dict()
if 'bootstrap_template' in posted_json and posted_json['bootstrap_template'] is not None:
print('Using bootstrap %s' % posted_json['bootstrap_template'])
payload['bootstrap_template'] = posted_json['bootstrap_template']
else:
print('No bootstrap file requested')
if 'init_cfg_template' in posted_json and posted_json['init_cfg_template'] is not None:
print('Setting init_cfg_name')
payload['init_cfg_template'] = posted_json['init_cfg_template']
else:
print('No init_cfg file requested')
if 'format' in posted_json and posted_json['format'] == 'aframe':
for v in vs:
payload[v] = "{{ %s }}" % v
else:
for v in vs:
payload[v] = ""
return jsonify(success=True, payload=payload, status_code=200)
@app.route('/import_template', methods=['POST'])
def import_template():
"""
Adds a template location to the configuration
:return: json with 'success', 'message' and 'status' keys
"""
posted_json = request.get_json(force=True)
try:
name = posted_json['name']
encoded_template = posted_json['template']
description = posted_json.get('description', 'Imported Template')
template_type = posted_json.get('type', 'bootstrap')
template = unquote(encoded_template)
except KeyError:
print("Not all required keys are present!")
r = jsonify(message="Not all required keys for add template are present", success=False, status_code=400)
r.status_code = 400
return r
print('Importing template with name: %s' % name)
print('Importing template with description: %s' % description)
print(template)
if bootstrapper_utils.import_template(template, name, description, template_type):
return jsonify(success=True, message='Imported Template Successfully', status_code=200)
else:
r = jsonify(success=False, message='Could not import template repository to the configuration',
status_code=500)
r.status_code = 500
return r
@app.route('/delete_template', methods=['POST'])
def delete_template():
"""
Adds a template location to the configuration
:return: json with 'success', 'message' and 'status' keys
"""
posted_json = request.get_json(force=True)
try:
name = posted_json['template_name']
except KeyError:
print("Not all required keys are present!")
r = jsonify(message="Not all required keys for add template are present", success=False, status_code=400)
r.status_code = 400
return r
if bootstrapper_utils.delete_template(name):
return jsonify(success=True, message='Deleted Template Successfully', status_code=200)
else:
r = jsonify(success=False, message='Could not delete template', status_code=500)
r.status_code = 500
return r
@app.route('/list_templates', methods=['GET'])
def list_templates():
ts = bootstrapper_utils.list_bootstrap_templates()
return jsonify(success=True, templates=ts, status_code=200)
@app.route('/get_template', methods=['POST'])
def get_template():
posted_json = request.get_json(force=True)
try:
name = posted_json['template_name']
except KeyError:
print("Not all required keys are present!")
r = jsonify(message="Not all required keys for add template are present", success=False, status_code=400)
r.status_code = 400
return r
ts = bootstrapper_utils.get_template(name)
return Response(ts, mimetype='text/plain')
@app.route('/list_init_cfg_templates', methods=['GET'])
def list_init_cfg_templates():
ts = bootstrapper_utils.list_init_cfg_templates()
return jsonify(success=True, templates=ts, status_code=200)
@app.teardown_appcontext
def shutdown_session(exception=None):
db_session.remove()
@app.before_first_request
def init_application():
init_db()
bootstrapper_utils.import_templates()
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
|
nilq/baby-python
|
python
|
# stream.models
# Database models for the Activity Stream Items
#
# Author: Benjamin Bengfort <bbengfort@districtdatalabs.com>
# Created: Wed Feb 04 10:24:36 2015 -0500
#
# Copyright (C) 2016 District Data Labs
# For license information, see LICENSE.txt
#
# ID: models.py [70aac9d] benjamin@bengfort.com $
"""
Database models for the Activity Stream items
"""
##########################################################################
## Imports
##########################################################################
from django.db import models
from model_utils import Choices
from django.utils.timesince import timesince
from minent.utils import nullable, notnullable
from stream.managers import StreamItemManager
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericForeignKey
from django.utils import timezone as datetime
##########################################################################
## Activity Stream models
##########################################################################
class StreamItem(models.Model):
"""
Contains a relationship between a user and any other content item via
a Generic relationship. It can then be used to describe an action
model as follows:
<actor> <verb> <time>
<actor> <verb> <target> <time>
<actor> <verb> <theme> <target> <time>
For example:
<bbengfort> <logged in> <1 minute ago>
<mcox> <asked> <question> <2 hours ago>
<dperlis> <annotated> <topic> on <question> <a day ago>
Much of this data type is created automatically (e.g. not interacted
with by users except through views). A secondary table is used to
store the activity stream to ensure that it can be quickly loaded,
even though many of the items in question already have a relationship
to some user!
"""
## Potential actions (verbs) for the activity stream
## DB storage is the infinitive, display is past tense
VERBS = Choices(
('join', 'joined'),
('view', 'viewed'),
('upvote', 'up voted'),
('downvote', 'down voted'),
('ask', 'asked'),
('answer', 'answered'),
)
## Relationship to the user (the actor)
actor = models.ForeignKey( 'auth.User', related_name='activity_stream' ) # The actor causing the event
## Generic relationship to a target
target_content_type = models.ForeignKey( ContentType, related_name="targets", **nullable )
target_object_id = models.PositiveIntegerField( **nullable )
target = GenericForeignKey( 'target_content_type', 'target_object_id' )
## Generic relationship to a theme (action object)
theme_content_type = models.ForeignKey( ContentType, related_name="themes", **nullable )
theme_object_id = models.PositiveIntegerField( **nullable )
theme = GenericForeignKey( 'theme_content_type', 'theme_object_id' )
## Meta data concerning the activity
public = models.BooleanField( default=True ) # May appear in public feeds?
verb = models.CharField( max_length=20, choices=VERBS ) # The "verb" or "action" or "event"
details = models.TextField( **nullable ) # Additional details about the action
timestamp = models.DateTimeField( default=datetime.now, db_index=True ) # The timestamp of the action (note no created and modified)
## A custom manager for the StreamItem
objects = StreamItemManager()
## Database setup and meta
class Meta:
app_label = 'stream'
db_table = 'activity_stream'
ordering = ('-timestamp',)
verbose_name = 'activity stream item'
verbose_name_plural = 'activity stream items'
######################################################################
## Methods on the Stream Item
######################################################################
def timesince(self, now=None):
"""
Returns a string representation of the time since the timestamp.
"""
return timesince(self.timestamp, now).encode('utf8').replace(b'\xc2\xa0', b' ').decode('utf8')
def get_object_url(self, obj):
"""
Returns the URL of an object by using the `get_absolute_url` method
otherwise returns None. (Shouldn't raise an error).
"""
if hasattr(obj, 'get_absolute_url'):
return obj.get_absolute_url()
return None
def get_actor_url(self):
return self.get_object_url(self.actor)
def get_target_url(self):
return self.get_object_url(self.target)
def get_theme_url(self):
return self.get_absolute_url(self.theme)
def get_object_repr(self, obj):
"""
Returns an HTML representation of an object, basically an anchor
to the object's absolute URL or just the plain string representation.
"""
# If the object knowns how to represent itself ...
if hasattr(obj, 'get_stream_repr'):
return obj.get_stream_repr()
# Otherwise, simply return the string representation
return str(obj)
def __str__(self):
context = {
'actor': self.actor.username,
'verb': self.get_verb_display(),
'theme': self.get_object_repr(self.theme),
'target': self.get_object_repr(self.target),
'timesince': self.timesince(),
}
if self.target:
if self.theme:
return "{actor} {verb} {theme} on {target} {timesince} ago".format(**context)
return "{actor} {verb} {target} {timesince} ago".format(**context)
if self.theme:
return "{actor} {verb} {theme} {timesince} ago".format(**context)
return "{actor} {verb} {timesince} ago".format(**context)
|
nilq/baby-python
|
python
|
# files.py — Debexpo files handling functions
#
# This file is part of debexpo -
# https://salsa.debian.org/mentors.debian.net-team/debexpo
#
# Copyright © 2019 Baptiste Beauplat <lyknode@cilg.org>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
from os.path import basename, join, isfile
from os import replace, unlink
import hashlib
from debexpo.keyring.models import Key
from debexpo.tools.gnupg import GnuPG, ExceptionGnuPGNoPubKey
class ExceptionCheckSumedFile(Exception):
pass
class ExceptionCheckSumedFileNoFile(ExceptionCheckSumedFile):
def __init__(self, e):
self.message = str(e)
def __str__(self):
return self.message
class ExceptionCheckSumedFileNoMethod(ExceptionCheckSumedFile):
def __init__(self, filename):
self.filename = filename
def __str__(self):
return f'No checksum method available for file {self.filename}.'
class ExceptionCheckSumedFileFailedSum(ExceptionCheckSumedFile):
def __init__(self, filename, expected, computed):
self.filename = filename
self.expected = expected
self.computed = computed
def __str__(self):
return f'Checksum failed for file {basename(self.filename)}.\n\n' \
f'Expected: {self.expected}\n' \
f'Computed: {self.computed}'
class GPGSignedFile():
def __init__(self, filename):
self.filename = filename
self.key = None
def authenticate(self):
lookup = self._lookup_fingerprint()
try:
if lookup.fingerprint:
search = lookup.fingerprint
else:
search = lookup.long_id
self.key = Key.objects.get_key_by_fingerprint(search)
except Key.DoesNotExist:
raise lookup
self.keyring = GnuPG()
self.keyring.import_key(self.key.key)
self.keyring.verify_sig(self.filename)
def _lookup_fingerprint(self):
gpg = GnuPG()
try:
gpg.verify_sig(self.filename)
except ExceptionGnuPGNoPubKey as e:
return e
def get_key(self):
return self.key
class CheckSumedFile():
METHODS = ('sha512', 'sha256')
def __init__(self, filename):
self.filename = filename
self.checksums = {}
# Two Checksumed files are considered equals if
# at least one of their checksum is equal.
# This does not perform any kind of file validation.
def __eq__(self, other):
for method in self.METHODS:
if method in self.checksums.keys() and \
method in other.checksums.keys():
if self.checksums[method] == other.checksums[method]:
return True
return False
def add_checksum(self, method, checksum):
self.checksums[method] = checksum
def validate(self):
for method in self.METHODS:
checksum = self.checksums.get(method)
if checksum:
hash_function = getattr(hashlib, method)
validator = hash_function()
try:
data = open(self.filename, 'rb')
except FileNotFoundError:
raise ExceptionCheckSumedFileNoFile(
f'{basename(self.filename)} is missing from '
'upload')
else:
with data:
while True:
chunk = data.read(10240)
if not chunk:
break
validator.update(chunk)
if validator.hexdigest() != checksum:
raise ExceptionCheckSumedFileFailedSum(
self.filename, checksum, validator.hexdigest()
)
else:
return True
raise ExceptionCheckSumedFileNoMethod(self.filename)
def __str__(self):
return basename(self.filename)
def move(self, destdir):
if not isfile(self.filename):
return
dest = join(destdir, basename(self.filename))
replace(self.filename, dest)
self.filename = dest
def remove(self):
if isfile(self.filename):
unlink(self.filename)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
class BIT:
def __init__(self, size: int) -> None:
self.size = size
self._bit = [0 for _ in range(self.size + 1)]
def add(self, index: int, value: int) -> None:
while index <= self.size:
self._bit[index] += value
index += index & -index
def sum(self, index) -> int:
summed = 0
while index > 0:
summed += self._bit[index]
index -= index & -index
return summed
def main():
n = int(input())
a = list(map(int, input().split()))
bit = BIT(n)
ans = 0
for index, ai in enumerate(a, 1):
bit.add(ai, 1)
ans += index - bit.sum(ai)
print(ans)
if __name__ == "__main__":
main()
|
nilq/baby-python
|
python
|
"""Mock input data for unit tests."""
from copy import deepcopy
import uuid
# no dependencies
MOCK_BASE_PATH = "a/b/c"
MOCK_DRS_URI = "drs://fakehost.com/SOME_OBJECT"
MOCK_DRS_URI_INVALID = "dr://fakehost.com/SOME_OBJECT"
MOCK_DRS_URI_LONG = (
"drs://aaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaa.com/SOME_OBJECT"
)
MOCK_ERROR = {
"msg": "mock_message",
"status_code": "400"
}
MOCK_ERROR_MSG = "SYSTEM HANDLER"
MOCK_ERROR_MSG_CUSTOM_HANDLER = "CUSTOM HANDLER"
MOCK_FILE_URL = "ftp://my.ftp.service/my_path/my_file_01.txt"
MOCK_HOST = "https://fakehost.com"
MOCK_ID = str(uuid.uuid4())
MOCK_PORT = 8080
MOCK_SELF_URI = f"https://fakehost.com/ga4gh/drs/v1/objects/{MOCK_ID}"
MOCK_TOKEN = "MyT0k3n"
# with dependencies
MOCK_ACCESS_URL = {
"url": MOCK_FILE_URL,
"headers": [
"None"
],
}
MOCK_ACCESS_METHODS = [
{
"type": "ftp",
"access_url": MOCK_ACCESS_URL,
},
]
MOCK_CHECKSUMS = [
{
"checksum": "18c2f5517e4ddc02cd57f6c7554b8e88",
"type": "md5",
},
]
MOCK_DRS_URL = f"{MOCK_HOST}:{MOCK_PORT}/ga4gh/drs/v1/objects"
MOCK_OBJECT_POST_INVALID = {
"updated_time": "2019-04-24T05:23:43-06:00",
"version": "1",
"size": 5,
"mime_type": "",
"checksums": MOCK_CHECKSUMS,
"access_methods": MOCK_ACCESS_METHODS,
}
MOCK_OBJECT_GET_INVALID = deepcopy(MOCK_OBJECT_POST_INVALID)
MOCK_OBJECT_GET_INVALID['id'] = MOCK_ID
MOCK_OBJECT_GET_INVALID['self_uri'] = MOCK_SELF_URI
MOCK_OBJECT_GET_INVALID['access_methods'][0]['access_id'] = MOCK_ID
MOCK_OBJECT_POST = deepcopy(MOCK_OBJECT_POST_INVALID)
MOCK_OBJECT_POST['created_time'] = "2019-05-20T00:12:34-07:00"
MOCK_OBJECT_GET = deepcopy(MOCK_OBJECT_GET_INVALID)
MOCK_OBJECT_GET['created_time'] = "2019-05-20T00:12:34-07:00"
|
nilq/baby-python
|
python
|
from sikuli import *
import sys
sys.path.insert(0, '/home/vagrant/Integration-Testing-Framework/sikuli/examples')
from test_helper import TestHelper
import open_flex_from_backup, check_change
helper = TestHelper("run_tests_from_backups")
folder = "/home/vagrant/Integration-Testing-Framework/sikuli/examples/images_for_comparison/"
backups_folder = "/home/vagrant/Integration-Testing-Framework/flex/projects/"
# Open Tagbanwa
open_flex_from_backup.open_backup(backups_folder + "Tagbanwa, Calamian 2015-07-07 1037 for testing purposes.fwbackup", True)
check_change.check_dictionary(folder + "Tagbanwa - dictionary.png")
check_change.check_word("dalik", folder + "Tagbanwa - dalik.png") # IXTERMINATE
check_change.check_word("bugnawan", folder + "Tagbanwa - bugnawan.png")
# Open Kamasau
open_flex_from_backup.open_backup(backups_folder + "Kamasau 2015-07-07 1036 for testing purposes.fwbackup", True)
check_change.check_dictionary(folder + "Kamasau - dictionary.png")
check_change.check_word("chiraq", folder + "Kamasau - chiraq.png") # like the French president in like the 2000s
check_change.check_word("gre", folder + "Kamasau - gre.png")
# Open Ayta Mag-Anchi
open_flex_from_backup.open_backup(backups_folder + "Ayta Mag-Anchi2 2015-07-07 1035 for testing purposes.fwbackup", True)
check_change.check_text("kulot2.ptx", folder + "Ayta - kulot2.ptx.png")
# Restart flex to hello project, closing the 3 windows
# we just opened + whatever was open before
helper.restart_flex()
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 CERN.
#
# Invenio-Records-Resources is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see LICENSE file for more
# details.
"""File service tests."""
from io import BytesIO
def test_file_flow(
file_service, location, example_file_record, identity_simple):
"""Test the lifecycle of a file.
- Initialize file saving
- Save 1 files
- Commit the files
- List files of the record
- Read file metadata
- Retrieve a file
- Delete a file
- Delete all remaining files
- List should be empty
"""
recid = example_file_record['id']
file_to_initialise = [{
'key': 'article.txt',
'checksum': 'md5:c785060c866796cc2a1708c997154c8e',
'size': 17, # 2kB
'metadata': {
'description': 'Published article PDF.',
}
}]
# Initialize file saving
result = file_service.init_files(
recid, identity_simple, file_to_initialise)
assert result.to_dict()['entries'][0]['key'] == \
file_to_initialise[0]['key']
# for to_file in to_files:
content = BytesIO(b'test file content')
result = file_service.set_file_content(
recid, file_to_initialise[0]['key'], identity_simple, content,
content.getbuffer().nbytes
)
# TODO figure response for succesfully saved file
assert result.to_dict()['key'] == file_to_initialise[0]['key']
result = file_service.commit_file(
recid, 'article.txt', identity_simple)
# TODO currently there is no status in the json between the initialisation
# and the commiting.
assert result.to_dict()['key'] == \
file_to_initialise[0]['key']
# List files
result = file_service.list_files(recid, identity_simple)
assert result.to_dict()['entries'][0]['key'] == \
file_to_initialise[0]['key']
# Read file metadata
result = file_service.read_file_metadata(
recid, 'article.txt', identity_simple)
assert result.to_dict()['key'] == \
file_to_initialise[0]['key']
# Retrieve file
result = file_service.get_file_content(
recid, 'article.txt', identity_simple)
assert result.file_id == 'article.txt'
# Delete file
result = file_service.delete_file(
recid, 'article.txt', identity_simple)
assert result.file_id == 'article.txt'
# Assert deleted
result = file_service.list_files(recid, identity_simple)
assert result.entries
assert len(list(result.entries)) == 0
# Delete all remaining files
result = file_service.delete_all_files(recid, identity_simple)
assert list(result.entries) == []
|
nilq/baby-python
|
python
|
# -*- coding:utf-8 -*-
from __future__ import unicode_literals
from future.builtins import object
from contextlib import contextmanager
import sys
import unittest
from redis import Redis
from limpyd.database import (RedisDatabase, DEFAULT_CONNECTION_SETTINGS)
TEST_CONNECTION_SETTINGS = DEFAULT_CONNECTION_SETTINGS.copy()
TEST_CONNECTION_SETTINGS['db'] = 15
test_database = RedisDatabase(**TEST_CONNECTION_SETTINGS)
class LimpydBaseTest(unittest.TestCase):
COUNT_LOCK_COMMANDS = 4
database = test_database
@property
def connection(self):
return self.database.connection
def setUp(self):
# Ensure that we are on the right DB before flushing
current_db_id = self.connection.connection_pool.connection_kwargs['db']
assert current_db_id != DEFAULT_CONNECTION_SETTINGS['db']
assert current_db_id == TEST_CONNECTION_SETTINGS['db']
self.connection.flushdb()
def tearDown(self):
self.connection.flushdb()
def count_commands(self):
"""
Helper method to only count redis commands that work on keys (ie ignore
commands like info...)
"""
return self.connection.info()['total_commands_processed']
def count_keys(self):
"""
Helper method to return the number of keys in the test database
"""
return self.connection.dbsize()
def assertNumCommands(self, num=None, func=None, *args, **kwargs):
"""
A context assert, to use with "with":
with self.assertNumCommands(2):
obj.field.set(1)
obj.field.get()
"""
context = _AssertNumCommandsContext(self, num, *args, **kwargs)
if func is None:
return context
# Basically emulate the `with` statement here.
context.__enter__()
try:
func(*args, **kwargs)
except:
context.__exit__(*sys.exc_info())
raise
else:
context.__exit__(*sys.exc_info())
if not hasattr(unittest.TestCase, 'subTest'):
@contextmanager
def subTest(self, msg=None, **params):
# support for the `subTest` command not available before python 3.4
# does nothing except running included test
yield
def assertSlicingIsCorrect(self, collection, check_data, check_only_length=False, limit=5):
"""Test a wide range of slicing of the given collection, compared to a python list
Parameters
----------
collection: Collection
The collection to test. Should not have been sliced yet
check_data: list
The python list containing the same values as the limpyd collection.
The result of slicing the collection will be compared to the result of slicing
this list
check_only_length: bool
Default to ``False``. When ``True``, only the length of the slicing of the collection
is compared to the slicing of the python list. To be used only when resulting content
cannot be assured (for unsorted collections)
limit: int
Default to ``5``, it's the boundary of the slicing ranges that will be tested.
``5`` means will use all values from ``-5`` to ``5`` for each of the three parts
of the slicing.
"""
# check we have the correct dataset
if check_only_length:
assert len(collection) == len(check_data), 'Wrong dataset for this test'
else:
assert sorted(collection) == check_data, 'Wrong dataset for this test'
# do all the slices
for start in list(range(-limit, limit+1)) + [None]:
for stop in list(range(-limit, limit+1)) + [None]:
for step in range(-limit, limit+1):
if not step:
continue
expected = check_data[start:stop:step]
for test_collection, clone in ((collection, False), (collection.clone(), True)):
with self.subTest(Start=start, Stop=stop, step=step, clone=clone):
sliced_collection = test_collection[start:stop:step]
if not check_only_length:
self.assertEqual(
list(sliced_collection),
expected,
'Unexpected result for `%s:%s:%s`' % (
'' if start is None else start,
'' if stop is None else stop,
'' if step is None else step,
)
)
self.assertEqual(
len(sliced_collection),
len(expected),
'Unexpected length result for `%s:%s:%s`' % (
'' if start is None else start,
'' if stop is None else stop,
'' if step is None else step,
)
)
class _AssertNumCommandsContext(object):
"""
A context to count commands occured
"""
def __init__(self, test_case, num=None, min_num=None, max_num=None, checkpoints=False):
self.test_case = test_case
if num is None and min_num is None and max_num is None:
raise ValueError('If `num` is not passed, `min_num` or `max_num` are expected')
if num is not None and (min_num is not None or max_num is not None):
raise ValueError('If `num` is passed, `min_num` and `max_num` are not expected')
self.num = num
self.min_num = min_num
self.max_num = max_num
self.checkpoints = checkpoints
self.log = 'ASSERT-NUM-COMMANDS-%s'
if self.num is not None:
self.log += '---EQ-%d' % self.num
if self.min_num is not None:
self.log += '---MIN-%d' % self.min_num
if self.max_num is not None:
self.log += '---MAX-%d' % self.max_num
def __enter__(self):
self.starting_commands = self.test_case.count_commands()
if self.checkpoints:
self.test_case.connection.get(self.log % 'START')
return self
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is not None:
return
if self.checkpoints:
self.test_case.connection.get(self.log % 'END')
# we remove 1 to ignore the "info" called in __enter__
final_commands = self.test_case.count_commands() - 1
# also two for checkpoints
if self.checkpoints:
final_commands = final_commands - 2
executed = final_commands - self.starting_commands
if self.checkpoints and executed != self.num:
self.test_case.connection.get((self.log % 'END') + '---FAILED-%s' % executed)
if self.num is not None:
self.test_case.assertEqual(
executed, self.num, "%d commands executed, %d expected" % (
executed, self.num
)
)
elif self.max_num is None:
self.test_case.assertTrue(
executed >= self.min_num, "%d commands executed, at least %d expected" % (
executed, self.min_num
)
)
elif self.min_num is None:
self.test_case.assertTrue(
executed <= self.max_num, "%d commands executed, at max %d expected" % (
executed, self.max_num
)
)
else:
self.test_case.assertTrue(
self.min_num <= executed <= self.max_num, "%d commands executed, expected to be at least %d and at max %d" % (
executed, self.min_num, self.max_num
)
)
|
nilq/baby-python
|
python
|
from .newton_divided_differences import NewtonDifDiv
from .larange import Larange
from .linear_spline import LinearSpline
from .quadratic_spline import QuadraticSpline
from .cubic_spline import CubicSpline
|
nilq/baby-python
|
python
|
/*
* Copyright (c) 2020 Huawei Technologies Co.,Ltd.
*
* openGauss is licensed under Mulan PSL v2.
* You can use this software according to the terms and conditions of the Mulan PSL v2.
* You may obtain a copy of Mulan PSL v2 at:
*
* http://license.coscl.org.cn/MulanPSL2
*
* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
* See the Mulan PSL v2 for more details.
*/
# -*- coding: utf-8 -*-
"""
desciption: system variables or other constant information
"""
import os
import requests
import argparse
CONFIG = {
'url': 'jdbc:postgresql://166.111.121.62:5432/',
'host': '166.111.121.62',
'port': 5432,
'driver': 'org.postgresql.Driver',
'username': 'postgres',
'password': 'postgres',
'schema': 'tpch1x',
'sqldir': 'tpch',
'logdir': 'rewrite_results'
}
class model_parameters():
def __init__(self):
self.cuda = False
self.fastmode = False
self.seed = 42
self.epochs = 100
self.lr = 0.01
self.weight_decay = 5e-4
self.hidden = 16
self.dropout = 0.5
def parse_cmd_args():
parser = argparse.ArgumentParser()
# benchmark
parser.add_argument('--iteration_num', type=int, default=230, help='')
parser.add_argument('--workload_num', type=int, default=3184, help='The number of queries')
parser.add_argument('--feature_num', type=int, default=2, help='The number of vertex features')
parser.add_argument('--node_dim', type=int, default=30, help='The size of intermediate network layers')
args = parser.parse_args()
argus = vars(args)
return argus
|
nilq/baby-python
|
python
|
from setuptools import setup, find_packages
setup(
name='arranger',
version='1.1.2',
description="moves each file to its appropriate directory based on the file's extension.",
author='j0eTheRipper',
author_email='j0eTheRipper0010@gmail.com',
url='https://github.com/j0eTheRipper/arranger',
scripts=['src/arrange'],
packages=['engine', 'engine.Extensions', 'engine.File', 'engine.DIR'],
package_dir={'engine': 'src/engine'},
)
|
nilq/baby-python
|
python
|
"""
Copyright 2018 Duo Security
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
following disclaimer in the documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
---------------------------------------------------------------------------
"""
import sys
import unittest
from unittest.mock import patch
from io import StringIO
from contextlib import contextmanager
from cloudtracker import (get_role_allowed_actions,
get_role_iam,
make_list,
normalize_api_call,
print_actor_diff,
print_diff,
Privileges,
read_aws_api_list)
@contextmanager
def capture(command, *args, **kwargs):
"""Capture stdout in order to check it"""
out, sys.stdout = sys.stdout, StringIO()
try:
command(*args, **kwargs)
sys.stdout.seek(0)
yield sys.stdout.read()
finally:
sys.stdout = out
class TestCloudtracker(unittest.TestCase):
"""Test class for cloudtracker"""
aws_api_list = None
def __init__(self, *args, **kwargs):
super(TestCloudtracker, self).__init__(*args, **kwargs)
self.aws_api_list = read_aws_api_list()
def test_make_list(self):
"""Test make_list"""
self.assertEquals(["hello"], make_list("hello"))
def test_get_actions_from_statement(self):
"""Test get_actions_from_statement"""
privileges = Privileges(self.aws_api_list)
stmt = {"Action": ["s3:PutObject"], "Resource": "*", "Effect": "Allow"}
self.assertEquals(privileges.get_actions_from_statement(stmt),
{'s3:putobject': True})
stmt = {"Action": ["s3:PutObject*"], "Resource": "*", "Effect": "Allow"}
self.assertEquals(privileges.get_actions_from_statement(stmt),
{'s3:putobject': True, 's3:putobjectacl': True, 's3:putobjecttagging': True})
stmt = {"Action": ["s3:*ObjectT*"], "Resource": "*", "Effect": "Allow"}
self.assertEquals(privileges.get_actions_from_statement(stmt),
{'s3:deleteobjecttagging': True,
's3:getobjecttagging': True,
's3:getobjecttorrent': True,
's3:putobjecttagging': True})
def test_policy(self):
"""Test having multiple statements, some allowed, some denied"""
privileges = Privileges(self.aws_api_list)
# Create a privilege object with some allowed and denied
stmt = {"Action": ["s3:*ObjectT*"], "Resource": "*", "Effect": "Allow"}
privileges.add_stmt(stmt)
stmt = {'Action': ['s3:GetObjectTagging', 's3:GetObjectTorrent'],
"Resource": "*",
"Effect": "Deny"}
privileges.add_stmt(stmt)
self.assertEquals(sorted(privileges.determine_allowed()),
sorted(['s3:putobjecttagging', 's3:deleteobjecttagging']))
def test_get_actions_from_statement_with_resources(self):
"""
Test that even when we are denied access to one resource,
the actions are still marked as allowed.
"""
privileges = Privileges(self.aws_api_list)
policy = [
{
"Action": "s3:*",
"Effect": "Allow",
"Resource": "*"
},
{
"Action": "s3:CreateBucket",
"Effect": "Deny",
"Resource": "*"
},
{
"Action": "s3:*",
"Effect": "Deny",
"Resource": [
"arn:aws:s3:::super-sensitive-bucket",
"arn:aws:s3:::super-sensitive-bucket/*"
]
}
]
for stmt in policy:
privileges.add_stmt(stmt)
self.assertTrue('s3:deletebucket' in privileges.determine_allowed())
self.assertTrue('s3:createbucket' not in privileges.determine_allowed())
def test_get_actions_from_statement_with_array_of_resources(self):
"""
Test array of resources
"""
privileges = Privileges(self.aws_api_list)
policy = [
{
"Action": "s3:*",
"Effect": "Allow",
"Resource": "*"
},
{
"Action": "s3:CreateBucket",
"Effect": "Deny",
"Resource": ["arn:aws:s3:::super-sensitive-bucket", "*"]
}
]
for stmt in policy:
privileges.add_stmt(stmt)
self.assertTrue('s3:deletebucket' in privileges.determine_allowed())
self.assertTrue('s3:createbucket' not in privileges.determine_allowed())
def test_get_actions_from_statement_with_conditions(self):
"""
Test that even when we are denied access based on a condition,
the actions are still marked as allowed.
"""
privileges = Privileges(self.aws_api_list)
policy = [
{
"Sid": "AllowAllActionsForEC2",
"Effect": "Allow",
"Action": "ec2:*",
"Resource": "*"
},
{
"Sid": "DenyStopAndTerminateWhenMFAIsNotPresent",
"Effect": "Deny",
"Action": [
"ec2:StopInstances",
"ec2:TerminateInstances"
],
"Resource": "*",
"Condition": {"BoolIfExists": {"aws:MultiFactorAuthPresent": False}}
}
]
for stmt in policy:
privileges.add_stmt(stmt)
self.assertTrue('ec2:startinstances' in privileges.determine_allowed())
self.assertTrue('ec2:stopinstances' in privileges.determine_allowed())
def test_normalize_api_call(self):
"""Test normalize_api_call"""
# Ensure the numbers at the end are removed
self.assertEquals(normalize_api_call('lambda', 'ListTags20170331'), 'lambda:listtags')
# Ensure service renaming occurs
self.assertEquals(normalize_api_call('monitoring', 'DescribeAlarms'), 'cloudwatch:describealarms')
def test_print_actor_diff(self):
"""Test print_actor_diff"""
with capture(print_actor_diff, [], [], False) as output:
self.assertEquals('', output)
# Test output when you have 3 configured users, but only two actually did anything
with capture(print_actor_diff, ['alice', 'bob'], ['alice', 'bob', 'charlie'], False) as output:
self.assertEquals(' alice\n bob\n- charlie\n', output)
def test_print_diff(self):
"""Test print_diff"""
with capture(print_diff, [], [], {}, False) as output:
self.assertEquals('', output)
def mocked_is_recorded_by_cloudtrail(action):
"""Instead of reading the whole file, just cherry pick this one action used in the tests"""
if action == 's3:putobject':
return False
return True
# One action allowed, and performed, and should be shown
with patch('cloudtracker.is_recorded_by_cloudtrail', side_effect=mocked_is_recorded_by_cloudtrail):
with capture(print_diff,
['s3:createbucket'], # performed
['s3:createbucket'], # allowed
{'show_benign': True, 'show_used': False, 'show_unknown': True}, False) as output:
self.assertEquals(' s3:createbucket\n', output)
# 3 actions allowed, one is used, one is unused, and one is unknown; show all
with patch('cloudtracker.is_recorded_by_cloudtrail', side_effect=mocked_is_recorded_by_cloudtrail):
with capture(print_diff,
['s3:createbucket', 'sts:getcalleridentity'], # performed
['s3:createbucket', 's3:putobject', 's3:deletebucket'], # allowed
{'show_benign': True, 'show_used': False, 'show_unknown': True}, False) as output:
self.assertEquals(' s3:createbucket\n- s3:deletebucket\n? s3:putobject\n', output)
# Same as above, but only show the used one
with patch('cloudtracker.is_recorded_by_cloudtrail', side_effect=mocked_is_recorded_by_cloudtrail):
with capture(print_diff,
['s3:createbucket', 'sts:getcalleridentity'], # performed
['s3:createbucket', 's3:putobject', 's3:deletebucket'], # allowed
{'show_benign': True, 'show_used': True, 'show_unknown': True}, False) as output:
self.assertEquals(' s3:createbucket\n', output)
# Hide the unknown
with patch('cloudtracker.is_recorded_by_cloudtrail', side_effect=mocked_is_recorded_by_cloudtrail):
with capture(print_diff,
['s3:createbucket', 'sts:getcalleridentity'], # performed
['s3:createbucket', 's3:putobject', 's3:deletebucket'], # allowed
{'show_benign': True, 'show_used': False, 'show_unknown': False}, False) as output:
self.assertEquals(' s3:createbucket\n- s3:deletebucket\n', output)
# Role IAM policy to be used in different tests
role_iam = {
"AssumeRolePolicyDocument": {},
"RoleId": "AROA00000000000000000",
"CreateDate": "2017-01-01T00:00:00Z",
"InstanceProfileList": [],
"RoleName": "test_role",
"Path": "/",
"AttachedManagedPolicies": [],
"RolePolicyList": [
{
"PolicyName": "KmsDecryptSecrets",
"PolicyDocument": {
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"kms:DescribeKey",
"kms:Decrypt"
],
"Resource": "*",
"Effect": "Allow",
"Sid": ""
}
]
}
},
{
"PolicyName": "S3PutObject",
"PolicyDocument": {
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"s3:PutObject",
"s3:PutObjectAcl",
"s3:ListBucket"
],
"Resource": "*",
"Effect": "Allow"
}
]
}
}
],
"Arn": "arn:aws:iam::111111111111:role/test_role"
}
def test_get_role_iam(self):
"""Test get_role_iam"""
account_iam = {
"RoleDetailList": [self.role_iam],
"UserDetailList": [],
"GroupDetailList": [],
"Policies": []
}
self.assertEquals(self.role_iam, get_role_iam("test_role", account_iam))
def test_get_role_allowed_actions(self):
"""Test get_role_allowed_actions"""
account_iam = {
"RoleDetailList": [self.role_iam],
"UserDetailList": [],
"GroupDetailList": [],
"Policies": []
}
aws_api_list = read_aws_api_list()
self.assertEquals(sorted(['s3:putobject', 'kms:describekey', 'kms:decrypt', 's3:putobjectacl']),
sorted(get_role_allowed_actions(aws_api_list, self.role_iam, account_iam)))
|
nilq/baby-python
|
python
|
from django.apps import AppConfig
class ProntuariomedicoConfig(AppConfig):
name = 'prontuarioMedico'
|
nilq/baby-python
|
python
|
"""
Enables the user to add an "Image" plugin that displays an image
using the HTML <img> tag.
"""
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.translation import gettext
from django.utils.translation import gettext_lazy as _
from cms.models import CMSPlugin
from cms.models.fields import PageField
from djangocms_attributes_field.fields import AttributesField
from easy_thumbnails.files import get_thumbnailer
from filer.fields.image import FilerImageField
from filer.models import ThumbnailOption
# add setting for picture alignment, renders a class or inline styles
# depending on your template setup
def get_alignment():
alignment = getattr(
settings,
'DJANGOCMS_PICTURE_ALIGN',
(
('left', _('Align left')),
('right', _('Align right')),
('center', _('Align center')),
)
)
return alignment
# Add additional choices through the ``settings.py``.
def get_templates():
choices = [
('default', _('Default')),
]
choices += getattr(
settings,
'DJANGOCMS_PICTURE_TEMPLATES',
[],
)
return choices
# use golden ration as default (https://en.wikipedia.org/wiki/Golden_ratio)
PICTURE_RATIO = getattr(settings, 'DJANGOCMS_PICTURE_RATIO', 1.6180)
# required for backwards compability
PICTURE_ALIGNMENT = get_alignment()
LINK_TARGET = (
('_blank', _('Open in new window')),
('_self', _('Open in same window')),
('_parent', _('Delegate to parent')),
('_top', _('Delegate to top')),
)
RESPONSIVE_IMAGE_CHOICES = (
('inherit', _('Let settings.DJANGOCMS_PICTURE_RESPONSIVE_IMAGES decide')),
('yes', _('Yes')),
('no', _('No')),
)
class AbstractPicture(CMSPlugin):
"""
Renders an image with the option of adding a link
"""
template = models.CharField(
verbose_name=_('Template'),
choices=get_templates(),
default=get_templates()[0][0],
max_length=255,
)
picture = FilerImageField(
verbose_name=_('Image'),
blank=True,
null=True,
on_delete=models.SET_NULL,
related_name='+',
)
external_picture = models.URLField(
verbose_name=_('External image'),
blank=True,
null=True,
max_length=255,
help_text=_(
'If provided, overrides the embedded image. '
'Certain options such as cropping are not applicable to external images.'
)
)
width = models.PositiveIntegerField(
verbose_name=_('Width'),
blank=True,
null=True,
help_text=_(
'The image width as number in pixels. '
'Example: "720" and not "720px".'
),
)
height = models.PositiveIntegerField(
verbose_name=_('Height'),
blank=True,
null=True,
help_text=_(
'The image height as number in pixels. '
'Example: "720" and not "720px".'
),
)
alignment = models.CharField(
verbose_name=_('Alignment'),
choices=get_alignment(),
blank=True,
max_length=255,
help_text=_('Aligns the image according to the selected option.'),
)
caption_text = models.TextField(
verbose_name=_('Caption text'),
blank=True,
null=True,
help_text=_('Provide a description, attribution, copyright or other information.')
)
attributes = AttributesField(
verbose_name=_('Attributes'),
blank=True,
excluded_keys=['src', 'width', 'height'],
)
# link models
link_url = models.URLField(
verbose_name=_('External URL'),
blank=True,
null=True,
max_length=2040,
help_text=_('Wraps the image in a link to an external URL.'),
)
link_page = PageField(
verbose_name=_('Internal URL'),
blank=True,
null=True,
on_delete=models.SET_NULL,
help_text=_('Wraps the image in a link to an internal (page) URL.'),
)
link_target = models.CharField(
verbose_name=_('Link target'),
choices=LINK_TARGET,
blank=True,
max_length=255,
)
link_attributes = AttributesField(
verbose_name=_('Link attributes'),
blank=True,
excluded_keys=['href', 'target'],
)
# cropping models
# active per default
use_automatic_scaling = models.BooleanField(
verbose_name=_('Automatic scaling'),
blank=True,
default=True,
help_text=_('Uses the placeholder dimensions to automatically calculate the size.'),
)
# ignores all other cropping options
# throws validation error if other cropping options are selected
use_no_cropping = models.BooleanField(
verbose_name=_('Use original image'),
blank=True,
default=False,
help_text=_('Outputs the raw image without cropping.'),
)
# upscale and crop work together
# throws validation error if other cropping options are selected
use_crop = models.BooleanField(
verbose_name=_('Crop image'),
blank=True,
default=False,
help_text=_('Crops the image according to the thumbnail settings provided in the template.'),
)
use_upscale = models.BooleanField(
verbose_name=_('Upscale image'),
blank=True,
default=False,
help_text=_('Upscales the image to the size of the thumbnail settings in the template.')
)
use_responsive_image = models.CharField(
verbose_name=_('Use responsive image'),
max_length=7,
choices=RESPONSIVE_IMAGE_CHOICES,
default=RESPONSIVE_IMAGE_CHOICES[0][0],
help_text=_(
'Uses responsive image technique to choose better image to display based upon screen viewport. '
'This configuration only applies to uploaded images (external pictures will not be affected). '
)
)
# overrides all other options
# throws validation error if other cropping options are selected
thumbnail_options = models.ForeignKey(
ThumbnailOption,
verbose_name=_('Thumbnail options'),
blank=True,
null=True,
help_text=_('Overrides width, height, and crop; scales up to the provided preset dimensions.'),
on_delete=models.CASCADE,
)
# Add an app namespace to related_name to avoid field name clashes
# with any other plugins that have a field with the same name as the
# lowercase of the class name of this model.
# https://github.com/divio/django-cms/issues/5030
cmsplugin_ptr = models.OneToOneField(
CMSPlugin,
related_name='%(app_label)s_%(class)s',
parent_link=True,
on_delete=models.CASCADE,
)
class Meta:
abstract = True
def __str__(self):
if self.picture and self.picture.label:
return self.picture.label
return str(self.pk)
def get_short_description(self):
if self.external_picture:
return self.external_picture
if self.picture and self.picture.label:
return self.picture.label
return gettext('<file is missing>')
def copy_relations(self, oldinstance):
# Because we have a ForeignKey, it's required to copy over
# the reference from the instance to the new plugin.
self.picture = oldinstance.picture
def get_size(self, width=None, height=None):
crop = self.use_crop
upscale = self.use_upscale
# use field thumbnail settings
if self.thumbnail_options:
width = self.thumbnail_options.width
height = self.thumbnail_options.height
crop = self.thumbnail_options.crop
upscale = self.thumbnail_options.upscale
elif not self.use_automatic_scaling:
width = self.width
height = self.height
# calculate height when not given according to the
# golden ratio or fallback to the picture size
if not height and width:
height = int(width / PICTURE_RATIO)
elif not width and height:
width = int(height * PICTURE_RATIO)
elif not width and not height and self.picture:
width = self.picture.width
height = self.picture.height
options = {
'size': (width, height),
'crop': crop,
'upscale': upscale,
}
return options
def get_link(self):
if self.link_url:
return self.link_url
elif self.link_page_id:
return self.link_page.get_absolute_url(language=self.language)
elif self.external_picture:
return self.external_picture
return False
def clean(self):
# there can be only one link type
if self.link_url and self.link_page_id:
raise ValidationError(
gettext(
'You have given both external and internal links. '
'Only one option is allowed.'
)
)
# you shall only set one image kind
if not self.picture and not self.external_picture:
raise ValidationError(
gettext(
'You need to add either an image, '
'or a URL linking to an external image.'
)
)
# certain cropping options do not work together, the following
# list defines the disallowed options used in the ``clean`` method
invalid_option_pairs = [
('use_automatic_scaling', 'use_no_cropping'),
('use_automatic_scaling', 'thumbnail_options'),
('use_no_cropping', 'use_crop'),
('use_no_cropping', 'use_upscale'),
('use_no_cropping', 'thumbnail_options'),
('thumbnail_options', 'use_crop'),
('thumbnail_options', 'use_upscale'),
]
# invalid_option_pairs
invalid_option_pair = None
for pair in invalid_option_pairs:
if getattr(self, pair[0]) and getattr(self, pair[1]):
invalid_option_pair = pair
break
if invalid_option_pair:
message = gettext(
'Invalid cropping settings. '
'You cannot combine "{field_a}" with "{field_b}".'
)
message = message.format(
field_a=self._meta.get_field(invalid_option_pair[0]).verbose_name,
field_b=self._meta.get_field(invalid_option_pair[1]).verbose_name,
)
raise ValidationError(message)
@property
def is_responsive_image(self):
if self.external_picture:
return False
if self.use_responsive_image == 'inherit':
return getattr(settings, 'DJANGOCMS_PICTURE_RESPONSIVE_IMAGES', False)
return self.use_responsive_image == 'yes'
@property
def img_srcset_data(self):
if not (self.picture and self.is_responsive_image):
return None
srcset = []
thumbnailer = get_thumbnailer(self.picture)
picture_options = self.get_size(self.width, self.height)
picture_width = picture_options['size'][0]
thumbnail_options = {'crop': picture_options['crop']}
breakpoints = getattr(
settings,
'DJANGOCMS_PICTURE_RESPONSIVE_IMAGES_VIEWPORT_BREAKPOINTS',
[576, 768, 992],
)
for size in filter(lambda x: x < picture_width, breakpoints):
thumbnail_options['size'] = (size, size)
srcset.append((int(size), thumbnailer.get_thumbnail(thumbnail_options)))
return srcset
@property
def img_src(self):
# we want the external picture to take priority by design
# please open a ticket if you disagree for an open discussion
if self.external_picture:
return self.external_picture
# picture can be empty, for example when the image is removed from filer
# in this case we want to return an empty string to avoid #69
elif not self.picture:
return ''
# return the original, unmodified picture
elif self.use_no_cropping:
return self.picture.url
picture_options = self.get_size(
width=self.width or 0,
height=self.height or 0,
)
thumbnail_options = {
'size': picture_options['size'],
'crop': picture_options['crop'],
'upscale': picture_options['upscale'],
'subject_location': self.picture.subject_location,
}
thumbnailer = get_thumbnailer(self.picture)
return thumbnailer.get_thumbnail(thumbnail_options).url
class Picture(AbstractPicture):
class Meta:
abstract = False
|
nilq/baby-python
|
python
|
import lark
import copy
import torch
class LogicParser:
""" This class defines the grammar of the STL according to
the EBNF syntax and builds the AST accordingly.
"""
_grammar = """
start: prop
prop: VAR CMP (CONST | VAR) -> atom
| _NOT "(" prop ")" -> op_not
| (prop _OR)+ prop -> op_or
| (prop _AND)+ prop -> op_and
| ltl_op "(" prop ")" -> operator
ltl_op: letter
letter: LTL_OPERATOR
_NOT: "!"
_AND: "&"
_OR: "|"
LTL_OPERATOR : ("F" | "G")
CMP: ("<=" | "<" | ">=" | ">" | "!=" | "==")
VAR: /[a-z_]+/
CONST: SIGNED_NUMBER
%import common.INT
%import common.DECIMAL
%import common.SIGNED_NUMBER
%import common.WORD
%import common.WS
%ignore WS
"""
def __init__(self, formula):
parser = lark.Lark(self._grammar)
self._tree = parser.parse(formula)
@property
def parse_tree(self):
return copy.deepcopy(self._tree)
def __str__(self):
return self._tree.pretty()
class Functions:
""" Encapsulate the set of functions allowed to be called
from the formula built starting from the AST
"""
@staticmethod
def not_(x):
return -x
@staticmethod
def and_(a, b):
return torch.min(a, b)
@staticmethod
def or_(a, b):
return torch.max(a, b)
@staticmethod
def finally_(f):
return torch.max(f)
@staticmethod
def globally_(f):
return torch.min(f)
@lark.v_args(inline=True)
class _CodeBuilder(lark.Transformer):
""" Set of rules to traverse the AST and build a customized formula.
Basically it rewrites a formula starting from the AST to have
fine control on the operations that will be carried out the the
specific semantic.
"""
def atom(self, *args):
operand_a, operator, operand_b = args
if operator == '>=':
return f'{operand_a} - {operand_b}'
elif operator == '>':
raise NotImplementedError
elif operator == '<=':
return f'{operand_b} - {operand_a}'
elif operator == '<':
raise NotImplementedError
elif operator == '==' or '!=':
raise NotImplementedError
def op_not(self, preposition):
return 'fn.not_(' + preposition + ')'
def op_and(self, preposition_a, preposition_b):
args = [preposition_a, preposition_b]
return 'fn.and_(' + ', '.join(args) + ')'
def op_or(self, preposition_a, preposition_b):
args = [preposition_a, preposition_b]
return 'fn.or_(' + ', '.join(args) + ')'
def ltl_op(self, *parameters):
return list(map(lambda x: str(x.children[0]), parameters))
def operator(self, params, preposition):
if len(params) > 1:
raise NotImplementedError
else:
letter = params[0]
operator_args = [preposition]
if letter == 'F':
function = 'fn.finally_'
elif letter == 'G':
function = 'fn.globally_'
return function + '(' + ', '.join(operator_args) + ')'
def start(self, preposition):
return str(preposition)
class DiffQuantitativeSemantic:
""" This class is used as API to build an STL formula and apply
it to arbitrary signals according to the quantitative semantics.
"""
def __init__(self, logic_formula):
"""Get the parse-tree and call the method _build on it"""
if isinstance(logic_formula, str):
self.logic_parser = LogicParser(logic_formula)
else:
self.logic_parser = logic_formula
self._code = self._build()
def _build(self):
"""Compute the internal representation for the semantic"""
tree = self.logic_parser.parse_tree
code = _CodeBuilder().transform(tree)
return code
def compute(self, **signals):
environment = {
'fn': Functions,
}
environment.update(signals)
return eval(self._code, environment)
def __str__(self):
return self._code
|
nilq/baby-python
|
python
|
from dash import Input, Output, callback
from dash import dcc
import dash.html as html
import dash_bootstrap_components as dbc
from pages.constants import TITLE_STYLE, PARAGRAPH_STYLE, IMG_STYLE
from utils.topic_crud import TopicCRUD
import plotly.express as px
df = px.data.iris() # iris is a pandas DataFrame
fig = px.scatter(df, x="sepal_width", y="sepal_length")
topic_plotter = TopicCRUD()
item_1 = [
"This is the content of the first section",
dcc.Graph(figure=fig)
]
topic_2_accordion = [
'This is the topics for the accordion',
html.Div([
dcc.Slider(min=1, max=25, step=1,
id='second-topic-slider',
value=10,
tooltip={"placement": "bottom", "always_visible": True}
)]),
dcc.Graph(id='second-topic-figure')
]
topics_by_city_accordion = [
'This is the topics for the accordion',
dcc.Graph(id='topic-cities-figure')
]
topic_presence_accordion = [
'This is the topics for the accordion',
dcc.Graph(id='topic-presence-figure')
]
topic_word_relevance = [
'This is the topics for the accordion',
dcc.Input(id='word-presence-input'),
dcc.Graph(id='topic-words-figure')
]
topic_speech_topics = [
'This is the topics for the accordion',
dcc.Dropdown(),
dcc.Graph(id='topic-speeches-figure')
]
accordion = dbc.Accordion(
[
dbc.AccordionItem(
item_1, title="Topic - Key words"
),
dbc.AccordionItem(
topic_2_accordion, title="Secondary topic"
),
dbc.AccordionItem(
topics_by_city_accordion, title="Topic location"
),
dbc.AccordionItem(
topic_word_relevance, title="Word relevance"
),
dbc.AccordionItem(
topic_speech_topics, title="Important topics by speach"
),
],
start_collapsed=True,
always_open=True,
flush=True
)
body = dbc.Container([
# Title page
dbc.Row(
[
html.H1(
'Conclusions', style=TITLE_STYLE)
],
justify='center',
align='center',
),
# Image of obama,
dbc.Row(
[
dbc.Col(
[
html.Img(
src='assets/obama-farewell.jpg',
style=IMG_STYLE
),
dcc.Markdown('''_President Obama Caps Long Goodbye With Farewell Speech
Copyright: Copyright 2017 The Associated Press. All rights reserved._
''', style=TITLE_STYLE)
],
align='center',
)
],
align='center'
),
# Contains
dbc.Row(
[
dcc.Markdown('''
What are the variables of Obama\'s speeches? Our answers based on the analyses are:
- Obama tends to be more negative when he talks about foreign conflict and terrorism, gun violence, the economy, immigration, and civil rights.
- Obama tends to be more positive when he talks about elections, education, faith and family.
- The overall mean for the sentiment is more positive (0.1).
- All topics have a positive sentiment, the "more negative" topics are below the overall average, but their mean sentiment score is positive.
- Obama's job (dis)approval tracker does not have an impact on the sentiment of his speeches. But more dynamics in the tracker coincides with his 2nd presidential term (2013-2017).
- Gun deaths by assault has a negative effect on the sentiment of his speeches. More deaths associate with more negative sentiments in the speeches.
- The positively improving people's perception of the financial situation and job market in the US coincides with Obama's increasingly positive speeches related to economy.
'''),
],
justify='center',
align='center',
),
# dbc.Row(
# accordion
# )
], style={'height': '100%'})
layout = html.Div([
body
])
|
nilq/baby-python
|
python
|
#!/usr/bin/python
class race:
def __init__(self, t):
self.title = t
#ACCESSORS
def titleReturn(self):
return(self.title)
|
nilq/baby-python
|
python
|
import glob
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
with open('requirements.txt') as f:
requirements = f.read().splitlines()
setuptools.setup(
name="mldiag",
version="0.0.1",
author="Aymen SHABOU",
author_email="aymen.shabou@gmail.com",
description="A framework to diagnose ML models",
long_description=long_description,
long_description_content_type="text/markdown",
include_package_data=True,
url="https://github.com/AI-MEN/MLDiag/blob/master/mldiag",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.7',
keywords=["diagnose", "machine learning", "deep learning", "augmenter", "tensorflow", "pytorch", "scikit-learn"],
install_requires=requirements,
data_files=[('resources', ['resources/ml-diag.css', 'resources/ml-diag.jpg']),
('examples/text_classification', glob.glob('examples/text_classification/*', ))
],
entry_points={
'console_scripts': ['mldiag_test=examples.tf_text_classification_diag:main',
'mldiag=mldiag.cli:diag'
],
}
)
'''
'''
# twine upload --repository-url https://upload.pypi.org/legacy/ dist/*
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
import roslib
import rospy
import tf
from geometry_msgs.msg import TransformStamped
from posedetection_msgs.msg import ObjectDetection
def handle_pose(msg):
br = tf.TransformBroadcaster()
if len(msg.objects)==0:
return
p = msg.objects[0].pose
br.sendTransform((p.position.x, p.position.y, p.position.z),
(p.orientation.x, p.orientation.y, p.orientation.z, p.orientation.w),
msg.header.stamp,
"checker_marker_frame",
"camera_color_optical_frame")
if __name__ == '__main__':
rospy.init_node('marker_tf_broadcaster')
rospy.Subscriber('/checkerdetector/ObjectDetection',
ObjectDetection,
handle_pose)
rospy.spin()
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
#
# Author: Jeremy Compostella <jeremy.compostella@gmail.com>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import sys
from select import select
import Pyro5.api
from sensor import Sensor
from tools import NameServer, Settings, debug, init, log_exception
DEFAULT_SETTINGS = {'max_loop_duration': 5}
MODULE_NAME = 'monitor'
class Monitor(Sensor):
def __init__(self):
self._states = {}
@Pyro5.api.expose
def track(self, name, state):
'''Update or start tracking "name" with current value "state"'''
if not isinstance(state, bool):
raise TypeError('state must be a boolean')
self._states[name] = state
@Pyro5.api.expose
def read(self, **kwargs):
return self._states
@Pyro5.api.expose
def units(self, **kwargs):
return {key:'binary' for key, _ in self._states.items()}
class MonitorProxy:
'''Helper class for monitor service users.
This class is a wrapper with exception handler of the monitor service. It
provides convenience for modules using the monitor by suppressing the
burden of locating the monitor and handling the various remote object
related errors.
'''
def __init__(self, max_attempt=2):
self._monitor = None
self.max_attempt = max_attempt
def track(self, *args):
for attempt in range(self.max_attempt):
if not self._monitor:
try:
self._monitor = NameServer().locate_service(MODULE_NAME)
except Pyro5.errors.NamingError:
if attempt == self.max_attempt - 1:
log_exception('Failed to locate the monitor',
*sys.exc_info())
except Pyro5.errors.CommunicationError:
if attempt == self.max_attempt - 1:
log_exception('Cannot communicate with the nameserver',
*sys.exc_info())
if self._monitor:
try:
self._monitor.track(*args)
except Pyro5.errors.PyroError:
if attempt == self.max_attempt - 1:
log_exception('Communication failed with the monitor',
*sys.exc_info())
self._monitor = None
def main():
# pylint: disable=too-many-locals
base = os.path.splitext(__file__)[0]
init(base + '.log')
settings = Settings(base + '.ini', DEFAULT_SETTINGS)
Pyro5.config.MAX_RETRIES = 3
daemon = Pyro5.api.Daemon()
nameserver = NameServer()
uri = daemon.register(Monitor())
nameserver.register_sensor(MODULE_NAME, uri)
nameserver.register_service(MODULE_NAME, uri)
debug("... is now ready to run")
while True:
try:
nameserver.register_sensor(MODULE_NAME, uri)
nameserver.register_service(MODULE_NAME, uri)
except RuntimeError:
log_exception('Failed to register the watchdog service',
*sys.exc_info())
sockets, _, _ = select(daemon.sockets, [], [],
# pylint: disable=maybe-no-member
settings.max_loop_duration)
if sockets:
daemon.events(sockets)
if __name__ == "__main__":
main()
|
nilq/baby-python
|
python
|
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def addTwoNumbers(self, l1, l2):
"""
:type l1: ListNode
:type l2: ListNode
:rtype: ListNode
"""
carry = 0
result = head = ListNode(0)
while l1 or l2 or carry:
l1, v1 = [l1.next, l1.val] if l1 else [0, 0]
l2, v2 = [l2.next, l2.val] if l2 else [0, 0]
carry, num = divmod(v1 + v2 + carry, 10)
head.next = ListNode(num)
head = head.next
return result.next
|
nilq/baby-python
|
python
|
"""
Merged String Checker
http://www.codewars.com/kata/54c9fcad28ec4c6e680011aa/train/python
"""
def is_merge(s, part1, part2):
result = list(s)
def findall(part):
pointer = 0
for c in part:
found = False
for i in range(pointer, len(result)):
if result[i] == c:
pointer = i + 1
found = True
break
if not found:
return False
return True
def removechar(part):
for c in part:
if c in result:
result.remove(c)
else:
return False
return True
return findall(part1) and findall(part2) and removechar(part1 + part2) and len(result) == 0
|
nilq/baby-python
|
python
|
import os
import docker.errors
import pandas as pd
import pytest
from ebonite.build.docker import create_docker_client, is_docker_running
from ebonite.core.objects.core import Model
from sklearn.linear_model import LinearRegression
from tests.client.test_func import func
def has_docker():
if os.environ.get('SKIP_DOCKER_TESTS', None) == 'true':
return False
return is_docker_running()
def has_local_image(img_name: str) -> bool:
if not has_docker():
return False
with create_docker_client() as client:
try:
client.images.get(img_name)
except docker.errors.ImageNotFound:
return False
return True
def rm_container(container_name: str, host: str = ''):
with create_docker_client(host) as client:
containers = client.containers.list()
if any(container_name == c.name for c in containers):
client.containers.get(container_name).remove(force=True)
def rm_image(image_tag: str, host: str = ''):
with create_docker_client(host) as client:
tags = [t for i in client.images.list() for t in i.tags]
if any(image_tag == t for t in tags):
client.images.remove(image_tag, force=True)
def train_model():
reg = LinearRegression()
data = pd.DataFrame([[1, 1], [2, 1]], columns=['a', 'b'])
reg.fit(data, [1, 0])
return reg, data
@pytest.fixture
def model():
model = Model.create(func, "kek", "Test Model")
return model
|
nilq/baby-python
|
python
|
from ..value_set import ValueSet
class BmiRatio(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent a body mass index (BMI) ratio.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category or attribute related to Procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with or related to BMI ratio.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.600.1.1490'
VALUE_SET_NAME = 'BMI Ratio'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
LOINC = {
'39156-5'
}
class CabgPciProcedure(ValueSet):
"""
**Clinical Focus:** CABG and PCI procedures
**Data Element Scope:** CABG and PCI procedures
**Inclusion Criteria:** Codes from 2018_Registry_SingleSource_v2.2
**Exclusion Criteria:** None
"""
OID = '2.16.840.1.113762.1.4.1138.566'
VALUE_SET_NAME = 'CABG, PCI Procedure'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
CPT = {
'33510',
'33511',
'33512',
'33513',
'33514',
'33516',
'33517',
'33518',
'33519',
'33521',
'33522',
'33523',
'33533',
'33534',
'33535',
'33536',
'92920',
'92924',
'92928',
'92933',
'92937',
'92941',
'92943'
}
HCPCSLEVELII = {
'S2205',
'S2206',
'S2207',
'S2208',
'S2209'
}
class CabgSurgeries(ValueSet):
"""
**Clinical Focus:** This value set grouping contains concepts that represent coronary artery bypass (CABG) surgical procedures.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Procedure. The intent of this data element is to identify patients who have a CABG surgical procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with CABG surgical procedures. This is a grouping of SNOMED CT, ICD-9-CM, and ICD-10-CM codes.
**Exclusion Criteria:** Excludes codes that represent a CABG performed using a scope.
"""
OID = '2.16.840.1.113883.3.666.5.694'
VALUE_SET_NAME = 'CABG Surgeries'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
ICD10PCS = {
'0210083',
'0210088',
'0210089',
'021008C',
'021008F',
'021008W',
'0210093',
'0210098',
'0210099',
'021009C',
'021009F',
'021009W',
'02100A3',
'02100A8',
'02100A9',
'02100AC',
'02100AF',
'02100AW',
'02100J3',
'02100J8',
'02100J9',
'02100JC',
'02100JF',
'02100JW',
'02100K3',
'02100K8',
'02100K9',
'02100KC',
'02100KF',
'02100KW',
'02100Z3',
'02100Z8',
'02100Z9',
'02100ZC',
'02100ZF',
'0210483',
'0210488',
'0210489',
'021048C',
'021048F',
'021048W',
'0210493',
'0210498',
'0210499',
'021049C',
'021049F',
'021049W',
'02104A3',
'02104A8',
'02104A9',
'02104AC',
'02104AF',
'02104AW',
'02104J3',
'02104J8',
'02104J9',
'02104JC',
'02104JF',
'02104JW',
'02104K3',
'02104K8',
'02104K9',
'02104KC',
'02104KF',
'02104KW',
'02104Z3',
'02104Z8',
'02104Z9',
'02104ZC',
'02104ZF',
'0211083',
'0211088',
'0211089',
'021108C',
'021108F',
'021108W',
'0211093',
'0211098',
'0211099',
'021109C',
'021109F',
'021109W',
'02110A3',
'02110A8',
'02110A9',
'02110AC',
'02110AF',
'02110AW',
'02110J3',
'02110J8',
'02110J9',
'02110JC',
'02110JF',
'02110JW',
'02110K3',
'02110K8',
'02110K9',
'02110KC',
'02110KF',
'02110KW',
'02110Z3',
'02110Z8',
'02110Z9',
'02110ZC',
'02110ZF',
'0211483',
'0211488',
'0211489',
'021148C',
'021148F',
'021148W',
'0211493',
'0211498',
'0211499',
'021149C',
'021149F',
'021149W',
'02114A3',
'02114A8',
'02114A9',
'02114AC',
'02114AF',
'02114AW',
'02114J3',
'02114J8',
'02114J9',
'02114JC',
'02114JF',
'02114JW',
'02114K3',
'02114K8',
'02114K9',
'02114KC',
'02114KF',
'02114KW',
'02114Z3',
'02114Z8',
'02114Z9',
'02114ZC',
'02114ZF',
'0212083',
'0212088',
'0212089',
'021208C',
'021208F',
'021208W',
'0212093',
'0212098',
'0212099',
'021209C',
'021209F',
'021209W',
'02120A3',
'02120A8',
'02120A9',
'02120AC',
'02120AF',
'02120AW',
'02120J3',
'02120J8',
'02120J9',
'02120JC',
'02120JF',
'02120JW',
'02120K3',
'02120K8',
'02120K9',
'02120KC',
'02120KF',
'02120KW',
'02120Z3',
'02120Z8',
'02120Z9',
'02120ZC',
'02120ZF',
'0212488',
'0212489',
'021248C',
'021248F',
'021248W',
'0212493',
'0212498',
'0212499',
'021249C',
'021249F',
'021249W',
'02124A3',
'02124A8',
'02124A9',
'02124AC',
'02124AF',
'02124AW',
'02124J3',
'02124J8',
'02124J9',
'02124JC',
'02124JF',
'02124JW',
'02124K3',
'02124K8',
'02124K9',
'02124KC',
'02124KF',
'02124KW',
'02124Z3',
'02124Z8',
'02124Z9',
'02124ZC',
'02124ZF',
'0213083',
'0213088',
'0213089',
'021308C',
'021308F',
'021308W',
'0213093',
'0213098',
'0213099',
'021309C',
'021309F',
'021309W',
'02130A3',
'02130A8',
'02130A9',
'02130AC',
'02130AF',
'02130AW',
'02130J3',
'02130J8',
'02130J9',
'02130JC',
'02130JF',
'02130JW',
'02130K3',
'02130K8',
'02130K9',
'02130KC',
'02130KF',
'02130KW',
'02130Z3',
'02130Z8',
'02130Z9',
'02130ZC',
'02130ZF',
'0213483',
'0213488',
'0213489',
'021348C',
'021348F',
'021348W',
'0213493',
'0213498',
'0213499',
'021349C',
'021349F',
'021349W',
'02134A3',
'02134A8',
'02134A9',
'02134AC',
'02134AF',
'02134AW',
'02134J3',
'02134J8',
'02134J9',
'02134JC',
'02134JF',
'02134JW',
'02134K3',
'02134K8',
'02134K9',
'02134KC',
'02134KF',
'02134KW',
'02134Z3',
'02134Z8',
'02134Z9',
'02134ZC',
'02134ZF'
}
ICD9CM = {
'3610',
'3611',
'3612',
'3613',
'3614',
'3615',
'3616',
'3617',
'3619'
}
SNOMEDCT = {
'10190003',
'10326007',
'119564002',
'119565001',
'14323007',
'17073005',
'175021005',
'175029007',
'175036008',
'175037004',
'175038009',
'175039001',
'175040004',
'175066001',
'232717009',
'232719007',
'232720001',
'232721002',
'232722009',
'232723004',
'232724005',
'252427007',
'29819009',
'309814006',
'3546002',
'359597003',
'359601003',
'39202005',
'39724006',
'405598005',
'405599002',
'414088005',
'418551006',
'419132001',
'438530000',
'440332008',
'450506009',
'67166004',
'736970002',
'736971003',
'736972005',
'736973000',
'74371005',
'82247006',
'8876004',
'90487008'
}
class CardiacSurgery(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent cardiac surgery.
**Data Element Scope:** This value set may use Quality Data Model (QDM) category related to Procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with cardiac surgery.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.526.3.371'
VALUE_SET_NAME = 'Cardiac Surgery'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
CPT = {
'33140',
'33510',
'33511',
'33512',
'33513',
'33514',
'33516',
'33533',
'33534',
'33535',
'33536',
'92920',
'92924',
'92928',
'92933',
'92937',
'92941',
'92943',
'92980',
'92981',
'92982',
'92984',
'92995',
'92996'
}
SNOMEDCT = {
'10326007',
'119564002',
'119565001',
'15256002',
'174911007',
'175007008',
'175008003',
'175009006',
'175011002',
'175021005',
'175022003',
'175024002',
'175025001',
'175026000',
'175036008',
'175037004',
'175038009',
'175039001',
'175040004',
'175041000',
'175045009',
'175047001',
'175048006',
'175050003',
'232717009',
'232719007',
'232720001',
'232721002',
'232722009',
'232723004',
'232724005',
'265481001',
'275215001',
'275216000',
'275227003',
'275252001',
'275253006',
'287277008',
'30670000',
'309814006',
'3546002',
'359597003',
'359601003',
'39202005',
'39724006',
'414088005',
'418551006',
'418824004',
'419132001',
'48431000',
'736966005',
'736967001',
'736968006',
'736969003',
'736970002',
'736971003',
'736972005',
'736973000',
'74371005',
'81266008',
'82247006',
'90205004'
}
class CarotidIntervention(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent carotid intervention surgical procedures.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category or attribute related to Procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with representing carotid intervention surgical procedures.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.117.1.7.1.204'
VALUE_SET_NAME = 'Carotid Intervention'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
ICD10PCS = {
'031H09G',
'031H09J',
'031H09K',
'031H09Y',
'031H0AG',
'031H0AJ',
'031H0AK',
'031H0AY',
'031H0JG',
'031H0JJ',
'031H0JK',
'031H0JY',
'031H0KG',
'031H0KJ',
'031H0KK',
'031H0KY',
'031H0ZG',
'031H0ZJ',
'031H0ZK',
'031H0ZY',
'031J09G',
'031J09J',
'031J09K',
'031J09Y',
'031J0AG',
'031J0AJ',
'031J0AK',
'031J0AY',
'031J0JG',
'031J0JJ',
'031J0JK',
'031J0JY',
'031J0KG',
'031J0KJ',
'031J0KK',
'031J0KY',
'031J0ZG',
'031J0ZJ',
'031J0ZK',
'031J0ZY',
'031K09J',
'031K09K',
'031K0AJ',
'031K0AK',
'031K0JJ',
'031K0JK',
'031K0KJ',
'031K0KK',
'031K0ZJ',
'031K0ZK',
'031L09J',
'031L09K',
'031L0AJ',
'031L0AK',
'031L0JJ',
'031L0JK',
'031L0KJ',
'031L0KK',
'031L0ZJ',
'031L0ZK',
'031M09J',
'031M09K',
'031M0AJ',
'031M0AK',
'031M0JJ',
'031M0JK',
'031M0KJ',
'031M0KK',
'031M0ZJ',
'031M0ZK',
'031N09J',
'031N09K',
'031N0AJ',
'031N0AK',
'031N0JJ',
'031N0JK',
'031N0KJ',
'031N0KK',
'031N0ZJ',
'031N0ZK',
'035H0ZZ',
'035H3ZZ',
'035H4ZZ',
'035J0ZZ',
'035J3ZZ',
'035J4ZZ',
'035K0ZZ',
'035K3ZZ',
'035K4ZZ',
'035L0ZZ',
'035L3ZZ',
'035L4ZZ',
'035M0ZZ',
'035M3ZZ',
'035M4ZZ',
'035N0ZZ',
'035N3ZZ',
'035N4ZZ',
'037H046',
'037H04Z',
'037H056',
'037H05Z',
'037H066',
'037H06Z',
'037H076',
'037H07Z',
'037H0D6',
'037H0DZ',
'037H0E6',
'037H0EZ',
'037H0F6',
'037H0FZ',
'037H0G6',
'037H0GZ',
'037H0Z6',
'037H0ZZ',
'037H346',
'037H34Z',
'037H356',
'037H35Z',
'037H366',
'037H36Z',
'037H376',
'037H37Z',
'037H3D6',
'037H3DZ',
'037H3E6',
'037H3EZ',
'037H3F6',
'037H3FZ',
'037H3G6',
'037H3GZ',
'037H3Z6',
'037H3ZZ',
'037H446',
'037H44Z',
'037H456',
'037H45Z',
'037H466',
'037H46Z',
'037H476',
'037H47Z',
'037H4D6',
'037H4DZ',
'037H4E6',
'037H4EZ',
'037H4F6',
'037H4FZ',
'037H4G6',
'037H4GZ',
'037H4Z6',
'037H4ZZ',
'037J046',
'037J04Z',
'037J056',
'037J05Z',
'037J066',
'037J06Z',
'037J076',
'037J07Z',
'037J0D6',
'037J0DZ',
'037J0E6',
'037J0EZ',
'037J0F6',
'037J0FZ',
'037J0G6',
'037J0GZ',
'037J0Z6',
'037J0ZZ',
'037J346',
'037J34Z',
'037J356',
'037J35Z',
'037J366',
'037J36Z',
'037J376',
'037J37Z',
'037J3D6',
'037J3DZ',
'037J3E6',
'037J3EZ',
'037J3F6',
'037J3FZ',
'037J3G6',
'037J3GZ',
'037J3Z6',
'037J3ZZ',
'037J446',
'037J44Z',
'037J456',
'037J45Z',
'037J466',
'037J46Z',
'037J476',
'037J47Z',
'037J4D6',
'037J4DZ',
'037J4E6',
'037J4EZ',
'037J4F6',
'037J4FZ',
'037J4G6',
'037J4GZ',
'037J4Z6',
'037J4ZZ',
'037K046',
'037K04Z',
'037K056',
'037K05Z',
'037K066',
'037K06Z',
'037K076',
'037K07Z',
'037K0D6',
'037K0DZ',
'037K0E6',
'037K0EZ',
'037K0F6',
'037K0FZ',
'037K0G6',
'037K0GZ',
'037K0Z6',
'037K0ZZ',
'037K346',
'037K34Z',
'037K356',
'037K35Z',
'037K366',
'037K36Z',
'037K376',
'037K37Z',
'037K3D6',
'037K3DZ',
'037K3E6',
'037K3EZ',
'037K3F6',
'037K3FZ',
'037K3G6',
'037K3GZ',
'037K3Z6',
'037K3ZZ',
'037K446',
'037K44Z',
'037K456',
'037K45Z',
'037K466',
'037K46Z',
'037K476',
'037K47Z',
'037K4D6',
'037K4DZ',
'037K4E6',
'037K4EZ',
'037K4F6',
'037K4FZ',
'037K4G6',
'037K4GZ',
'037K4Z6',
'037K4ZZ',
'037L046',
'037L04Z',
'037L056',
'037L05Z',
'037L066',
'037L06Z',
'037L076',
'037L07Z',
'037L0D6',
'037L0DZ',
'037L0E6',
'037L0EZ',
'037L0F6',
'037L0FZ',
'037L0G6',
'037L0GZ',
'037L0Z6',
'037L0ZZ',
'037L346',
'037L34Z',
'037L356',
'037L35Z',
'037L366',
'037L36Z',
'037L376',
'037L37Z',
'037L3D6',
'037L3DZ',
'037L3E6',
'037L3EZ',
'037L3F6',
'037L3FZ',
'037L3G6',
'037L3GZ',
'037L3Z6',
'037L3ZZ',
'037L446',
'037L44Z',
'037L456',
'037L45Z',
'037L466',
'037L46Z',
'037L476',
'037L47Z',
'037L4D6',
'037L4DZ',
'037L4E6',
'037L4EZ',
'037L4F6',
'037L4FZ',
'037L4G6',
'037L4GZ',
'037L4Z6',
'037L4ZZ',
'037M046',
'037M04Z',
'037M056',
'037M05Z',
'037M066',
'037M06Z',
'037M076',
'037M07Z',
'037M0D6',
'037M0DZ',
'037M0E6',
'037M0EZ',
'037M0F6',
'037M0FZ',
'037M0G6',
'037M0GZ',
'037M0Z6',
'037M0ZZ',
'037M346',
'037M34Z',
'037M356',
'037M35Z',
'037M366',
'037M36Z',
'037M376',
'037M37Z',
'037M3D6',
'037M3DZ',
'037M3E6',
'037M3EZ',
'037M3F6',
'037M3FZ',
'037M3G6',
'037M3GZ',
'037M3Z6',
'037M3ZZ',
'037M446',
'037M44Z',
'037M456',
'037M45Z',
'037M466',
'037M46Z',
'037M476',
'037M47Z',
'037M4D6',
'037M4DZ',
'037M4E6',
'037M4EZ',
'037M4F6',
'037M4FZ',
'037M4G6',
'037M4GZ',
'037M4Z6',
'037M4ZZ',
'037N046',
'037N04Z',
'037N056',
'037N05Z',
'037N066',
'037N06Z',
'037N076',
'037N07Z',
'037N0D6',
'037N0DZ',
'037N0E6',
'037N0EZ',
'037N0F6',
'037N0FZ',
'037N0G6',
'037N0GZ',
'037N0Z6',
'037N0ZZ',
'037N346',
'037N34Z',
'037N356',
'037N35Z',
'037N366',
'037N36Z',
'037N376',
'037N37Z',
'037N3D6',
'037N3DZ',
'037N3E6',
'037N3EZ',
'037N3F6',
'037N3FZ',
'037N3G6',
'037N3GZ',
'037N3Z6',
'037N3ZZ',
'037N446',
'037N44Z',
'037N456',
'037N45Z',
'037N466',
'037N46Z',
'037N476',
'037N47Z',
'037N4D6',
'037N4DZ',
'037N4E6',
'037N4EZ',
'037N4F6',
'037N4FZ',
'037N4G6',
'037N4GZ',
'037N4Z6',
'037N4ZZ',
'039H00Z',
'039H0ZX',
'039H0ZZ',
'039H30Z',
'039H3ZX',
'039H3ZZ',
'039H40Z',
'039H4ZX',
'039H4ZZ',
'039J00Z',
'039J0ZX',
'039J0ZZ',
'039J30Z',
'039J3ZX',
'039J3ZZ',
'039J40Z',
'039J4ZX',
'039J4ZZ',
'039K00Z',
'039K0ZX',
'039K0ZZ',
'039K30Z',
'039K3ZX',
'039K3ZZ',
'039K40Z',
'039K4ZX',
'039K4ZZ',
'039L00Z',
'039L0ZX',
'039L0ZZ',
'039L30Z',
'039L3ZX',
'039L3ZZ',
'039L40Z',
'039L4ZX',
'039L4ZZ',
'039M00Z',
'039M0ZX',
'039M0ZZ',
'039M30Z',
'039M3ZX',
'039M3ZZ',
'039M40Z',
'039M4ZX',
'039M4ZZ',
'039N00Z',
'039N0ZX',
'039N0ZZ',
'039N30Z',
'039N3ZX',
'039N3ZZ',
'039N40Z',
'039N4ZX',
'039N4ZZ',
'03BH0ZX',
'03BH0ZZ',
'03BH3ZX',
'03BH3ZZ',
'03BH4ZX',
'03BH4ZZ',
'03BJ0ZX',
'03BJ0ZZ',
'03BJ3ZX',
'03BJ3ZZ',
'03BJ4ZX',
'03BJ4ZZ',
'03BK0ZX',
'03BK0ZZ',
'03BK3ZX',
'03BK3ZZ',
'03BK4ZX',
'03BK4ZZ',
'03BL0ZX',
'03BL0ZZ',
'03BL3ZX',
'03BL3ZZ',
'03BL4ZX',
'03BL4ZZ',
'03BM0ZX',
'03BM0ZZ',
'03BM3ZX',
'03BM3ZZ',
'03BM4ZX',
'03BM4ZZ',
'03BN0ZX',
'03BN0ZZ',
'03BN3ZX',
'03BN3ZZ',
'03BN4ZX',
'03BN4ZZ',
'03CH0Z6',
'03CH0ZZ',
'03CH3Z6',
'03CH3Z7',
'03CH3ZZ',
'03CH4Z6',
'03CH4ZZ',
'03CJ0Z6',
'03CJ0ZZ',
'03CJ3Z6',
'03CJ3Z7',
'03CJ3ZZ',
'03CJ4Z6',
'03CJ4ZZ',
'03CK0Z6',
'03CK0ZZ',
'03CK3Z6',
'03CK3Z7',
'03CK3ZZ',
'03CK4Z6',
'03CK4ZZ',
'03CL0Z6',
'03CL0ZZ',
'03CL3Z6',
'03CL3Z7',
'03CL3ZZ',
'03CL4Z6',
'03CL4ZZ',
'03CM0Z6',
'03CM0ZZ',
'03CM3Z6',
'03CM3Z7',
'03CM3ZZ',
'03CM4Z6',
'03CM4ZZ',
'03CN0Z6',
'03CN0ZZ',
'03CN3Z6',
'03CN3Z7',
'03CN3ZZ',
'03CN4Z6',
'03CN4ZZ',
'03HH03Z',
'03HH0DZ',
'03HH33Z',
'03HH3DZ',
'03HH43Z',
'03HH4DZ',
'03HJ03Z',
'03HJ0DZ',
'03HJ33Z',
'03HJ3DZ',
'03HJ43Z',
'03HJ4DZ',
'03HK03Z',
'03HK0DZ',
'03HK0MZ',
'03HK33Z',
'03HK3DZ',
'03HK3MZ',
'03HK43Z',
'03HK4DZ',
'03HK4MZ',
'03HL03Z',
'03HL0DZ',
'03HL0MZ',
'03HL33Z',
'03HL3DZ',
'03HL3MZ',
'03HL43Z',
'03HL4DZ',
'03HL4MZ',
'03HM03Z',
'03HM0DZ',
'03HM33Z',
'03HM3DZ',
'03HM43Z',
'03HM4DZ',
'03HN03Z',
'03HN0DZ',
'03HN33Z',
'03HN3DZ',
'03HN43Z',
'03HN4DZ',
'03LH0BZ',
'03LH0CZ',
'03LH0DZ',
'03LH0ZZ',
'03LH3BZ',
'03LH3CZ',
'03LH3DZ',
'03LH3ZZ',
'03LH4BZ',
'03LH4CZ',
'03LH4DZ',
'03LH4ZZ',
'03LJ0BZ',
'03LJ0CZ',
'03LJ0DZ',
'03LJ0ZZ',
'03LJ3BZ',
'03LJ3CZ',
'03LJ3DZ',
'03LJ3ZZ',
'03LJ4BZ',
'03LJ4CZ',
'03LJ4DZ',
'03LJ4ZZ',
'03LK0BZ',
'03LK0CZ',
'03LK0DZ',
'03LK0ZZ',
'03LK3BZ',
'03LK3CZ',
'03LK3DZ',
'03LK3ZZ',
'03LK4BZ',
'03LK4CZ',
'03LK4DZ',
'03LK4ZZ',
'03LL0BZ',
'03LL0CZ',
'03LL0DZ',
'03LL0ZZ',
'03LL3BZ',
'03LL3CZ',
'03LL3DZ',
'03LL3ZZ',
'03LL4BZ',
'03LL4CZ',
'03LL4DZ',
'03LL4ZZ',
'03LM0BZ',
'03LM0CZ',
'03LM0DZ',
'03LM0ZZ',
'03LM3BZ',
'03LM3CZ',
'03LM3DZ',
'03LM3ZZ',
'03LM4BZ',
'03LM4CZ',
'03LM4DZ',
'03LM4ZZ',
'03LN0BZ',
'03LN0CZ',
'03LN0DZ',
'03LN0ZZ',
'03LN3BZ',
'03LN3CZ',
'03LN3DZ',
'03LN3ZZ',
'03LN4BZ',
'03LN4CZ',
'03LN4DZ',
'03LN4ZZ',
'03NH0ZZ',
'03NH3ZZ',
'03NH4ZZ',
'03NJ0ZZ',
'03NJ3ZZ',
'03NJ4ZZ',
'03NK0ZZ',
'03NK3ZZ',
'03NK4ZZ',
'03NL0ZZ',
'03NL3ZZ',
'03NL4ZZ',
'03NM0ZZ',
'03NM3ZZ',
'03NM4ZZ',
'03NN0ZZ',
'03NN3ZZ',
'03NN4ZZ',
'03QH0ZZ',
'03QH3ZZ',
'03QH4ZZ',
'03QJ0ZZ',
'03QJ3ZZ',
'03QJ4ZZ',
'03QK0ZZ',
'03QK3ZZ',
'03QK4ZZ',
'03QL0ZZ',
'03QL3ZZ',
'03QL4ZZ',
'03QM0ZZ',
'03QM3ZZ',
'03QM4ZZ',
'03QN0ZZ',
'03QN3ZZ',
'03QN4ZZ',
'03RH07Z',
'03RH0JZ',
'03RH0KZ',
'03RH47Z',
'03RH4JZ',
'03RH4KZ',
'03RJ07Z',
'03RJ0JZ',
'03RJ0KZ',
'03RJ47Z',
'03RJ4JZ',
'03RJ4KZ',
'03RK07Z',
'03RK0JZ',
'03RK0KZ',
'03RK47Z',
'03RK4JZ',
'03RK4KZ',
'03RL07Z',
'03RL0JZ',
'03RL0KZ',
'03RL47Z',
'03RL4JZ',
'03RL4KZ',
'03RM07Z',
'03RM0JZ',
'03RM0KZ',
'03RM47Z',
'03RM4JZ',
'03RM4KZ',
'03RN07Z',
'03RN0JZ',
'03RN0KZ',
'03RN47Z',
'03RN4JZ',
'03RN4KZ',
'03SH0ZZ',
'03SH3ZZ',
'03SH4ZZ',
'03SJ0ZZ',
'03SJ3ZZ',
'03SJ4ZZ',
'03SK0ZZ',
'03SK3ZZ',
'03SK4ZZ',
'03SL0ZZ',
'03SL3ZZ',
'03SL4ZZ',
'03SM0ZZ',
'03SM3ZZ',
'03SM4ZZ',
'03SN0ZZ',
'03SN3ZZ',
'03SN4ZZ',
'03UH07Z',
'03UH0JZ',
'03UH0KZ',
'03UH37Z',
'03UH3JZ',
'03UH3KZ',
'03UH47Z',
'03UH4JZ',
'03UH4KZ',
'03UJ07Z',
'03UJ0JZ',
'03UJ0KZ',
'03UJ37Z',
'03UJ3JZ',
'03UJ3KZ',
'03UJ47Z',
'03UJ4JZ',
'03UJ4KZ',
'03UK07Z',
'03UK0JZ',
'03UK0KZ',
'03UK37Z',
'03UK3JZ',
'03UK3KZ',
'03UK47Z',
'03UK4JZ',
'03UK4KZ',
'03UL07Z',
'03UL0JZ',
'03UL0KZ',
'03UL37Z',
'03UL3JZ',
'03UL3KZ',
'03UL47Z',
'03UL4JZ',
'03UL4KZ',
'03UM07Z',
'03UM0JZ',
'03UM0KZ',
'03UM37Z',
'03UM3JZ',
'03UM3KZ',
'03UM47Z',
'03UM4JZ',
'03UM4KZ',
'03UN07Z',
'03UN0JZ',
'03UN0KZ',
'03UN37Z',
'03UN3JZ',
'03UN3KZ',
'03UN47Z',
'03UN4JZ',
'03UN4KZ',
'03VH0BZ',
'03VH0CZ',
'03VH0DZ',
'03VH0ZZ',
'03VH3BZ',
'03VH3CZ',
'03VH3DZ',
'03VH3ZZ',
'03VH4BZ',
'03VH4CZ',
'03VH4DZ',
'03VH4ZZ',
'03VJ0BZ',
'03VJ0CZ',
'03VJ0DZ',
'03VJ0ZZ',
'03VJ3BZ',
'03VJ3CZ',
'03VJ3DZ',
'03VJ3ZZ',
'03VJ4BZ',
'03VJ4CZ',
'03VJ4DZ',
'03VJ4ZZ',
'03VK0BZ',
'03VK0CZ',
'03VK0DZ',
'03VK0ZZ',
'03VK3BZ',
'03VK3CZ',
'03VK3DZ',
'03VK3ZZ',
'03VK4BZ',
'03VK4CZ',
'03VK4DZ',
'03VK4ZZ',
'03VL0BZ',
'03VL0CZ',
'03VL0DZ',
'03VL0ZZ',
'03VL3BZ',
'03VL3CZ',
'03VL3DZ',
'03VL3ZZ',
'03VL4BZ',
'03VL4CZ',
'03VL4DZ',
'03VL4ZZ',
'03VM0BZ',
'03VM0CZ',
'03VM0DZ',
'03VM0ZZ',
'03VM3BZ',
'03VM3CZ',
'03VM3DZ',
'03VM3ZZ',
'03VM4BZ',
'03VM4CZ',
'03VM4DZ',
'03VM4ZZ',
'03VN0BZ',
'03VN0CZ',
'03VN0DZ',
'03VN0ZZ',
'03VN3BZ',
'03VN3CZ',
'03VN3DZ',
'03VN3ZZ',
'03VN4BZ',
'03VN4CZ',
'03VN4DZ',
'03VN4ZZ',
'0G560ZZ',
'0G563ZZ',
'0G564ZZ',
'0G570ZZ',
'0G573ZZ',
'0G574ZZ',
'0G580ZZ',
'0G583ZZ',
'0G584ZZ',
'0G9600Z',
'0G960ZX',
'0G960ZZ',
'0G9630Z',
'0G963ZX',
'0G963ZZ',
'0G9640Z',
'0G964ZX',
'0G964ZZ',
'0G9700Z',
'0G970ZX',
'0G970ZZ',
'0G9730Z',
'0G973ZX',
'0G973ZZ',
'0G9740Z',
'0G974ZX',
'0G974ZZ',
'0G9800Z',
'0G980ZX',
'0G980ZZ',
'0G9830Z',
'0G983ZX',
'0G983ZZ',
'0G9840Z',
'0G984ZX',
'0G984ZZ',
'0GB60ZX',
'0GB60ZZ',
'0GB63ZX',
'0GB63ZZ',
'0GB64ZX',
'0GB64ZZ',
'0GB70ZX',
'0GB70ZZ',
'0GB73ZX',
'0GB73ZZ',
'0GB74ZX',
'0GB74ZZ',
'0GB80ZX',
'0GB80ZZ',
'0GB83ZX',
'0GB83ZZ',
'0GB84ZX',
'0GB84ZZ',
'0GC60ZZ',
'0GC63ZZ',
'0GC64ZZ',
'0GC70ZZ',
'0GC73ZZ',
'0GC74ZZ',
'0GC80ZZ',
'0GC83ZZ',
'0GC84ZZ',
'0GN60ZZ',
'0GN63ZZ',
'0GN64ZZ',
'0GN70ZZ',
'0GN73ZZ',
'0GN74ZZ',
'0GN80ZZ',
'0GN83ZZ',
'0GN84ZZ',
'0GQ60ZZ',
'0GQ63ZZ',
'0GQ64ZZ',
'0GQ70ZZ',
'0GQ73ZZ',
'0GQ74ZZ',
'0GQ80ZZ',
'0GQ83ZZ',
'0GQ84ZZ',
'0GT60ZZ',
'0GT64ZZ',
'0GT70ZZ',
'0GT74ZZ',
'0GT80ZZ',
'0GT84ZZ',
'B3060ZZ',
'B3061ZZ',
'B306YZZ',
'B3070ZZ',
'B3071ZZ',
'B307YZZ',
'B3080ZZ',
'B3081ZZ',
'B308YZZ',
'B3160ZZ',
'B3161ZZ',
'B316YZZ',
'B3170ZZ',
'B3171ZZ',
'B317YZZ',
'B3180ZZ',
'B3181ZZ',
'B318YZZ'
}
ICD9CM = {
'0061',
'0062',
'0063',
'0064',
'0065',
'3802',
'3812',
'3822',
'3830',
'3831',
'3832',
'3842',
'3922',
'3928',
'8841'
}
SNOMEDCT = {
'112823003',
'15023006',
'175363002',
'175364008',
'175365009',
'175367001',
'175373000',
'175374006',
'175376008',
'175379001',
'175380003',
'175398004',
'18674003',
'22928005',
'233259003',
'233260008',
'233296007',
'233297003',
'233298008',
'233405004',
'241219006',
'276949008',
'276950008',
'276951007',
'287606009',
'302053004',
'303161001',
'31573003',
'34214004',
'39887009',
'405326004',
'405379009',
'405407008',
'405408003',
'405409006',
'405411002',
'405412009',
'405415006',
'417884003',
'418405008',
'418838006',
'419014003',
'420026003',
'420046008',
'420171008',
'425611003',
'427486009',
'428802000',
'429287007',
'431515004',
'431519005',
'431535003',
'431659001',
'432039002',
'432785007',
'433056003',
'433061001',
'433591001',
'433683001',
'433690006',
'433711000',
'433734009',
'434159001',
'434378006',
'434433007',
'43628009',
'438615003',
'440221006',
'440453000',
'440518005',
'449242004',
'46912008',
'51382002',
'53412000',
'59012002',
'59109003',
'66951008',
'74720005',
'79507006',
'80102005',
'80104006',
'87314005',
'90931006',
'9339002'
}
class CataractSurgery(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent cataract surgical procedures.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with cataract surgery.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.526.3.1411'
VALUE_SET_NAME = 'Cataract Surgery'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
CPT = {
'66840',
'66850',
'66852',
'66920',
'66930',
'66940',
'66982',
'66983',
'66984'
}
SNOMEDCT = {
'10178000',
'110473004',
'112963003',
'112964009',
'12163000',
'231744001',
'308694002',
'308695001',
'313999004',
'31705006',
'335636001',
'336651000',
'35717002',
'361191005',
'385468004',
'39243005',
'397544007',
'404628003',
'415089008',
'417493007',
'418430006',
'419767009',
'420260004',
'420526005',
'424945000',
'446548003',
'46309001',
'46426006',
'46562009',
'50538003',
'5130002',
'51839008',
'54885007',
'65812008',
'67760003',
'69360005',
'74490003',
'75814005',
'79611007',
'82155009',
'84149000',
'85622008',
'88282000',
'89153001',
'9137006'
}
class ChemotherapyAdministration(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent chemotherapy administration.
**Data Element Scope:** This value set may use Quality Data Model (QDM) category related to Procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with chemotherapy administration.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.526.3.1027'
VALUE_SET_NAME = 'Chemotherapy Administration'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
CPT = {
'51720',
'96401',
'96405',
'96406',
'96409',
'96413',
'96416',
'96420',
'96422',
'96425',
'96440',
'96446',
'96450',
'96521',
'96522',
'96523',
'96542',
'96549'
}
SNOMEDCT = {
'169396008',
'24977001',
'265760000',
'265761001',
'265762008',
'266719004',
'268500004',
'315601005',
'31652009',
'367336001',
'38216008',
'394894008',
'394895009',
'394935005',
'4114003',
'51534007',
'6872008',
'716872004',
'77738002'
}
class CognitiveAssessment(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent assessments performed for the evaluation of cognition.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Intervention.
**Inclusion Criteria:** Includes only relevant concepts associated with general concepts for assessments used to evaluate cognition.
**Exclusion Criteria:** Excludes concepts which explicitly reference specific standardized tools used to evaluate cognition.
"""
OID = '2.16.840.1.113883.3.526.3.1332'
VALUE_SET_NAME = 'Cognitive Assessment'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
SNOMEDCT = {
'113024001',
'4719001'
}
class CounselingForNutrition(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent nutrition counseling.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Intervention.
**Inclusion Criteria:** Includes only relevant concepts associated with identifying counseling for nutrition, including codes for medical nutrition therapy, dietetics services, education about diet or different types of diets (e.g., low fat diet, high fiber diet
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.464.1003.195.12.1003'
VALUE_SET_NAME = 'Counseling for Nutrition'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
CPT = {
'97802',
'97803',
'97804'
}
SNOMEDCT = {
'11816003',
'183059007',
'183060002',
'183061003',
'183062005',
'183063000',
'183065007',
'183066008',
'183067004',
'183070000',
'183071001',
'226067002',
'266724001',
'275919002',
'281085002',
'284352003',
'305849009',
'305850009',
'305851008',
'306163007',
'306164001',
'306165000',
'306626002',
'306627006',
'306628001',
'313210009',
'370847001',
'386464006',
'404923009',
'408910007',
'410171007',
'410177006',
'410200000',
'428461000124101',
'428691000124107',
'429095004',
'431482008',
'441041000124100',
'441201000124108',
'441231000124100',
'441241000124105',
'441251000124107',
'441261000124109',
'441271000124102',
'441281000124104',
'441291000124101',
'441301000124100',
'441311000124102',
'441321000124105',
'441331000124108',
'441341000124103',
'441351000124101',
'443288003',
'445291000124103',
'445301000124102',
'445331000124105',
'445641000124105',
'609104008',
'61310001',
'698471002',
'699827002',
'699829004',
'699830009',
'699849008',
'700154005',
'700258004',
'705060005',
'710881000'
}
class CounselingForPhysicalActivity(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent physical activity counseling.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Intervention.
**Inclusion Criteria:** Includes only relevant concepts associated with identifying counseling or referrals related to physical activity, including codes related to weight management services.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.464.1003.118.12.1035'
VALUE_SET_NAME = 'Counseling for Physical Activity'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
SNOMEDCT = {
'103736005',
'183073003',
'281090004',
'304507003',
'304549008',
'304558001',
'310882002',
'386291006',
'386292004',
'386463000',
'390864007',
'390893007',
'398636004',
'398752005',
'408289007',
'410200000',
'410289001',
'410335001',
'429778002',
'435551000124105',
'710849009'
}
class CtColonography(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent identify a computed tomographic (CT) colonography.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with patients that have had a CT colonography. This is a grouping of CPT and SNOMED CT codes.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.464.1003.108.12.1038'
VALUE_SET_NAME = 'CT Colonography'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
LOINC = {
'60515-4',
'72531-7',
'79069-1',
'79071-7',
'79101-2',
'82688-3'
}
class DialysisEducation(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent patients received dialysis education.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) datatype related to Intervention, Performed.
**Inclusion Criteria:** Includes only relevant concepts associated with patients who had dialysis education. This includes only relevant concepts associated with education at home.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.464.1003.109.12.1016'
VALUE_SET_NAME = 'Dialysis Education'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
SNOMEDCT = {
'28812006',
'385972005',
'59596005',
'66402002'
}
class DialysisServices(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent dialysis services.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with patients who had dialysis services.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.464.1003.109.12.1013'
VALUE_SET_NAME = 'Dialysis Services'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
CPT = {
'1019320',
'90935',
'90937',
'90940',
'90945',
'90947',
'90957',
'90958',
'90959'
}
HCPCSLEVELII = {
'G0257'
}
SNOMEDCT = {
'108241001',
'10848006',
'11932001',
'14684005',
'180273006',
'225230008',
'225231007',
'233575001',
'233576000',
'233577009',
'233578004',
'233579007',
'233580005',
'233581009',
'233582002',
'233583007',
'233584001',
'233585000',
'233586004',
'233587008',
'233588003',
'233589006',
'233590002',
'238316008',
'238317004',
'238318009',
'238319001',
'238321006',
'238322004',
'238323009',
'265764009',
'288182009',
'302497006',
'34897002',
'427053002',
'428648006',
'439278006',
'439976001',
'57274006',
'676002',
'67970008',
'68341005',
'71192002',
'714749008'
}
class DietaryRecommendations(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent dietary management.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category or attribute related to Intervention or Procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with dietary management and nutritional education.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.600.1515'
VALUE_SET_NAME = 'Dietary Recommendations'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
HCPCSLEVELII = {
'S9452',
'S9470'
}
ICD10CM = {
'Z713'
}
SNOMEDCT = {
'103699006',
'11816003',
'182922004',
'182954008',
'182955009',
'182956005',
'182960008',
'183061003',
'183065007',
'183070000',
'183071001',
'281085002',
'284071006',
'284352003',
'289176001',
'289177005',
'304491008',
'306163007',
'361231003',
'370847001',
'386464006',
'410114009',
'410171007',
'410177006',
'410270001',
'413315001',
'418995006',
'424753004',
'437211000124103',
'437231000124109',
'437391000124102',
'437421000124105',
'438961000124108',
'443288003',
'61310001'
}
class FollowUpForAboveNormalBmi(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent interventions relevant for a follow up for a BMI above normal measurement.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category Intervention or Procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with interventions relevant for a follow-up when BMI is above normal measurement.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.600.1.1525'
VALUE_SET_NAME = 'Follow Up for Above Normal BMI'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
CPT = {
'43644',
'43645',
'43659',
'43770',
'43771',
'43772',
'43773',
'43774',
'43842',
'43843',
'43845',
'43846',
'43847',
'43848',
'43886',
'43888',
'97802',
'97803',
'97804',
'98960',
'99078',
'99401',
'99402'
}
HCPCSLEVELII = {
'G0270',
'G0271',
'G0447',
'G0473',
'S9449',
'S9451',
'S9452',
'S9470'
}
ICD10CM = {
'Z713',
'Z7182'
}
SNOMEDCT = {
'304549008',
'307818003',
'361231003',
'370847001',
'386291006',
'386292004',
'386373004',
'386463000',
'386464006',
'410177006',
'413315001',
'418995006',
'424753004',
'443288003'
}
class FollowUpForAdolescentDepression(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent follow-up plans used to document a plan is in place for the treatment of depression that specifically pertains to the adolescent population.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category or attribute related to Intervention.
**Inclusion Criteria:** Includes only relevant concepts associated with emotional and coping support as well as mental health management in an attempt to follow up on previously evaluated and diagnosed depression or depressive disorder in the adolescent population.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.526.3.1569'
VALUE_SET_NAME = 'Follow Up for Adolescent Depression'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
SNOMEDCT = {
'108313002',
'1555005',
'15558000',
'18512000',
'229065009',
'28868002',
'304891004',
'372067001',
'385721005',
'385724002',
'385725001',
'385726000',
'385727009',
'385887004',
'385889001',
'385890005',
'386472008',
'401277000',
'405780009',
'410223002',
'410224008',
'410225009',
'410226005',
'410227001',
'410228006',
'410229003',
'410230008',
'410231007',
'410232000',
'410233005',
'410234004',
'425604002',
'439141002',
'5694008',
'75516001',
'76168009',
'76740001',
'81294000',
'88848003',
'91310009'
}
class FollowUpForAdultDepression(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent follow-up plans used to document a plan is in place for the treatment of depression specifically pertaining to the adult population.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category or attribute related to Intervention.
**Inclusion Criteria:** Includes only relevant concepts associated with emotional and coping support as well as mental health management in an attempt to follow up on previously evaluated and diagnosed depression or depressive disorder in the adult population.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.526.3.1568'
VALUE_SET_NAME = 'Follow Up for Adult Depression'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
SNOMEDCT = {
'108313002',
'1555005',
'15558000',
'18512000',
'229065009',
'28868002',
'304891004',
'372067001',
'385721005',
'385724002',
'385725001',
'385726000',
'385727009',
'385887004',
'385889001',
'385890005',
'386472008',
'401277000',
'405780009',
'410223002',
'410224008',
'410225009',
'410226005',
'410227001',
'410228006',
'410229003',
'410230008',
'410231007',
'410232000',
'410233005',
'410234004',
'425604002',
'439141002',
'5694008',
'75516001',
'76168009',
'76740001',
'81294000',
'88848003',
'91310009'
}
class FollowUpForBelowNormalBmi(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent a follow-up with a BMI below normal measurement.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category or attribute related to Intervention or Procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with a follow-up when BMI is below normal measurement.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.600.1.1528'
VALUE_SET_NAME = 'Follow Up for Below Normal BMI'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
CPT = {
'97802',
'97803',
'97804',
'98960',
'99078',
'99401',
'99402'
}
HCPCSLEVELII = {
'G0270',
'G0271',
'S9449',
'S9452',
'S9470'
}
ICD10CM = {
'Z713'
}
SNOMEDCT = {
'386464006',
'410177006',
'413315001',
'418995006',
'424753004',
'429095004',
'443288003'
}
class Hemodialysis(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent the administration of hemodialysis.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with the administration of hemodialysis.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.526.3.1083'
VALUE_SET_NAME = 'Hemodialysis'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
CPT = {
'90951',
'90952',
'90953',
'90954',
'90955',
'90956',
'90957',
'90958',
'90959',
'90960',
'90961',
'90962',
'90963',
'90964',
'90965',
'90966',
'90967',
'90968',
'90969',
'90970',
'99512'
}
SNOMEDCT = {
'302497006'
}
class HospiceCareAmbulatory(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent patients receiving hospice care outside of a hospital or long term care facility.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) datatype related to Procedure, Order or Intervention, Order. The intent of this value set is to identify all patients receiving hospice care outside of a hospital or long term care facility.
**Inclusion Criteria:** Includes only relevant concepts associated with hospice care concepts.
**Exclusion Criteria:** Excludes concepts for palliative care or comfort measures.
"""
OID = '2.16.840.1.113762.1.4.1108.15'
VALUE_SET_NAME = 'Hospice care ambulatory'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
SNOMEDCT = {
'385763009',
'385765002'
}
class HospiceCareAmbulatory_1584(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent patients receiving hospice care outside of a hospital or long term care facility.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) datatype related to Procedure, Order or Intervention, Order. The intent of this value set is to identify all patients receiving hospice care outside of a hospital or long term care facility.
**Inclusion Criteria:** Includes only relevant concepts associated with hospice care concepts.
**Exclusion Criteria:** Excludes concepts for palliative care or comfort measures.
"""
OID = '2.16.840.1.113883.3.526.3.1584'
VALUE_SET_NAME = 'Hospice Care Ambulatory'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
SNOMEDCT = {
'385763009',
'385765002'
}
class HospitalServicesForUrologyCare(ValueSet):
"""
**Clinical Focus:** This set of values focuses on hospital care, specifically for urology care.
**Data Element Scope:** The intent of this data element is to define hospital CPT services used for urology care.
**Inclusion Criteria:** Included CPT codes
**Exclusion Criteria:** None
"""
OID = '2.16.840.1.113762.1.4.1164.64'
VALUE_SET_NAME = 'Hospital Services for urology care'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
CPT = {
'99217',
'99218',
'99219',
'99220',
'99221',
'99222',
'99223',
'99231',
'99232',
'99233',
'99234',
'99235',
'99236',
'99238',
'99239',
'99251',
'99281',
'99282',
'99283',
'99284'
}
class InfluenzaVaccination(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent influenza vaccinations.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with influenza vaccinations that are SNOMED CT, CPT, and HCPCS codes.
**Exclusion Criteria:** Excludes CVX vaccine codes.
"""
OID = '2.16.840.1.113883.3.526.3.402'
VALUE_SET_NAME = 'Influenza Vaccination'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
CPT = {
'90630',
'90653',
'90654',
'90655',
'90656',
'90657',
'90658',
'90661',
'90662',
'90666',
'90667',
'90668',
'90673',
'90674',
'90682',
'90685',
'90686',
'90687',
'90688',
'90689',
'90694',
'90756'
}
HCPCSLEVELII = {
'G0008',
'Q2034',
'Q2035',
'Q2036',
'Q2037',
'Q2038',
'Q2039'
}
SNOMEDCT = {
'86198006'
}
class KidneyTransplant(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent patients who have undergone kidney transplant.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with kidney transplants.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.464.1003.109.12.1012'
VALUE_SET_NAME = 'Kidney Transplant'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
CPT = {
'50300',
'50320',
'50340',
'50360',
'50365',
'50370',
'50380'
}
HCPCSLEVELII = {
'S2065'
}
ICD10PCS = {
'0TY00Z0',
'0TY00Z1',
'0TY00Z2',
'0TY10Z0',
'0TY10Z1',
'0TY10Z2'
}
SNOMEDCT = {
'122531000119108',
'128631000119109',
'197747000',
'213150003',
'236436003',
'236569000',
'236570004',
'236571000',
'236572007',
'236573002',
'236574008',
'236575009',
'236576005',
'236577001',
'236578006',
'236579003',
'236580000',
'236581001',
'236582008',
'236583003',
'236584009',
'236587002',
'236588007',
'236589004',
'236614007',
'277010001',
'277011002',
'426136000',
'428575007',
'429451003',
'473195006',
'58797008',
'703048006',
'707148007',
'713825007'
}
class LaboratoryTestsForHypertension(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent laboratory tests that are commonly used with patients diagnosed with hypertension.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with laboratory testing for patients diagnosed with hypertension.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.600.1482'
VALUE_SET_NAME = 'Laboratory Tests for Hypertension'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
LOINC = {
'24320-4',
'24321-2',
'24323-8',
'24356-8',
'24357-6',
'24362-6',
'2888-6',
'57021-8',
'57782-5',
'58410-2'
}
class LifestyleRecommendation(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent the type of interventions relevant to lifestyle needs.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Procedure or Intervention.
**Inclusion Criteria:** Includes only relevant concepts associated with the type of lifestyle education, particularly that related to hyperyension.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.526.3.1581'
VALUE_SET_NAME = 'Lifestyle Recommendation'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
SNOMEDCT = {
'313204009',
'39155009',
'443402002'
}
class OtherServicesRelatedToDialysis(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent services related to dialysis.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) datatype related to Intervention, Performed.
**Inclusion Criteria:** Includes only relevant concepts associated with other services related to dialysis.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.464.1003.109.12.1015'
VALUE_SET_NAME = 'Other Services Related to Dialysis'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
SNOMEDCT = {
'233591003',
'251000124108',
'311000124103',
'3257008',
'385970002',
'385971003',
'385973000',
'406168002',
'717738008',
'718019005',
'718308002',
'718330001',
'718331002',
'73257006'
}
class PalliativeOrHospiceCare(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent identifying patients receiving palliative, comfort or hospice care.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Intervention or Procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with identifying patients receiving palliative, comfort or hospice care.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.600.1.1579'
VALUE_SET_NAME = 'Palliative or Hospice Care'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
SNOMEDCT = {
'103735009',
'133918004',
'182964004',
'305284002',
'305381007',
'305981001',
'306237005',
'306288008',
'385736008',
'385763009'
}
class PeritonealDialysis(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent the administration of peritoneal dialysis.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with the administration of peritoneal dialysis.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.526.3.1084'
VALUE_SET_NAME = 'Peritoneal Dialysis'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
CPT = {
'90945',
'90947',
'90951',
'90952',
'90953',
'90954',
'90955',
'90956',
'90957',
'90958',
'90959',
'90960',
'90961',
'90962',
'90963',
'90964',
'90965',
'90966',
'90967',
'90968',
'90969',
'90970'
}
SNOMEDCT = {
'14684005',
'225230008',
'238318009',
'238319001',
'238321006',
'238322004',
'238323009',
'428648006',
'676002',
'71192002'
}
class ProstateCancerTreatment(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent prostate cancer treatments.
**Data Element Scope:** This value set may use Quality Data Model (QDM) category related to Procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with interstitial prostate brachytherapy, external beam radiotherapy to the prostate and radical prostatectomy.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.526.3.398'
VALUE_SET_NAME = 'Prostate Cancer Treatment'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
CPT = {
'55810',
'55812',
'55815',
'55840',
'55842',
'55845',
'55866',
'55875',
'77427',
'77435',
'77772',
'77778',
'77799'
}
SNOMEDCT = {
'10492003',
'113120007',
'116244007',
'118161009',
'118162002',
'118163007',
'14473006',
'168922004',
'169327006',
'169328001',
'169329009',
'169340001',
'169349000',
'169359004',
'176106009',
'176258007',
'176260009',
'176261008',
'176262001',
'176263006',
'176267007',
'176288003',
'19149007',
'21190008',
'21372000',
'228677009',
'228684001',
'228688003',
'228690002',
'228692005',
'228693000',
'228694006',
'228695007',
'228697004',
'228698009',
'228699001',
'228701001',
'228702008',
'236252003',
'24242005',
'26294005',
'271291003',
'27877006',
'28579000',
'30426000',
'312235007',
'314202001',
'359922007',
'359926005',
'36253005',
'37851009',
'384691004',
'384692006',
'38915000',
'394902000',
'394918006',
'399124002',
'399180008',
'399315003',
'41371003',
'41416003',
'427541000119103',
'427985002',
'433224001',
'440093006',
'440094000',
'57525009',
'62867004',
'65381004',
'65551008',
'67598001',
'68986004',
'72388004',
'764675000',
'77613002',
'81232004',
'83154001',
'84755001',
'85768003',
'87795007',
'8782006',
'90199006',
'90470006',
'91531008'
}
class RadiationTreatmentManagement(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent radiation treatment management.
**Data Element Scope:** This value set may use Quality Data Model (QDM) category related to Procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with radiation treatment management.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.526.3.1026'
VALUE_SET_NAME = 'Radiation Treatment Management'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
CPT = {
'77427',
'77431',
'77432',
'77435'
}
SNOMEDCT = {
'84755001'
}
class RecommendationToIncreasePhysicalActivity(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent exercise, education and nutrition.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Intervention or Procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with promoting exercise and nutrition regimens.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.600.1518'
VALUE_SET_NAME = 'Recommendation to Increase Physical Activity'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
HCPCSLEVELII = {
'S9451'
}
SNOMEDCT = {
'281090004',
'304507003',
'304549008',
'386291006',
'386292004',
'386373004',
'386463000',
'410289001'
}
class Referral(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent a referral for a patient to a practitioner for evaluation, treatment or co-management of a patient's condition.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Intervention.
**Inclusion Criteria:** Includes only relevant concepts associated with referrals and consultations.
**Exclusion Criteria:** Excludes self referrals.
"""
OID = '2.16.840.1.113883.3.464.1003.101.12.1046'
VALUE_SET_NAME = 'Referral'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
SNOMEDCT = {
'103696004',
'103697008',
'103698003',
'103699006',
'103704003',
'183515008',
'183517000',
'183528001',
'183529009',
'183530004',
'183541002',
'183555005',
'183557002',
'183561008',
'183567007',
'183569005',
'183583007',
'183591003',
'183878008',
'183879000',
'183880002',
'183881003',
'183882005',
'183884006',
'183885007',
'183886008',
'183887004',
'183888009',
'183889001',
'183890005',
'183891009',
'183892002',
'183893007',
'183894001',
'183895000',
'183896004',
'183897008',
'183899006',
'183900001',
'183901002',
'183902009',
'183903004',
'183904005',
'183905006',
'183906007',
'183907003',
'183908008',
'183909000',
'183910005',
'183911009',
'183913007',
'183914001',
'183915000',
'183916004',
'266747000',
'274410002',
'306241009',
'306242002',
'306243007',
'306245000',
'306247008',
'306250006',
'306252003',
'306253008',
'306254002',
'306255001',
'306256000',
'306257009',
'306258004',
'306259007',
'306260002',
'306261003',
'306262005',
'306263000',
'306264006',
'306265007',
'306266008',
'306267004',
'306268009',
'306269001',
'306270000',
'306271001',
'306272008',
'306273003',
'306275005',
'306276006',
'306277002',
'306278007',
'306279004',
'306280001',
'306281002',
'306282009',
'306284005',
'306285006',
'306286007',
'306287003',
'306288008',
'306289000',
'306290009',
'306291008',
'306293006',
'306294000',
'306295004',
'306296003',
'306297007',
'306298002',
'306299005',
'306300002',
'306301003',
'306302005',
'306303000',
'306304006',
'306305007',
'306306008',
'306307004',
'306308009',
'306309001',
'306310006',
'306311005',
'306312003',
'306313008',
'306314002',
'306315001',
'306316000',
'306317009',
'306318004',
'306320001',
'306338003',
'306341007',
'306342000',
'306343005',
'306351008',
'306352001',
'306353006',
'306354000',
'306355004',
'306356003',
'306357007',
'306358002',
'306359005',
'306360000',
'306361001',
'306736002',
'307063001',
'307777008',
'308439003',
'308447003',
'308449000',
'308450000',
'308451001',
'308452008',
'308453003',
'308454009',
'308455005',
'308456006',
'308459004',
'308465004',
'308469005',
'308470006',
'308471005',
'308472003',
'308473008',
'308474002',
'308475001',
'308476000',
'308477009',
'308478004',
'308479007',
'308480005',
'308481009',
'308482002',
'308483007',
'308484001',
'308485000',
'309046007',
'309623006',
'309626003',
'309627007',
'309629005',
'310515004',
'312487009',
'312488004',
'390866009',
'401266006',
'406158007',
'406159004',
'408285001',
'415277000',
'416116000',
'416999007',
'425971006',
'428441000124100',
'428451000124103',
'428461000124101',
'428471000124108',
'428481000124106',
'428491000124109',
'428541000124104',
'429365000',
'433151006',
'448761000124106',
'448771000124104',
'54395008',
'698563003',
'698599008',
'703974003',
'703975002',
'703976001',
'716634006'
}
class ReferralForAdolescentDepression(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent appropriate referrals specific to the child and adolescent age group for depression management.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Intervention.
**Inclusion Criteria:** Includes only relevant concepts associated with appropriate referrals as specific to the child and adolescent age group for depression management.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.526.3.1570'
VALUE_SET_NAME = 'Referral for Adolescent Depression'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
SNOMEDCT = {
'183524004',
'183583007',
'183851006',
'183866009',
'306136006',
'306137002',
'306226009',
'306227000',
'306252003',
'306291008',
'306294000',
'308459004',
'308477009',
'309627007',
'390866009',
'703978000',
'710914003',
'711281004'
}
class ReferralForAdultDepression(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent appropriate referrals specific to the adult age group for depression management.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Intervention.
**Inclusion Criteria:** Includes only relevant concepts associated with appropriate referrals as specific to the adult age group for depression management.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.526.3.1571'
VALUE_SET_NAME = 'Referral for Adult Depression'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
SNOMEDCT = {
'183524004',
'183528001',
'183583007',
'183866009',
'305922005',
'306136006',
'306137002',
'306138007',
'306204008',
'306226009',
'306227000',
'306252003',
'306294000',
'308459004',
'308477009',
'390866009',
'703978000',
'710914003',
'711281004'
}
class ReferralOrCounselingForAlcoholConsumption(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent the type of interventions relevant to alcohol use.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Procedure or Intervention.
**Inclusion Criteria:** Includes only relevant concepts associated with indicating the type of education provided, referral to community service or rehabilitation center for alcohol use.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.526.3.1583'
VALUE_SET_NAME = 'Referral or Counseling for Alcohol Consumption'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
SNOMEDCT = {
'24165007',
'38670004',
'390857005',
'408947007',
'413473000',
'417096006',
'431260004'
}
class ReferralToPrimaryCareOrAlternateProvider(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent referrals to an alternate or primary care provider.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Intervention.
**Inclusion Criteria:** Includes only relevant concepts associated with the different types of services and providers.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.526.3.1580'
VALUE_SET_NAME = 'Referral to Primary Care or Alternate Provider'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
SNOMEDCT = {
'134403003',
'183516009',
'183561008',
'183856001',
'306206005',
'306253008',
'308470006'
}
class ReferralsWhereWeightAssessmentMayOccur(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent multiple types of providers and settings for weight assessment.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Intervention or Procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with multiple providers in different settings performing weight assessments.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.600.1.1527'
VALUE_SET_NAME = 'Referrals Where Weight Assessment May Occur'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
SNOMEDCT = {
'183515008',
'183524004',
'183583007',
'306136006',
'306163007',
'306164001',
'306165000',
'306166004',
'306167008',
'306168003',
'306226009',
'306227000',
'306252003',
'306344004',
'306353006',
'306354000',
'308459004',
'308470006',
'308477009',
'390864007',
'390866009',
'390893007',
'408289007',
'416790000'
}
class SalvageTherapy(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent salvage therapy procedures.
**Data Element Scope:** This value set may use Quality Data Model (QDM) category related to Procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with salvage therapy procedures.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.526.3.399'
VALUE_SET_NAME = 'Salvage Therapy'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
CPT = {
'51597',
'55860',
'55862',
'55865'
}
SNOMEDCT = {
'236209003',
'236211007'
}
class TobaccoUseCessationCounseling(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent various tobacco cessation counseling interventions.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Intervention.
**Inclusion Criteria:** Includes only relevant concepts associated with various cessation interventions which may include referral to tobacco-related services or providers, education about the benefits of stopping tobacco use, education about the negative side effects of using tobacco, and monitoring for tobacco cessation.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.526.3.509'
VALUE_SET_NAME = 'Tobacco Use Cessation Counseling'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
CPT = {
'99406',
'99407'
}
SNOMEDCT = {
'171055003',
'185795007',
'185796008',
'225323000',
'225324006',
'310429001',
'315232003',
'384742004',
'395700008',
'449841000124108',
'449851000124105',
'449861000124107',
'449871000124100',
'702388001',
'710081004',
'711028002',
'713700008'
}
class WeightReductionRecommended(ValueSet):
"""
**Clinical Focus:** This value set contains concepts that represent management and maintenance of weight.
**Data Element Scope:** This value set may use the Quality Data Model (QDM) category related to Intervention or Procedure.
**Inclusion Criteria:** Includes only relevant concepts associated with interventions addressing healthy eating, goal setting, weight management and maintenance.
**Exclusion Criteria:** No exclusions.
"""
OID = '2.16.840.1.113883.3.600.1510'
VALUE_SET_NAME = 'Weight Reduction Recommended'
EXPANSION_VERSION = 'eCQM Update 2020-05-07'
HCPCSLEVELII = {
'S9449'
}
SNOMEDCT = {
'170795002',
'266724001',
'268523001',
'408289007',
'410200000'
}
|
nilq/baby-python
|
python
|
import os
import json
import tempfile
from model import VepException, VepResult
from bgcore import tsv
from bgcore.request import Request
def _ctype(value):
return value.split(",")
class VepService(object):
HOST = "beta.rest.ensembl.org"
VEP_STRAND = { "+" : "1", "-" : "-1", "1" : "1", "-1" : "-1" }
def __init__(self, cache_path, max_retries=3, max_freq=3):
self.cache_path = cache_path
self.results_path = None
self.__restful = Request(max_retries=max_retries, max_freq=max_freq)
def __parse_response(self, var_id, chr, start, end, strand, alt, response):
root = json.load(response)
if not isinstance(root, dict):
raise Exception("Unexpected result from VEP web service:\n{0}".format(json.dumps(root)))
results = []
found = set()
tag = ":".join([chr, str(start), str(end), strand, alt])
for data in root["data"]:
#chromosome = data["location"]["name"];
#start = data["location"]["start"];
for trans in data["transcripts"]:
gene = trans.get("gene_id");
transcript = trans.get("transcript_id")
tstart = trans.get("translation_start")
tend = trans.get("translation_end")
if tstart is not None and tend is not None and tstart != tend:
protein_pos = "{0}-{1}".format(tstart, tend)
elif tstart is not None:
protein_pos = tstart
elif tend is not None:
protein_pos = tend
else:
protein_pos = None
protein = trans.get("translation_stable_id")
for allele in trans.get("alleles", []):
consequences = allele.get("consequence_terms")
#allele_string = allele["allele_string"]
aa_change = allele.get("pep_allele_string")
sift_score = allele.get("sift_score")
polyphen_score = allele.get("polyphen_score")
key = "{0}|{1}".format(tag, transcript)
if key not in found:
found.add(key)
results += [VepResult(
var_id=var_id, chr=chr, start=start, allele=allele,
gene=gene, transcript=transcript, consequences=consequences,
protein_pos = protein_pos, aa_change=aa_change, protein=protein,
sift=sift_score, polyphen=polyphen_score)]
return results
def get(self, chr, start, end, strand, alt, var_id=None):
strand = self.VEP_STRAND[strand]
url = "http://{0}/vep/human/{1}:{2}-{3}:{4}/{5}/consequences".format(
self.HOST, chr, start, end, strand, alt)
response = self.__restful.get(url, headers={"Content-type" : "application/json"})
if response is None:
return None
return self.__parse_response(var_id, chr, start, end, strand, alt, response)
def run(self, variants_path):
"""
Run the VEP service and save results in a temporary file.
:param variants_path: File with variants. In BED format. http://www.ensembl.org/info/docs/variation/vep/vep_script.html#custom_formats
:return: True if successfull or False otherwise
"""
if self.results_path is None:
self.results_path = tempfile.mkstemp()[1]
with open(self.results_path, "w") as rf:
with open(variants_path, "r") as vf:
column_types = (str, int, int, str, str, int)
for fields in tsv.lines(vf, column_types):
chr, start, end, allele, strand, var_id = fields
alt = allele[allele.find("/") + 1:]
results = self.get(chr, start, end, strand, alt, var_id)
if results is None:
continue
for r in results:
rf.write(tsv.line_text(
var_id, chr, start, allele,
r.gene, r.transcript, ",".join(sorted(r.consequences)),
r.protein_pos, r.aa_change, r.protein,
r.sift, r.polyphen, null_value="-"))
def results(self):
"""
Iterator that parses the results temporary file and yields VepResult's
"""
with open(self.results_path, "r") as f:
column_types = (int, str, int, str, str, str, _ctype, str, str, str, float, float)
for fields in tsv.lines(f, column_types, null_value="-"):
var_id, chr, start, allele, gene, transcript, consequences, protein_pos, aa_change, protein, sift, polyphen = fields
yield VepResult(var_id=var_id, chr=chr, start=start, allele=allele,
gene=gene, transcript=transcript, consequences=consequences,
protein_pos = protein_pos, aa_change=aa_change, protein=protein,
sift=sift, polyphen=polyphen)
def close(self):
"""
Removes temporary files
"""
if self.results_path is not None:
os.remove(self.results_path)
self.results_path = None
|
nilq/baby-python
|
python
|
from ._PlaceBox import *
from ._RemoveBox import *
|
nilq/baby-python
|
python
|
"""Implements logic to render and validate web forms for login and user registration."""
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, SubmitField, SelectField
from wtforms.validators import DataRequired
class LoginForm(FlaskForm):
"""Form for new users to log-in to site."""
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
remember_me = BooleanField('Remember Me')
submit = SubmitField('Sign In')
# For disabled RegistrationForm
# ------------------------------
# from wtforms.fields.html5 import EmailField
# from wtforms.validators import ValidationError, Email, EqualTo, Regexp, Length
# from cinescout.models import User
# Disabled as of v1.1.0
# class RegistrationForm(FlaskForm):
# """Form for new users to register with site."""
# # Class data
# username = StringField('Username',
# validators=[DataRequired(),
# Regexp('^\w+$',
# message="Username must contain only alphanumeric or underscore characters.")
# ])
# email = EmailField('Email', validators=[ DataRequired(), Email()])
# password = PasswordField('Password', validators=[DataRequired(),
# EqualTo('password2', message="Passwords do not match."),
# Length(min=8,
# message="Password must be at least 8 characters long.")
# ])
# password2 = PasswordField('Re-enter Password',
# validators=[DataRequired()])
# submit = SubmitField('Register')
# def validate_username(self, username):
# """Checks that username has not already been used.
# Args:
# username: String representing username of user.
# Raises:
# ValidationError: if username already in use.
# """
# user = User.query.filter_by(username=username.data).first()
# if user:
# raise ValidationError('Username already taken. Please use another.')
# def validate_email(self, email):
# """Checks that user is not creating multiple accounts with same
# email.
# Args:
# email: String representing user's email.
# Raises:
# ValidationError: if email already in use.
# """
# user = User.query.filter_by(email=email.data).first()
# if user:
# raise ValidationError('An account already exists with this email address. Please use another.')
|
nilq/baby-python
|
python
|
spanish_columns = {
"oer": "oer",
"games": "partidos",
"points_made": "puntos_a_favor",
"total_possessions": "posesiones_totales",
"minutes": "minutos",
"assists": "asistencias",
"steals": "robos",
"turnovers": "perdidas",
"2_point_percentage": "porcentaje_2_puntos",
"2_point_made": "2_puntos_metidos",
"2_point_attempted": "2_puntos_intentados",
"3_point_percentage": "porcentaje_3_puntos",
"3_point_made": "3_puntos_metidos",
"3_point_attempted": "3_puntos_intentados",
"field_goal_percentage": "porcentaje_tiros_campo",
"field_goal_made": "tiros_campo_metidos",
"field_goal_attempted": "tiros_campo_intentados",
"free_throw_percentage": "porcentaje_tiros_libres",
"free_throw_made": "tiros_libres_metidos",
"free_throw_attempted": "tiros_libres_intentados",
"offensive_rebounds": "rebotes_defensivos",
"defensive_rebounds": "rebotes_ofensivos",
"total_rebounds": "rebotes_totales",
"fouls_made": "faltas_cometidas",
"fouls_received": "faltas_recibidas",
"blocks_made": "tapones_favor",
"blocks_received": "tapones_contra",
"dunks": "mates",
"ranking": "valoracion",
"point_balance": "+/-",
"team": "equipo",
"der": "der",
"points_received": "puntos_contra",
"mode": "modo",
"points_made_volume": "volumen_puntos_favor",
"total_possessions_volume": "volumen_posesiones_totales",
"2_point_made_volume": "volumen_2_puntos_metidos",
"2_point_attempted_volume": "volumen_2_puntos_intentados",
"3_point_made_volume": "volumen_3_puntos_metidos",
"3_point_attempted_volume": "volumen_3_puntos_intentados",
"field_goal_made_volume": "volumen_tiros_campo_metidos",
"field_goal_attempted_volume": "volumen_tiros_campo_intentados",
"free_throw_made_volume": "volumen_tiros_libres_metidos",
"free_throw_attempted_volume": "volumen_tiros_libres_intentados",
"offensive_rebounds_volume": "volumen_rebotes_defensivos",
"defensive_rebounds_volume": "volumen_rebotes_ofensivos",
"total_rebounds_volume": "volumen_rebotes_totales",
"oer_40_min": "oer_por_40_minutos",
}
|
nilq/baby-python
|
python
|
import decimal
import numbers
import itertools
__all__ = [
'TRUNCATE',
'ROUND',
'DECIMAL_PLACES',
'SIGNIFICANT_DIGITS',
'NO_PADDING',
'PAD_WITH_ZERO',
'decimal_to_precision',
]
# rounding mode
TRUNCATE = 0
ROUND = 1
# digits counting mode
DECIMAL_PLACES = 2
SIGNIFICANT_DIGITS = 3
# padding mode
NO_PADDING = 4
PAD_WITH_ZERO = 5
def decimal_to_precision(n, rounding_mode=ROUND, precision=None, counting_mode=DECIMAL_PLACES, padding_mode=NO_PADDING):
assert precision is not None and isinstance(precision, numbers.Integral)
assert rounding_mode in [TRUNCATE, ROUND]
assert counting_mode in [DECIMAL_PLACES, SIGNIFICANT_DIGITS]
assert padding_mode in [NO_PADDING, PAD_WITH_ZERO]
context = decimal.getcontext()
precision = min(context.prec - 2, precision)
# all default except decimal.Underflow (raised when a number is rounded to zero)
context.traps[decimal.Underflow] = True
context.rounding = decimal.ROUND_HALF_UP # rounds 0.5 away from zero
dec = decimal.Decimal(n)
string = str(dec)
precise = None
def power_of_10(x):
return decimal.Decimal('10') ** (-x)
if rounding_mode == ROUND:
if counting_mode == DECIMAL_PLACES:
precise = str(dec.quantize(power_of_10(precision))) # ROUND_HALF_EVEN is default context
elif counting_mode == SIGNIFICANT_DIGITS:
q = precision - dec.adjusted() - 1
sigfig = power_of_10(q)
if q < 0:
string_to_precision = string[:precision]
# string_to_precision is '' when we have zero precision
below = sigfig * decimal.Decimal(string_to_precision if string_to_precision else '0')
above = below + sigfig
precise = str(min((below, above), key=lambda x: abs(x - dec)))
else:
precise = str(dec.quantize(sigfig))
elif rounding_mode == TRUNCATE:
# Slice a string
if counting_mode == DECIMAL_PLACES:
before, after = string.split('.') if '.' in string else (string, '')
precise = before + '.' + after[:precision]
elif counting_mode == SIGNIFICANT_DIGITS:
if precision == 0:
return '0'
dot = string.index('.') if '.' in string else 0
start = dot - dec.adjusted()
end = start + precision
# need to clarify these conditionals
if dot >= end:
end -= 1
precise = string[:end].ljust(dot, '0')
precise = precise.rstrip('.')
if padding_mode == NO_PADDING:
return precise.rstrip('0').rstrip('.') if '.' in precise else precise
elif padding_mode == PAD_WITH_ZERO:
if '.' in precise:
if counting_mode == DECIMAL_PLACES:
before, after = precise.split('.')
return before + '.' + after.ljust(precision, '0')
elif counting_mode == SIGNIFICANT_DIGITS:
fsfg = len(list(itertools.takewhile(lambda x: x == '.' or x == '0', precise)))
if '.' in precise[fsfg:]:
precision += 1
return precise[:fsfg] + precise[fsfg:].rstrip('0').ljust(precision, '0')
else:
if counting_mode == SIGNIFICANT_DIGITS:
if precision > len(precise):
return precise + '.' + (precision - len(precise)) * '0'
elif counting_mode == DECIMAL_PLACES:
if precision > 0:
return precise + '.' + precision * '0'
return precise
|
nilq/baby-python
|
python
|
import os
from setuptools import setup
README = """
See the README on `GitHub
<https://github.com/uw-it-aca/uw-restclients-coda>`_.
"""
version_path = 'uw_coda/VERSION'
VERSION = open(os.path.join(os.path.dirname(__file__), version_path)).read()
VERSION = VERSION.replace("\n", "")
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
url = "https://github.com/uw-it-aca/uw-restclients-coda"
setup(
name='UW-RestClients-CoDa',
version=VERSION,
packages=['uw_coda'],
author="UW-IT AXDD",
author_email="aca-it@uw.edu",
include_package_data=True,
install_requires=['UW-RestClients-Core'],
license='Apache License, Version 2.0',
description=('A restclient for accessing the Instructor Course Dashboards'
'application at the University of Washington'),
long_description=README,
url=url,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
],
)
|
nilq/baby-python
|
python
|
#!/usr/bin/env pytest
# -*- coding: utf-8 -*-
################################################################################
# Project: OGR NextGIS Web Driver
# Purpose: Tests OGR NGW Driver capabilities
# Author: Dmitry Baryshnikov, polimax@mail.ru
# Language: Python
################################################################################
# The MIT License (MIT)
#
# Copyright (c) 2018-2019, NextGIS <info@nextgis.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
################################################################################
import sys
sys.path.append('../pymod')
import gdaltest
from osgeo import gdal
from osgeo import ogr
from osgeo import osr
import time
import json
import pytest
import random
from datetime import datetime
def check_availability(url):
# Sandbox cleans at 1:05 on monday (UTC)
now = datetime.utcnow()
if now.weekday() == 0:
if now.hour >= 1 and now.hour < 3:
return False
version_url = url + '/api/component/pyramid/pkg_version'
if gdaltest.gdalurlopen(version_url) is None:
return False
# Check quota
quota_url = url + '/api/resource/quota'
quota_conn = gdaltest.gdalurlopen(quota_url)
try:
quota_json = json.loads(quota_conn.read())
quota_conn.close()
if quota_json is None:
return False
limit = quota_json['limit']
count = quota_json['count']
if limit is None or count is None:
return True
return limit - count > 10
except:
return False
def get_new_name():
return 'gdaltest_group_' + str(int(time.time())) + '_' + str(random.randint(10, 99))
###############################################################################
# Check driver existence.
def test_ogr_ngw_1():
gdaltest.ngw_ds = None
gdaltest.ngw_drv = None
gdaltest.ngw_drv = gdal.GetDriverByName('NGW')
if gdaltest.ngw_drv is None:
pytest.skip()
gdaltest.ngw_test_server = 'https://sandbox.nextgis.com' # 'http://dev.nextgis.com/sandbox'
if check_availability(gdaltest.ngw_test_server) == False:
gdaltest.ngw_drv = None
pytest.skip()
###############################################################################
# Check create datasource.
def test_ogr_ngw_2():
if gdaltest.ngw_drv is None:
pytest.skip()
if check_availability(gdaltest.ngw_test_server) == False:
gdaltest.ngw_drv = None
pytest.skip()
create_url = 'NGW:' + gdaltest.ngw_test_server + '/resource/0/' + get_new_name()
gdal.PushErrorHandler()
gdaltest.ngw_ds = gdaltest.ngw_drv.Create(create_url, 0, 0, 0, gdal.GDT_Unknown, \
options=['DESCRIPTION=GDAL Test group',])
gdal.PopErrorHandler()
assert gdaltest.ngw_ds is not None, 'Create datasource failed.'
assert gdaltest.ngw_ds.GetMetadataItem('description', '') == 'GDAL Test group', \
'Did not get expected datasource description.'
assert int(gdaltest.ngw_ds.GetMetadataItem('id', '')) > 0, \
'Did not get expected datasource identifier.'
gdaltest.group_id = gdaltest.ngw_ds.GetMetadataItem('id', '')
###############################################################################
# Check rename datasource.
def test_ogr_ngw_3():
if gdaltest.ngw_drv is None:
pytest.skip()
if check_availability(gdaltest.ngw_test_server) == False:
gdaltest.ngw_drv = None
pytest.skip()
new_name = get_new_name() + '_2'
ds_resource_id = gdaltest.ngw_ds.GetMetadataItem('id', '')
rename_url = 'NGW:' + gdaltest.ngw_test_server + '/resource/' + ds_resource_id
assert gdaltest.ngw_drv.Rename(new_name, rename_url) == gdal.CE_None, \
'Rename datasource failed.'
###############################################################################
# Check datasource metadata.
def test_ogr_ngw_4():
if gdaltest.ngw_drv is None:
pytest.skip()
if check_availability(gdaltest.ngw_test_server) == False:
gdaltest.ngw_drv = None
pytest.skip()
ds_resource_id = gdaltest.ngw_ds.GetMetadataItem('id', '')
gdaltest.ngw_ds.SetMetadataItem('test_int.d', '777', 'NGW')
gdaltest.ngw_ds.SetMetadataItem('test_float.f', '777.555', 'NGW')
gdaltest.ngw_ds.SetMetadataItem('test_string', 'metadata test', 'NGW')
gdaltest.ngw_ds = None
url = 'NGW:' + gdaltest.ngw_test_server + '/resource/' + ds_resource_id
gdaltest.ngw_ds = gdal.OpenEx(url, gdal.OF_UPDATE) # gdaltest.ngw_drv.Open(url, update=1)
assert gdaltest.ngw_ds is not None, \
'Open datasource failed.'
md_item = gdaltest.ngw_ds.GetMetadataItem('test_int.d', 'NGW')
assert md_item == '777', \
'Did not get expected datasource metadata item. test_int.d is equal {}, but should {}.'.format(md_item, '777')
md_item = gdaltest.ngw_ds.GetMetadataItem('test_float.f', 'NGW')
assert float(md_item) == pytest.approx(777.555, abs=0.00001), \
'Did not get expected datasource metadata item. test_float.f is equal {}, but should {}.'.format(md_item, '777.555')
md_item = gdaltest.ngw_ds.GetMetadataItem('test_string', 'NGW')
assert md_item == 'metadata test', \
'Did not get expected datasource metadata item. test_string is equal {}, but should {}.'.format(md_item, 'metadata test')
resource_type = gdaltest.ngw_ds.GetMetadataItem('resource_type', '')
assert resource_type is not None, 'Did not get expected datasource metadata item. Resourse type should be present.'
def create_fields(lyr):
fld_defn = ogr.FieldDefn('STRFIELD', ogr.OFTString)
lyr.CreateField(fld_defn)
lyr.SetMetadataItem('FIELD_0_ALIAS', 'String field test')
fld_defn = ogr.FieldDefn('DECFIELD', ogr.OFTInteger)
lyr.CreateField(fld_defn)
lyr.SetMetadataItem('FIELD_1_ALIAS', 'Integer field test')
fld_defn = ogr.FieldDefn('BIGDECFIELD', ogr.OFTInteger64)
lyr.CreateField(fld_defn)
lyr.SetMetadataItem('FIELD_2_ALIAS', 'Integer64 field test')
fld_defn = ogr.FieldDefn('REALFIELD', ogr.OFTReal)
lyr.CreateField(fld_defn)
lyr.SetMetadataItem('FIELD_3_ALIAS', 'Real field test')
fld_defn = ogr.FieldDefn('DATEFIELD', ogr.OFTDate)
lyr.CreateField(fld_defn)
lyr.SetMetadataItem('FIELD_4_ALIAS', 'Date field test')
fld_defn = ogr.FieldDefn('TIMEFIELD', ogr.OFTTime)
lyr.CreateField(fld_defn)
lyr.SetMetadataItem('FIELD_5_ALIAS', 'Time field test')
fld_defn = ogr.FieldDefn('DATETIMEFLD', ogr.OFTDateTime)
lyr.CreateField(fld_defn)
lyr.SetMetadataItem('FIELD_6_ALIAS', 'Date & time field test')
def fill_fields(f):
f.SetField('STRFIELD', "fo_o")
f.SetField('DECFIELD', 123)
f.SetField('BIGDECFIELD', 12345678901234)
f.SetField('REALFIELD', 1.23)
f.SetField('DATETIMEFLD', '2014/12/04 12:34:56')
def fill_fields2(f):
f.SetField('STRFIELD', "русский")
f.SetField('DECFIELD', 321)
f.SetField('BIGDECFIELD', 32145678901234)
f.SetField('REALFIELD', 21.32)
f.SetField('DATETIMEFLD', '2019/12/31 21:43:56')
def add_metadata(lyr):
lyr.SetMetadataItem('test_int.d', '777', 'NGW')
lyr.SetMetadataItem('test_float.f', '777,555', 'NGW')
lyr.SetMetadataItem('test_string', 'metadata test', 'NGW')
###############################################################################
# Check create vector layers.
def test_ogr_ngw_5():
if gdaltest.ngw_drv is None:
pytest.skip()
if check_availability(gdaltest.ngw_test_server) == False:
gdaltest.ngw_drv = None
pytest.skip()
sr = osr.SpatialReference()
sr.ImportFromEPSG(3857)
lyr = gdaltest.ngw_ds.CreateLayer('test_pt_layer', srs=sr, geom_type=ogr.wkbMultiPoint, options=['OVERWRITE=YES', 'DESCRIPTION=Test point layer'])
assert lyr is not None, 'Create layer failed.'
create_fields(lyr)
# Test duplicated names.
fld_defn = ogr.FieldDefn('STRFIELD', ogr.OFTString)
assert lyr.CreateField(fld_defn) != 0, 'Expected not to create duplicated field'
# Test forbidden field names.
gdal.ErrorReset()
gdal.PushErrorHandler('CPLQuietErrorHandler')
fld_defn = ogr.FieldDefn('id', ogr.OFTInteger)
lyr.CreateField(fld_defn)
gdal.PopErrorHandler()
assert gdal.GetLastErrorMsg() != '', 'Expecting a warning'
add_metadata(lyr)
lyr = gdaltest.ngw_ds.CreateLayer('test_ln_layer', srs=sr, geom_type=ogr.wkbMultiLineString, options=['OVERWRITE=YES', 'DESCRIPTION=Test point layer'])
assert lyr is not None, 'Create layer failed.'
create_fields(lyr)
add_metadata(lyr)
lyr = gdaltest.ngw_ds.CreateLayer('test_pl_layer', srs=sr, geom_type=ogr.wkbMultiPolygon, options=['OVERWRITE=YES', 'DESCRIPTION=Test point layer'])
assert lyr is not None, 'Create layer failed.'
create_fields(lyr)
add_metadata(lyr)
# Test overwrite
lyr = gdaltest.ngw_ds.CreateLayer('test_pt_layer', srs=sr, geom_type=ogr.wkbPoint, options=['OVERWRITE=YES', 'DESCRIPTION=Test point layer'])
assert lyr is not None, 'Create layer failed.'
create_fields(lyr)
add_metadata(lyr)
lyr = gdaltest.ngw_ds.CreateLayer('test_ln_layer', srs=sr, geom_type=ogr.wkbLineString, options=['OVERWRITE=YES', 'DESCRIPTION=Test point layer'])
assert lyr is not None, 'Create layer failed.'
create_fields(lyr)
add_metadata(lyr)
lyr = gdaltest.ngw_ds.CreateLayer('test_pl_layer', srs=sr, geom_type=ogr.wkbPolygon, options=['OVERWRITE=YES', 'DESCRIPTION=Test point layer'])
assert lyr is not None, 'Create layer failed.'
create_fields(lyr)
add_metadata(lyr)
# Test without overwrite
lyr = gdaltest.ngw_ds.CreateLayer('test_pl_layer', srs=sr, geom_type=ogr.wkbMultiPolygon, options=['OVERWRITE=NO', 'DESCRIPTION=Test point layer 1'])
assert lyr is None, 'Create layer without overwrite should fail.'
lyr = gdaltest.ngw_ds.CreateLayer('test_pl_layer', srs=sr, geom_type=ogr.wkbMultiPolygon, options=['DESCRIPTION=Test point layer 1'])
assert lyr is None, 'Create layer without overwrite should fail.'
ds_resource_id = gdaltest.ngw_ds.GetMetadataItem('id', '')
gdaltest.ngw_ds = None
url = 'NGW:' + gdaltest.ngw_test_server + '/resource/' + ds_resource_id
gdaltest.ngw_ds = gdal.OpenEx(url, gdal.OF_UPDATE) # gdaltest.ngw_drv.Open(url, update=1)
assert gdaltest.ngw_ds is not None, 'Open datasource failed.'
for layer_name in ['test_pt_layer', 'test_ln_layer', 'test_pl_layer']:
lyr = gdaltest.ngw_ds.GetLayerByName(layer_name)
assert lyr is not None, 'Get layer {} failed.'.format(layer_name)
md_item = lyr.GetMetadataItem('test_int.d', 'NGW')
assert md_item == '777', \
'Did not get expected layer metadata item. test_int.d is equal {}, but should {}.'.format(md_item, '777')
md_item = lyr.GetMetadataItem('test_float.f', 'NGW')
assert float(md_item) == pytest.approx(777.555, abs=0.00001), \
'Did not get expected layer metadata item. test_float.f is equal {}, but should {}.'.format(md_item, '777.555')
md_item = lyr.GetMetadataItem('test_string', 'NGW')
assert md_item == 'metadata test', \
'Did not get expected layer metadata item. test_string is equal {}, but should {}.'.format(md_item, 'metadata test')
resource_type = lyr.GetMetadataItem('resource_type', '')
assert resource_type is not None, 'Did not get expected layer metadata item. Resourse type should be present.'
###############################################################################
# Check open single vector layer.
def test_ogr_ngw_6():
if gdaltest.ngw_drv is None:
pytest.skip()
if check_availability(gdaltest.ngw_test_server) == False:
gdaltest.ngw_drv = None
pytest.skip()
lyr = gdaltest.ngw_ds.GetLayerByName('test_pt_layer')
lyr_resource_id = lyr.GetMetadataItem('id', '')
url = 'NGW:' + gdaltest.ngw_test_server + '/resource/' + lyr_resource_id
ds = gdal.OpenEx(url)
assert ds is not None and ds.GetLayerCount() == 1, \
'Failed to open single vector layer.'
###############################################################################
# Check insert, update and delete features.
def test_ogr_ngw_7():
if gdaltest.ngw_drv is None:
pytest.skip()
if check_availability(gdaltest.ngw_test_server) == False:
gdaltest.ngw_drv = None
pytest.skip()
lyr = gdaltest.ngw_ds.GetLayerByName('test_pt_layer')
f = ogr.Feature(lyr.GetLayerDefn())
fill_fields(f)
f.SetGeometry(ogr.CreateGeometryFromWkt('POINT (1 2)'))
ret = lyr.CreateFeature(f)
assert ret == 0 and f.GetFID() >= 0, \
'Create feature failed. Expected FID greater or equal 0, got {}.'.format(f.GetFID())
fill_fields2(f)
f.SetGeometry(ogr.CreateGeometryFromWkt('POINT (3 4)'))
ret = lyr.SetFeature(f)
assert ret == 0, 'Failed to update feature #{}.'.format(f.GetFID())
lyr.DeleteFeature(f.GetFID())
# Expected fail to get feature
gdal.PushErrorHandler()
f = lyr.GetFeature(f.GetFID())
gdal.PopErrorHandler()
assert f is None, 'Failed to delete feature #{}.'.format(f.GetFID())
###############################################################################
# Check insert, update features in batch mode.
def test_ogr_ngw_8():
if gdaltest.ngw_drv is None:
pytest.skip()
if check_availability(gdaltest.ngw_test_server) == False:
gdaltest.ngw_drv = None
pytest.skip()
ds_resource_id = gdaltest.ngw_ds.GetMetadataItem('id', '')
gdaltest.ngw_ds = None
url = 'NGW:' + gdaltest.ngw_test_server + '/resource/' + ds_resource_id
gdaltest.ngw_ds = gdal.OpenEx(url, gdal.OF_UPDATE, open_options=['BATCH_SIZE=2'])
lyr = gdaltest.ngw_ds.GetLayerByName('test_pt_layer')
f1 = ogr.Feature(lyr.GetLayerDefn())
fill_fields(f1)
f1.SetGeometry(ogr.CreateGeometryFromWkt('POINT (1 2)'))
ret = lyr.CreateFeature(f1)
assert ret == 0 and f1.GetFID() < 0
f2 = ogr.Feature(lyr.GetLayerDefn())
fill_fields2(f2)
f2.SetGeometry(ogr.CreateGeometryFromWkt('POINT (2 3)'))
ret = lyr.CreateFeature(f2)
assert ret == 0 and f2.GetFID() < 0
f3 = ogr.Feature(lyr.GetLayerDefn())
fill_fields(f3)
f3.SetGeometry(ogr.CreateGeometryFromWkt('POINT (3 4)'))
ret = lyr.CreateFeature(f3)
assert ret == 0
ret = lyr.SyncToDisk()
assert ret == 0
lyr.ResetReading()
feat = lyr.GetNextFeature()
counter = 0
while feat is not None:
counter += 1
assert feat.GetFID() >= 0, 'Expected FID greater or equal 0, got {}.'.format(feat.GetFID())
feat = lyr.GetNextFeature()
assert counter >= 3, 'Expected 3 or greater feature count, got {}.'.format(counter)
###############################################################################
# Check paging while GetNextFeature.
def test_ogr_ngw_9():
if gdaltest.ngw_drv is None:
pytest.skip()
if check_availability(gdaltest.ngw_test_server) == False:
gdaltest.ngw_drv = None
pytest.skip()
ds_resource_id = gdaltest.ngw_ds.GetMetadataItem('id', '')
gdaltest.ngw_ds = None
url = 'NGW:' + gdaltest.ngw_test_server + '/resource/' + ds_resource_id
gdaltest.ngw_ds = gdal.OpenEx(url, gdal.OF_UPDATE, open_options=['PAGE_SIZE=2'])
lyr = gdaltest.ngw_ds.GetLayerByName('test_pt_layer')
lyr.ResetReading()
feat = lyr.GetNextFeature()
counter = 0
while feat is not None:
counter += 1
assert feat.GetFID() >= 0, 'Expected FID greater or equal 0, got {}.'.format(feat.GetFID())
feat = lyr.GetNextFeature()
assert counter >= 3, 'Expected 3 or greater feature count, got {}.'.format(counter)
###############################################################################
# Check native data.
def test_ogr_ngw_10():
if gdaltest.ngw_drv is None:
pytest.skip()
if check_availability(gdaltest.ngw_test_server) == False:
gdaltest.ngw_drv = None
pytest.skip()
ds_resource_id = gdaltest.ngw_ds.GetMetadataItem('id', '')
gdaltest.ngw_ds = None
url = 'NGW:' + gdaltest.ngw_test_server + '/resource/' + ds_resource_id
gdaltest.ngw_ds = gdal.OpenEx(url, gdal.OF_UPDATE, open_options=['NATIVE_DATA=YES'])
lyr = gdaltest.ngw_ds.GetLayerByName('test_pt_layer')
lyr.ResetReading()
feat = lyr.GetNextFeature()
feature_id = feat.GetFID()
native_data = feat.GetNativeData()
assert native_data is not None, 'Feature #{} native data should not be empty'.format(feature_id)
# {"description":null,"attachment":null}
assert feat.GetNativeMediaType() == 'application/json', 'Unsupported native media type'
# Set description
feat.SetNativeData('{"description":"Test feature description"}')
ret = lyr.SetFeature(feat)
assert ret == 0, 'Failed to update feature #{}.'.format(feature_id)
feat = lyr.GetFeature(feature_id)
native_data = feat.GetNativeData()
assert native_data is not None and native_data.find('Test feature description') != -1, 'Expected feature description text, got {}'.format(native_data)
###############################################################################
# Check ignored fields works ok
def test_ogr_ngw_11():
if gdaltest.ngw_drv is None or gdaltest.ngw_ds is None:
pytest.skip()
if check_availability(gdaltest.ngw_test_server) == False:
gdaltest.ngw_drv = None
pytest.skip()
lyr = gdaltest.ngw_ds.GetLayerByName('test_pt_layer')
lyr.SetIgnoredFields(['STRFIELD'])
feat = lyr.GetNextFeature()
assert not feat.IsFieldSet('STRFIELD'), 'got STRFIELD despite request to ignore it.'
assert feat.GetFieldAsInteger('DECFIELD') == 123, 'missing or wrong DECFIELD'
fd = lyr.GetLayerDefn()
fld = fd.GetFieldDefn(0) # STRFIELD
assert fld.IsIgnored(), 'STRFIELD unexpectedly not marked as ignored.'
fld = fd.GetFieldDefn(1) # DECFIELD
assert not fld.IsIgnored(), 'DECFIELD unexpectedly marked as ignored.'
assert not fd.IsGeometryIgnored(), 'geometry unexpectedly ignored.'
assert not fd.IsStyleIgnored(), 'style unexpectedly ignored.'
feat = None
lyr = None
###############################################################################
# Check attribute filter.
def test_ogr_ngw_12():
if gdaltest.ngw_drv is None:
pytest.skip()
if check_availability(gdaltest.ngw_test_server) == False:
gdaltest.ngw_drv = None
pytest.skip()
lyr = gdaltest.ngw_ds.GetLayerByName('test_pt_layer')
lyr.SetAttributeFilter("STRFIELD = 'русский'")
fc = lyr.GetFeatureCount()
assert fc == 1, 'Expected feature count is 1, got {}.'.format(fc)
lyr.SetAttributeFilter("STRFIELD = 'fo_o' AND DECFIELD = 321")
fc = lyr.GetFeatureCount()
assert fc == 0, 'Expected feature count is 0, got {}.'.format(fc)
lyr.SetAttributeFilter('NGW:fld_STRFIELD=fo_o&fld_DECFIELD=123')
fc = lyr.GetFeatureCount()
assert fc == 2, 'Expected feature count is 2, got {}.'.format(fc)
###############################################################################
# Check spatial filter.
def test_ogr_ngw_13():
if gdaltest.ngw_drv is None:
pytest.skip()
if check_availability(gdaltest.ngw_test_server) == False:
gdaltest.ngw_drv = None
pytest.skip()
lyr = gdaltest.ngw_ds.GetLayerByName('test_pt_layer')
# Reset any attribute filters
lyr.SetAttributeFilter(None)
# Check intersecting POINT(3 4)
lyr.SetSpatialFilter(ogr.CreateGeometryFromWkt('POLYGON ((2.5 3.5,2.5 6,6 6,6 3.5,2.5 3.5))'))
fc = lyr.GetFeatureCount()
assert fc == 1, 'Expected feature count is 1, got {}.'.format(fc)
###############################################################################
# Check ExecuteSQL.
def test_ogr_ngw_14():
if gdaltest.ngw_drv is None:
pytest.skip()
if check_availability(gdaltest.ngw_test_server) == False:
gdaltest.ngw_drv = None
pytest.skip()
gdaltest.ngw_ds.ExecuteSQL('DELLAYER:test_ln_layer')
lyr = gdaltest.ngw_ds.GetLayerByName('test_ln_layer')
assert lyr is None, 'Expected fail to get layer test_ln_layer.'
lyr = gdaltest.ngw_ds.GetLayerByName('test_pl_layer')
f = ogr.Feature(lyr.GetLayerDefn())
fill_fields(f)
f.SetGeometry(ogr.CreateGeometryFromWkt('POLYGON((0 0,0 1,1 0,0 0))'))
ret = lyr.CreateFeature(f)
assert ret == 0, 'Failed to create feature in test_pl_layer.'
assert lyr.GetFeatureCount() == 1, 'Expected feature count is 1, got {}.'.format(lyr.GetFeatureCount())
gdaltest.ngw_ds.ExecuteSQL('DELETE FROM test_pl_layer')
assert lyr.GetFeatureCount() == 0, 'Expected feature count is 0, got {}.'.format(lyr.GetFeatureCount())
gdaltest.ngw_ds.ExecuteSQL('ALTER TABLE test_pl_layer RENAME TO test_pl_layer777')
lyr = gdaltest.ngw_ds.GetLayerByName('test_pl_layer777')
assert lyr is not None, 'Get layer test_pl_layer777 failed.'
# Create 2 new features
f = ogr.Feature(lyr.GetLayerDefn())
fill_fields(f)
f.SetGeometry(ogr.CreateGeometryFromWkt('POLYGON((0 0,0 1,1 0,0 0))'))
ret = lyr.CreateFeature(f)
assert ret == 0, 'Failed to create feature in test_pl_layer777.'
f = ogr.Feature(lyr.GetLayerDefn())
fill_fields2(f)
f.SetGeometry(ogr.CreateGeometryFromWkt('POLYGON((1 1,1 2,2 1,1 1))'))
ret = lyr.CreateFeature(f)
assert ret == 0, 'Failed to create feature in test_pl_layer777.'
lyr = gdaltest.ngw_ds.ExecuteSQL("SELECT STRFIELD,DECFIELD FROM test_pl_layer777 WHERE STRFIELD = 'fo_o'")
assert lyr is not None, 'ExecuteSQL: SELECT STRFIELD,DECFIELD FROM test_pl_layer777 WHERE STRFIELD = "fo_o"; failed.'
assert lyr.GetFeatureCount() == 2, 'Expected feature count is 2, got {}.'.format(lyr.GetFeatureCount())
gdaltest.ngw_ds.ReleaseResultSet(lyr)
###############################################################################
# Run test_ogrsf
def test_ogr_ngw_test_ogrsf():
if gdaltest.ngw_drv is None or gdal.GetConfigOption('SKIP_SLOW') is not None:
pytest.skip()
if check_availability(gdaltest.ngw_test_server) == False:
gdaltest.ngw_drv = None
pytest.skip()
if gdaltest.skip_on_travis():
pytest.skip()
if gdaltest.ngw_ds is None:
pytest.skip()
url = 'NGW:' + gdaltest.ngw_test_server + '/resource/' + gdaltest.group_id
import test_cli_utilities
if test_cli_utilities.get_test_ogrsf_path() is None:
pytest.skip()
ret = gdaltest.runexternal(test_cli_utilities.get_test_ogrsf_path() + ' ' + url)
assert ret.find('INFO') != -1 and ret.find('ERROR') == -1
ret = gdaltest.runexternal(test_cli_utilities.get_test_ogrsf_path() + ' ' + url + ' -oo PAGE_SIZE=100')
assert ret.find('INFO') != -1 and ret.find('ERROR') == -1
ret = gdaltest.runexternal(test_cli_utilities.get_test_ogrsf_path() + ' ' + url + ' -oo BATCH_SIZE=5')
assert ret.find('INFO') != -1 and ret.find('ERROR') == -1
ret = gdaltest.runexternal(test_cli_utilities.get_test_ogrsf_path() + ' ' + url + ' -oo BATCH_SIZE=5 -oo PAGE_SIZE=100')
assert ret.find('INFO') != -1 and ret.find('ERROR') == -1
###############################################################################
# Cleanup
def test_ogr_ngw_cleanup():
if gdaltest.ngw_drv is None:
pytest.skip()
if gdaltest.group_id is not None:
delete_url = 'NGW:' + gdaltest.ngw_test_server + '/resource/' + gdaltest.group_id
gdaltest.ngw_layer = None
gdaltest.ngw_ds = None
assert gdaltest.ngw_drv.Delete(delete_url) == gdal.CE_None, \
'Failed to delete datasource ' + delete_url + '.'
gdaltest.ngw_ds = None
|
nilq/baby-python
|
python
|
# Copyright (c) 2016 Rackspace Hosting Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from neutron.api import extensions
from neutron import manager
from neutron import wsgi
from oslo_log import log as logging
import quark.utils as utils
RESOURCE_NAME = 'job'
RESOURCE_COLLECTION = RESOURCE_NAME + "s"
EXTENDED_ATTRIBUTES_2_0 = {
RESOURCE_COLLECTION: {
"completed": {"allow_post": False, "is_visible": True,
"default": False}}
}
attr_dict = EXTENDED_ATTRIBUTES_2_0[RESOURCE_COLLECTION]
attr_dict[RESOURCE_NAME] = {'allow_post': True,
'allow_put': True,
'is_visible': True}
LOG = logging.getLogger(__name__)
class JobsController(wsgi.Controller):
def __init__(self, plugin):
self._resource_name = RESOURCE_NAME
self._plugin = plugin
@utils.exc_wrapper
def index(self, request):
context = request.context
return {"jobs": self._plugin.get_jobs(context, **request.GET)}
@utils.exc_wrapper
def show(self, request, id):
context = request.context
return {"job": self._plugin.get_job(context, id)}
@utils.exc_wrapper
def create(self, request, body=None):
context = request.context
body = self._deserialize(request.body, request.get_content_type())
return {"job": self._plugin.create_job(context, body)}
@utils.exc_wrapper
def update(self, request, id, body=None):
context = request.context
body = self._deserialize(request.body, request.get_content_type())
return {"job": self._plugin.update_job(context, id, body)}
@utils.exc_wrapper
def delete(self, request, id):
context = request.context
return self._plugin.delete_job(context, id)
class Jobs(extensions.ExtensionDescriptor):
"""Jobs support."""
@classmethod
def get_name(cls):
return "Asyncronous jobs for a tenant"
@classmethod
def get_alias(cls):
return RESOURCE_COLLECTION
@classmethod
def get_description(cls):
return "Provide a way to track asyncronous jobs"
@classmethod
def get_namespace(cls):
return ("http://docs.openstack.org/network/ext/"
"ip_addresses/api/v2.0")
@classmethod
def get_updated(cls):
return "2016-05-15T10:00:00-00:00"
def get_extended_resources(self, version):
if version == "2.0":
return EXTENDED_ATTRIBUTES_2_0
else:
return {}
@classmethod
def get_resources(cls):
"""Returns Ext Resources."""
job_controller = JobsController(
manager.NeutronManager.get_plugin())
resources = []
resources.append(extensions.ResourceExtension(
Jobs.get_alias(),
job_controller))
return resources
|
nilq/baby-python
|
python
|
# coding: utf-8
# pylint: disable=W0201,C0111
from __future__ import division, unicode_literals, print_function
# standard library
import sys
import os.path
import time
import datetime
import traceback
from copy import deepcopy
from collections import OrderedDict
from itertools import cycle
from math import ceil
import cgi # html lib
from six import string_types, iteritems, itervalues
from six.moves import range
import numpy as np
from pyNastran.gui.qt_version import qt_version
from qtpy import QtCore, QtGui #, API
from qtpy.QtWidgets import (
QMessageBox, QWidget,
QMainWindow, QDockWidget, QFrame, QHBoxLayout, QAction)
from qtpy.compat import getsavefilename, getopenfilename
#from pyNastran.gui.gui_utils.vtk_utils import numpy_to_vtk_points, get_numpy_idtype_for_vtk
import vtk
from pyNastran.gui.qt_files.QVTKRenderWindowInteractor import QVTKRenderWindowInteractor
import pyNastran
from pyNastran.bdf.utils import write_patran_syntax_dict
from pyNastran.utils.log import SimpleLogger
from pyNastran.utils import print_bad_path, integer_types, object_methods
from pyNastran.utils.numpy_utils import loadtxt_nice
from pyNastran.gui.gui_utils.write_gif import (
setup_animation, update_animation_inputs, write_gif)
from pyNastran.gui.qt_files.gui_qt_common import GuiCommon
from pyNastran.gui.qt_files.scalar_bar import ScalarBar
from pyNastran.gui.qt_files.alt_geometry_storage import AltGeometry
from pyNastran.gui.qt_files.coord_properties import CoordProperties
from pyNastran.gui.gui_interface.legend.interface import set_legend_menu
from pyNastran.gui.gui_interface.clipping.interface import set_clipping_menu
from pyNastran.gui.gui_interface.camera.interface import set_camera_menu
from pyNastran.gui.gui_interface.preferences.interface import set_preferences_menu
from pyNastran.gui.gui_interface.groups_modify.interface import on_set_modify_groups
from pyNastran.gui.gui_interface.groups_modify.groups_modify import Group
from gui_utils.menus.add_sidebar import Sidebar
from pyNastran.gui.menus.application_log import PythonConsoleWidget, ApplicationLogWidget
from pyNastran.gui.menus.manage_actors import EditGeometryProperties
from pyNastran.gui.styles.area_pick_style import AreaPickStyle
from pyNastran.gui.styles.zoom_style import ZoomStyle
from pyNastran.gui.styles.probe_style import ProbeResultStyle
from pyNastran.gui.styles.rotation_center_style import RotationCenterStyle
#from pyNastran.gui.menus.multidialog import MultiFileDialog
from pyNastran.gui.gui_utils.utils import load_csv, load_deflection_csv, load_user_geom
#---------------------------------
from gui_utils.menus.wing_menu import WingWindow
import gui_utils.vsp_g as vsp
class Interactor(vtk.vtkGenericRenderWindowInteractor):
def __init__(self):
#vtk.vtkGenericRenderWindowInteractor()
pass
def HighlightProp(self):
print('highlight')
class PyNastranRenderWindowInteractor(QVTKRenderWindowInteractor):
def __init__(self, parent=None):
render_window = vtk.vtkRenderWindow()
iren = Interactor()
iren.SetRenderWindow(render_window)
kwargs = {
'iren' : iren,
'rw' : render_window,
}
QVTKRenderWindowInteractor.__init__(self, parent=parent,
iren=iren, rw=render_window)
#self.Highlight
# http://pyqt.sourceforge.net/Docs/PyQt5/multiinheritance.html
class GuiCommon2(QMainWindow, GuiCommon):
def __init__(self, **kwds):
"""
fmt_order, html_logging, inputs, parent=None,
"""
# this will reset the background color/label color if things break
#super(QMainWindow, self).__init__(self)
if qt_version == 4:
QMainWindow.__init__(self)
GuiCommon.__init__(self, **kwds)
elif qt_version == 5:
super(GuiCommon2, self).__init__(**kwds)
elif qt_version == 'pyside':
#super(GuiCommon2, self).__init__(**kwds) # fails
# fails
#QMainWindow.__init__(self)
#GuiCommon.__init__(self, **kwds)
#super(GuiCommon2, self).__init__(**kwds)
#super(GuiCommon2, self).__init__(**kwds)
#super(GuiCommon2, self).__init__(**kwds)
QMainWindow.__init__(self)
GuiCommon.__init__(self, **kwds)
else:
raise NotImplementedError(qt_version)
fmt_order = kwds['fmt_order']
inputs = kwds['inputs']
self.stdout = vsp.cvar.cstdout
self.errorMgr = vsp.ErrorMgrSingleton_getInstance()
#self.app = inputs['app']
#del inputs['app']
if inputs['log'] is not None:
html_logging = False
else:
html_logging = kwds['html_logging']
del kwds['html_logging']
#if qt_version == 4: # TODO: remove this???
#QMainWindow.__init__(self)
#-----------------------------------------------------------------------
self._active_background_image = None
self.reset_settings = False
self.fmts = fmt_order
self.base_window_title = "pyNastran v%s" % pyNastran.__version__
#defaults
self.wildcard_delimited = 'Delimited Text (*.txt; *.dat; *.csv)'
# initializes tools/checkables
self.set_tools()
self.html_logging = html_logging
self.execute_python = True
self.scalar_bar = ScalarBar(self.is_horizontal_scalar_bar)
self.color_function_black = vtk.vtkColorTransferFunction()
self.color_function_black.AddRGBPoint(0.0, 0.0, 0.0, 0.0)
self.color_function_black.AddRGBPoint(1.0, 0.0, 0.0, 0.0)
# in,lb,s
self.input_units = ['', '', ''] # '' means not set
self.display_units = ['', '', '']
self.recent_files = []
#def dragEnterEvent(self, e):
#print(e)
#print('drag event')
#if e.mimeData().hasFormat('text/plain'):
#e.accept()
#else:
#e.ignore()
#def dropEvent(self, e):
#print(e)
#print('drop event')
def Render(self):
#self.vtk_interactor.Render()
self.vtk_interactor.GetRenderWindow().Render()
@property
def legend_shown(self):
"""determines if the legend is shown"""
return self.scalar_bar.is_shown
@property
def scalarBar(self):
return self.scalar_bar.scalar_bar
def hide_legend(self):
"""hides the legend"""
#self.scalar_bar.is_shown = False
self.scalarBar.VisibilityOff()
def show_legend(self):
"""shows the legend"""
#self.scalar_bar.is_shown = True
self.scalarBar.VisibilityOn()
@property
def color_function(self):
return self.scalar_bar.color_function
#def get_color_function(self):
#return self.scalar_bar.color_function
@property
def window_title(self):
return self.getWindowTitle()
@window_title.setter
def window_title(self, msg):
#msg2 = "%s - " % self.base_window_title
#msg2 += msg
self.setWindowTitle(msg)
@property
def logo(self):
"""Gets the pyNastran icon path, which can be overwritten"""
return self._logo
@logo.setter
def logo(self, logo):
"""Sets the pyNastran icon path, which can be overwritten"""
self._logo = logo
def init_ui(self):
"""
Initialize user iterface
+--------------+
| Window Title |
+--------------+----------------+
| Menubar |
+-------------------------------+
| Toolbar |
+---------------------+---------+
| | |
| | |
| | Results |
| VTK Frame | Dock |
| | |
| | |
+---------------------+---------+
| |
| HTML Logging Dock |
| |
+-------------------------------+
"""
#self.resize(1100, 700)
self.statusBar().showMessage('Ready')
# windows title and aplication icon
self.setWindowTitle('Statusbar')
if self._logo is not None:
self.setWindowIcon(QtGui.QIcon(self._logo))
self.window_title = self.base_window_title
#=========== Results widget ===================
self.res_dock = QDockWidget("Components", self)
self.res_dock.setObjectName("results_obj")
#self.res_widget = QtGui.QTextEdit()
#self.res_widget.setReadOnly(True)
#self.res_dock.setWidget(self.res_widget)
self.res_widget = Sidebar(self)
#self.res_widget.update_results(data)
#self.res_widget.setWidget(sidebar)
self.res_dock.setWidget(self.res_widget)
self.addDockWidget(QtCore.Qt.RightDockWidgetArea, self.res_dock)
self.create_log_python_docks()
#===============================================
self.run_vtk = True
if self.run_vtk:
self._create_vtk_objects()
self._build_menubar()
#self._hide_menubar()
if self.run_vtk:
self.build_vtk_frame()
#compassRepresentation = vtk.vtkCompassRepresentation()
#compassWidget = vtk.vtkCompassWidget()
#compassWidget.SetInteractor(self.iren)
#compassWidget.SetRepresentation(compassRepresentation)
#compassWidget.EnabledOn()
def create_log_python_docks(self):
"""
Creates the
- HTML Log dock
- Python Console dock
"""
#=========== Logging widget ===================
if self.html_logging is True:
self.log_dock_widget = ApplicationLogWidget(self)
self.log_widget = self.log_dock_widget.log_widget
self.addDockWidget(QtCore.Qt.BottomDockWidgetArea, self.log_dock_widget)
else:
self.log_widget = self.log
if self.execute_python:
self.python_dock_widget = PythonConsoleWidget(self)
self.python_dock_widget.setObjectName("python_console")
self.addDockWidget(QtCore.Qt.BottomDockWidgetArea, self.python_dock_widget)
def _on_execute_python_button(self, clear=False):
"""executes the docked python console"""
txt = str(self.python_dock_widget.enter_data.toPlainText()).rstrip()
if len(txt) == 0:
return
self.log_command(txt)
try:
exec(txt)
except TypeError as e:
self.log_error('\n' + ''.join(traceback.format_stack()))
#traceback.print_exc(file=self.log_error)
self.log_error(str(e))
self.log_error(str(txt))
self.log_error(str(type(txt)))
return
except Exception as e:
#self.log_error(traceback.print_stack(f))
self.log_error('\n' + ''.join(traceback.format_stack()))
#traceback.print_exc(file=self.log_error)
self.log_error(str(e))
self.log_error(str(txt))
return
if clear:
self.python_dock_widget.enter_data.clear()
def load_batch_inputs(self, inputs):
geom_script = inputs['geomscript']
if geom_script is not None:
self.on_run_script(geom_script)
if not inputs['format']:
return
form = inputs['format'].lower()
input_filenames = inputs['input']
results_filename = inputs['output']
plot = True
if results_filename:
plot = False
#print('input_filename =', input_filename)
if input_filenames is not None:
for input_filename in input_filenames:
if not os.path.exists(input_filename):
msg = '%s does not exist\n%s' % (
input_filename, print_bad_path(input_filename))
self.log.error(msg)
if self.html_logging:
print(msg)
return
for results_filenamei in results_filename:
#print('results_filenamei =', results_filenamei)
if results_filenamei is not None:
if not os.path.exists(results_filenamei):
msg = '%s does not exist\n%s' % (
results_filenamei, print_bad_path(results_filenamei))
self.log.error(msg)
if self.html_logging:
print(msg)
return
#is_geom_results = input_filename == results_filename and len(input_filenames) == 1
is_geom_results = False
for i, input_filename in enumerate(input_filenames):
if i == 0:
name = 'main'
else:
name = input_filename
#form = inputs['format'].lower()
#if is_geom_results:
# is_failed = self.on_load_geometry_and_results(
# infile_name=input_filename, name=name, geometry_format=form,
# plot=plot, raise_error=True)
#else:
is_failed = self.on_load_geometry(
infile_name=input_filename, name=name, geometry_format=form,
plot=plot, raise_error=True)
self.name = 'main'
#print('keys =', self.nid_maps.keys())
if is_failed:
return
if results_filename: # and not is_geom_results
self.on_load_results(results_filename)
post_script = inputs['postscript']
if post_script is not None:
self.on_run_script(post_script)
self.on_reset_camera()
self.vtk_interactor.Modified()
def set_tools(self, tools=None, checkables=None):
"""Creates the GUI tools"""
if checkables is None:
checkables = {
# name, is_checked
'show_info' : True,
'show_debug' : True,
'show_gui' : True,
'show_command' : True,
'anti_alias_0' : True,
'anti_alias_1' : False,
'anti_alias_2' : False,
'anti_alias_4' : False,
'anti_alias_8' : False,
'rotation_center' : False,
'measure_distance' : False,
'probe_result' : False,
'area_pick' : False,
'zoom' : False,
}
if tools is None:
file_tools = [
('exit', '&Exit', 'texit.png', 'Ctrl+Q', 'Exit application', self.closeEvent), # QtGui.qApp.quit
('load_geometry', 'Load &Geometry...', 'load_geometry.png', 'Ctrl+O', 'Loads a geometry input file', self.on_load_geometry),
('load_results', 'Load &Results...', 'load_results.png', 'Ctrl+R', 'Loads a results file', self.on_load_results),
('load_csv_user_geom', 'Load CSV User Geometry...', '', None, 'Loads custom geometry file', self.on_load_user_geom),
('load_csv_user_points', 'Load CSV User Points...', 'user_points.png', None, 'Loads CSV points', self.on_load_csv_points),
('load_custom_result', 'Load Custom Results...', '', None, 'Loads a custom results file', self.on_load_custom_results),
('script', 'Run Python Script...', 'python48.png', None, 'Runs pyNastranGUI in batch mode', self.on_run_script),
]
tools = file_tools + [
('log_clear', 'Clear Application Log', '', None, 'Clear Application Log', self.clear_application_log),
('label_clear', 'Clear Current Labels', '', None, 'Clear current labels', self.clear_labels),
('label_reset', 'Clear All Labels', '', None, 'Clear all labels', self.reset_labels),
('legend', 'Modify Legend...', 'legend.png', None, 'Set Legend', self.set_legend),
('clipping', 'Set Clipping...', '', None, 'Set Clipping', self.set_clipping),
#('axis', 'Show/Hide Axis', 'axis.png', None, 'Show/Hide Global Axis', self.on_show_hide_axes),
('wireframe', 'Wireframe Model', 'twireframe.png', 'w', 'Show Model as a Wireframe Model', self.on_wireframe),
('surface', 'Surface Model', 'tsolid.png', 's', 'Show Model as a Surface Model', self.on_surface),
('geo_properties', 'Edit Geometry Properties...', '', None, 'Change Model Color/Opacity/Line Width', self.edit_geometry_properties),
('modify_groups', 'Modify Groups...', '', None, 'Create/Edit/Delete Groups', self.on_set_modify_groups),
('create_groups_by_visible_result', 'Create Groups By Visible Result', '', None, 'Create Groups', self.create_groups_by_visible_result),
('create_groups_by_property_id', 'Create Groups By Property ID', '', None, 'Create Groups', self.create_groups_by_property_id),
#('create_list', 'Create Lists through Booleans', '', None, 'Create List', self.create_list),
('show_info', 'Show INFO', 'show_info.png', None, 'Show "INFO" messages', self.on_show_info),
('show_debug', 'Show DEBUG', 'show_debug.png', None, 'Show "DEBUG" messages', self.on_show_debug),
('show_gui', 'Show GUI', 'show_gui.png', None, 'Show "GUI" messages', self.on_show_gui),
('show_command', 'Show COMMAND', 'show_command.png', None, 'Show "COMMAND" messages', self.on_show_command),
('magnify', 'Magnify', 'plus_zoom.png', 'M', 'Increase Magnfication', self.on_increase_magnification),
('shrink', 'Shrink', 'minus_zoom.png', 'm', 'Decrease Magnfication', self.on_decrease_magnification),
#('cell_pick', 'Cell Pick', '', 'c', 'Centroidal Picking', self.on_cell_picker),
#('node_pick', 'Node Pick', '', 'n', 'Nodal Picking', self.on_node_picker),
('rotate_clockwise', 'Rotate Clockwise', 'tclock.png', 'o', 'Rotate Clockwise', self.on_rotate_clockwise),
('rotate_cclockwise', 'Rotate Counter-Clockwise', 'tcclock.png', 'O', 'Rotate Counter-Clockwise', self.on_rotate_cclockwise),
('screenshot', 'Take a Screenshot...', 'tcamera.png', 'CTRL+I', 'Take a Screenshot of current view', self.on_take_screenshot),
('about', 'About pyNastran GUI...', 'tabout.png', 'CTRL+H', 'About pyNastran GUI and help on shortcuts', self.about_dialog),
('view', 'Camera View', 'view.png', None, 'Load the camera menu', self.view_camera),
('camera_reset', 'Reset Camera View', 'trefresh.png', 'r', 'Reset the camera view to default', self.on_reset_camera),
('wing_menu', 'Load the Wing...', 'wing.png', None, 'Load the Wing Menu', self.on_wing_window),
#('reload', 'Reload Model...', 'treload.png', 'r', 'Remove the model and reload the same geometry file', self.on_reload),
#('cycle_results', 'Cycle Results', 'cycle_results.png', 'CTRL+L', 'Changes the result case', self.on_cycle_results),
#('rcycle_results', 'Cycle Results', 'rcycle_results.png', 'CTRL+K', 'Changes the result case', self.on_rcycle_results),
('back_view', 'Back View', 'back.png', 'x', 'Flips to +X Axis', lambda: self.update_camera('+x')),
('right_view', 'Right View', 'right.png', 'y', 'Flips to +Y Axis', lambda: self.update_camera('+y')),
('top_view', 'Top View', 'top.png', 'z', 'Flips to +Z Axis', lambda: self.update_camera('+z')),
('front_view', 'Front View', 'front.png', 'X', 'Flips to -X Axis', lambda: self.update_camera('-x')),
('left_view', 'Left View', 'left.png', 'Y', 'Flips to -Y Axis', lambda: self.update_camera('-y')),
('bottom_view', 'Bottom View', 'bottom.png', 'Z', 'Flips to -Z Axis', lambda: self.update_camera('-z')),
('edges', 'Show/Hide Edges', 'tedges.png', 'e', 'Show/Hide Model Edges', self.on_flip_edges),
('edges_black', 'Color Edges', '', 'b', 'Set Edge Color to Color/Black', self.on_set_edge_visibility),
('anti_alias_0', 'Off', '', None, 'Disable Anti-Aliasing', lambda: self.on_set_anti_aliasing(0)),
('anti_alias_1', '1x', '', None, 'Set Anti-Aliasing to 1x', lambda: self.on_set_anti_aliasing(1)),
('anti_alias_2', '2x', '', None, 'Set Anti-Aliasing to 2x', lambda: self.on_set_anti_aliasing(2)),
('anti_alias_4', '4x', '', None, 'Set Anti-Aliasing to 4x', lambda: self.on_set_anti_aliasing(4)),
('anti_alias_8', '8x', '', None, 'Set Anti-Aliasing to 8x', lambda: self.on_set_anti_aliasing(8)),
# new
('rotation_center', 'Set the rotation center', 'trotation_center.png', 'f', 'Pick a node for the rotation center', self.on_rotation_center),
('measure_distance', 'Measure Distance', 'measure_distance.png', None, 'Measure the distance between two nodes', self.on_measure_distance),
('probe_result', 'Probe', 'tprobe.png', None, 'Probe the displayed result', self.on_probe_result),
('quick_probe_result', 'Quick Probe', '', 'p', 'Probe the displayed result', self.on_quick_probe_result),
('zoom', 'Zoom', 'zoom.png', None, 'Zoom In', self.on_zoom),
('text_size_increase', 'Increase Text Size', 'text_up.png', 'Ctrl+Plus', 'Increase Text Size', self.on_increase_text_size),
('text_size_decrease', 'Decrease Text Size', 'text_down.png', 'Ctrl+Minus', 'Decrease Text Size', self.on_decrease_text_size),
('set_preferences', 'Preferences...', 'preferences.png', None, 'Set Text Size', self.set_preferences_menu),
# picking
('area_pick', 'Area Pick', 'tarea_pick.png', None, 'Get a list of nodes/elements', self.on_area_pick),
]
if 'nastran' in self.fmts:
tools += [
('caero', 'Show/Hide CAERO Panels', '', None, 'Show/Hide CAERO Panel Outlines', self.toggle_caero_panels),
('caero_subpanels', 'Toggle CAERO Subpanels', '', None, 'Show/Hide CAERO Subanel Outlines', self.toggle_caero_sub_panels),
('conm2', 'Toggle CONM2s', '', None, 'Show/Hide CONM2s', self.toggle_conms),
]
self.tools = tools
self.checkables = checkables
def on_increase_text_size(self):
"""used by the hidden_tools for Ctrl +"""
self.on_set_font_size(self.font_size + 1)
def on_decrease_text_size(self):
"""used by the hidden_tools for Ctrl -"""
self.on_set_font_size(self.font_size - 1)
def on_set_font_size(self, font_size, show_command=True):
"""changes the font size"""
is_failed = True
if not isinstance(font_size, int):
self.log_error('font_size=%r must be an integer; type=%s' % (
font_size, type(font_size)))
return is_failed
if font_size < 6:
font_size = 6
if self.font_size == font_size:
return False
self.font_size = font_size
font = QtGui.QFont()
font.setPointSize(self.font_size)
self.setFont(font)
#self.toolbar.setFont(font)
self.menu_file.setFont(font)
self.menu_view.setFont(font)
self.menu_window.setFont(font)
self.menu_help.setFont(font)
if self._legend_window_shown:
self._legend_window.set_font_size(font_size)
if self._clipping_window_shown:
self._clipping_window.set_font_size(font_size)
if self._edit_geometry_properties_window_shown:
self._edit_geometry_properties.set_font_size(font_size)
if self._modify_groups_window_shown:
self._modify_groups_window.set_font_size(font_size)
if self._preferences_window_shown:
self._preferences_window.set_font_size(font_size)
#self.menu_scripts.setFont(font)
self.log_command('settings.on_set_font_size(%s)' % font_size)
return False
def _create_menu_items(self, actions=None, create_menu_bar=True):
if actions is None:
actions = self.actions
if create_menu_bar:
self.menu_file = self.menubar.addMenu('&File')
self.menu_view = self.menubar.addMenu('&View')
self.menu_window = self.menubar.addMenu('&Window')
self.menu_help = self.menubar.addMenu('&Help')
self.menu_hidden = self.menubar.addMenu('&Hidden')
self.menu_hidden.menuAction().setVisible(False)
if self._script_path is not None and os.path.exists(self._script_path):
scripts = [script for script in os.listdir(self._script_path) if '.py' in script]
else:
scripts = []
scripts = tuple(scripts)
#if 0:
#print('script_path =', script_path)
#print('scripts =', scripts)
#self.menu_scripts = self.menubar.addMenu('&Scripts')
#for script in scripts:
#fname = os.path.join(script_path, script)
#tool = (script, script, 'python48.png', None, '',
#lambda: self.on_run_script(fname) )
#tools.append(tool)
#else:
self.menu_scripts = None
menu_window = ['toolbar', 'reswidget']
menu_view = [
'screenshot', '', 'wireframe', 'surface', 'camera_reset', '',
'set_preferences', '',
'log_clear', 'label_clear', 'label_reset', '',
'legend', 'geo_properties',
#['Anti-Aliasing', 'anti_alias_0', 'anti_alias_1', 'anti_alias_2',
#'anti_alias_4', 'anti_alias_8',],
]
if self.is_groups:
menu_view += ['modify_groups', 'create_groups_by_property_id',
'create_groups_by_visible_result']
menu_view += [
'', 'clipping', #'axis',
'edges', 'edges_black',]
if self.html_logging:
self.actions['log_dock_widget'] = self.log_dock_widget.toggleViewAction()
self.actions['log_dock_widget'].setStatusTip("Show/Hide application log")
menu_view += ['', 'show_info', 'show_debug', 'show_gui', 'show_command']
menu_window += ['log_dock_widget']
if self.execute_python:
self.actions['python_dock_widget'] = self.python_dock_widget.toggleViewAction()
self.actions['python_dock_widget'].setStatusTip("Show/Hide Python Console")
menu_window += ['python_dock_widget']
menu_file = [
'load_geometry', '', #'load_results', '',
#'load_custom_result', '',
'load_csv_user_points', 'load_csv_user_geom', 'script', '', 'exit']
toolbar_tools = [
'wing_menu',
#'reload',
#'load_geometry', 'load_results',
'front_view', 'back_view', 'top_view', 'bottom_view', 'left_view', 'right_view',
'magnify', 'shrink', 'zoom',
'rotate_clockwise', 'rotate_cclockwise',
'rotation_center', 'measure_distance', 'probe_result', 'area_pick',
'wireframe', 'surface', 'edges'
]
toolbar_tools += ['camera_reset', 'view', 'screenshot', '', 'exit']
hidden_tools = (#'cycle_results', 'rcycle_results',
'text_size_increase', 'text_size_decrease')
menu_items = []
if create_menu_bar:
menu_items = [
(self.menu_file, menu_file),
(self.menu_view, menu_view),
(self.menu_window, menu_window),
(self.menu_help, ('about',)),
(self.menu_scripts, scripts),
(self.toolbar, toolbar_tools),
(self.menu_hidden, hidden_tools),
# (self.menu_scripts, ()),
#(self._dummy_toolbar, ('cell_pick', 'node_pick'))
]
return menu_items
def _hide_menubar(self):
self.toolbar.setVisible(False)
#self.menuBar.setVisible(False)
def _build_menubar(self):
## toolbar
self.toolbar = self.addToolBar('Show toolbar')
self.toolbar.setObjectName('main_toolbar')
# the dummy toolbar stores actions but doesn't get shown
# in other words, it can set shortcuts
#self._dummy_toolbar = self.addToolBar('Dummy toolbar')
#self._dummy_toolbar.setObjectName('dummy_toolbar')
self.menubar = self.menuBar()
actions = self._prepare_actions(self._icon_path, self.tools, self.checkables)
menu_items = self._create_menu_items(actions)
self._populate_menu(menu_items)
def _populate_menu(self, menu_items):
"""populate menus and toolbar"""
for menu, items in menu_items:
if menu is None:
continue
for i in items:
if not i:
menu.addSeparator()
else:
if isinstance(i, list):
sub_menu_name = i[0]
sub_menu = menu.addMenu(sub_menu_name)
for ii_count, ii in enumerate(i[1:]):
if not isinstance(ii, string_types):
raise RuntimeError('what is this...action ii() = %r' % ii())
action = self.actions[ii]
if ii_count > 0:
action.setChecked(False)
sub_menu.addAction(action)
continue
elif not isinstance(i, string_types):
raise RuntimeError('what is this...action i() = %r' % i())
try:
action = self.actions[i] #if isinstance(i, string_types) else i()
except:
print(self.actions.keys())
raise
menu.addAction(action)
#self._create_plane_from_points(None)
def _update_menu(self, menu_items):
for menu, items in menu_items:
menu.clear()
self._populate_menu(menu_items)
#def _create_plane_from_points(self, points):
#origin, vx, vy, vz, x_limits, y_limits = self._fit_plane(points)
## We create a 100 by 100 point plane to sample
#splane = vtk.vtkPlaneSource()
#plane = splane.GetOutput()
#dx = max(x_limits) - min(x_limits)
#dy = max(y_limits) - min(y_limits)
##dx = 1.
##dy = 3.
## we need to offset the origin of the plane because the "origin"
## is at the lower left corner of the plane and not the centroid
#offset = (dx * vx + dy * vy) / 2.
#origin -= offset
#splane.SetCenter(origin)
#splane.SetNormal(vz)
## Point 1 defines the x-axis and the x-size
## Point 2 defines the y-axis and the y-size
#splane.SetPoint1(origin + dx * vx)
#splane.SetPoint2(origin + dy * vy)
#actor = vtk.vtkLODActor()
#mapper = vtk.vtkPolyDataMapper()
##mapper.InterpolateScalarsBeforeMappingOn()
##mapper.UseLookupTableScalarRangeOn()
#if self.vtk_version <= 5:
#mapper.SetInputData(plane)
#else:
#mapper.SetInput(plane)
#actor.GetProperty().SetColor(1., 0., 0.)
#actor.SetMapper(mapper)
#self.rend.AddActor(actor)
#splane.Update()
#def _fit_plane(self, points):
#origin = np.array([34.60272856552356, 16.92028913186242, 37.805958003209184])
#vx = np.array([1., 0., 0.])
#vy = np.array([0., 1., 0.])
#vz = np.array([0., 0., 1.])
#x_limits = [-1., 2.]
#y_limits = [0., 1.]
#return origin, vx, vy, vz, x_limits, y_limits
def _prepare_actions(self, icon_path, tools, checkables=None):
"""
Prepare actions that will be used in application in a way
that's independent of the menus & toolbar
"""
if checkables is None:
checkables = []
#print('---------------------------')
for tool in tools:
(name, txt, icon, shortcut, tip, func) = tool
if name in self.actions:
self.log_error('trying to create a duplicate action %r' % name)
continue
#print("name=%s txt=%s icon=%s shortcut=%s tip=%s func=%s"
#% (name, txt, icon, shortcut, tip, func))
#if icon is None:
#print("missing_icon = %r!!!" % name)
#icon = os.path.join(icon_path, 'no.png')
if icon is None:
print("missing_icon = %r!!!" % name)
ico = None
#print(print_bad_path(icon))
#elif not "/" in icon:
#ico = QtGui.QIcon.fromTheme(icon)
else:
ico = QtGui.QIcon()
pth = os.path.join(icon_path, icon)
ico.addPixmap(QtGui.QPixmap(pth), QtGui.QIcon.Normal, QtGui.QIcon.Off)
if name in checkables:
is_checked = checkables[name]
self.actions[name] = QAction(ico, txt, self, checkable=True)
self.actions[name].setChecked(is_checked)
else:
self.actions[name] = QAction(ico, txt, self)
if shortcut:
self.actions[name].setShortcut(shortcut)
#actions[name].setShortcutContext(QtCore.Qt.WidgetShortcut)
if tip:
self.actions[name].setStatusTip(tip)
if func:
self.actions[name].triggered.connect(func)
self.actions['toolbar'] = self.toolbar.toggleViewAction()
self.actions['toolbar'].setStatusTip("Show/Hide application toolbar")
self.actions['reswidget'] = self.res_dock.toggleViewAction()
self.actions['reswidget'].setStatusTip("Show/Hide results selection")
return self.actions
def _logg_msg(self, typ, msg):
"""
Add message to log widget trying to choose right color for it.
Parameters
----------
typ : str
{DEBUG, INFO, GUI ERROR, COMMAND, WARNING}
msg : str
message to be displayed
"""
if not self.html_logging:
print(typ, msg)
return
if typ == 'DEBUG' and not self.show_debug:
return
elif typ == 'INFO' and not self.show_info:
return
elif typ == 'GUI' and not self.show_gui:
return
elif typ == 'COMMAND' and not self.show_command:
return
_fr = sys._getframe(4) # jump to get out of the logger code
n = _fr.f_lineno
filename = os.path.basename(_fr.f_globals['__file__'])
#if typ in ['GUI', 'COMMAND']:
msg = ' fname=%-25s:%-4s %s\n' % (filename, n, msg)
tim = datetime.datetime.now().strftime('[%Y-%m-%d %H:%M:%S]')
msg = cgi.escape(msg)
#message colors
dark_orange = '#EB9100'
colors = {
"GUI" : "blue",
"COMMAND" : "green",
"GUI ERROR" : "Crimson",
"DEBUG" : dark_orange,
'WARNING' : "purple",
# INFO - black
}
msg = msg.rstrip().replace('\n', '<br>')
msg = tim + ' ' + (typ + ': ' + msg) if typ else msg
if typ in colors:
msg = '<font color="%s"> %s </font>' % (colors[typ], msg)
self.log_mutex.lockForWrite()
text_cursor = self.log_widget.textCursor()
end = text_cursor.End
#print("end", end)
text_cursor.movePosition(end)
#print(dir(text_cursor))
text_cursor.insertHtml(msg + r"<br />")
self.log_widget.ensureCursorVisible() # new message will be visible
self.log_mutex.unlock()
def log_info(self, msg):
""" Helper funtion: log a message msg with a 'INFO:' prefix """
if msg is None:
msg = 'msg is None; must be a string'
return self.log.simple_msg(msg, 'ERROR')
self.log.simple_msg(msg, 'INFO')
def log_debug(self, msg):
""" Helper funtion: log a message msg with a 'DEBUG:' prefix """
if msg is None:
msg = 'msg is None; must be a string'
return self.log.simple_msg(msg, 'ERROR')
self.log.simple_msg(msg, 'DEBUG')
def log_command(self, msg):
""" Helper funtion: log a message msg with a 'COMMAND:' prefix """
if msg is None:
msg = 'msg is None; must be a string'
return self.log.simple_msg(msg, 'ERROR')
self.log.simple_msg(msg, 'COMMAND')
def log_error(self, msg):
""" Helper funtion: log a message msg with a 'GUI ERROR:' prefix """
if msg is None:
msg = 'msg is None; must be a string'
return self.log.simple_msg(msg, 'ERROR')
self.log.simple_msg(msg, 'GUI ERROR')
def log_warning(self, msg):
""" Helper funtion: log a message msg with a 'WARNING:' prefix """
if msg is None:
msg = 'msg is None; must be a string'
return self.log.simple_msg(msg, 'ERROR')
self.log.simple_msg(msg, 'WARNING')
def create_coordinate_system(self, dim_max, label='', origin=None, matrix_3x3=None,
Type='xyz'):
"""
Creates a coordinate system
Parameters
----------
dim_max : float
the max model dimension; 10% of the max will be used for the coord length
label : str
the coord id or other unique label (default is empty to indicate the global frame)
origin : (3, ) ndarray/list/tuple
the origin
matrix_3x3 : (3, 3) ndarray
a standard Nastran-style coordinate system
Type : str
a string of 'xyz', 'Rtz', 'Rtp' (xyz, cylindrical, spherical)
that changes the axis names
.. todo:: Type is not supported ('xyz' ONLY)
.. todo:: Can only set one coordinate system
.. seealso::
http://en.wikipedia.org/wiki/Homogeneous_coordinates
http://www3.cs.stonybrook.edu/~qin/courses/graphics/camera-coordinate-system.pdf
http://www.vtk.org/doc/nightly/html/classvtkTransform.html#ad58b847446d791391e32441b98eff151
"""
coord_id = self.coord_id
self.settings.dim_max = dim_max
scale = 0.05 * dim_max
transform = vtk.vtkTransform()
if origin is None and matrix_3x3 is None:
pass
elif origin is not None and matrix_3x3 is None:
#print('origin%s = %s' % (label, str(origin)))
transform.Translate(*origin)
elif matrix_3x3 is not None: # origin can be None
m = np.eye(4, dtype='float32')
m[:3, :3] = matrix_3x3
if origin is not None:
m[:3, 3] = origin
transform.SetMatrix(m.ravel())
else:
raise RuntimeError('unexpected coordinate system')
axes = vtk.vtkAxesActor()
axes.DragableOff()
axes.PickableOff()
#axes.GetLength() # pi
#axes.GetNormalizedShaftLength() # (0.8, 0.8, 0.8)
#axes.GetNormalizedTipLength() # (0.2, 0.2, 0.2)
#axes.GetOrigin() # (0., 0., 0.)
#axes.GetScale() # (1., 1., 1.)
#axes.GetShaftType() # 1
#axes.GetTotalLength() # (1., 1., 1.)
axes.SetUserTransform(transform)
axes.SetTotalLength(scale, scale, scale)
if Type == 'xyz':
if label:
xlabel = u'x%s' % label
ylabel = u'y%s' % label
zlabel = u'z%s' % label
axes.SetXAxisLabelText(xlabel)
axes.SetYAxisLabelText(ylabel)
axes.SetZAxisLabelText(zlabel)
else:
if Type == 'Rtz': # cylindrical
#x = u'R'
#y = u'θ'
#z = u'z'
x = 'R'
y = 't'
z = 'z'
elif Type == 'Rtp': # spherical
xlabel = u'R'
#ylabel = u'θ'
#z = u'Φ'
x = 'R'
y = 't'
z = 'p'
else:
raise RuntimeError('invalid axis type; Type=%r' % Type)
xlabel = '%s%s' % (x, label)
ylabel = '%s%s' % (y, label)
zlabel = '%s%s' % (z, label)
axes.SetXAxisLabelText(xlabel)
axes.SetYAxisLabelText(ylabel)
axes.SetZAxisLabelText(zlabel)
self.transform[coord_id] = transform
self.axes[coord_id] = axes
is_visible = False
if label == '':
label = 'Global XYZ'
is_visible = True
else:
label = 'Coord %s' % label
self.geometry_properties[label] = CoordProperties(label, Type, is_visible, scale)
self.geometry_actors[label] = axes
self.coord_id += 1
self.rend.AddActor(axes)
return self.coord_id
def create_global_axes(self, dim_max):
self.create_coordinate_system(
dim_max, label='', origin=None, matrix_3x3=None, Type='xyz')
def create_corner_axis(self):
"""creates the axes that sits in the corner"""
if not self.run_vtk:
return
axes = vtk.vtkAxesActor()
self.corner_axis = vtk.vtkOrientationMarkerWidget()
self.corner_axis.SetOrientationMarker(axes)
self.corner_axis.SetInteractor(self.vtk_interactor)
self.corner_axis.SetEnabled(1)
self.corner_axis.InteractiveOff()
#def on_show_hide_axes(self):
#"""
#show/hide axes
#"""
#if not self.run_vtk:
#return
## this method should handle all the coords when
## there are more then one
#if self._is_axes_shown:
#for axis in itervalues(self.axes):
#axis.VisibilityOff()
#else:
#for axis in itervalues(self.axes):
#axis.VisibilityOn()
#self._is_axes_shown = not self._is_axes_shown
def create_vtk_actors(self):
self.rend = vtk.vtkRenderer()
# vtk actors
self.grid = vtk.vtkUnstructuredGrid()
#self.emptyResult = vtk.vtkFloatArray()
#self.vectorResult = vtk.vtkFloatArray()
# edges
self.edge_actor = vtk.vtkLODActor()
self.edge_actor.DragableOff()
self.edge_mapper = vtk.vtkPolyDataMapper()
self.create_cell_picker()
def create_alternate_vtk_grid(self, name, color=None, line_width=5, opacity=1.0, point_size=1,
bar_scale=0.0, representation=None, is_visible=True,
follower_nodes=None, is_pickable=False):
"""
Creates an AltGeometry object
Parameters
----------
line_width : int
the width of the line for 'surface' and 'main'
color : [int, int, int]
the RGB colors
opacity : float
0.0 -> solid
1.0 -> transparent
point_size : int
the point size for 'point'
bar_scale : float
the scale for the CBAR / CBEAM elements
representation : str
main - change with main mesh
wire - always wireframe
point - always points
surface - always surface
bar - can use bar scale
is_visible : bool; default=True
is this actor currently visable
is_pickable : bool; default=False
can you pick a node/cell on this actor
follower_nodes : List[int]
the nodes that are brought along with a deflection
"""
self.alt_grids[name] = vtk.vtkUnstructuredGrid()
self.geometry_properties[name] = AltGeometry(
self, name, color=color,
line_width=line_width, opacity=opacity,
point_size=point_size, bar_scale=bar_scale,
representation=representation, is_visible=is_visible, is_pickable=is_pickable)
if follower_nodes is not None:
self.follower_nodes[name] = follower_nodes
def duplicate_alternate_vtk_grid(self, name, name_duplicate_from, color=None, line_width=5,
opacity=1.0, point_size=1, bar_scale=0.0, is_visible=True,
follower_nodes=None, is_pickable=False):
"""
Copies the VTK actor
Parameters
----------
line_width : int
the width of the line for 'surface' and 'main'
color : [int, int, int]
the RGB colors
opacity : float
0.0 -> solid
1.0 -> transparent
point_size : int
the point size for 'point'
bar_scale : float
the scale for the CBAR / CBEAM elements
is_visible : bool; default=True
is this actor currently visable
is_pickable : bool; default=False
can you pick a node/cell on this actor
follower_nodes : List[int]
the nodes that are brought along with a deflection
"""
self.alt_grids[name] = vtk.vtkUnstructuredGrid()
if name_duplicate_from == 'main':
grid_copy_from = self.grid
representation = 'toggle'
else:
grid_copy_from = self.alt_grids[name_duplicate_from]
props = self.geometry_properties[name_duplicate_from]
representation = props.representation
self.alt_grids[name].DeepCopy(grid_copy_from)
#representation : str
#main - change with main mesh
#wire - always wireframe
#point - always points
#surface - always surface
#bar - can use bar scale
self.geometry_properties[name] = AltGeometry(
self, name, color=color, line_width=line_width,
opacity=opacity, point_size=point_size,
bar_scale=bar_scale, representation=representation,
is_visible=is_visible, is_pickable=is_pickable)
if follower_nodes is not None:
self.follower_nodes[name] = follower_nodes
def _create_vtk_objects(self):
"""creates some of the vtk objects"""
#Frame that VTK will render on
self.vtk_frame = QFrame()
#Qt VTK QVTKRenderWindowInteractor
self.vtk_interactor = QVTKRenderWindowInteractor(parent=self.vtk_frame)
#self.vtk_interactor = PyNastranRenderWindowInteractor(parent=self.vtk_frame)
self.iren = self.vtk_interactor
#self.set_anti_aliasing(2)
#self._camera_event_name = 'LeftButtonPressEvent'
self._camera_mode = 'default'
self.setup_mouse_buttons(mode='default')
def setup_mouse_buttons(self, mode=None, revert=False,
left_button_down=None, left_button_up=None,
right_button_down=None,
end_pick=None,
style=None, force=False):
"""
Remaps the mouse buttons temporarily
Parameters
----------
mode : str
lets you know what kind of mapping this is
revert : bool; default=False
does the button revert when it's finished
left_button_down : function (default=None)
the callback function (None -> depends on the mode)
left_button_up : function (default=None)
the callback function (None -> depends on the mode)
right_button_down : function (default=None)
the callback function (None -> depends on the mode)
style : vtkInteractorStyle (default=None)
a custom vtkInteractorStyle
None -> keep the same style, but overwrite the left mouse button
force : bool; default=False
override the mode=camera_mode check
"""
assert isinstance(mode, string_types), mode
assert revert in [True, False], revert
#print('setup_mouse_buttons mode=%r _camera_mode=%r' % (mode, self._camera_mode))
if mode == self._camera_mode and not force:
#print('auto return from set mouse mode')
return
self._camera_mode = mode
if mode is None:
# same as default
#print('auto return 2 from set mouse mode')
return
elif mode == 'default':
#print('set mouse mode as default')
# standard rotation
# Disable default left mouse click function (Rotate)
self.vtk_interactor.RemoveObservers('LeftButtonPressEvent')
self.vtk_interactor.RemoveObservers('EndPickEvent')
self.vtk_interactor.AddObserver('EndPickEvent', self._probe_picker)
# there should be a cleaner way to revert the trackball Rotate command
# it apparently requires an (obj, event) argument instead of a void...
self.set_style_as_trackball()
# the more correct-ish way to reset the 'LeftButtonPressEvent' to Rotate
# that doesn't work...
#
# Re-assign left mouse click event to custom function (Point Picker)
#self.vtk_interactor.AddObserver('LeftButtonPressEvent', self.style.Rotate)
elif mode == 'measure_distance': # 'rotation_center',
# hackish b/c the default setting is so bad
self.vtk_interactor.RemoveObservers('LeftButtonPressEvent')
self.vtk_interactor.AddObserver('LeftButtonPressEvent', left_button_down)
self.vtk_interactor.RemoveObservers('EndPickEvent')
self.vtk_interactor.AddObserver('EndPickEvent', left_button_down)
elif mode == 'probe_result':
# hackish b/c the default setting is so bad
self.vtk_interactor.RemoveObservers('LeftButtonPressEvent')
self.vtk_interactor.AddObserver('LeftButtonPressEvent', left_button_down)
self.vtk_interactor.RemoveObservers('EndPickEvent')
self.vtk_interactor.AddObserver('EndPickEvent', left_button_down)
#self.vtk_interactor.AddObserver('LeftButtonPressEvent', func, 1) # on press down
#self.vtk_interactor.AddObserver('LeftButtonPressEvent', func, -1) # on button up
elif mode == 'zoom':
assert style is not None, style
self.vtk_interactor.SetInteractorStyle(style)
# on press down
self.vtk_interactor.AddObserver('LeftButtonPressEvent', left_button_down)
# on button up
self.vtk_interactor.AddObserver('LeftButtonReleaseEvent', left_button_up, -1)
if right_button_down:
self.vtk_interactor.AddObserver('RightButtonPressEvent', right_button_down)
#elif mode == 'node_pick':
#self.vtk_interactor.RemoveObservers('LeftButtonPressEvent')
#self.vtk_interactor.AddObserver('LeftButtonPressEvent', self.on_node_pick_event)
#elif mode == 'cell_pick':
#self.vtk_interactor.RemoveObservers('LeftButtonPressEvent')
#self.vtk_interactor.AddObserver('LeftButtonPressEvent', self.on_cell_pick_event)
elif mode == 'cell_pick':
#aaa
#print('set mouse mode as cell_pick')
self.vtk_interactor.SetPicker(self.cell_picker)
elif mode == 'node_pick':
#bbb
#print('set mouse mode as node_pick')
self.vtk_interactor.SetPicker(self.node_picker)
elif mode == 'style':
self.vtk_interactor.RemoveObservers('LeftButtonPressEvent')
self.vtk_interactor.RemoveObservers('RightButtonPressEvent')
self.vtk_interactor.SetInteractorStyle(style)
#elif mode == 'area_cell_pick':
#self.vtk_interactor.RemoveObservers('LeftButtonPressEvent')
#self.vtk_interactor.AddObserver('LeftButtonPressEvent',
#self.on_area_cell_pick_event)
#elif mode == 'area_node_pick':
#self.vtk_interactor.RemoveObservers('LeftButtonPressEvent')
#self.vtk_interactor.AddObserver('LeftButtonPressEvent',
#self.on_area_cell_pick_event)
#elif mode == 'polygon_cell_pick':
#self.vtk_interactor.RemoveObservers('LeftButtonPressEvent')
#self.vtk_interactor.AddObserver('LeftButtonPressEvent',
#self.on_polygon_cell_pick_event)
#elif mode == 'polygon_node_pick':
#self.vtk_interactor.RemoveObservers('LeftButtonPressEvent')
#self.vtk_interactor.AddObserver('LeftButtonPressEvent',
#self.on_polygon_cell_pick_event)
#elif mode == 'pan':
#pass
else:
raise NotImplementedError('camera_mode = %r' % self._camera_mode)
self.revert = revert
def on_measure_distance(self):
self.revert_pressed('measure_distance')
measure_distance_button = self.actions['measure_distance']
is_checked = measure_distance_button.isChecked()
if not is_checked:
# revert on_measure_distance
self._measure_distance_pick_points = []
self.setup_mouse_buttons(mode='default')
return
self._measure_distance_pick_points = []
self.setup_mouse_buttons('measure_distance', left_button_down=self._measure_distance_picker)
def _measure_distance_picker(self, obj, event):
picker = self.cell_picker
pixel_x, pixel_y = self.vtk_interactor.GetEventPosition()
picker.Pick(pixel_x, pixel_y, 0, self.rend)
cell_id = picker.GetCellId()
#print('_measure_distance_picker', cell_id)
if cell_id < 0:
#self.picker_textActor.VisibilityOff()
pass
else:
world_position = picker.GetPickPosition()
closest_point = self._get_closest_node_xyz(cell_id, world_position)
if len(self._measure_distance_pick_points) == 0:
self._measure_distance_pick_points.append(closest_point)
self.log_info('point1 = %s' % str(closest_point))
else:
self.log_info('point2 = %s' % str(closest_point))
p1 = self._measure_distance_pick_points[0]
dxyz = closest_point - p1
mag = np.linalg.norm(dxyz)
self._measure_distance_pick_points = []
self.log_info('dxyz=%s mag=%s' % (str(dxyz), str(mag)))
measure_distance_button = self.actions['measure_distance']
measure_distance_button.setChecked(False)
self.setup_mouse_buttons(mode='default')
def on_escape_null(self):
"""
The default state for Escape key is nothing.
"""
pass
def on_escape(self):
"""
Escape key should cancel:
- on_rotation_center
TODO: not done...
"""
pass
def on_rotation_center(self):
"""
http://osdir.com/ml/lib.vtk.user/2002-09/msg00079.html
"""
self.revert_pressed('rotation_center')
is_checked = self.actions['rotation_center'].isChecked()
if not is_checked:
# revert on_rotation_center
self.setup_mouse_buttons(mode='default')
return
style = RotationCenterStyle(parent=self)
self.setup_mouse_buttons('style', revert=True, style=style)
def set_focal_point(self, focal_point):
"""
Parameters
----------
focal_point : (3, ) float ndarray
The focal point
[ 188.25109863 -7. -32.07858658]
"""
camera = self.rend.GetActiveCamera()
self.log_command("set_focal_point(focal_point=%s)" % str(focal_point))
# now we can actually modify the camera
camera.SetFocalPoint(focal_point[0], focal_point[1], focal_point[2])
camera.OrthogonalizeViewUp()
self.vtk_interactor.Render()
def revert_pressed(self, active_name):
if active_name != 'probe_result':
probe_button = self.actions['probe_result']
is_checked = probe_button.isChecked()
if is_checked: # revert probe_result
probe_button.setChecked(False)
self.setup_mouse_buttons(mode='default')
return
if active_name != 'rotation_center':
rotation_button = self.actions['rotation_center']
is_checked = rotation_button.isChecked()
if is_checked: # revert rotation_center
rotation_button.setChecked(False)
self.setup_mouse_buttons(mode='default')
return
if active_name != 'measure_distance':
measure_distance_button = self.actions['measure_distance']
is_checked = measure_distance_button.isChecked()
if is_checked:
# revert on_measure_distance
measure_distance_button.setChecked(False)
self._measure_distance_pick_points = []
self.setup_mouse_buttons(mode='default')
return
if active_name != 'zoom':
zoom_button = self.actions['zoom']
is_checked = zoom_button.isChecked()
if is_checked:
# revert on_measure_distance
zoom_button.setChecked(False)
self._zoom = []
self.setup_mouse_buttons(mode='default')
return
def on_probe_result(self):
self.revert_pressed('probe_result')
is_checked = self.actions['probe_result'].isChecked()
if not is_checked:
# revert probe_result
self.setup_mouse_buttons(mode='default')
return
self.setup_mouse_buttons('probe_result', left_button_down=self._probe_picker)
#style = ProbeResultStyle(parent=self)
#self.vtk_interactor.SetInteractorStyle(style)
def on_quick_probe_result(self):
self.revert_pressed('probe_result')
is_checked = self.actions['probe_result'].isChecked()
self.setup_mouse_buttons('probe_result', left_button_down=self._probe_picker, revert=True)
def on_area_pick_callback(self, eids, nids):
"""prints the message when area_pick succeeds"""
msg = ''
if eids is not None and len(eids):
msg += write_patran_syntax_dict({'Elem' : eids})
if nids is not None and len(nids):
msg += '\n' + write_patran_syntax_dict({'Node' : nids})
if msg:
self.log_info('\n%s' % msg.lstrip())
def on_area_pick(self, is_eids=True, is_nids=True, callback=None, force=False):
"""creates a Rubber Band Zoom"""
self.revert_pressed('area_pick')
is_checked = self.actions['area_pick'].isChecked()
if not is_checked:
# revert area_pick
self.setup_mouse_buttons(mode='default')
if not force:
return
self.log_info('on_area_pick')
self._picker_points = []
if callback is None:
callback = self.on_area_pick_callback
style = AreaPickStyle(parent=self, is_eids=is_eids, is_nids=is_nids,
callback=callback)
self.setup_mouse_buttons(mode='style', revert=True, style=style) #, style_name='area_pick'
def on_area_pick_not_square(self):
self.revert_pressed('area_pick')
is_checked = self.actions['area_pick'].isChecked()
if not is_checked:
# revert area_pick
self.setup_mouse_buttons(mode='default')
return
self.log_info('on_area_pick')
self.vtk_interactor.SetPicker(self.area_picker)
def _area_picker_up(*args):
pass
style = vtk.vtkInteractorStyleDrawPolygon()
self.setup_mouse_buttons('area_pick',
#left_button_down=self._area_picker,
left_button_up=_area_picker_up,
#end_pick=self._area_picker_up,
style=style)
#self.area_picker = vtk.vtkAreaPicker() # vtkRenderedAreaPicker?
#self.rubber_band_style = vtk.vtkInteractorStyleRubberBandPick()
#vtk.vtkInteractorStyleRubberBand2D
#vtk.vtkInteractorStyleRubberBand3D
#vtk.vtkInteractorStyleRubberBandZoom
#vtk.vtkInteractorStyleAreaSelectHover
#vtk.vtkInteractorStyleDrawPolygon
def on_zoom(self):
"""creates a Rubber Band Zoom"""
#self.revert_pressed('zoom')
is_checked = self.actions['zoom'].isChecked()
if not is_checked:
# revert zoom
self.setup_mouse_buttons(mode='default')
return
style = ZoomStyle(parent=self)
self.setup_mouse_buttons(mode='style', revert=True, style=style)
#self.vtk_interactor.SetInteractorStyle(style)
def _probe_picker(self, obj, event):
"""pick a point and apply the label based on the current displayed result"""
picker = self.cell_picker
pixel_x, pixel_y = self.vtk_interactor.GetEventPosition()
picker.Pick(pixel_x, pixel_y, 0, self.rend)
cell_id = picker.GetCellId()
#print('_probe_picker', cell_id)
if cell_id < 0:
pass
else:
world_position = picker.GetPickPosition()
if 0:
camera = self.rend.GetActiveCamera()
#focal_point = world_position
out = self.get_result_by_xyz_cell_id(world_position, cell_id)
_result_name, result_value, node_id, node_xyz = out
focal_point = node_xyz
self.log_info('focal_point = %s' % str(focal_point))
self.setup_mouse_buttons(mode='default')
# now we can actually modify the camera
camera.SetFocalPoint(focal_point[0], focal_point[1], focal_point[2])
camera.OrthogonalizeViewUp()
probe_result_button = self.actions['probe_result']
probe_result_button.setChecked(False)
world_position = picker.GetPickPosition()
cell_id = picker.GetCellId()
#ds = picker.GetDataSet()
#select_point = picker.GetSelectionPoint()
self.log_command("annotate_cell_picker()")
self.log_info("XYZ Global = %s" % str(world_position))
#self.log_info("cell_id = %s" % cell_id)
#self.log_info("data_set = %s" % ds)
#self.log_info("selPt = %s" % str(select_point))
#method = 'get_result_by_cell_id()' # self.model_type
#print('pick_state =', self.pick_state)
icase = self.icase
key = self.case_keys[icase]
location = self.get_case_location(key)
if location == 'centroid':
out = self._cell_centroid_pick(cell_id, world_position)
elif location == 'node':
out = self._cell_node_pick(cell_id, world_position)
else:
raise RuntimeError('invalid pick location=%r' % location)
return_flag, duplicate_key, result_value, result_name, xyz = out
if return_flag is True:
return
# prevent duplicate labels with the same value on the same cell
if duplicate_key is not None and duplicate_key in self.label_ids[icase]:
return
self.label_ids[icase].add(duplicate_key)
#if 0:
#result_value2, xyz2 = self.convert_units(case_key, result_value, xyz)
#result_value = result_value2
#xyz2 = xyz
#x, y, z = world_position
x, y, z = xyz
text = '(%.3g, %.3g, %.3g); %s' % (x, y, z, result_value)
text = str(result_value)
assert icase in self.label_actors, icase
self._create_annotation(text, self.label_actors[icase], x, y, z)
self.vtk_interactor.Render()
if self.revert:
self.setup_mouse_buttons(mode='default')
#def remove_picker(self):
#self.vtk_interactor.
def set_node_picker(self):
self.vtk_interactor.SetPicker(self.node_picker)
def set_cell_picker(self):
self.vtk_interactor.SetPicker(self.cell_picker)
@property
def render_window(self):
return self.vtk_interactor.GetRenderWindow()
def set_background_image(self, image_filename='GeologicalExfoliationOfGraniteRock.jpg'):
"""adds a background image"""
if not os.path.exists(image_filename):
return
fmt = os.path.splitext(image_filename)[1].lower()
if fmt not in ['.jpg', '.jpeg', '.png', '.tif', '.tiff', '.bmp']:
msg = 'invalid image type=%r; filename=%r' % (fmt, image_filename)
raise NotImplementedError(msg)
#image_reader = vtk.vtkJPEGReader()
#image_reader = vtk.vtkPNGReader()
#image_reader = vtk.vtkTIFFReader()
#image_reader = vtk.vtkBMPReader()
#image_reader = vtk.vtkPostScriptReader() # doesn't exist?
has_background_image = self._active_background_image is not None
self._active_background_image = image_filename
#if has_background_image:
#self.image_reader.Delete()
if fmt in ['.jpg', '.jpeg']:
self.image_reader = vtk.vtkJPEGReader()
elif fmt == '.png':
self.image_reader = vtk.vtkPNGReader()
elif fmt in ['.tif', '.tiff']:
self.image_reader = vtk.vtkTIFFReader()
elif fmt == '.bmp':
self.image_reader = vtk.vtkBMPReader()
#elif fmt == '.ps': # doesn't exist?
#self.image_reader = vtk.vtkPostScriptReader()
else:
msg = 'invalid image type=%r; filename=%r' % (fmt, image_filename)
raise NotImplementedError(msg)
if not self.image_reader.CanReadFile(image_filename):
print("Error reading file %s" % image_filename)
return
self.image_reader.SetFileName(image_filename)
self.image_reader.Update()
image_data = self.image_reader.GetOutput()
if has_background_image:
if vtk.VTK_MAJOR_VERSION <= 5:
self.image_actor.SetInput(image_data)
else:
self.image_actor.SetInputData(image_data)
self.Render()
return
# Create an image actor to display the image
self.image_actor = vtk.vtkImageActor()
if vtk.VTK_MAJOR_VERSION <= 5:
self.image_actor.SetInput(image_data)
else:
self.image_actor.SetInputData(image_data)
self.background_rend = vtk.vtkRenderer()
self.background_rend.SetLayer(0)
self.background_rend.InteractiveOff()
self.background_rend.AddActor(self.image_actor)
self.rend.SetLayer(1)
render_window = self.vtk_interactor.GetRenderWindow()
render_window.SetNumberOfLayers(2)
render_window.AddRenderer(self.background_rend)
# Set up the background camera to fill the renderer with the image
origin = image_data.GetOrigin()
spacing = image_data.GetSpacing()
extent = image_data.GetExtent()
camera = self.background_rend.GetActiveCamera()
camera.ParallelProjectionOn()
xc = origin[0] + 0.5*(extent[0] + extent[1]) * spacing[0]
yc = origin[1] + 0.5*(extent[2] + extent[3]) * spacing[1]
# xd = (extent[1] - extent[0] + 1) * spacing[0]
yd = (extent[3] - extent[2] + 1) * spacing[1]
d = camera.GetDistance()
camera.SetParallelScale(0.5 * yd)
camera.SetFocalPoint(xc, yc, 0.0)
camera.SetPosition(xc, yc, d)
def build_vtk_frame(self):
vtk_hbox = QHBoxLayout()
vtk_hbox.setContentsMargins(2, 2, 2, 2)
vtk_hbox.addWidget(self.vtk_interactor)
self.vtk_frame.setLayout(vtk_hbox)
self.vtk_frame.setFrameStyle(QFrame.NoFrame | QFrame.Plain)
# this is our main, 'central' widget
self.setCentralWidget(self.vtk_frame)
#=============================================================
# +-----+-----+
# | | |
# | A | B |
# | | |
# +-----+-----+
# xmin, xmax, ymin, ymax
nframes = 1
#nframes = 2
if nframes == 2:
# xmin, ymin, xmax, ymax
frame1 = [0., 0., 0.5, 1.0]
frame2 = [0.5, 0., 1., 1.0]
#frames = [frame1, frame2]
self.rend.SetViewport(*frame1)
self.vtk_interactor.GetRenderWindow().AddRenderer(self.rend)
if nframes == 2:
rend = vtk.vtkRenderer()
rend.SetViewport(*frame2)
self.vtk_interactor.GetRenderWindow().AddRenderer(rend)
self.set_background_image()
self.vtk_interactor.GetRenderWindow().Render()
#self.load_nastran_geometry(None, None)
#for cid, axes in iteritems(self.axes):
#self.rend.AddActor(axes)
self.add_geometry()
if nframes == 2:
rend.AddActor(self.geom_actor)
# initialize geometry_actors
self.geometry_actors['main'] = self.geom_actor
# bar scale set so you can't edit the bar scale
white = (255, 255, 255)
geom_props = AltGeometry(
self, 'main', color=white, line_width=1, opacity=1.0, point_size=1,
bar_scale=0.0, representation='main', is_visible=True)
self.geometry_properties['main'] = geom_props
#self.addAltGeometry()
self.rend.GetActiveCamera().ParallelProjectionOn()
self.rend.SetBackground(*self.background_color)
self.rend.ResetCamera()
self.set_style_as_trackball()
self.build_lookup_table()
text_size = 14
self.create_text([5, 50], 'Max ', text_size) # text actor 0
self.create_text([5, 35], 'Min ', text_size) # text actor 1
self.create_text([5, 20], 'Word1', text_size) # text actor 2
self.create_text([5, 5], 'Word2', text_size) # text actor 3
self.get_edges()
if self.is_edges:
prop = self.edge_actor.GetProperty()
prop.EdgeVisibilityOn()
else:
prop = self.edge_actor.GetProperty()
prop.EdgeVisibilityOff()
#def _script_helper(self, python_file=False):
#if python_file in [None, False]:
#self.on_run_script(python_file)
def set_style_as_trackball(self):
"""sets the default rotation style"""
#self._simulate_key_press('t') # change mouse style to trackball
self.style = vtk.vtkInteractorStyleTrackballCamera()
self.vtk_interactor.SetInteractorStyle(self.style)
def on_run_script(self, python_file=False):
"""pulldown for running a python script"""
is_failed = True
if python_file in [None, False]:
title = 'Choose a Python Script to Run'
wildcard = "Python (*.py)"
infile_name = self._create_load_file_dialog(
wildcard, title, self._default_python_file)[1]
if not infile_name:
return is_failed # user clicked cancel
#python_file = os.path.join(script_path, infile_name)
python_file = os.path.join(infile_name)
if not os.path.exists(python_file):
msg = 'python_file = %r does not exist' % python_file
self.log_error(msg)
return is_failed
lines = open(python_file, 'r').read()
try:
exec(lines)
except Exception as e:
#self.log_error(traceback.print_stack(f))
self.log_error('\n' + ''.join(traceback.format_stack()))
#traceback.print_exc(file=self.log_error)
self.log_error(str(e))
return is_failed
is_failed = False
self._default_python_file = python_file
self.log_command('self.on_run_script(%r)' % python_file)
return is_failed
def on_show_info(self):
"""sets a flag for showing/hiding INFO messages"""
self.show_info = not self.show_info
def on_show_debug(self):
"""sets a flag for showing/hiding DEBUG messages"""
self.show_debug = not self.show_debug
def on_show_gui(self):
"""sets a flag for showing/hiding GUI messages"""
self.show_gui = not self.show_gui
def on_show_command(self):
"""sets a flag for showing/hiding COMMAND messages"""
self.show_command = not self.show_command
def on_reset_camera(self):
self.log_command('on_reset_camera()')
self._simulate_key_press('r')
self.vtk_interactor.Render()
def on_surface(self):
if self.is_wireframe:
self.log_command('on_surface()')
for name, actor in iteritems(self.geometry_actors):
#if name != 'main':
#print('name: %s\nrep: %s' % (
#name, self.geometry_properties[name].representation))
representation = self.geometry_properties[name].representation
if name == 'main' or representation in ['main', 'toggle']:
prop = actor.GetProperty()
prop.SetRepresentationToSurface()
self.is_wireframe = False
self.vtk_interactor.Render()
def on_wireframe(self):
if not self.is_wireframe:
self.log_command('on_wireframe()')
for name, actor in iteritems(self.geometry_actors):
#if name != 'main':
#print('name: %s\nrep: %s' % (
#name, self.geometry_properties[name].representation))
representation = self.geometry_properties[name].representation
if name == 'main' or representation in ['main', 'toggle']:
prop = actor.GetProperty()
prop.SetRepresentationToWireframe()
#prop.SetRepresentationToPoints()
#prop.GetPointSize()
#prop.SetPointSize(5.0)
#prop.ShadingOff()
self.vtk_interactor.Render()
self.is_wireframe = True
def _update_camera(self, camera=None):
if camera is None:
camera = self.GetCamera()
camera.Modified()
self.vtk_interactor.Render()
def zoom(self, value):
camera = self.GetCamera()
camera.Zoom(value)
camera.Modified()
self.vtk_interactor.Render()
self.log_command('zoom(%s)' % value)
def rotate(self, rotate_deg):
camera = self.GetCamera()
camera.Roll(-rotate_deg)
camera.Modified()
self.vtk_interactor.Render()
self.log_command('rotate(%s)' % rotate_deg)
def on_rotate_clockwise(self):
"""rotate clockwise"""
self.rotate(15.0)
def on_rotate_cclockwise(self):
"""rotate counter clockwise"""
self.rotate(-15.0)
def on_increase_magnification(self):
"""zoom in"""
self.zoom(1.1)
def on_decrease_magnification(self):
"""zoom out"""
self.zoom(1.0 / 1.1)
def on_flip_edges(self):
"""turn edges on/off"""
self.is_edges = not self.is_edges
self.edge_actor.SetVisibility(self.is_edges)
#self.edge_actor.GetProperty().SetColor(0, 0, 0) # cart3d edge color isn't black...
self.edge_actor.Modified()
#self.widget.Update()
#self._update_camera()
self.Render()
#self.refresh()
self.log_command('on_flip_edges()')
def on_set_edge_visibility(self):
#self.edge_actor.SetVisibility(self.is_edges_black)
self.is_edges_black = not self.is_edges_black
if self.is_edges_black:
prop = self.edge_actor.GetProperty()
prop.EdgeVisibilityOn()
self.edge_mapper.SetLookupTable(self.color_function_black)
else:
prop = self.edge_actor.GetProperty()
prop.EdgeVisibilityOff()
self.edge_mapper.SetLookupTable(self.color_function)
self.edge_actor.Modified()
prop.Modified()
self.vtk_interactor.Render()
self.log_command('on_set_edge_visibility()')
def get_edges(self):
"""Create the edge actor"""
edges = vtk.vtkExtractEdges()
edge_mapper = self.edge_mapper
edge_actor = self.edge_actor
if self.vtk_version[0] >= 6:
edges.SetInputData(self.grid_selected)
edge_mapper.SetInputConnection(edges.GetOutputPort())
else:
edges.SetInput(self.grid_selected)
edge_mapper.SetInput(edges.GetOutput())
edge_actor.SetMapper(edge_mapper)
edge_actor.GetProperty().SetColor(0., 0., 0.)
edge_mapper.SetLookupTable(self.color_function)
edge_mapper.SetResolveCoincidentTopologyToPolygonOffset()
prop = edge_actor.GetProperty()
prop.SetColor(0., 0., 0.)
edge_actor.SetVisibility(self.is_edges)
self.rend.AddActor(edge_actor)
def post_group_by_name(self, name):
"""posts a group with a specific name"""
group = self.groups[name]
self.post_group(group)
self.group_active = name
def post_group(self, group):
"""posts a group object"""
eids = group.element_ids
self.show_eids(eids)
def get_all_eids(self):
"""get the list of all the element IDs"""
return self.element_ids
#name, result = self.get_name_result_data(0)
#if name != 'ElementID':
#name, result = self.get_name_result_data(1)
#assert name == 'ElementID', name
#return result
def show_eids(self, eids):
"""shows the specified element IDs"""
all_eids = self.get_all_eids()
# remove eids that are out of range
eids = np.intersect1d(all_eids, eids)
# update for indices
ishow = np.searchsorted(all_eids, eids)
#eids_off = np.setdiff1d(all_eids, eids)
#j = np.setdiff1d(all_eids, eids_off)
self.show_ids_mask(ishow)
def hide_eids(self, eids):
"""hides the specified element IDs"""
all_eids = self.get_all_eids()
# remove eids that are out of range
eids = np.intersect1d(all_eids, eids)
# A-B
eids = np.setdiff1d(all_eids, eids)
# update for indices
ishow = np.searchsorted(all_eids, eids)
self.show_ids_mask(ishow)
def create_groups_by_visible_result(self, nlimit=50):
"""
Creates group by the active result
This should really only be called for integer results < 50-ish.
"""
#self.scalar_bar.title
case_key = self.case_keys[self.icase] # int for object
result_name = self.result_name
obj, (i, name) = self.result_cases[case_key]
default_title = obj.get_default_title(i, name)
location = obj.get_location(i, name)
if obj.data_format != '%i':
self.log.error('not creating result=%r; must be an integer result' % result_name)
return 0
if location != 'centroid':
self.log.error('not creating result=%r; must be a centroidal result' % result_name)
return 0
word = default_title
prefix = default_title
ngroups = self._create_groups_by_name(word, prefix, nlimit=nlimit)
self.log_command('create_groups_by_visible_result()'
' # created %i groups for result_name=%r' % (ngroups, result_name))
def create_groups_by_property_id(self):
"""
Creates a group for each Property ID.
As this is somewhat Nastran specific, create_groups_by_visible_result exists as well.
"""
self._create_groups_by_name('PropertyID', 'property')
self.log_command('create_groups_by_property_id()')
def _create_groups_by_name(self, name, prefix, nlimit=50):
"""
Helper method for `create_groups_by_visible_result` and `create_groups_by_property_id`
"""
#eids = self.find_result_by_name('ElementID')
#elements_pound = eids.max()
eids = self.groups['main'].element_ids
elements_pound = self.groups['main'].elements_pound
result = self.find_result_by_name(name)
ures = np.unique(result)
ngroups = len(ures)
if ngroups > nlimit:
self.log.error('not creating result; %i new groups would be created; '
'increase nlimit=%i if you really want to' % (ngroups, nlimit))
return 0
for uresi in ures:
ids = np.where(uresi == result)[0]
name = '%s %s' % (prefix, uresi)
element_str = ''
group = Group(
name, element_str, elements_pound,
editable=True)
group.element_ids = eids[ids]
self.log_info('creating group=%r' % name)
self.groups[name] = group
return ngroups
def create_group_with_name(self, name, eids):
elements_pound = self.groups['main'].elements_pound
element_str = ''
group = Group(
name, element_str, elements_pound,
editable=True)
# TODO: make sure all the eids exist
group.element_ids = eids
self.log_command('create_group_with_name(%r, %r)' % (name, eids))
self.groups[name] = group
def find_result_by_name(self, desired_name):
for icase in range(self.ncases):
name, result = self.get_name_result_data(icase)
if name == desired_name:
return result
raise RuntimeError('cannot find name=%r' % desired_name)
def show_ids_mask(self, ids_to_show):
"""masks the specific 0-based element ids"""
#print('ids_to_show = ', ids_to_show)
prop = self.geom_actor.GetProperty()
if len(ids_to_show) == self.nelements:
#prop.BackfaceCullingOn()
pass
else:
prop.BackfaceCullingOff()
if 0: # pragma: no cover
self._show_ids_mask(ids_to_show)
elif 1:
# doesn't work for the BWB_saero.bdf
flip_flag = True is self._show_flag
assert self._show_flag is True, self._show_flag
self._update_ids_mask_show(ids_to_show)
self._show_flag = True
elif 1: # pragma: no cover
# works
flip_flag = True is self._show_flag
assert self._show_flag is True, self._show_flag
self._update_ids_mask_show_true(ids_to_show, flip_flag, render=False)
self._update_ids_mask_show_true(ids_to_show, False, render=True)
self._show_flag = True
else: # pragma: no cover
# old; works; slow
flip_flag = True is self._show_flag
self._update_ids_mask(ids_to_show, flip_flag, show_flag=True, render=False)
self._update_ids_mask(ids_to_show, False, show_flag=True, render=True)
self._show_flag = True
def hide_ids_mask(self, ids_to_hide):
"""masks the specific 0-based element ids"""
#print('hide_ids_mask = ', hide_ids_mask)
prop = self.geom_actor.GetProperty()
if len(self.ids_to_hide) == 0:
prop.BackfaceCullingOn()
else:
prop.BackfaceCullingOff()
#if 0: # pragma: no cover
#self._hide_ids_mask(ids_to_hide)
#else:
# old; works; slow
flip_flag = False is self._show_flag
self._update_ids_mask(ids_to_hide, flip_flag, show_flag=False, render=False)
self._update_ids_mask(ids_to_hide, False, show_flag=False, render=True)
self._show_flag = False
def _show_ids_mask(self, ids_to_show):
"""
helper method for ``show_ids_mask``
.. todo:: doesn't work
"""
all_i = np.arange(self.nelements, dtype='int32')
ids_to_hide = np.setdiff1d(all_i, ids_to_show)
self._hide_ids_mask(ids_to_hide)
def _hide_ids_mask(self, ids_to_hide):
"""
helper method for ``hide_ids_mask``
.. todo:: doesn't work
"""
#print('_hide_ids_mask = ', ids_to_hide)
ids = self.numpy_to_vtk_idtype(ids_to_hide)
#self.selection_node.GetProperties().Set(vtk.vtkSelectionNode.INVERSE(), 1)
if 1:
# sane; doesn't work
self.selection_node.SetSelectionList(ids)
ids.Modified()
self.selection_node.Modified()
self.selection.Modified()
self.grid_selected.Modified()
self.grid_selected.ShallowCopy(self.extract_selection.GetOutput())
self.update_all(render=True)
else: # pragma: no cover
# doesn't work
self.selection.RemoveAllNodes()
self.selection_node = vtk.vtkSelectionNode()
self.selection_node.SetFieldType(vtk.vtkSelectionNode.CELL)
self.selection_node.SetContentType(vtk.vtkSelectionNode.INDICES)
#self.selection_node.GetProperties().Set(vtk.vtkSelectionNode.INVERSE(), 1)
self.selection.AddNode(self.selection_node)
self.selection_node.SetSelectionList(ids)
#self.selection.RemoveAllNodes()
#self.selection.AddNode(self.selection_node)
self.grid_selected.ShallowCopy(self.extract_selection.GetOutput())
self.selection_node.SetSelectionList(ids)
self.update_all(render=True)
def numpy_to_vtk_idtype(self, ids):
#self.selection_node.GetProperties().Set(vtk.vtkSelectionNode.INVERSE(), 1)
from pyNastran.gui.gui_utils.vtk_utils import numpy_to_vtkIdTypeArray
dtype = get_numpy_idtype_for_vtk()
ids = np.asarray(ids, dtype=dtype)
vtk_ids = numpy_to_vtkIdTypeArray(ids, deep=0)
return vtk_ids
def _update_ids_mask_show_false(self, ids_to_show, flip_flag=True, render=True):
ids = self.numpy_to_vtk_idtype(ids_to_show)
ids.Modified()
if flip_flag:
self.selection.RemoveAllNodes()
self.selection_node = vtk.vtkSelectionNode()
self.selection_node.SetFieldType(vtk.vtkSelectionNode.CELL)
self.selection_node.SetContentType(vtk.vtkSelectionNode.INDICES)
self.selection_node.SetSelectionList(ids)
self.selection_node.GetProperties().Set(vtk.vtkSelectionNode.INVERSE(), 1)
self.selection.AddNode(self.selection_node)
else:
self.selection_node.SetSelectionList(ids)
# dumb; works
self.grid_selected.ShallowCopy(self.extract_selection.GetOutput())
self.update_all(render=render)
def _update_ids_mask_show(self, ids_to_show):
"""helper method for ``show_ids_mask``"""
ids = self.numpy_to_vtk_idtype(ids_to_show)
ids.Modified()
self.selection.RemoveAllNodes()
self.selection_node = vtk.vtkSelectionNode()
self.selection_node.SetFieldType(vtk.vtkSelectionNode.CELL)
self.selection_node.SetContentType(vtk.vtkSelectionNode.INDICES)
self.selection_node.SetSelectionList(ids)
self.selection_node.Modified()
self.selection.Modified()
self.selection.AddNode(self.selection_node)
# seems to also work
self.extract_selection.Update()
self.grid_selected.ShallowCopy(self.extract_selection.GetOutput())
self.update_all(render=True)
#if 0:
#self.grid_selected.Modified()
#self.vtk_interactor.Render()
#render_window = self.vtk_interactor.GetRenderWindow()
#render_window.Render()
def _update_ids_mask_show_true(self, ids_to_show,
flip_flag=True, render=True): # pragma: no cover
ids = self.numpy_to_vtk_idtype(ids_to_show)
ids.Modified()
if flip_flag:
self.selection.RemoveAllNodes()
self.selection_node = vtk.vtkSelectionNode()
self.selection_node.SetFieldType(vtk.vtkSelectionNode.CELL)
self.selection_node.SetContentType(vtk.vtkSelectionNode.INDICES)
self.selection_node.SetSelectionList(ids)
self.selection.AddNode(self.selection_node)
else:
self.selection_node.SetSelectionList(ids)
# dumb; works
self.grid_selected.ShallowCopy(self.extract_selection.GetOutput())
self.update_all(render=render)
def _update_ids_mask(self, ids_to_show, flip_flag=True, show_flag=True, render=True):
print('flip_flag=%s show_flag=%s' % (flip_flag, show_flag))
ids = self.numpy_to_vtk_idtype(ids_to_show)
ids.Modified()
if flip_flag:
self.selection.RemoveAllNodes()
self.selection_node = vtk.vtkSelectionNode()
self.selection_node.SetFieldType(vtk.vtkSelectionNode.CELL)
self.selection_node.SetContentType(vtk.vtkSelectionNode.INDICES)
self.selection_node.SetSelectionList(ids)
if not show_flag:
self.selection_node.GetProperties().Set(vtk.vtkSelectionNode.INVERSE(), 1)
self.selection.AddNode(self.selection_node)
else:
self.selection_node.SetSelectionList(ids)
#self.grid_selected.Update() # not in vtk 6
#ids.Update()
#self.shown_ids.Modified()
if 0: # pragma: no cover
# doesn't work...
if vtk.VTK_MAJOR_VERSION <= 5:
self.extract_selection.SetInput(0, self.grid)
self.extract_selection.SetInput(1, self.selection)
else:
self.extract_selection.SetInputData(0, self.grid)
self.extract_selection.SetInputData(1, self.selection)
else:
# dumb; works
self.grid_selected.ShallowCopy(self.extract_selection.GetOutput())
#if 0:
#self.selection_node.GetProperties().Set(vtk.vtkSelectionNode.INVERSE(), 1)
#self.extract_selection.Update()
self.update_all(render=render)
def update_all_2(self, render=True): # pragma: no cover
self.grid_selected.Modified()
self.selection_node.Modified()
self.selection.Modified()
self.extract_selection.Update()
self.extract_selection.Modified()
self.grid_selected.Modified()
self.grid_mapper.Update()
self.grid_mapper.Modified()
self.iren.Modified()
self.rend.Render()
self.rend.Modified()
self.geom_actor.Modified()
if render:
self.vtk_interactor.Render()
render_window = self.vtk_interactor.GetRenderWindow()
render_window.Render()
def update_all(self, render=True):
self.grid_selected.Modified()
#selection_node.Update()
self.selection_node.Modified()
#selection.Update()
self.selection.Modified()
self.extract_selection.Update()
self.extract_selection.Modified()
#grid_selected.Update()
self.grid_selected.Modified()
self.grid_mapper.Update()
self.grid_mapper.Modified()
#selected_actor.Update()
#selected_actor.Modified()
#right_renderer.Modified()
#right_renderer.Update()
self.iren.Modified()
#interactor.Update()
#-----------------
self.rend.Render()
#interactor.Start()
self.rend.Modified()
self.geom_actor.Modified()
if render:
self.vtk_interactor.Render()
render_window = self.vtk_interactor.GetRenderWindow()
render_window.Render()
def _setup_element_mask(self, create_grid_selected=True):
"""
starts the masking
self.grid feeds in the geometry
"""
ids = vtk.vtkIdTypeArray()
ids.SetNumberOfComponents(1)
# the "selection_node" is really a "selection_element_ids"
# furthermore, it's an inverse model, so adding elements
# hides more elements
self.selection_node = vtk.vtkSelectionNode()
self.selection_node.SetFieldType(vtk.vtkSelectionNode.CELL)
self.selection_node.SetContentType(vtk.vtkSelectionNode.INDICES)
self.selection_node.GetProperties().Set(vtk.vtkSelectionNode.INVERSE(), 1) # added
self.selection_node.SetSelectionList(ids)
self.selection = vtk.vtkSelection()
self.selection.AddNode(self.selection_node)
self.extract_selection = vtk.vtkExtractSelection()
if vtk.VTK_MAJOR_VERSION <= 5:
self.extract_selection.SetInput(0, self.grid)
self.extract_selection.SetInput(1, self.selection)
else:
self.extract_selection.SetInputData(0, self.grid)
self.extract_selection.SetInputData(1, self.selection)
self.extract_selection.Update()
# In selection
if create_grid_selected:
self.grid_selected = vtk.vtkUnstructuredGrid()
self.grid_selected.ShallowCopy(self.extract_selection.GetOutput())
#if 0:
self.selection_node.GetProperties().Set(vtk.vtkSelectionNode.INVERSE(), 1)
self.extract_selection.Update()
def create_text(self, position, label, text_size=18):
"""creates the lower left text actors"""
text_actor = vtk.vtkTextActor()
text_actor.SetInput(label)
text_prop = text_actor.GetTextProperty()
#text_prop.SetFontFamilyToArial()
text_prop.SetFontSize(int(text_size))
text_prop.SetColor(self.text_color)
text_actor.SetDisplayPosition(*position)
text_actor.VisibilityOff()
# assign actor to the renderer
self.rend.AddActor(text_actor)
self.text_actors[self.itext] = text_actor
self.itext += 1
def turn_text_off(self):
"""turns all the text actors off"""
for text in itervalues(self.text_actors):
text.VisibilityOff()
def turn_text_on(self):
"""turns all the text actors on"""
for text in itervalues(self.text_actors):
text.VisibilityOn()
def build_lookup_table(self):
scalar_range = self.grid_selected.GetScalarRange()
self.grid_mapper.SetScalarRange(scalar_range)
self.grid_mapper.SetLookupTable(self.color_function)
self.rend.AddActor(self.scalarBar)
def _create_load_file_dialog(self, qt_wildcard, title, default_filename=None):
if default_filename is None:
default_filename = self.last_dir
fname, wildcard_level = getopenfilename(
parent=self, caption=title,
basedir=default_filename, filters=qt_wildcard,
selectedfilter='', options=None)
return wildcard_level, fname
#def _create_load_file_dialog2(self, qt_wildcard, title):
## getOpenFileName return QString and we want Python string
##title = 'Load a Tecplot Geometry/Results File'
#last_dir = ''
##qt_wildcard = ['Tecplot Hex Binary (*.tec; *.dat)']
#dialog = MultiFileDialog()
#dialog.setWindowTitle(title)
#dialog.setDirectory(self.last_dir)
#dialog.setFilters(qt_wildcard.split(';;'))
#if dialog.exec_() == QtGui.QDialog.Accepted:
#outfiles = dialog.selectedFiles()
#wildcard_level = dialog.selectedFilter()
#return str(wildcard_level), str(fname)
#return None, None
def start_logging(self):
if self.html_logging is True:
log = SimpleLogger('debug', 'utf-8', lambda x, y: self._logg_msg(x, y))
# logging needs synchronizing, so the messages from different
# threads would not be interleave
self.log_mutex = QtCore.QReadWriteLock()
else:
log = SimpleLogger(
level='debug', encoding='utf-8',
#log_func=lambda x, y: print(x, y) # no colorama
)
self.log = log
def build_fmts(self, fmt_order, stop_on_failure=False):
fmts = []
for fmt in fmt_order:
if hasattr(self, 'get_%s_wildcard_geometry_results_functions' % fmt):
func = 'get_%s_wildcard_geometry_results_functions' % fmt
data = getattr(self, func)()
msg = 'macro_name, geo_fmt, geo_func, res_fmt, res_func = data\n'
msg += 'data = %s'
if isinstance(data, tuple):
assert len(data) == 5, msg % str(data)
macro_name, geo_fmt, geo_func, res_fmt, res_func = data
fmts.append((fmt, macro_name, geo_fmt, geo_func, res_fmt, res_func))
elif isinstance(data, list):
for datai in data:
assert len(datai) == 5, msg % str(datai)
macro_name, geo_fmt, geo_func, res_fmt, res_func = datai
fmts.append((fmt, macro_name, geo_fmt, geo_func, res_fmt, res_func))
else:
raise TypeError(data)
else:
if stop_on_failure:
func = 'get_%s_wildcard_geometry_results_functions does not exist' % fmt
raise RuntimeError(func)
if len(fmts) == 0:
RuntimeError('No formats...expected=%s' % fmt_order)
self.fmts = fmts
self.supported_formats = [fmt[0] for fmt in fmts]
print('supported_formats = %s' % self.supported_formats)
if len(fmts) == 0:
raise RuntimeError('no modules were loaded...')
def on_load_geometry_button(self, infile_name=None, geometry_format=None, name='main',
plot=True, raise_error=False):
"""action version of ``on_load_geometry``"""
self.on_load_geometry(infile_name=infile_name, geometry_format=geometry_format,
name=name, plot=True, raise_error=raise_error)
def _load_geometry_filename(self, geometry_format, infile_name):
"""gets the filename and format"""
wildcard = ''
is_failed = False
if geometry_format and geometry_format.lower() not in self.supported_formats:
is_failed = True
msg = 'The import for the %r module failed.\n' % geometry_format
self.log_error(msg)
return is_failed, None
if infile_name:
geometry_format = geometry_format.lower()
print("geometry_format = %r" % geometry_format)
for fmt in self.fmts:
fmt_name, _major_name, _geom_wildcard, geom_func, res_wildcard, _resfunc = fmt
if geometry_format == fmt_name:
load_function = geom_func
if res_wildcard is None:
has_results = False
else:
has_results = True
break
else:
self.log_error('---invalid format=%r' % geometry_format)
is_failed = True
return is_failed, None
formats = [geometry_format]
filter_index = 0
else:
# load a pyqt window
formats = []
load_functions = []
has_results_list = []
wildcard_list = []
# setup the selectable formats
for fmt in self.fmts:
fmt_name, _major_name, geom_wildcard, geom_func, res_wildcard, _res_func = fmt
formats.append(_major_name)
wildcard_list.append(geom_wildcard)
load_functions.append(geom_func)
if res_wildcard is None:
has_results_list.append(False)
else:
has_results_list.append(True)
# the list of formats that will be selectable in some odd syntax
# that pyqt uses
wildcard = ';;'.join(wildcard_list)
# get the filter index and filename
if infile_name is not None and geometry_format is not None:
filter_index = formats.index(geometry_format)
else:
title = 'Choose a Geometry File to Load'
wildcard_index, infile_name = self._create_load_file_dialog(wildcard, title)
if not infile_name:
# user clicked cancel
is_failed = True
return is_failed, None
filter_index = wildcard_list.index(wildcard_index)
geometry_format = formats[filter_index]
load_function = load_functions[filter_index]
has_results = has_results_list[filter_index]
return is_failed, (infile_name, load_function, filter_index, formats)
def on_load_geometry(self, infile_name=None, geometry_format=None, name='main',
plot=True, raise_error=True):
"""
Loads a baseline geometry
Parameters
----------
infile_name : str; default=None -> popup
path to the filename
geometry_format : str; default=None
the geometry format for programmatic loading
name : str; default='main'
the name of the actor; don't use this
plot : bool; default=True
Should the baseline geometry have results created and plotted/rendered?
If you're calling the on_load_results method immediately after, set it to False
raise_error : bool; default=True
stop the code if True
"""
is_failed, out = self._load_geometry_filename(
geometry_format, infile_name)
if is_failed:
return
infile_name, load_function, filter_index, formats = out
if load_function is not None:
self.last_dir = os.path.split(infile_name)[0]
if self.name == '':
name = 'main'
else:
print('name = %r' % name)
if name != self.name:
#scalar_range = self.grid_selected.GetScalarRange()
#self.grid_mapper.SetScalarRange(scalar_range)
self.grid_mapper.ScalarVisibilityOff()
#self.grid_mapper.SetLookupTable(self.color_function)
self.name = str(name)
self._reset_model(name)
# reset alt grids
names = self.alt_grids.keys()
for name in names:
self.alt_grids[name].Reset()
self.alt_grids[name].Modified()
if not os.path.exists(infile_name) and geometry_format:
msg = 'input file=%r does not exist' % infile_name
self.log_error(msg)
self.log_error(print_bad_path(infile_name))
return
# clear out old data
if self.model_type is not None:
clear_name = 'clear_' + self.model_type
try:
dy_method = getattr(self, clear_name) # 'self.clear_nastran()'
dy_method()
except:
print("method %r does not exist" % clear_name)
self.log_info("reading %s file %r" % (geometry_format, infile_name))
try:
time0 = time.time()
has_results = load_function(infile_name, name=name, plot=plot) # self.last_dir,
dt = time.time() - time0
print('dt_load = %.2f sec = %.2f min' % (dt, dt / 60.))
#else:
#name = load_function.__name__
#self.log_error(str(args))
#self.log_error("'plot' needs to be added to %r; "
#"args[-1]=%r" % (name, args[-1]))
#has_results = load_function(infile_name) # , self.last_dir
#form, cases = load_function(infile_name) # , self.last_dir
except Exception as e:
msg = traceback.format_exc()
self.log_error(msg)
if raise_error or self.dev:
raise
#return
#self.vtk_panel.Update()
self.rend.ResetCamera()
# the model has been loaded, so we enable load_results
if filter_index >= 0:
self.format = formats[filter_index].lower()
enable = has_results
#self.load_results.Enable(enable)
else: # no file specified
return
#print("on_load_geometry(infile_name=%r, geometry_format=None)" % infile_name)
self.infile_name = infile_name
self.out_filename = None
#if self.out_filename is not None:
#msg = '%s - %s - %s' % (self.format, self.infile_name, self.out_filename)
#else:
if name == 'main':
msg = '%s - %s' % (self.format, self.infile_name)
self.window_title = msg
self.update_menu_bar()
main_str = ''
else:
main_str = ', name=%r' % name
self.log_command("on_load_geometry(infile_name=%r, geometry_format=%r%s)" % (
infile_name, self.format, main_str))
def _reset_model(self, name):
"""resets the grids; sets up alt_grids"""
if hasattr(self, 'main_grids') and name not in self.main_grids:
grid = vtk.vtkUnstructuredGrid()
grid_mapper = vtk.vtkDataSetMapper()
if self.vtk_version[0] <= 5:
grid_mapper.SetInputConnection(grid.GetProducerPort())
else:
grid_mapper.SetInputData(grid)
geom_actor = vtk.vtkLODActor()
geom_actor.DragableOff()
geom_actor.SetMapper(grid_mapper)
self.rend.AddActor(geom_actor)
self.grid = grid
self.grid_mapper = grid_mapper
self.geom_actor = geom_actor
self.grid.Modified()
# link the current "main" to the scalar bar
scalar_range = self.grid_selected.GetScalarRange()
self.grid_mapper.ScalarVisibilityOn()
self.grid_mapper.SetScalarRange(scalar_range)
self.grid_mapper.SetLookupTable(self.color_function)
self.edge_actor = vtk.vtkLODActor()
self.edge_actor.DragableOff()
self.edge_mapper = vtk.vtkPolyDataMapper()
# create the edges
self.get_edges()
else:
self.grid.Reset()
self.grid.Modified()
# reset alt grids
alt_names = self.alt_grids.keys()
for alt_name in alt_names:
self.alt_grids[alt_name].Reset()
self.alt_grids[alt_name].Modified()
def _update_menu_bar_to_format(self, fmt, method):
self.menu_bar_format = fmt
tools, menu_items = getattr(self, method)()
actions = self._prepare_actions(self._icon_path, tools, self.checkables)
self._update_menu(menu_items)
def update_menu_bar(self):
# the format we're switching to
method_new = '_create_%s_tools_and_menu_items' % self.format
method_cleanup = '_cleanup_%s_tools_and_menu_items' % self.menu_bar_format
# the current state of the format
#method_new = '_create_%s_tools_and_menu_items' % self.menu_bar_format
self.menu_bar_format = 'cwo'
if self.menu_bar_format is None:
self._update_menu_bar_to_format(self.format, method_new)
else:
print('need to add %r' % method_new)
if self.menu_bar_format != self.format:
if hasattr(self, method_cleanup):
#if hasattr(self, method_old):
self.menu_bar_format = None
getattr(self, method_cleanup)()
if hasattr(self, method_new):
self._update_menu_bar_to_format(self.format, method_new)
#self._update_menu_bar_to_format(self.format)
#actions = self._prepare_actions(self._icon_path, self.tools, self.checkables)
#menu_items = self._create_menu_items(actions)
#menu_items = self._create_menu_items()
#self._populate_menu(menu_items)
def on_load_custom_results(self, out_filename=None, restype=None):
"""will be a more generalized results reader"""
is_failed = True
geometry_format = self.format
if self.format is None:
msg = 'on_load_results failed: You need to load a file first...'
self.log_error(msg)
return is_failed
if out_filename in [None, False]:
title = 'Select a Custom Results File for %s' % (self.format)
#print('wildcard_level =', wildcard_level)
#self.wildcard_delimited = 'Delimited Text (*.txt; *.dat; *.csv)'
fmts = [
'Node - Delimited Text (*.txt; *.dat; *.csv)',
'Element - Delimited Text (*.txt; *.dat; *.csv)',
'Nodal Deflection - Delimited Text (*.txt; *.dat; *.csv)',
'Patran nod (*.nod)',
]
fmt = ';;'.join(fmts)
wildcard_level, out_filename = self._create_load_file_dialog(fmt, title)
if not out_filename:
return is_failed # user clicked cancel
iwildcard = fmts.index(wildcard_level)
else:
fmts = [
'node', 'element', 'deflection', 'patran_nod',
]
iwildcard = fmts.index(restype.lower())
if out_filename == '':
return is_failed
if not os.path.exists(out_filename):
msg = 'result file=%r does not exist' % out_filename
self.log_error(msg)
return is_failed
try:
if iwildcard == 0:
self._on_load_nodal_elemental_results('Nodal', out_filename)
restype = 'Node'
elif iwildcard == 1:
self._on_load_nodal_elemental_results('Elemental', out_filename)
restype = 'Element'
elif iwildcard == 2:
self._load_deflection(out_filename)
restype = 'Deflection'
elif iwildcard == 3:
self._load_patran_nod(out_filename)
restype = 'Patran_nod'
else:
raise NotImplementedError('wildcard_level = %s' % wildcard_level)
except Exception as e:
msg = traceback.format_exc()
self.log_error(msg)
return is_failed
self.log_command("on_load_custom_results(%r, restype=%r)" % (out_filename, restype))
is_failed = False
return is_failed
def _on_load_nodal_elemental_results(self, result_type, out_filename=None):
"""
Loads a CSV/TXT results file. Must have called on_load_geometry first.
Parameters
----------
result_type : str
'Nodal', 'Elemental'
out_filename : str / None
the path to the results file
"""
try:
self._load_csv(result_type, out_filename)
except Exception as e:
msg = traceback.format_exc()
self.log_error(msg)
#return
raise
#if 0:
#self.out_filename = out_filename
#msg = '%s - %s - %s' % (self.format, self.infile_name, out_filename)
#self.window_title = msg
#self.out_filename = out_filename
#def _load_force(self, out_filename):
#"""loads a deflection file"""
#self._load_deflection_force(out_filename, is_deflection=True, is_force=False)
def _load_deflection(self, out_filename):
"""loads a force file"""
self._load_deflection_force(out_filename, is_deflection=False, is_force=True)
def _load_deflection_force(self, out_filename, is_deflection=False, is_force=False):
out_filename_short = os.path.basename(out_filename)
A, fmt_dict, headers = load_deflection_csv(out_filename)
#nrows, ncols, fmts
header0 = headers[0]
result0 = A[header0]
nrows = result0.shape[0]
assert nrows == self.nnodes, 'nrows=%s nnodes=%s' % (nrows, self.nnodes)
result_type = 'node'
self._add_cases_to_form(A, fmt_dict, headers, result_type,
out_filename_short, update=True, is_scalar=False,
is_deflection=is_deflection, is_force=is_deflection)
def _load_csv(self, result_type, out_filename):
"""
common method between:
- on_add_nodal_results(filename)
- on_add_elemental_results(filename)
Parameters
----------
result_type : str
???
out_filename : str
the CSV filename to load
"""
out_filename_short = os.path.relpath(out_filename)
A, fmt_dict, headers = load_csv(out_filename)
#nrows, ncols, fmts
header0 = headers[0]
result0 = A[header0]
nrows = result0.size
if result_type == 'Nodal':
assert nrows == self.nnodes, 'nrows=%s nnodes=%s' % (nrows, self.nnodes)
result_type2 = 'node'
#ids = self.node_ids
elif result_type == 'Elemental':
assert nrows == self.nelements, 'nrows=%s nelements=%s' % (nrows, self.nelements)
result_type2 = 'centroid'
#ids = self.element_ids
else:
raise NotImplementedError('result_type=%r' % result_type)
#num_ids = len(ids)
#if num_ids != nrows:
#A2 = {}
#for key, matrix in iteritems(A):
#fmt = fmt_dict[key]
#assert fmt not in ['%i'], 'fmt=%r' % fmt
#if len(matrix.shape) == 1:
#matrix2 = np.full(num_ids, dtype=matrix.dtype)
#iids = np.searchsorted(ids, )
#A = A2
self._add_cases_to_form(A, fmt_dict, headers, result_type2,
out_filename_short, update=True, is_scalar=True)
def on_load_results(self, out_filename=None):
"""
Loads a results file. Must have called on_load_geometry first.
Parameters
----------
out_filename : str / None
the path to the results file
"""
geometry_format = self.format
if self.format is None:
msg = 'on_load_results failed: You need to load a file first...'
self.log_error(msg)
raise RuntimeError(msg)
if out_filename in [None, False]:
title = 'Select a Results File for %s' % self.format
wildcard = None
load_function = None
for fmt in self.fmts:
fmt_name, _major_name, _geowild, _geofunc, _reswild, _resfunc = fmt
if geometry_format == fmt_name:
wildcard = _reswild
load_function = _resfunc
break
else:
msg = 'format=%r is not supported' % geometry_format
self.log_error(msg)
raise RuntimeError(msg)
if wildcard is None:
msg = 'format=%r has no method to load results' % geometry_format
self.log_error(msg)
return
out_filename = self._create_load_file_dialog(wildcard, title)[1]
else:
for fmt in self.fmts:
fmt_name, _major_name, _geowild, _geofunc, _reswild, _resfunc = fmt
print('fmt_name=%r geometry_format=%r' % (fmt_name, geometry_format))
if fmt_name == geometry_format:
load_function = _resfunc
break
else:
msg = ('format=%r is not supported. '
'Did you load a geometry model?' % geometry_format)
self.log_error(msg)
raise RuntimeError(msg)
if out_filename == '':
return
if isinstance(out_filename, string_types):
out_filename = [out_filename]
for out_filenamei in out_filename:
if not os.path.exists(out_filenamei):
msg = 'result file=%r does not exist' % out_filenamei
self.log_error(msg)
return
#raise IOError(msg)
self.last_dir = os.path.split(out_filenamei)[0]
try:
load_function(out_filenamei)
except Exception: # as e
msg = traceback.format_exc()
self.log_error(msg)
#return
raise
self.out_filename = out_filenamei
msg = '%s - %s - %s' % (self.format, self.infile_name, out_filenamei)
self.window_title = msg
print("on_load_results(%r)" % out_filenamei)
self.out_filename = out_filenamei
self.log_command("on_load_results(%r)" % out_filenamei)
def setup_gui(self):
"""
Setup the gui
1. starts the logging
2. reapplies the settings
3. create pickers
4. create main vtk actors
5. shows the Qt window
"""
assert self.fmts != [], 'supported_formats=%s' % self.supported_formats
self.start_logging()
settings = QtCore.QSettings()
self.create_vtk_actors()
# build GUI and restore saved application state
#nice_blue = (0.1, 0.2, 0.4)
qpos_default = self.pos()
pos_default = qpos_default.x(), qpos_default.y()
self.reset_settings = False
#if self.reset_settings or qt_version in [5, 'pyside']:
#self.settings.reset_settings()
#else:
self.settings.load(settings)
self.init_ui()
if self.reset_settings:
self.res_dock.toggleViewAction()
self.init_cell_picker()
main_window_state = settings.value("mainWindowState")
self.create_corner_axis()
#-------------
# loading
self.show()
def setup_post(self, inputs):
"""interface for user defined post-scripts"""
self.load_batch_inputs(inputs)
shots = inputs['shots']
if shots is None:
shots = []
if shots:
#for shot in shots:
self.on_take_screenshot(shots)
sys.exit('took screenshot %r' % shots)
self.color_order = [
(1.0, 0.145098039216, 1.0),
(0.0823529411765, 0.0823529411765, 1.0),
(0.0901960784314, 1.0, 0.941176470588),
(0.501960784314, 1.0, 0.0941176470588),
(1.0, 1.0, 0.117647058824),
(1.0, 0.662745098039, 0.113725490196)
]
if inputs['user_points'] is not None:
for fname in inputs['user_points']:
self.on_load_user_points(fname)
if inputs['user_geom'] is not None:
for fname in inputs['user_geom']:
self.on_load_user_geom(fname)
#self.set_anti_aliasing(16)
def on_load_user_geom(self, csv_filename=None, name=None, color=None):
"""
Loads a User Geometry CSV File of the form:
# id x y z
GRID, 1, 0.2, 0.3, 0.3
GRID, 2, 1.2, 0.3, 0.3
GRID, 3, 2.2, 0.3, 0.3
GRID, 4, 5.2, 0.3, 0.3
grid, 5, 5.2, 1.3, 2.3 # case insensitive
# ID, nodes
BAR, 1, 1, 2
TRI, 2, 1, 2, 3
# this is a comment
QUAD, 3, 1, 5, 3, 4
QUAD, 4, 1, 2, 3, 4 # this is after a blank line
#RESULT,4,CENTROID,AREA(%f),PROPERTY_ID(%i)
# in element id sorted order: value1, value2
#1.0, 2.0 # bar
#1.0, 2.0 # tri
#1.0, 2.0 # quad
#1.0, 2.0 # quad
#RESULT,NODE,NODEX(%f),NODEY(%f),NODEZ(%f)
# same difference
#RESULT,VECTOR3,GEOM,DXYZ
# 3xN
Parameters
----------
csv_filename : str (default=None -> load a dialog)
the path to the user geometry CSV file
name : str (default=None -> extract from fname)
the name for the user points
color : (float, float, float)
RGB values as 0.0 <= rgb <= 1.0
"""
if csv_filename in [None, False]:
title = 'Load User Geometry'
csv_filename = self._create_load_file_dialog(self.wildcard_delimited, title)[1]
if not csv_filename:
return
if color is None:
# we mod the num_user_points so we don't go outside the range
icolor = self.num_user_points % len(self.color_order)
color = self.color_order[icolor]
if name is None:
name = os.path.basename(csv_filename).rsplit('.', 1)[0]
self._add_user_geometry(csv_filename, name, color)
self.log_command('on_load_user_geom(%r, %r, %s)' % (
csv_filename, name, str(color)))
def _add_user_geometry(self, csv_filename, name, color):
"""helper method for ``on_load_user_geom``"""
if name in self.geometry_actors:
msg = 'Name: %s is already in geometry_actors\nChoose a different name.' % name
raise ValueError(msg)
if len(name) == 0:
msg = 'Invalid Name: name=%r' % name
raise ValueError(msg)
point_name = name + '_point'
geom_name = name + '_geom'
grid_ids, xyz, bars, tris, quads = load_user_geom(csv_filename)
nbars = len(bars)
ntris = len(tris)
nquads = len(quads)
nelements = nbars + ntris + nquads
self.create_alternate_vtk_grid(point_name, color=color, opacity=1.0,
point_size=5, representation='point')
if nelements > 0:
nid_map = {}
i = 0
for nid in grid_ids:
nid_map[nid] = i
i += 1
self.create_alternate_vtk_grid(geom_name, color=color, opacity=1.0,
line_width=5, representation='toggle')
# allocate
nnodes = len(grid_ids)
#self.alt_grids[point_name].Allocate(npoints, 1000)
#if nelements > 0:
#self.alt_grids[geom_name].Allocate(npoints, 1000)
# set points
points = numpy_to_vtk_points(xyz, dtype='<f')
if nelements > 0:
geom_grid = self.alt_grids[geom_name]
for i in range(nnodes):
elem = vtk.vtkVertex()
elem.GetPointIds().SetId(0, i)
self.alt_grids[point_name].InsertNextCell(elem.GetCellType(), elem.GetPointIds())
geom_grid.InsertNextCell(elem.GetCellType(), elem.GetPointIds())
else:
for i in range(nnodes):
elem = vtk.vtkVertex()
elem.GetPointIds().SetId(0, i)
self.alt_grids[point_name].InsertNextCell(elem.GetCellType(), elem.GetPointIds())
if nbars:
for i, bar in enumerate(bars[:, 1:]):
g1 = nid_map[bar[0]]
g2 = nid_map[bar[1]]
elem = vtk.vtkLine()
elem.GetPointIds().SetId(0, g1)
elem.GetPointIds().SetId(1, g2)
geom_grid.InsertNextCell(elem.GetCellType(), elem.GetPointIds())
if ntris:
for i, tri in enumerate(tris[:, 1:]):
g1 = nid_map[tri[0]]
g2 = nid_map[tri[1]]
g3 = nid_map[tri[2]]
elem = vtk.vtkTriangle()
elem.GetPointIds().SetId(0, g1)
elem.GetPointIds().SetId(1, g2)
elem.GetPointIds().SetId(2, g3)
geom_grid.InsertNextCell(5, elem.GetPointIds())
if nquads:
for i, quad in enumerate(quads[:, 1:]):
g1 = nid_map[quad[0]]
g2 = nid_map[quad[1]]
g3 = nid_map[quad[2]]
g4 = nid_map[quad[3]]
elem = vtk.vtkQuad()
point_ids = elem.GetPointIds()
point_ids.SetId(0, g1)
point_ids.SetId(1, g2)
point_ids.SetId(2, g3)
point_ids.SetId(3, g4)
geom_grid.InsertNextCell(9, elem.GetPointIds())
self.alt_grids[point_name].SetPoints(points)
if nelements > 0:
self.alt_grids[geom_name].SetPoints(points)
# create actor/mapper
self._add_alt_geometry(self.alt_grids[point_name], point_name)
if nelements > 0:
self._add_alt_geometry(self.alt_grids[geom_name], geom_name)
# set representation to points
#self.geometry_properties[point_name].representation = 'point'
#self.geometry_properties[geom_name].representation = 'toggle'
#actor = self.geometry_actors[name]
#prop = actor.GetProperty()
#prop.SetRepresentationToPoints()
#prop.SetPointSize(4)
def on_load_csv_points(self, csv_filename=None, name=None, color=None):
"""
Loads a User Points CSV File of the form:
1.0, 2.0, 3.0
1.5, 2.5, 3.5
Parameters
-----------
csv_filename : str (default=None -> load a dialog)
the path to the user points CSV file
name : str (default=None -> extract from fname)
the name for the user points
color : (float, float, float)
RGB values as 0.0 <= rgb <= 1.0
.. note:: no header line is required
.. note:: nodes are in the global frame
.. todo:: support changing the name
.. todo:: support changing the color
.. todo:: support overwriting points
"""
if csv_filename in [None, False]:
title = 'Load User Points'
csv_filename = self._create_load_file_dialog(self.wildcard_delimited, title)[1]
if not csv_filename:
return
if color is None:
# we mod the num_user_points so we don't go outside the range
icolor = self.num_user_points % len(self.color_order)
color = self.color_order[icolor]
if name is None:
sline = os.path.basename(csv_filename).rsplit('.', 1)
name = sline[0]
is_failed = self._add_user_points_from_csv(csv_filename, name, color)
if not is_failed:
self.num_user_points += 1
self.log_command('on_load_csv_points(%r, %r, %s)' % (
csv_filename, name, str(color)))
return is_failed
def create_cell_picker(self):
"""creates the vtk picker objects"""
self.cell_picker = vtk.vtkCellPicker()
self.node_picker = vtk.vtkPointPicker()
self.area_picker = vtk.vtkAreaPicker() # vtkRenderedAreaPicker?
self.rubber_band_style = vtk.vtkInteractorStyleRubberBandPick()
#vtk.vtkInteractorStyleRubberBand2D
#vtk.vtkInteractorStyleRubberBand3D
#vtk.vtkInteractorStyleRubberBandZoom
#vtk.vtkInteractorStyleAreaSelectHover
#vtk.vtkInteractorStyleDrawPolygon
#vtk.vtkAngleWidget
#vtk.vtkAngleRepresentation2D
#vtk.vtkAngleRepresentation3D
#vtk.vtkAnnotation
#vtk.vtkArrowSource
#vtk.vtkGlyph2D
#vtk.vtkGlyph3D
#vtk.vtkHedgeHog
#vtk.vtkLegendBoxActor
#vtk.vtkLegendScaleActor
#vtk.vtkLabelPlacer
self.cell_picker.SetTolerance(0.001)
self.node_picker.SetTolerance(0.001)
def mark_elements_by_different_case(self, eids, icase_result, icase_to_apply):
"""
Marks a series of elements with custom text labels
Parameters
----------
eids : int, List[int]
the elements to apply a message to
icase_result : int
the case to draw the result from
icase_to_apply : int
the key in label_actors to slot the result into
TODO: fix the following
correct : applies to the icase_to_apply
incorrect : applies to the icase_result
Examples
--------
.. code-block::
eids = [16563, 16564, 8916703, 16499, 16500, 8916699,
16565, 16566, 8916706, 16502, 16503, 8916701]
icase_result = 22
icase_to_apply = 25
self.mark_elements_by_different_case(eids, icase_result, icase_to_apply)
"""
if icase_result not in self.label_actors:
msg = 'icase_result=%r not in label_actors=[%s]' % (
icase_result, ', '.join(self.label_actors))
self.log_error(msg)
return
if icase_to_apply not in self.label_actors:
msg = 'icase_to_apply=%r not in label_actors=[%s]' % (
icase_to_apply, ', '.join(self.label_actors))
self.log_error(msg)
return
eids = np.unique(eids)
neids = len(eids)
#centroids = np.zeros((neids, 3), dtype='float32')
ieids = np.searchsorted(self.element_ids, eids)
#print('ieids = ', ieids)
for cell_id in ieids:
centroid = self.cell_centroid(cell_id)
result_name, result_values, xyz = self.get_result_by_cell_id(
cell_id, centroid, icase_result)
texti = '%s' % result_values
xi, yi, zi = centroid
self._create_annotation(texti, self.label_actors[icase_to_apply], xi, yi, zi)
self.log_command('mark_elements_by_different_case(%s, %s, %s)' % (
eids, icase_result, icase_to_apply))
self.vtk_interactor.Render()
def mark_nodes(self, nids, icase, text):
"""
Marks a series of nodes with custom text labels
Parameters
----------
nids : int, List[int]
the nodes to apply a message to
icase : int
the key in label_actors to slot the result into
text : str, List[str]
the text to display
0 corresponds to the NodeID result
self.mark_nodes(1, 0, 'max')
self.mark_nodes(6, 0, 'min')
self.mark_nodes([1, 6], 0, 'max')
self.mark_nodes([1, 6], 0, ['max', 'min'])
"""
if icase not in self.label_actors:
msg = 'icase=%r not in label_actors=[%s]' % (
icase, ', '.join(self.label_actors))
self.log_error(msg)
return
i = np.searchsorted(self.node_ids, nids)
if isinstance(text, string_types):
text = [text] * len(i)
else:
assert len(text) == len(i)
xyz = self.xyz_cid0[i, :]
for (xi, yi, zi), texti in zip(xyz, text):
self._create_annotation(texti, self.label_actors[icase], xi, yi, zi)
self.vtk_interactor.Render()
def __mark_nodes_by_result(self, nids, icases):
"""
# mark the node 1 with the NodeID (0) result
self.mark_nodes_by_result_case(1, 0)
# mark the nodes 1 and 2 with the NodeID (0) result
self.mark_nodes_by_result_case([1, 2], 0)
# mark the nodes with the NodeID (0) and ElementID (1) result
self.mark_nodes_by_result_case([1, 2], [0, 1])
"""
i = np.searchsorted(self.node_ids, nids)
if isinstance(icases, int):
icases = [icases]
for icase in icases:
if icase not in self.label_actors:
msg = 'icase=%r not in label_actors=[%s]' % (
icase, ', '.join(self.label_actors))
self.log_error(msg)
continue
for node_id in i:
#xyz = self.xyz_cid0[i, :]
out = self.get_result_by_xyz_node_id(world_position, node_id)
_result_name, result_value, node_id, node_xyz = out
self._create_annotation(texti, self.label_actors[icase], xi, yi, zi)
self.vtk_interactor.Render()
def _cell_centroid_pick(self, cell_id, world_position):
duplicate_key = None
icase = self.icase
if self.pick_state == 'node/centroid':
return_flag = False
duplicate_key = cell_id
result_name, result_value, xyz = self.get_result_by_cell_id(cell_id, world_position)
assert icase in self.label_actors, icase
else:
#cell = self.grid.GetCell(cell_id)
# get_nastran_centroidal_pick_state_nodal_by_xyz_cell_id()
method = 'get_centroidal_%s_result_pick_state_%s_by_xyz_cell_id' % (
self.format, self.pick_state)
if hasattr(self, method):
methodi = getattr(self, method)
return_flag, value = methodi(world_position, cell_id)
if return_flag is True:
return return_flag, None, None, None, None
else:
msg = "pick_state is set to 'nodal', but the result is 'centroidal'\n"
msg += ' cannot find: self.%s(xyz, cell_id)' % method
self.log_error(msg)
return return_flag, None, None, None
self.log_info("%s = %s" % (result_name, result_value))
return return_flag, duplicate_key, result_value, result_name, xyz
def _get_closest_node_xyz(self, cell_id, world_position):
duplicate_key = None
(result_name, result_value, node_id, xyz) = self.get_result_by_xyz_cell_id(
world_position, cell_id)
assert self.icase in self.label_actors, result_name
assert not isinstance(xyz, int), xyz
return xyz
def _cell_node_pick(self, cell_id, world_position):
duplicate_key = None
icase = self.icase
if self.pick_state == 'node/centroid':
return_flag = False
(result_name, result_value, node_id, xyz) = self.get_result_by_xyz_cell_id(
world_position, cell_id)
assert icase in self.label_actors, result_name
assert not isinstance(xyz, int), xyz
duplicate_key = node_id
else:
method = 'get_nodal_%s_result_pick_state_%s_by_xyz_cell_id' % (
self.format, self.pick_state)
if hasattr(self, method):
methodi = getattr(self, method)
return_flag, value = methodi(world_position, cell_id)
if return_flag is True:
return return_flag, None, None, None, None
else:
msg = "pick_state is set to 'centroidal', but the result is 'nodal'\n"
msg += ' cannot find: self.%s(xyz, cell_id)' % method
self.log_error(msg)
return return_flag, None, None, None
msg = "%s = %s" % (result_name, result_value)
if self.result_name in ['Node_ID', 'Node ID', 'NodeID']:
x1, y1, z1 = xyz
x2, y2, z2 = world_position
msg += '; xyz=(%s, %s, %s); pierce_xyz=(%s, %s, %s)' % (x1, y1, z1,
x2, y2, z2)
self.log_info(msg)
return return_flag, duplicate_key, result_value, result_name, xyz
def init_cell_picker(self):
self.is_pick = False
if not self.run_vtk:
return
self.vtk_interactor.SetPicker(self.node_picker)
self.vtk_interactor.SetPicker(self.cell_picker)
self.setup_mouse_buttons(mode='probe_result')
self.setup_mouse_buttons(mode='default')
def convert_units(self, result_name, result_value, xyz):
#self.input_units
#self.display_units
return result_value, xyz
def _create_annotation(self, text, slot, x, y, z):
"""
Creates the actual annotation and appends it to slot
Parameters
----------
text : str
the text to display
x, y, z : float
the position of the label
slot : List[annotation]
where to place the annotation
self.label_actors[icase] : List[annotation]
icase : icase
the key in label_actors to slot the result into
annotation : vtkBillboardTextActor3D
the annotation object
???
"""
if not isinstance(slot, list):
msg = 'slot=%r type=%s' % (slot, type(slot))
raise TypeError(msg)
# http://nullege.com/codes/show/src%40p%40y%40pymatgen-2.9.6%40pymatgen%40vis%40structure_vtk.py/395/vtk.vtkVectorText/python
#self.convert_units(icase, result_value, x, y, z)
text_actor = vtk.vtkBillboardTextActor3D()
label = text
text_actor.SetPosition(x, y, z)
text_actor.SetInput(label)
text_actor.PickableOff()
text_actor.DragableOff()
#text_actor.SetPickable(False)
#text_actor.SetPosition(actor.GetPosition())
text_prop = text_actor.GetTextProperty()
text_prop.SetFontSize(self.annotation_size)
text_prop.SetFontFamilyToArial()
text_prop.BoldOn()
text_prop.ShadowOn()
text_prop.SetColor(self.annotation_color)
text_prop.SetJustificationToCentered()
# finish adding the actor
self.rend.AddActor(text_actor)
#self.label_actors[icase].append(text_actor)
slot.append(text_actor)
#print('added label actor %r; icase=%s' % (text, icase))
#print(self.label_actors)
#self.picker_textMapper.SetInput("(%.6f, %.6f, %.6f)"% pickPos)
#camera.GetPosition()
#camera.GetClippingRange()
#camera.GetFocalPoint()
def _on_multi_pick(self, a):
"""
vtkFrustumExtractor
vtkAreaPicker
"""
pass
def _on_cell_picker(self, a):
self.vtk_interactor.SetPicker(self.cell_picker)
picker = self.cell_picker
world_position = picker.GetPickPosition()
cell_id = picker.GetCellId()
select_point = picker.GetSelectionPoint() # get x,y pixel coordinate
self.log_info("world_position = %s" % str(world_position))
self.log_info("cell_id = %s" % cell_id)
self.log_info("select_point = %s" % str(select_point))
def _on_node_picker(self, a):
self.vtk_interactor.SetPicker(self.node_picker)
picker = self.node_picker
world_position = picker.GetPickPosition()
node_id = picker.GetPointId()
select_point = picker.GetSelectionPoint() # get x,y pixel coordinate
self.log_info("world_position = %s" % str(world_position))
self.log_info("node_id = %s" % node_id)
self.log_info("select_point = %s" % str(select_point))
#def on_cell_picker(self):
#self.log_command("on_cell_picker()")
#picker = self.cell_picker
#world_position = picker.GetPickPosition()
#cell_id = picker.GetCellId()
##ds = picker.GetDataSet()
#select_point = picker.GetSelectionPoint() # get x,y pixel coordinate
#self.log_info("world_position = %s" % str(world_position))
#self.log_info("cell_id = %s" % cell_id)
#self.log_info("select_point = %s" % str(select_point))
#self.log_info("data_set = %s" % ds)
#def get_2d_point(self, point3d, view_matrix,
#projection_matrix,
#width, height):
#view_projection_matrix = projection_matrix * view_matrix
## transform world to clipping coordinates
#point3d = view_projection_matrix.multiply(point3d)
#win_x = math.round(((point3d.getX() + 1) / 2.0) * width)
## we calculate -point3D.getY() because the screen Y axis is
## oriented top->down
#win_y = math.round(((1 - point3d.getY()) / 2.0) * height)
#return Point2D(win_x, win_y)
#def get_3d_point(self, point2D, width, height, view_matrix, projection_matrix):
#x = 2.0 * win_x / client_width - 1
#y = -2.0 * win_y / client_height + 1
#view_projection_inverse = inverse(projection_matrix * view_vatrix)
#point3d = Point3D(x, y, 0)
#return view_projection_inverse.multiply(point3d)
def show_only(self, names):
"""
Show these actors only
names : str, List[str]
names to show
If they're hidden, show them.
If they're shown and shouldn't be, hide them.
..todo :: update the GeomeryProperties
"""
raise NotImplementedError('show_only')
def hide_actors(self, except_names=None):
"""
Hide all the actors
except_names : str, List[str], None
list of names to exclude
None : hide all
..note :: If an actor is hidden and in the except_names, it will still be hidden.
..todo :: update the GeomeryProperties
"""
if except_names is None:
except_names = []
elif isinstance(except_names, string_types):
except_names = [except_names]
# hide everything but the main grid
for key, actor in iteritems(self.geometry_actors):
if key not in except_names:
actor.VisibilityOff()
self.hide_axes()
self.hide_legend()
#self.settings.set_background_color_to_white()
def hide_axes(self, cids=None):
"""
..todo :: support cids
..todo :: fix the coords
"""
for axis in self.axes.itervalues():
axis.VisibilityOff()
self.corner_axis.EnabledOff()
def show_axes(self, cids=None):
"""
..todo :: support cids
..todo :: fix the coords
"""
for axis in self.axes.itervalues():
axis.VisibilityOn()
self.corner_axis.EnabledOn()
def on_take_screenshot(self, fname=None, magnify=None, show_msg=True):
"""
Take a screenshot of a current view and save as a file
Parameters
----------
fname : str; default=None
None : pop open a window
str : bypass the popup window
magnify : int; default=None
None : use self.magnify
int : resolution increase factor
show_msg : bool; default=True
log the command
"""
if fname is None or fname is False:
filt = ''
default_filename = ''
title = ''
if self.title is not None:
title = self.title
if self.out_filename is None:
default_filename = ''
if self.infile_name is not None:
base, ext = os.path.splitext(os.path.basename(self.infile_name))
default_filename = self.infile_name
default_filename = base + '.png'
else:
base, ext = os.path.splitext(os.path.basename(self.out_filename))
default_filename = title + '_' + base + '.png'
file_types = (
'PNG Image *.png (*.png);; '
'JPEG Image *.jpg *.jpeg (*.jpg, *.jpeg);; '
'TIFF Image *.tif *.tiff (*.tif, *.tiff);; '
'BMP Image *.bmp (*.bmp);; '
'PostScript Document *.ps (*.ps)')
title = 'Choose a filename and type'
fname, flt = getsavefilename(parent=self, caption=title, basedir='',
filters=file_types, selectedfilter=filt,
options=None)
if fname in [None, '']:
return
#print("fname=%r" % fname)
#print("flt=%r" % flt)
else:
base, ext = os.path.splitext(os.path.basename(fname))
if ext.lower() in ['png', 'jpg', 'jpeg', 'tif', 'tiff', 'bmp', 'ps']:
flt = ext.lower()
else:
flt = 'png'
if fname:
render_large = vtk.vtkRenderLargeImage()
if self.vtk_version[0] >= 6:
render_large.SetInput(self.rend)
else:
render_large.SetInput(self.rend)
line_widths0, point_sizes0, axes_actor = self._screenshot_setup(magnify, render_large)
nam, ext = os.path.splitext(fname)
ext = ext.lower()
for nam, exts, obj in (('PostScript', ['.ps'], vtk.vtkPostScriptWriter),
("BMP", ['.bmp'], vtk.vtkBMPWriter),
('JPG', ['.jpg', '.jpeg'], vtk.vtkJPEGWriter),
("TIFF", ['.tif', '.tiff'], vtk.vtkTIFFWriter)):
if flt == nam:
fname = fname if ext in exts else fname + exts[0]
writer = obj()
break
else:
fname = fname if ext == '.png' else fname + '.png'
writer = vtk.vtkPNGWriter()
if self.vtk_version[0] >= 6:
writer.SetInputConnection(render_large.GetOutputPort())
else:
writer.SetInputConnection(render_large.GetOutputPort())
writer.SetFileName(fname)
writer.Write()
#self.log_info("Saved screenshot: " + fname)
if show_msg:
self.log_command('on_take_screenshot(%r, magnify=%s)' % (fname, magnify))
self._screenshot_teardown(line_widths0, point_sizes0, axes_actor)
def _screenshot_setup(self, magnify, render_large):
if magnify is None:
magnify_min = 1
magnify = self.magnify if self.magnify > magnify_min else magnify_min
else:
magnify = magnify
if not isinstance(magnify, integer_types):
msg = 'magnify=%r type=%s' % (magnify, type(magnify))
raise TypeError(msg)
self.settings.update_text_size(magnify=magnify)
render_large.SetMagnification(magnify)
# multiply linewidth by magnify
line_widths0 = {}
point_sizes0 = {}
for key, geom_actor in iteritems(self.geometry_actors):
if isinstance(geom_actor, vtk.vtkActor):
prop = geom_actor.GetProperty()
line_width0 = prop.GetLineWidth()
point_size0 = prop.GetPointSize()
line_widths0[key] = line_width0
point_sizes0[key] = point_size0
line_width = line_width0 * magnify
point_size = point_size0 * magnify
prop.SetLineWidth(line_width)
prop.SetPointSize(point_size)
prop.Modified()
elif isinstance(geom_actor, vtk.vtkAxesActor):
pass
else:
raise NotImplementedError(geom_actor)
# hide corner axis
axes_actor = self.corner_axis.GetOrientationMarker()
axes_actor.SetVisibility(False)
return line_widths0, point_sizes0, axes_actor
def _screenshot_teardown(self, line_widths0, point_sizes0, axes_actor):
self.settings.update_text_size(magnify=1.0)
# show corner axes
axes_actor.SetVisibility(True)
# set linewidth back
for key, geom_actor in iteritems(self.geometry_actors):
if isinstance(geom_actor, vtk.vtkActor):
prop = geom_actor.GetProperty()
prop.SetLineWidth(line_widths0[key])
prop.SetPointSize(point_sizes0[key])
prop.Modified()
elif isinstance(geom_actor, vtk.vtkAxesActor):
pass
else:
raise NotImplementedError(geom_actor)
def make_gif(self, gif_filename, scale, istep=None,
min_value=None, max_value=None,
animate_scale=True, animate_phase=False, animate_time=False,
icase=None, icase_start=None, icase_end=None, icase_delta=None,
time=2.0, animation_profile='0 to scale',
nrepeat=0, fps=30, magnify=1,
make_images=True, delete_images=False, make_gif=True, stop_animation=False,
animate_in_gui=True):
"""
Makes an animated gif
Parameters
----------
gif_filename : str
path to the output gif & png folder
scale : float
the deflection scale factor; true scale
istep : int; default=None
the png file number (let's you pick a subset of images)
useful for when you press ``Step``
stop_animation : bool; default=False
stops the animation; don't make any images/gif
animate_in_gui : bool; default=True
animates the model; don't make any images/gif
stop_animation overrides animate_in_gui
animate_in_gui overrides make_gif
Pick One
--------
animate_scale : bool; default=True
does a deflection plot (single subcase)
animate_phase : bool; default=False
does a complex deflection plot (single subcase)
animate_time : bool; default=False
does a deflection plot (multiple subcases)
istep : int
the png file number (let's you pick a subset of images)
useful for when you press ``Step``
time : float; default=2.0
the runtime of the gif (seconds)
fps : int; default=30
the frames/second
Case Selection
--------------
icase : int; default=None
None : unused
int : the result case to plot the deflection for
active if animate_scale=True or animate_phase=True
icase_start : int; default=None
starting case id
None : unused
int : active if animate_time=True
icase_end : int; default=None
starting case id
None : unused
int : active if animate_time=True
icase_delta : int; default=None
step size
None : unused
int : active if animate_time=True
Time Plot Options
-----------------
max_value : float; default=None
the max value on the plot
min_value : float; default=None
the min value on the plot
Options
-------
animation_profile : str; default='0 to scale'
animation profile to follow
'0 to Scale',
'0 to Scale to 0',
#'0 to Scale to -Scale to 0',
'-Scale to Scale',
'-scale to scale to -scale',
nrepeat : int; default=0
0 : loop infinitely
1 : loop 1 time
2 : loop 2 times
Final Control Options
---------------------
make_images : bool; default=True
make the images
delete_images : bool; default=False
cleanup the png files at the end
make_gif : bool; default=True
actually make the gif at the end
Other local variables
---------------------
duration : float
frame time (seconds)
For one sided data
------------------
- scales/phases should be one-sided
- time should be one-sided
- analysis_time should be one-sided
- set onesided=True
For two-sided data
------------------
- scales/phases should be one-sided
- time should be two-sided
- analysis_time should be one-sided
- set onesided=False
"""
if stop_animation:
return self.stop_animation()
phases, icases, isteps, scales, analysis_time, onesided = setup_animation(
scale, istep=istep,
animate_scale=animate_scale, animate_phase=animate_phase, animate_time=animate_time,
icase=icase,
icase_start=icase_start, icase_end=icase_end, icase_delta=icase_delta,
time=time, animation_profile=animation_profile,
fps=fps)
parent = self
#animate_in_gui = True
self.stop_animation()
if len(icases) == 1:
pass
elif animate_in_gui:
class vtkAnimationCallback(object):
"""
http://www.vtk.org/Wiki/VTK/Examples/Python/Animation
"""
def __init__(self):
self.timer_count = 0
self.cycler = cycle(range(len(icases)))
self.icase0 = -1
self.ncases = len(icases)
def execute(self, obj, event):
iren = obj
i = self.timer_count % self.ncases
#j = next(self.cycler)
istep = isteps[i]
icase = icases[i]
scale = scales[i]
phase = phases[i]
if icase != self.icase0:
#self.cycle_results(case=icase)
parent.cycle_results_explicit(icase, explicit=True)
try:
parent.update_grid_by_icase_scale_phase(icase, scale, phase=phase)
except AttributeError:
parent.log_error('Invalid Case %i' % icase)
parent.stop_animation()
self.icase0 = icase
parent.vtk_interactor.Render()
self.timer_count += 1
# Sign up to receive TimerEvent
callback = vtkAnimationCallback()
observer_name = self.iren.AddObserver('TimerEvent', callback.execute)
self.observers['TimerEvent'] = observer_name
# total_time not needed
# fps
# -> frames_per_second = 1/fps
delay = int(1. / fps * 1000)
timer_id = self.iren.CreateRepeatingTimer(delay) # time in milliseconds
return
is_failed = True
try:
is_failed = self.make_gif_helper(
gif_filename, icases, scales,
phases=phases, isteps=isteps,
max_value=max_value, min_value=min_value,
time=time, analysis_time=analysis_time, fps=fps, magnify=magnify,
onesided=onesided, nrepeat=nrepeat,
make_images=make_images, delete_images=delete_images, make_gif=make_gif)
except Exception as e:
self.log_error(str(e))
raise
#self.log_error(traceback.print_stack(f))
#self.log_error('\n' + ''.join(traceback.format_stack()))
#traceback.print_exc(file=self.log_error)
if not is_failed:
msg = (
'make_gif(%r, %s, istep=%s,\n'
' min_value=%s, max_value=%s,\n'
' animate_scale=%s, animate_phase=%s, animate_time=%s,\n'
' icase=%s, icase_start=%s, icase_end=%s, icase_delta=%s,\n'
" time=%s, animation_profile=%r,\n"
' nrepeat=%s, fps=%s, magnify=%s,\n'
' make_images=%s, delete_images=%s, make_gif=%s, stop_animation=%s,\n'
' animate_in_gui=%s)\n' % (
gif_filename, scale, istep, min_value, max_value,
animate_scale, animate_phase, animate_time,
icase, icase_start, icase_end, icase_delta, time, animation_profile,
nrepeat, fps, magnify, make_images, delete_images, make_gif, stop_animation,
animate_in_gui)
)
self.log_command(msg)
return is_failed
def stop_animation(self):
"""removes the animation timer"""
is_failed = False
if 'TimerEvent' in self.observers:
observer_name = self.observers['TimerEvent']
self.iren.RemoveObserver(observer_name)
del self.observers['TimerEvent']
self.setup_mouse_buttons(mode='default', force=True)
return is_failed
def make_gif_helper(self, gif_filename, icases, scales, phases=None, isteps=None,
max_value=None, min_value=None,
time=2.0, analysis_time=2.0, fps=30, magnify=1,
onesided=True, nrepeat=0,
make_images=True, delete_images=False, make_gif=True):
"""
Makes an animated gif
Parameters
----------
gif_filename : str
path to the output gif & png folder
icases : int / List[int]
the result case to plot the deflection for
scales : List[float]
List[float] : the deflection scale factors; true scale
phases : List[float]; default=None
List[float] : the phase angles (degrees)
None -> animate scale
max_value : float; default=None
the max value on the plot (not supported)
min_value : float; default=None
the min value on the plot (not supported)
isteps : List[int]
the png file numbers (let's you pick a subset of images)
useful for when you press ``Step``
time : float; default=2.0
the runtime of the gif (seconds)
analysis_time : float; default=2.0
The time we actually need to simulate (seconds).
We don't need to take extra pictures if they're just copies.
fps : int; default=30
the frames/second
Options
-------
onesided : bool; default=True
should the animation go up and back down
nrepeat : int; default=0
0 : loop infinitely
1 : loop 1 time
2 : loop 2 times
Final Control Options
---------------------
make_images : bool; default=True
make the images
delete_images : bool; default=False
cleanup the png files at the end
make_gif : bool; default=True
actually make the gif at the end
Other local variables
---------------------
duration : float
frame time (seconds)
For one sided data
------------------
- scales/phases should be one-sided
- time should be one-sided
- analysis_time should be one-sided
- set onesided=True
For two-sided data
------------------
- scales/phases should be one-sided
- time should be two-sided
- analysis_time should be one-sided
- set onesided=False
"""
assert fps >= 1, fps
nframes = ceil(analysis_time * fps)
assert nframes >= 2, nframes
duration = time / nframes
nframes = int(nframes)
png_dirname = os.path.dirname(os.path.abspath(gif_filename))
if not os.path.exists(png_dirname):
os.makedirs(png_dirname)
phases, icases, isteps, scales = update_animation_inputs(
phases, icases, isteps, scales, analysis_time, fps)
if gif_filename is not None:
png_filenames = []
fmt = gif_filename[:-4] + '_%%0%ii.png' % (len(str(nframes)))
icase0 = -1
is_failed = True
if make_images:
for istep, icase, scale, phase in zip(isteps, icases, scales, phases):
if icase != icase0:
#self.cycle_results(case=icase)
self.cycle_results_explicit(icase, explicit=True,
min_value=min_value, max_value=max_value)
self.update_grid_by_icase_scale_phase(icase, scale, phase=phase)
if gif_filename is not None:
png_filename = fmt % istep
self.on_take_screenshot(fname=png_filename, magnify=magnify)
png_filenames.append(png_filename)
else:
for istep in isteps:
png_filename = fmt % istep
png_filenames.append(png_filename)
assert os.path.exists(png_filename), 'png_filename=%s' % png_filename
if png_filenames:
is_failed = write_gif(
gif_filename, png_filenames, time=time,
onesided=onesided,
nrepeat=nrepeat, delete_images=delete_images,
make_gif=make_gif)
return is_failed
def add_geometry(self):
"""
#(N,) for stress, x-disp
#(N,3) for warp vectors/glyphs
grid_result = vtk.vtkFloatArray()
point_data = self.grid.GetPointData()
cell_data = self.grid.GetCellData()
self.grid.GetCellData().SetScalars(grid_result)
self.grid.GetPointData().SetScalars(grid_result)
self.grid_mapper <-input-> self.grid
vtkDataSetMapper() <-input-> vtkUnstructuredGrid()
self.grid_mapper <--map--> self.geom_actor <-add-> self.rend
vtkDataSetMapper() <--map--> vtkActor() <-add-> vtkRenderer()
"""
if self.is_groups:
# solid_bending: eids 1-182
self._setup_element_mask()
#eids = np.arange(172)
#eids = arange(172)
#self.update_element_mask(eids)
else:
self.grid_selected = self.grid
#print('grid_selected =', self.grid_selected)
self.grid_mapper = vtk.vtkDataSetMapper()
if self.vtk_version[0] <= 5:
#self.grid_mapper.SetInput(self.grid_selected) ## OLD
self.grid_mapper.SetInputConnection(self.grid_selected.GetProducerPort())
else:
self.grid_mapper.SetInputData(self.grid_selected)
#if 0:
#self.warp_filter = vtk.vtkWarpVector()
#self.warp_filter.SetScaleFactor(50.0)
#self.warp_filter.SetInput(self.grid_mapper.GetUnstructuredGridOutput())
#self.geom_filter = vtk.vtkGeometryFilter()
#self.geom_filter.SetInput(self.warp_filter.GetUnstructuredGridOutput())
#self.geom_mapper = vtk.vtkPolyDataMapper()
#self.geom_actor.setMapper(self.geom_mapper)
#if 0:
#from vtk.numpy_interface import algorithms
#arrow = vtk.vtkArrowSource()
#arrow.PickableOff()
#self.glyph_transform = vtk.vtkTransform()
#self.glyph_transform_filter = vtk.vtkTransformPolyDataFilter()
#self.glyph_transform_filter.SetInputConnection(arrow.GetOutputPort())
#self.glyph_transform_filter.SetTransform(self.glyph_transform)
#self.glyph = vtk.vtkGlyph3D()
#self.glyph.setInput(xxx)
#self.glyph.SetSource(self.glyph_transform_filter.GetOutput())
#self.glyph.SetVectorModeToUseVector()
#self.glyph.SetColorModeToColorByVector()
#self.glyph.SetScaleModeToScaleByVector()
#self.glyph.SetScaleFactor(1.0)
#self.append_filter = vtk.vtkAppendFilter()
#self.append_filter.AddInputConnection(self.grid.GetOutput())
#self.warpVector = vtk.vtkWarpVector()
#self.warpVector.SetInput(self.grid_mapper.GetUnstructuredGridOutput())
#grid_mapper.SetInput(Filter.GetOutput())
self.geom_actor = vtk.vtkLODActor()
self.geom_actor.DragableOff()
self.geom_actor.SetMapper(self.grid_mapper)
#geometryActor.AddPosition(2, 0, 2)
#geometryActor.GetProperty().SetDiffuseColor(0, 0, 1) # blue
#self.geom_actor.GetProperty().SetDiffuseColor(1, 0, 0) # red
#if 0:
#id_filter = vtk.vtkIdFilter()
#ids = np.array([1, 2, 3], dtype='int32')
#id_array = numpy_to_vtk(
#num_array=ids,
#deep=True,
#array_type=vtk.VTK_INT,
#)
#id_filter.SetCellIds(id_array.GetOutputPort())
#id_filter.CellIdsOff()
#self.grid_mapper.SetInputConnection(id_filter.GetOutputPort())
self.rend.AddActor(self.geom_actor)
self.build_glyph()
def build_glyph(self):
"""builds the glyph actor"""
grid = self.grid
glyphs = vtk.vtkGlyph3D()
#if filter_small_forces:
#glyphs.SetRange(0.5, 1.)
glyphs.SetVectorModeToUseVector()
#apply_color_to_glyph = False
#if apply_color_to_glyph:
#glyphs.SetScaleModeToScaleByScalar()
glyphs.SetScaleModeToScaleByVector()
glyphs.SetColorModeToColorByScale()
#glyphs.SetColorModeToColorByScalar() # super tiny
#glyphs.SetColorModeToColorByVector() # super tiny
glyphs.ScalingOn()
glyphs.ClampingOn()
#glyphs.Update()
glyph_source = vtk.vtkArrowSource()
#glyph_source.InvertOn() # flip this arrow direction
if self.vtk_version[0] == 5:
glyphs.SetInput(grid)
elif self.vtk_version[0] in [6, 7, 8]:
glyphs.SetInputData(grid)
else:
raise NotImplementedError(vtk.VTK_VERSION)
glyphs.SetSourceConnection(glyph_source.GetOutputPort())
#glyphs.SetScaleModeToDataScalingOff()
#glyphs.SetScaleFactor(10.0) # bwb
#glyphs.SetScaleFactor(1.0) # solid-bending
glyph_mapper = vtk.vtkPolyDataMapper()
glyph_mapper.SetInputConnection(glyphs.GetOutputPort())
glyph_mapper.ScalarVisibilityOff()
arrow_actor = vtk.vtkLODActor()
arrow_actor.SetMapper(glyph_mapper)
prop = arrow_actor.GetProperty()
prop.SetColor(1., 0., 0.)
self.rend.AddActor(arrow_actor)
#self.grid.GetPointData().SetActiveVectors(None)
arrow_actor.SetVisibility(False)
self.glyph_source = glyph_source
self.glyphs = glyphs
self.glyph_mapper = glyph_mapper
self.arrow_actor = arrow_actor
def _add_alt_actors(self, grids_dict, names_to_ignore=None):
if names_to_ignore is None:
names_to_ignore = ['main']
names = set(list(grids_dict.keys()))
names_old = set(list(self.geometry_actors.keys()))
names_old = names_old - set(names_to_ignore)
#print('names_old1 =', names_old)
#names_to_clear = names_old - names
#self._remove_alt_actors(names_to_clear)
#print('names_old2 =', names_old)
#print('names =', names)
for name in names:
#print('adding %s' % name)
grid = grids_dict[name]
self._add_alt_geometry(grid, name)
def _remove_alt_actors(self, names=None):
if names is None:
names = list(self.geometry_actors.keys())
names.remove('main')
for name in names:
actor = self.geometry_actors[name]
self.rend.RemoveActor(actor)
del actor
def _add_alt_geometry(self, grid, name, color=None, line_width=None,
opacity=None, representation=None):
"""
NOTE: color, line_width, opacity are ignored if name already exists
"""
is_pickable = self.geometry_properties[name].is_pickable
quad_mapper = vtk.vtkDataSetMapper()
if name in self.geometry_actors:
alt_geometry_actor = self.geometry_actors[name]
if self.vtk_version[0] >= 6:
alt_geometry_actor.GetMapper().SetInputData(grid)
else:
alt_geometry_actor.GetMapper().SetInput(grid)
else:
if self.vtk_version[0] >= 6:
quad_mapper.SetInputData(grid)
else:
quad_mapper.SetInput(grid)
alt_geometry_actor = vtk.vtkActor()
if not is_pickable:
alt_geometry_actor.PickableOff()
alt_geometry_actor.DragableOff()
alt_geometry_actor.SetMapper(quad_mapper)
self.geometry_actors[name] = alt_geometry_actor
#geometryActor.AddPosition(2, 0, 2)
if name in self.geometry_properties:
geom = self.geometry_properties[name]
else:
geom = AltGeometry(self, name, color=color, line_width=line_width,
opacity=opacity, representation=representation)
self.geometry_properties[name] = geom
color = geom.color_float
opacity = geom.opacity
point_size = geom.point_size
representation = geom.representation
line_width = geom.line_width
#print('color_2014[%s] = %s' % (name, str(color)))
assert isinstance(color[0], float), color
assert color[0] <= 1.0, color
prop = alt_geometry_actor.GetProperty()
#prop.SetInterpolationToFlat() # 0
#prop.SetInterpolationToGouraud() # 1
#prop.SetInterpolationToPhong() # 2
prop.SetDiffuseColor(color)
prop.SetOpacity(opacity)
#prop.Update()
#print('prop.GetInterpolation()', prop.GetInterpolation()) # 1
if representation == 'point':
prop.SetRepresentationToPoints()
prop.SetPointSize(point_size)
elif representation in ['surface', 'toggle']:
prop.SetRepresentationToSurface()
prop.SetLineWidth(line_width)
elif representation == 'wire':
prop.SetRepresentationToWireframe()
prop.SetLineWidth(line_width)
self.rend.AddActor(alt_geometry_actor)
vtk.vtkPolyDataMapper().SetResolveCoincidentTopologyToPolygonOffset()
if geom.is_visible:
alt_geometry_actor.VisibilityOn()
else:
alt_geometry_actor.VisibilityOff()
#print('current_actors = ', self.geometry_actors.keys())
if hasattr(grid, 'Update'):
grid.Update()
alt_geometry_actor.Modified()
def on_update_scalar_bar(self, title, min_value, max_value, data_format):
self.title = str(title)
self.min_value = float(min_value)
self.max_value = float(max_value)
try:
data_format % 1
except:
msg = ("failed applying the data formatter format=%r and "
"should be of the form: '%i', '%8f', '%.2f', '%e', etc.")
self.log_error(msg)
return
self.data_format = data_format
self.log_command('on_update_scalar_bar(%r, %r, %r, %r)' % (
title, min_value, max_value, data_format))
def ResetCamera(self):
self.GetCamera().ResetCamera()
def GetCamera(self):
return self.rend.GetActiveCamera()
def update_camera(self, code):
camera = self.GetCamera()
#print("code =", code)
if code == '+x': # set x-axis
# +z up
# +y right
# looking forward
camera.SetFocalPoint(0., 0., 0.)
camera.SetViewUp(0., 0., 1.)
camera.SetPosition(1., 0., 0.)
elif code == '-x': # set x-axis
# +z up
# +y to the left (right wing)
# looking aft
camera.SetFocalPoint(0., 0., 0.)
camera.SetViewUp(0., 0., 1.)
camera.SetPosition(-1., 0., 0.)
elif code == '+y': # set y-axis
# +z up
# +x aft to left
# view from right wing
camera.SetFocalPoint(0., 0., 0.)
camera.SetViewUp(0., 0., 1.)
camera.SetPosition(0., 1., 0.)
elif code == '-y': # set y-axis
# +z up
# +x aft to right
# view from left wing
camera.SetFocalPoint(0., 0., 0.)
camera.SetViewUp(0., 0., 1.)
camera.SetPosition(0., -1., 0.)
elif code == '+z': # set z-axis
# +x aft
# +y up (right wing up)
# top view
camera.SetFocalPoint(0., 0., 0.)
camera.SetViewUp(0., 1., 0.)
camera.SetPosition(0., 0., 1.)
elif code == '-z': # set z-axis
# +x aft
# -y down (left wing up)
# bottom view
camera.SetFocalPoint(0., 0., 0.)
camera.SetViewUp(0., -1., 0.)
camera.SetPosition(0., 0., -1.)
else:
self.log_error('invalid camera code...%r' % code)
return
self._update_camera(camera)
self.rend.ResetCamera()
self.log_command('update_camera(%r)' % code)
def _simulate_key_press(self, key):
"""
A little hack method that simulates pressing the key for the VTK
interactor. There is no easy way to instruct VTK to e.g. change mouse
style to 'trackball' (as by pressing 't' key),
(see http://public.kitware.com/pipermail/vtkusers/2011-November/119996.html)
therefore we trick VTK to think that a key has been pressed.
Parameters
----------
key : str
a key that VTK should be informed about, e.g. 't'
"""
print("key_key_press = ", key)
if key == 'f': # change focal point
#print('focal_point!')
return
self.vtk_interactor._Iren.SetEventInformation(0, 0, 0, 0, key, 0, None)
self.vtk_interactor._Iren.KeyPressEvent()
self.vtk_interactor._Iren.CharEvent()
#if key in ['y', 'z', 'X', 'Y', 'Z']:
#self.update_camera(key)
def _set_results(self, form, cases):
assert len(cases) > 0, cases
if isinstance(cases, OrderedDict):
self.case_keys = list(cases.keys())
else:
self.case_keys = sorted(cases.keys())
assert isinstance(cases, dict), type(cases)
self.result_cases = cases
if len(self.case_keys) > 1:
self.icase = -1
self.ncases = len(self.result_cases) # number of keys in dictionary
elif len(self.case_keys) == 1:
self.icase = -1
self.ncases = 1
else:
self.icase = -1
self.ncases = 0
self.set_form(form)
def _finish_results_io2(self, form, cases, reset_labels=True):
"""
Adds results to the Sidebar
Parameters
----------
form : List[pairs]
There are two types of pairs
header_pair : (str, None, List[pair])
defines a heading
str : the sidebar label
None : flag that there are sub-results
List[pair] : more header/result pairs
result_pair : (str, int, List[])
str : the sidebar label
int : the case id
List[] : flag that there are no sub-results
cases : dict[case_id] = result
case_id : int
the case id
result : GuiResult
the class that stores the result
reset_labels : bool; default=True
should the label actors be reset
form = [
'Model', None, [
['NodeID', 0, []],
['ElementID', 1, []]
['PropertyID', 2, []]
],
'time=0.0', None, [
['Stress', 3, []],
['Displacement', 4, []]
],
'time=1.0', None, [
['Stress', 5, []],
['Displacement', 6, []]
],
]
cases = {
0 : GuiResult(...), # NodeID
1 : GuiResult(...), # ElementID
2 : GuiResult(...), # PropertyID
3 : GuiResult(...), # Stress; t=0.0
4 : GuiResult(...), # Displacement; t=0.0
5 : GuiResult(...), # Stress; t=1.0
6 : GuiResult(...), # Displacement; t=1.0
}
case_keys = [0, 1, 2, 3, 4, 5, 6]
"""
self.turn_text_on()
self._set_results(form, cases)
# assert len(cases) > 0, cases
# if isinstance(cases, OrderedDict):
# self.case_keys = cases.keys()
# else:
# self.case_keys = sorted(cases.keys())
# assert isinstance(cases, dict), type(cases)
self.on_update_geometry_properties(self.geometry_properties, write_log=False)
# self.result_cases = cases
#print("cases =", cases)
#print("case_keys =", self.case_keys)
self.reset_labels(reset_minus1=reset_labels)
self.cycle_results_explicit() # start at nCase=0
if self.ncases:
self.scalarBar.VisibilityOn()
self.scalarBar.Modified()
#data = [
# ('A', []),
# ('B', []),
# ('C', []),
#]
data = []
for key in self.case_keys:
assert isinstance(key, integer_types), key
obj, (i, name) = self.result_cases[key]
t = (i, [])
data.append(t)
self.res_widget.update_results(form, self.name)
key = self.case_keys[0]
location = self.get_case_location(key)
method = 'centroid' if location else 'nodal'
data2 = [(method, None, [])]
self.res_widget.update_methods(data2)
if self.is_groups:
if self.element_ids is None:
raise RuntimeError('implement self.element_ids for this format')
#eids = np.arange(172)
#eids = []
#self.hide_elements_mask(eids)
elements_pound = self.element_ids[-1]
main_group = Group(
'main', '', elements_pound,
editable=False)
main_group.element_ids = self.element_ids
self.groups['main'] = main_group
self.post_group(main_group)
#self.show_elements_mask(np.arange(self.nelements))
def get_result_by_cell_id(self, cell_id, world_position, icase=None):
"""should handle multiple cell_ids"""
if icase is None:
icase = self.icase
case_key = self.case_keys[icase] # int for object
result_name = self.result_name
case = self.result_cases[case_key]
(obj, (i, res_name)) = case
subcase_id = obj.subcase_id
case = obj.get_result(i, res_name)
try:
result_values = case[cell_id]
except IndexError:
msg = ('case[cell_id] is out of bounds; length=%s\n'
'result_name=%r cell_id=%r case_key=%r\n' % (
len(case), result_name, cell_id, case_key))
raise IndexError(msg)
cell = self.grid_selected.GetCell(cell_id)
nnodes = cell.GetNumberOfPoints()
points = cell.GetPoints()
cell_type = cell.GetCellType()
if cell_type in [5, 9, 22, 23, 28]: # CTRIA3, CQUAD4, CTRIA6, CQUAD8, CQUAD
node_xyz = np.zeros((nnodes, 3), dtype='float32')
for ipoint in range(nnodes):
point = points.GetPoint(ipoint)
node_xyz[ipoint, :] = point
xyz = node_xyz.mean(axis=0)
elif cell_type in [10, 12, 13, 14]: # CTETRA4, CHEXA8, CPENTA6, CPYRAM5
# TODO: No idea how to get the center of the face
# vs. a point on a face that's not exposed
#faces = cell.GetFaces()
#nfaces = cell.GetNumberOfFaces()
#for iface in range(nfaces):
#face = cell.GetFace(iface)
#points = face.GetPoints()
#faces
xyz = world_position
elif cell_type in [24, 25, 26, 27]: # CTETRA10, CHEXA20, CPENTA15, CPYRAM13
xyz = world_position
elif cell_type in [3]: # CBAR, CBEAM, CELASx, CDAMPx, CBUSHx
node_xyz = np.zeros((nnodes, 3), dtype='float32')
for ipoint in range(nnodes):
point = points.GetPoint(ipoint)
node_xyz[ipoint, :] = point
xyz = node_xyz.mean(axis=0)
elif cell_type in [21]: # CBEND
# 21-QuadraticEdge
node_xyz = np.zeros((nnodes, 3), dtype='float32')
for ipoint in range(nnodes):
point = points.GetPoint(ipoint)
node_xyz[ipoint, :] = point
xyz = node_xyz.mean(axis=0)
else:
#self.log.error(msg)
msg = 'cell_type=%s nnodes=%s; icase=%s result_values=%s' % (
cell_type, nnodes, icase, result_values)
self.log.error(msg)
#VTK_LINE = 3
#VTK_TRIANGLE = 5
#VTK_QUADRATIC_TRIANGLE = 22
#VTK_QUAD = 9
#VTK_QUADRATIC_QUAD = 23
#VTK_TETRA = 10
#VTK_QUADRATIC_TETRA = 24
#VTK_WEDGE = 13
#VTK_QUADRATIC_WEDGE = 26
#VTK_HEXAHEDRON = 12
#VTK_QUADRATIC_HEXAHEDRON = 25
#VTK_PYRAMID = 14
#VTK_QUADRATIC_PYRAMID = 27
raise NotImplementedError(msg)
return result_name, result_values, xyz
def cell_centroid(self, cell_id):
"""gets the cell centroid"""
cell = self.grid_selected.GetCell(cell_id)
nnodes = cell.GetNumberOfPoints()
points = cell.GetPoints()
centroid = np.zeros(3, dtype='float32')
for ipoint in range(nnodes):
point = np.array(points.GetPoint(ipoint), dtype='float32')
centroid += point
centroid /= nnodes
return centroid
def get_result_by_xyz_cell_id(self, node_xyz, cell_id):
"""won't handle multiple cell_ids/node_xyz"""
case_key = self.case_keys[self.icase]
result_name = self.result_name
cell = self.grid_selected.GetCell(cell_id)
nnodes = cell.GetNumberOfPoints()
points = cell.GetPoints()
#node_xyz = array(node_xyz, dtype='float32')
#point0 = array(points.GetPoint(0), dtype='float32')
#dist_min = norm(point0 - node_xyz)
point0 = points.GetPoint(0)
dist_min = vtk.vtkMath.Distance2BetweenPoints(point0, node_xyz)
point_min = point0
imin = 0
for ipoint in range(1, nnodes):
#point = array(points.GetPoint(ipoint), dtype='float32')
#dist = norm(point - node_xyz)
point = points.GetPoint(ipoint)
dist = vtk.vtkMath.Distance2BetweenPoints(point, node_xyz)
if dist < dist_min:
dist_min = dist
imin = ipoint
point_min = point
node_id = cell.GetPointId(imin)
xyz = np.array(point_min, dtype='float32')
case = self.result_cases[case_key]
assert isinstance(case_key, integer_types), case_key
(obj, (i, res_name)) = case
subcase_id = obj.subcase_id
case = obj.get_result(i, res_name)
result_values = case[node_id]
assert not isinstance(xyz, int), xyz
return result_name, result_values, node_id, xyz
@property
def result_name(self):
"""
creates the self.result_name variable
.. python ::
#if len(key) == 5:
#(subcase_id, result_type, vector_size, location, data_format) = key
#elif len(key) == 6:
#(subcase_id, j, result_type, vector_size, location, data_format) = key
else:
(subcase_id, j, result_type, vector_size, location, data_format, label2) = key
"""
# case_key = (1, 'ElementID', 1, 'centroid', '%.0f')
case_key = self.case_keys[self.icase]
assert isinstance(case_key, integer_types), case_key
obj, (i, name) = self.result_cases[case_key]
return name
def finish_io(self, cases):
self.result_cases = cases
self.case_keys = sorted(cases.keys())
#print("case_keys = ", self.case_keys)
if len(self.result_cases) == 0:
self.ncases = 1
self.icase = 0
elif len(self.result_cases) == 1:
self.ncases = 1
self.icase = 0
else:
self.ncases = len(self.result_cases) - 1 # number of keys in dictionary
self.icase = -1
self.cycle_results() # start at nCase=0
if self.ncases:
self.scalarBar.VisibilityOn()
self.scalarBar.Modified()
def _finish_results_io(self, cases):
self.result_cases = cases
self.case_keys = sorted(cases.keys())
if len(self.case_keys) > 1:
self.icase = -1
self.ncases = len(self.result_cases) # number of keys in dictionary
elif len(self.case_keys) == 1:
self.icase = -1
self.ncases = 1
else:
self.icase = -1
self.ncases = 0
self.reset_labels()
self.cycle_results_explicit() # start at nCase=0
if self.ncases:
self.scalarBar.VisibilityOn()
self.scalarBar.Modified()
#data = [
# ('A',[]),
# ('B',[]),
# ('C',[]),
#]
#self.case_keys = [
# (1, 'ElementID', 1, 'centroid', '%.0f'), (1, 'Region', 1, 'centroid', '%.0f')
#]
data = []
for i, key in enumerate(self.case_keys):
t = (key[1], i, [])
data.append(t)
i += 1
self.res_widget.update_results(data)
data2 = [('node/centroid', None, [])]
self.res_widget.update_methods(data2)
def clear_application_log(self, force=False):
"""
Clears the application log
Parameters
----------
force : bool; default=False
clears the dialog without asking
"""
# popup menu
if force:
self.log_widget.clear()
self.log_command('clear_application_log(force=%s)' % force)
else:
widget = QWidget()
title = 'Clear Application Log'
msg = 'Are you sure you want to clear the Application Log?'
result = QMessageBox.question(widget, title, msg,
QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if result == QMessageBox.Yes:
self.log_widget.clear()
self.log_command('clear_application_log(force=%s)' % force)
def delete_actor(self, name):
"""deletes an actor and associated properties"""
if name != 'main':
if name in self.geometry_actors:
actor = self.geometry_actors[name]
self.rend.RemoveActor(actor)
del self.geometry_actors[name]
if name in self.geometry_properties:
prop = self.geometry_properties[name]
del self.geometry_properties[name]
self.Render()
def reset_labels(self, reset_minus1=True):
"""
Wipe all labels and regenerate the key slots based on the case keys.
This is used when changing the model.
"""
self._remove_labels()
reset_minus1 = True
# new geometry
if reset_minus1:
self.label_actors = {-1 : []}
else:
for idi in self.label_actors:
if idi == -1:
continue
self.label_actors[idi] = []
self.label_ids = {}
#self.case_keys = [
#(1, 'ElementID', 1, 'centroid', '%.0f'),
#(1, 'Region', 1, 'centroid', '%.0f')
#]
for icase in self.case_keys:
#result_name = self.get_result_name(icase)
self.label_actors[icase] = []
self.label_ids[icase] = set([])
#print(self.label_actors)
#print(self.label_ids)
def _remove_labels(self):
"""
Remove all labels from the current result case.
This happens when the user explictly selects the clear label button.
"""
if len(self.label_actors) == 0:
self.log.warning('No actors to remove')
return
# existing geometry
for icase, actors in iteritems(self.label_actors):
if icase == -1:
continue
for actor in actors:
self.rend.RemoveActor(actor)
del actor
self.label_actors[icase] = []
self.label_ids[icase] = set([])
def clear_labels(self):
"""
This clears out all labels from all result cases.
"""
if len(self.label_actors) == 0:
self.log.warning('No actors to clear')
return
# existing geometry
#icase = self.case_keys[self.icase]
icase = self.icase
result_name = self.result_name
actors = self.label_actors[icase]
for actor in actors:
self.rend.RemoveActor(actor)
del actor
self.label_actors[icase] = []
self.label_ids[icase] = set([])
def resize_labels(self, case_keys=None, show_msg=True):
"""
This resizes labels for all result cases.
TODO: not done...
"""
if case_keys is None:
names = 'None) # None -> all'
case_keys = sorted(self.label_actors.keys())
else:
mid = '%s,' * len(case_keys)
names = '[' + mid[:-1] + '])'
count = 0
for icase in case_keys:
actors = self.label_actors[icase]
for actor in actors:
actor.VisibilityOff()
count += 1
if count and show_msg:
self.log_command('resize_labels(%s)' % names)
def hide_labels(self, case_keys=None, show_msg=True):
if case_keys is None:
names = 'None) # None -> all'
case_keys = sorted(self.label_actors.keys())
else:
mid = '%s,' * len(case_keys)
names = '[' + mid[:-1] + '])'
count = 0
for icase in case_keys:
actors = self.label_actors[icase]
for actor in actors:
actor.VisibilityOff()
#prop = actor.GetProperty()
count += 1
if count and show_msg:
self.log_command('hide_labels(%s)' % names)
def show_labels(self, case_keys=None, show_msg=True):
if case_keys is None:
names = 'None) # None -> all'
case_keys = sorted(self.label_actors.keys())
else:
mid = '%s,' * len(case_keys)
names = mid[:-1] % case_keys + ')'
count = 0
for icase in case_keys:
try:
actors = self.label_actors[icase]
except KeyError:
msg = 'Cant find label_actors for icase=%r; keys=%s' % (
icase, self.label_actors.keys())
self.log.error(msg)
continue
for actor in actors:
actor.VisibilityOn()
count += 1
if count and show_msg:
# yes the ) is intentionally left off because it's already been added
self.log_command('show_labels(%s)' % names)
def update_scalar_bar(self, title, min_value, max_value, norm_value,
data_format,
nlabels=None, labelsize=None,
ncolors=None, colormap='jet',
is_low_to_high=True, is_horizontal=True,
is_shown=True):
"""
Updates the Scalar Bar
Parameters
----------
title : str
the scalar bar title
min_value : float
the blue value
max_value :
the red value
data_format : str
'%g','%f','%i', etc.
nlabels : int (default=None -> auto)
the number of labels
labelsize : int (default=None -> auto)
the label size
ncolors : int (default=None -> auto)
the number of colors
colormap : varies
str :
the name
ndarray : (N, 3) float ndarry
red-green-blue array
is_low_to_high : bool; default=True
flips the order of the RGB points
is_horizontal : bool; default=True
makes the scalar bar horizontal
is_shown : bool
show the scalar bar
"""
#print("update_scalar_bar min=%s max=%s norm=%s" % (min_value, max_value, norm_value))
self.scalar_bar.update(title, min_value, max_value, norm_value, data_format,
nlabels=nlabels, labelsize=labelsize,
ncolors=ncolors, colormap=colormap,
is_low_to_high=is_low_to_high, is_horizontal=is_horizontal,
is_shown=is_shown)
#---------------------------------------------------------------------------------------
# CAMERA MENU
def view_camera(self):
set_camera_menu(self)
#def _apply_camera(self, data):
#name = data['name']
#self.cameras = deepcopy(data['cameras'])
#self.on_set_camera(name)
def on_set_camera(self, name, show_log=True):
camera_data = self.cameras[name]
#position, clip_range, focal_point, view_up, distance = camera_data
self.on_set_camera_data(camera_data, show_log=show_log)
def get_camera_data(self):
camera = self.rend.GetActiveCamera()
position = camera.GetPosition()
focal_point = camera.GetFocalPoint()
view_angle = camera.GetViewAngle()
view_up = camera.GetViewUp()
clip_range = camera.GetClippingRange() # TODO: do I need this???
parallel_scale = camera.GetParallelScale() # TODO: do I need this???
#parallel_proj = GetParralelProjection()
parallel_proj = 32.
distance = camera.GetDistance()
# clip_range, view_up, distance
camera_data = [
position, focal_point, view_angle, view_up, clip_range,
parallel_scale, parallel_proj, distance
]
return camera_data
def on_set_camera_data(self, camera_data, show_log=True):
"""
Sets the current camera
Parameters
----------
position : (float, float, float)
where am I is xyz space
focal_point : (float, float, float)
where am I looking
view_angle : float
field of view (angle); perspective only?
view_up : (float, float, float)
up on the screen vector
clip_range : (float, float)
start/end distance from camera where clipping starts
parallel_scale : float
???
parallel_projection : bool (0/1)
flag?
TODO: not used
distance : float
distance to the camera
i_vector = focal_point - position
j'_vector = view_up
use:
i x j' -> k
k x i -> j
or it's like k'
"""
#position, clip_range, focal_point, view_up, distance = camera_data
(position, focal_point, view_angle, view_up, clip_range,
parallel_scale, parallel_proj, distance) = camera_data
camera = self.rend.GetActiveCamera()
camera.SetPosition(position)
camera.SetFocalPoint(focal_point)
camera.SetViewAngle(view_angle)
camera.SetViewUp(view_up)
camera.SetClippingRange(clip_range)
camera.SetParallelScale(parallel_scale)
#parallel_proj
camera.SetDistance(distance)
camera.Modified()
self.vtk_interactor.Render()
if show_log:
self.log_command(
'on_set_camera_data([%s, %s, %s, %s, %s, %s, %s, %s])'
% (position, focal_point, view_angle, view_up,
clip_range, parallel_scale, parallel_proj, distance))
#---------------------------------------------------------------------------------------
# PICKER
@property
def node_picker_size(self):
"""Gets the node picker size"""
return self.node_picker.GetTolerance()
@node_picker_size.setter
def node_picker_size(self, size):
"""Sets the node picker size"""
assert size >= 0., size
self.node_picker.SetTolerance(size)
@property
def element_picker_size(self):
"""Gets the element picker size"""
return self.cell_picker.GetTolerance()
@element_picker_size.setter
def element_picker_size(self, size):
"""Sets the element picker size"""
assert size >= 0., size
self.cell_picker.SetTolerance(size)
#---------------------------------------------------------------------------------------
def set_preferences_menu(self):
"""
Opens a dialog box to set:
+--------+----------+
| Min | Float |
+--------+----------+
"""
set_preferences_menu(self)
#---------------------------------------------------------------------------------------
# CLIPPING MENU
def set_clipping(self):
"""
Opens a dialog box to set:
+--------+----------+
| Min | Float |
+--------+----------+
| Max | Float |
+--------+----------+
"""
set_clipping_menu(self)
def _apply_clipping(self, data):
min_clip = data['clipping_min']
max_clip = data['clipping_max']
self.on_update_clipping(min_clip, max_clip)
def on_update_clipping(self, min_clip=None, max_clip=None):
camera = self.GetCamera()
_min_clip, _max_clip = camera.GetClippingRange()
if min_clip is None:
min_clip = _min_clip
if max_clip is None:
max_clip = _max_clip
camera.SetClippingRange(min_clip, max_clip)
self.log_command('self.on_update_clipping(min_value=%s, max_clip=%s)'
% (min_clip, max_clip))
#---------------------------------------------------------------------------------------
def on_set_anti_aliasing(self, scale=0):
assert isinstance(scale, int), 'scale=%r; type=%r' % (scale, type(scale))
renwin = self.render_window
renwin.LineSmoothingOn()
renwin.PolygonSmoothingOn()
renwin.PointSmoothingOn()
renwin.SetMultiSamples(scale)
self.vtk_interactor.Render()
self.log_command('on_set_anti_aliasing(%r)' % (scale))
#---------------------------------------------------------------------------------------
# LEGEND MENU
def set_legend(self):
"""
Opens a dialog box to set:
+--------+----------+
| Name | String |
+--------+----------+
| Min | Float |
+--------+----------+
| Max | Float |
+--------+----------+
| Format | pyString |
+--------+----------+
"""
set_legend_menu(self)
def update_legend(self, icase, name, min_value, max_value, data_format, scale, phase,
nlabels, labelsize, ncolors, colormap,
is_low_to_high, is_horizontal_scalar_bar):
if not self._legend_window_shown:
return
self._legend_window._updated_legend = True
key = self.case_keys[icase]
assert isinstance(key, integer_types), key
(obj, (i, name)) = self.result_cases[key]
#subcase_id = obj.subcase_id
#case = obj.get_result(i, name)
#result_type = obj.get_title(i, name)
#vector_size = obj.get_vector_size(i, name)
#location = obj.get_location(i, name)
#data_format = obj.get_data_format(i, name)
#scale = obj.get_scale(i, name)
#label2 = obj.get_header(i, name)
default_data_format = obj.get_default_data_format(i, name)
default_min, default_max = obj.get_default_min_max(i, name)
default_scale = obj.get_default_scale(i, name)
default_title = obj.get_default_title(i, name)
default_phase = obj.get_default_phase(i, name)
out_labels = obj.get_default_nlabels_labelsize_ncolors_colormap(i, name)
default_nlabels, default_labelsize, default_ncolors, default_colormap = out_labels
is_normals = obj.is_normal_result(i, name)
assert isinstance(scale, float), 'scale=%s' % scale
self._legend_window.update_legend(
icase,
name, min_value, max_value, data_format, scale, phase,
nlabels, labelsize,
ncolors, colormap,
default_title, default_min, default_max, default_data_format,
default_scale, default_phase,
default_nlabels, default_labelsize,
default_ncolors, default_colormap,
is_low_to_high, is_horizontal_scalar_bar, is_normals, font_size=self.font_size)
#self.scalar_bar.set_visibility(self._legend_shown)
#self.vtk_interactor.Render()
def _apply_legend(self, data):
title = data['name']
min_value = data['min']
max_value = data['max']
scale = data['scale']
phase = data['phase']
data_format = data['format']
is_low_to_high = data['is_low_to_high']
is_discrete = data['is_discrete']
is_horizontal = data['is_horizontal']
is_shown = data['is_shown']
nlabels = data['nlabels']
labelsize = data['labelsize']
ncolors = data['ncolors']
colormap = data['colormap']
#print('is_shown1 =', is_shown)
self.on_update_legend(title=title, min_value=min_value, max_value=max_value,
scale=scale, phase=phase, data_format=data_format,
is_low_to_high=is_low_to_high,
is_discrete=is_discrete, is_horizontal=is_horizontal,
nlabels=nlabels, labelsize=labelsize,
ncolors=ncolors, colormap=colormap,
is_shown=is_shown)
def on_update_legend(self, title='Title', min_value=0., max_value=1., scale=0.0,
phase=0.0,
data_format='%.0f',
is_low_to_high=True, is_discrete=True, is_horizontal=True,
nlabels=None, labelsize=None, ncolors=None, colormap='jet',
is_shown=True):
"""
Updates the legend/model
Parameters
----------
scale : float
displacemnt scale factor; true scale
"""
#print('is_shown2 =', is_shown)
#assert is_shown == False, is_shown
key = self.case_keys[self.icase]
name_vector = None
plot_value = self.result_cases[key] # scalar
vector_size1 = 1
update_3d = False
assert isinstance(key, integer_types), key
(obj, (i, res_name)) = self.result_cases[key]
subcase_id = obj.subcase_id
#print('plot_value =', plot_value)
result_type = obj.get_title(i, res_name)
vector_size = obj.get_vector_size(i, res_name)
if vector_size == 3:
plot_value = obj.get_plot_value(i, res_name) # vector
update_3d = True
#print('setting scale=%s' % scale)
assert isinstance(scale, float), scale
obj.set_scale(i, res_name, scale)
obj.set_phase(i, res_name, phase)
else:
scalar_result = obj.get_scalar(i, res_name)
location = obj.get_location(i, res_name)
obj.set_min_max(i, res_name, min_value, max_value)
obj.set_data_format(i, res_name, data_format)
obj.set_nlabels_labelsize_ncolors_colormap(
i, res_name, nlabels, labelsize, ncolors, colormap)
#data_format = obj.get_data_format(i, res_name)
#obj.set_format(i, res_name, data_format)
#obj.set_data_format(i, res_name, data_format)
subtitle, label = self.get_subtitle_label(subcase_id)
name_vector = (vector_size1, subcase_id, result_type, label,
min_value, max_value, scale)
assert vector_size1 == 1, vector_size1
#if isinstance(key, integer_types): # vector 3
#norm_plot_value = norm(plot_value, axis=1)
#min_value = norm_plot_value.min()
#max_value = norm_plot_value.max()
#print('norm_plot_value =', norm_plot_value)
if update_3d:
self.is_horizontal_scalar_bar = is_horizontal
self._set_case(self.result_name, self.icase,
explicit=False, cycle=False, skip_click_check=True,
min_value=min_value, max_value=max_value,
is_legend_shown=is_shown)
return
subtitle, label = self.get_subtitle_label(subcase_id)
scale1 = 0.0
# if vector_size == 3:
name = (vector_size1, subcase_id, result_type, label, min_value, max_value, scale1)
if obj.is_normal_result(i, res_name):
return
norm_value = float(max_value - min_value)
# if name not in self._loaded_names:
#if isinstance(key, integer_types): # vector 3
#norm_plot_value = norm(plot_value, axis=1)
#grid_result = self.set_grid_values(name, norm_plot_value, vector_size1,
#min_value, max_value, norm_value,
#is_low_to_high=is_low_to_high)
#else:
grid_result = self.set_grid_values(name, scalar_result, vector_size1,
min_value, max_value, norm_value,
is_low_to_high=is_low_to_high)
grid_result_vector = None
#if name_vector and 0:
#vector_size = 3
#grid_result_vector = self.set_grid_values(name_vector, plot_value, vector_size,
#min_value, max_value, norm_value,
#is_low_to_high=is_low_to_high)
self.update_scalar_bar(title, min_value, max_value, norm_value,
data_format,
nlabels=nlabels, labelsize=labelsize,
ncolors=ncolors, colormap=colormap,
is_low_to_high=is_low_to_high,
is_horizontal=is_horizontal, is_shown=is_shown)
revert_displaced = True
self._final_grid_update(name, grid_result, None, None, None,
1, subcase_id, result_type, location, subtitle, label,
revert_displaced=revert_displaced)
if grid_result_vector is not None:
self._final_grid_update(name_vector, grid_result_vector, obj, i, res_name,
vector_size, subcase_id, result_type, location, subtitle, label,
revert_displaced=False)
#if 0:
#xyz_nominal, vector_data = obj.get_vector_result(i, res_name)
#self._update_grid(vector_data)
#self.grid.Modified()
#self.geom_actor.Modified()
#self.vtk_interactor.Render()
#revert_displaced = False
#self._final_grid_update(name, grid_result, None, None, None,
#1, subcase_id, result_type, location, subtitle, label,
#revert_displaced=revert_displaced)
#self.is_horizontal_scalar_bar = is_horizontal
icase = i
msg = ('self.on_update_legend(title=%r, min_value=%s, max_value=%s,\n'
' scale=%r, phase=%r,\n'
' data_format=%r, is_low_to_high=%s, is_discrete=%s,\n'
' nlabels=%r, labelsize=%r, ncolors=%r, colormap=%r,\n'
' is_horizontal=%r, is_shown=%r)'
% (title, min_value, max_value, scale, phase,
data_format, is_low_to_high, is_discrete,
nlabels, labelsize, ncolors, colormap, is_horizontal, is_shown))
self.log_command(msg)
#if is_shown:
#pass
#---------------------------------------------------------------------------------------
# WingWindow
def on_add_menu(self, text):
self.log_info('on_add_menu(text=%r)' % text)
self.on_wing_window()
def on_wing_window(self):
if not hasattr(self, '_edit_geometry_window_shown'):
self._edit_geometry_window_shown = False
#data = deepcopy(self.geometry_properties)
transform = {
'xyz' : [0., 0., 0.],
'is_absolute' : False,
'rot origin(X)' : 0.,
}
symmetry = {}
data = {
'name' : 'WingGeom',
'color' : (0, 0, 255),
#'font_size' : 8,
'num_U' : 16,
'num_W' : 33,
'Density' : 1.0,
'Thin Shell' : False,
'Mass/Area' : 1.0,
#'Priority' : 0,
'Negative Volume' : False,
'transform' : transform,
'symmetry' : symmetry,
}
data['font_size'] = self.font_size
if not self._edit_geometry_window_shown:
self._edit_geometry = WingWindow(data, win_parent=self)
self._edit_geometry.show()
self._edit_geometry_window_shown = True
self._edit_geometry.exec_()
else:
self._edit_geometry.activateWindow()
if 'clicked_ok' not in data:
self._edit_geometry.activateWindow()
return
if data['clicked_ok']:
#self.on_update_geometry_properties(data)
#self._save_geometry_properties(data)
del self._edit_geometry
self._edit_geometry_window_shown = False
elif data['clicked_cancel']:
#self.on_update_geometry_properties(self.geometry_properties)
del self._edit_geometry
self._edit_geometry_window_shown = False
#---------------------------------------------------------------------------------------
# EDIT ACTOR PROPERTIES
def edit_geometry_properties(self):
"""
Opens a dialog box to set:
+--------+----------+
| Name | String |
+--------+----------+
| Min | Float |
+--------+----------+
| Max | Float |
+--------+----------+
| Format | pyString |
+--------+----------+
"""
if not hasattr(self, 'case_keys'):
self.log_error('No model has been loaded.')
return
if not len(self.geometry_properties):
self.log_error('No secondary geometries to edit.')
return
#print('geometry_properties.keys() =', self.geometry_properties.keys())
#key = self.case_keys[self.icase]
#case = self.result_cases[key]
data = deepcopy(self.geometry_properties)
data['font_size'] = self.font_size
if not self._edit_geometry_properties_window_shown:
self._edit_geometry_properties = EditGeometryProperties(data, win_parent=self)
self._edit_geometry_properties.show()
self._edit_geometry_properties_window_shown = True
self._edit_geometry_properties.exec_()
else:
self._edit_geometry_properties.activateWindow()
if 'clicked_ok' not in data:
self._edit_geometry_properties.activateWindow()
return
if data['clicked_ok']:
self.on_update_geometry_properties(data)
self._save_geometry_properties(data)
del self._edit_geometry_properties
self._edit_geometry_properties_window_shown = False
elif data['clicked_cancel']:
self.on_update_geometry_properties(self.geometry_properties)
del self._edit_geometry_properties
self._edit_geometry_properties_window_shown = False
def _save_geometry_properties(self, out_data):
for name, group in iteritems(out_data):
if name in ['clicked_ok', 'clicked_cancel']:
continue
if name not in self.geometry_properties:
# we've deleted the actor
continue
geom_prop = self.geometry_properties[name]
if isinstance(geom_prop, CoordProperties):
pass
elif isinstance(geom_prop, AltGeometry):
geom_prop.color = group.color
geom_prop.line_width = group.line_width
geom_prop.opacity = group.opacity
geom_prop.point_size = group.point_size
else:
raise NotImplementedError(geom_prop)
def on_update_geometry_properties_override_dialog(self, geometry_properties):
"""
Update the goemetry properties and overwite the options in the
edit geometry properties dialog if it is open.
Parameters
-----------
geometry_properties : dict {str : CoordProperties or AltGeometry}
Dictionary from name to properties object. Only the names included in
``geometry_properties`` are modified.
"""
if self._edit_geometry_properties_window_shown:
# Override the output state in the edit geometry properties diaglog
# if the button is pushed while the dialog is open. This prevent the
# case where you close the dialog and the state reverts back to
# before you hit the button.
for name, prop in iteritems(geometry_properties):
self._edit_geometry_properties.out_data[name] = prop
if self._edit_geometry_properties.active_key == name:
index = self._edit_geometry_properties.table.currentIndex()
self._edit_geometry_properties.update_active_key(index)
self.on_update_geometry_properties(geometry_properties)
def on_set_modify_groups(self):
"""
Opens a dialog box to set:
+--------+----------+
| Name | String |
+--------+----------+
| Min | Float |
+--------+----------+
| Max | Float |
+--------+----------+
| Format | pyString |
+--------+----------+
"""
on_set_modify_groups(self)
def _apply_modify_groups(self, data):
"""called by on_set_modify_groups when apply is clicked"""
self.on_update_modify_groups(data)
imain = self._modify_groups_window.imain
name = self._modify_groups_window.keys[imain]
self.post_group_by_name(name)
def on_update_modify_groups(self, out_data):
"""
Applies the changed groups to the different groups if
something changed.
"""
#self.groups = out_data
data = {}
for group_id, group in sorted(iteritems(out_data)):
if not isinstance(group, Group):
continue
data[group.name] = group
self.groups = data
def on_update_geometry_properties(self, out_data, name=None, write_log=True):
"""
Applies the changed properties to the different actors if
something changed.
Note that some of the values are limited. This prevents
points/lines from being shrunk to 0 and also the actor being
actually "hidden" at the same time. This prevents confusion
when you try to show the actor and it's not visible.
"""
lines = []
if name is None:
for namei, group in iteritems(out_data):
if namei in ['clicked_ok', 'clicked_cancel']:
continue
self._update_ith_geometry_properties(namei, group, lines, render=False)
else:
group = out_data[name]
self._update_ith_geometry_properties(name, group, lines, render=False)
self.vtk_interactor.Render()
if write_log and lines:
msg = 'out_data = {\n'
msg += ''.join(lines)
msg += '}\n'
msg += 'self.on_update_geometry_properties(out_data)'
self.log_command(msg)
def _update_ith_geometry_properties(self, namei, group, lines, render=True):
"""updates a geometry"""
if namei not in self.geometry_actors:
# we've deleted the actor
return
actor = self.geometry_actors[namei]
if isinstance(actor, vtk.vtkActor):
alt_prop = self.geometry_properties[namei]
label_actors = alt_prop.label_actors
lines += self._update_geometry_properties_actor(namei, group, actor, label_actors)
elif isinstance(actor, vtk.vtkAxesActor):
changed = False
is_visible1 = bool(actor.GetVisibility())
is_visible2 = group.is_visible
if is_visible1 != is_visible2:
actor.SetVisibility(is_visible2)
alt_prop = self.geometry_properties[namei]
alt_prop.is_visible = is_visible2
actor.Modified()
changed = True
if changed:
lines.append(' %r : CoordProperties(is_visible=%s),\n' % (
namei, is_visible2))
else:
raise NotImplementedError(actor)
if render:
self.vtk_interactor.Render()
def _update_geometry_properties_actor(self, name, group, actor, label_actors):
"""
Updates an actor
Parameters
----------
name : str
the geometry proprety to update
group : AltGeometry()
a storage container for all the actor's properties
actor : vtkActor()
the actor where the properties will be applied
linewidth1 : int
the active linewidth
linewidth2 : int
the new linewidth
"""
lines = []
changed = False
#mapper = actor.GetMapper()
prop = actor.GetProperty()
backface_prop = actor.GetBackfaceProperty()
if name == 'main' and backface_prop is None:
# don't edit these
# we're lying about the colors to make sure the
# colors aren't reset for the Normals
color1 = prop.GetDiffuseColor()
color2 = color1
assert color1[1] <= 1.0, color1
else:
color1 = prop.GetDiffuseColor()
assert color1[1] <= 1.0, color1
color2 = group.color_float
#print('line2646 - name=%s color1=%s color2=%s' % (name, str(color1), str(color2)))
#color2 = group.color
opacity1 = prop.GetOpacity()
opacity2 = group.opacity
opacity2 = max(0.1, opacity2)
line_width1 = prop.GetLineWidth()
line_width2 = group.line_width
line_width2 = max(1, line_width2)
point_size1 = prop.GetPointSize()
point_size2 = group.point_size
point_size2 = max(1, point_size2)
representation = group.representation
alt_prop = self.geometry_properties[name]
#representation = alt_prop.representation
#is_visible1 = alt_prop.is_visible
is_visible1 = bool(actor.GetVisibility())
is_visible2 = group.is_visible
#print('is_visible1=%s is_visible2=%s' % (is_visible1, is_visible2))
bar_scale1 = alt_prop.bar_scale
bar_scale2 = group.bar_scale
# bar_scale2 = max(0.0, bar_scale2)
#print('name=%s color1=%s color2=%s' % (name, str(color1), str(color2)))
if color1 != color2:
#print('color_2662[%s] = %s' % (name, str(color1)))
assert isinstance(color1[0], float), color1
prop.SetDiffuseColor(color2)
changed = True
if line_width1 != line_width2:
line_width2 = max(1, line_width2)
prop.SetLineWidth(line_width2)
changed = True
if opacity1 != opacity2:
#if backface_prop is not None:
#backface_prop.SetOpacity(opacity2)
prop.SetOpacity(opacity2)
changed = True
if point_size1 != point_size2:
prop.SetPointSize(point_size2)
changed = True
if representation == 'bar' and bar_scale1 != bar_scale2:
#print('name=%s rep=%r bar_scale1=%s bar_scale2=%s' % (
#name, representation, bar_scale1, bar_scale2))
self.set_bar_scale(name, bar_scale2)
if is_visible1 != is_visible2:
actor.SetVisibility(is_visible2)
alt_prop.is_visible = is_visible2
#prop.SetViPointSize(is_visible2)
actor.Modified()
for label_actor in label_actors:
label_actor.SetVisibility(is_visible2)
label_actor.Modified()
changed = True
if changed:
lines.append(' %r : AltGeometry(self, %r, color=(%s, %s, %s), '
'line_width=%s, opacity=%s, point_size=%s, bar_scale=%s, '
'representation=%r, is_visible=%s),\n' % (
name, name, color2[0], color2[1], color2[2], line_width2,
opacity2, point_size2, bar_scale2, representation, is_visible2))
prop.Modified()
return lines
def set_bar_scale(self, name, bar_scale):
"""
Parameters
----------
name : str
the parameter to scale (e.g. TUBE_y, TUBE_z)
bar_scale : float
the scaling factor
"""
#print('set_bar_scale - GuiCommon2; name=%s bar_scale=%s' % (name, bar_scale))
if bar_scale <= 0.0:
return
assert bar_scale > 0.0, 'bar_scale=%r' % bar_scale
# bar_y : (nbars, 6) float ndarray
# the xyz coordinates for (node1, node2) of the y/z axis of the bar
# xyz1 is the centroid
# xyz2 is the end point of the axis with a length_xyz with a bar_scale of 1.0
bar_y = self.bar_lines[name]
#dy = c - yaxis
#dz = c - zaxis
#print('bary:\n%s' % bar_y)
xyz1 = bar_y[:, :3]
xyz2 = bar_y[:, 3:]
dxyz = xyz2 - xyz1
# vectorized version of L = sqrt(dx^2 + dy^2 + dz^2)
length_xyz = np.linalg.norm(dxyz, axis=1)
izero = np.where(length_xyz == 0.0)[0]
if len(izero):
bad_eids = self.bar_eids[name][izero]
self.log.error('The following elements have zero length...%s' % bad_eids)
# v = dxyz / length_xyz * bar_scale
# xyz2 = xyz1 + v
nnodes = len(length_xyz)
grid = self.alt_grids[name]
points = grid.GetPoints()
for i in range(nnodes):
p = points.GetPoint(2*i+1)
#print(p)
node = xyz1[i, :] + length_xyz[i] * bar_scale * dxyz[i, :]
#print(p, node)
points.SetPoint(2 * i + 1, *node)
if hasattr(grid, 'Update'):
#print('update....')
grid.Update()
grid.Modified()
#print('update2...')
def _add_user_points_from_csv(self, csv_points_filename, name, color, point_size=4):
"""
Helper method for adding csv nodes to the gui
Parameters
----------
csv_points_filename : str
CSV filename that defines one xyz point per line
name : str
name of the geometry actor
color : List[float, float, float]
RGB values; [0. to 1.]
point_size : int; default=4
the nominal point size
"""
is_failed = True
try:
assert os.path.exists(csv_points_filename), print_bad_path(csv_points_filename)
# read input file
try:
user_points = np.loadtxt(csv_points_filename, delimiter=',')
except ValueError:
user_points = loadtxt_nice(csv_points_filename, delimiter=',')
# can't handle leading spaces?
#raise
except ValueError as e:
#self.log_error(traceback.print_stack(f))
self.log_error('\n' + ''.join(traceback.format_stack()))
#traceback.print_exc(file=self.log_error)
self.log_error(str(e))
return is_failed
self._add_user_points(user_points, name, color, csv_points_filename, point_size=point_size)
is_failed = False
return False
def _add_user_points(self, user_points, name, color, csv_points_filename='', point_size=4):
"""
Helper method for adding csv nodes to the gui
Parameters
----------
user_points : (n, 3) float ndarray
the points to add
name : str
name of the geometry actor
color : List[float, float, float]
RGB values; [0. to 1.]
point_size : int; default=4
the nominal point size
"""
if name in self.geometry_actors:
msg = 'Name: %s is already in geometry_actors\nChoose a different name.' % name
raise ValueError(msg)
if len(name) == 0:
msg = 'Invalid Name: name=%r' % name
raise ValueError(msg)
# create grid
self.create_alternate_vtk_grid(name, color=color, line_width=5, opacity=1.0,
point_size=point_size, representation='point')
npoints = user_points.shape[0]
if npoints == 0:
raise RuntimeError('npoints=0 in %r' % csv_points_filename)
if len(user_points.shape) == 1:
user_points = user_points.reshape(1, npoints)
# allocate grid
self.alt_grids[name].Allocate(npoints, 1000)
# set points
points = vtk.vtkPoints()
points.SetNumberOfPoints(npoints)
for i, point in enumerate(user_points):
points.InsertPoint(i, *point)
elem = vtk.vtkVertex()
elem.GetPointIds().SetId(0, i)
self.alt_grids[name].InsertNextCell(elem.GetCellType(), elem.GetPointIds())
self.alt_grids[name].SetPoints(points)
# create actor/mapper
self._add_alt_geometry(self.alt_grids[name], name)
# set representation to points
self.geometry_properties[name].representation = 'point'
actor = self.geometry_actors[name]
prop = actor.GetProperty()
prop.SetRepresentationToPoints()
prop.SetPointSize(point_size)
|
nilq/baby-python
|
python
|
#
# Copyright (c) 2010 Testrepository Contributors
#
# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
# license at the users choice. A copy of both licenses are available in the
# project source as Apache-2.0 and BSD. You may not use this file except in
# compliance with one of these two licences.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# license you chose for the specific language governing permissions and
# limitations under that license.
"""Tests for matchers used by or for testing testrepository."""
import sys
from testtools import TestCase
class TestWildcard(TestCase):
def test_wildcard_equals_everything(self):
from testrepository.tests import Wildcard
self.assertTrue(Wildcard == 5)
self.assertTrue(Wildcard == 'orange')
self.assertTrue('orange' == Wildcard)
self.assertTrue(5 == Wildcard)
def test_wildcard_not_equals_nothing(self):
from testrepository.tests import Wildcard
self.assertFalse(Wildcard != 5)
self.assertFalse(Wildcard != 'orange')
|
nilq/baby-python
|
python
|
import re
import h5py
import numpy as np
from nltk.tokenize import word_tokenize
from nltk.stem import WordNetLemmatizer
from nltk.corpus import stopwords
from os.path import join,exists
from os import listdir
from paper import paper
def html_reader(input_dir):
"""
read the html file at input_dir, and parse the html file, return a paper object
-------------------------------------------------------------------------------
parameter:
input_dir: str, dir of the html file
------------------------------------------------------------------------------
return:
paper: paper.paper object
"""
#read data from the html file
with open(input_dir,'r') as html_file:
content = html_file.read()
content = (content.split('\n'))[4:-4]
num = re.compile("(.*\t\d.*)|(\d*\d\.\d*)")
information = []
for i in range(len(content)):
if num.match(content[i])==None:
information.append(content[i])
information = information[:-1]
#data parsing
Date = re.compile('( ?CACM|June)')
Meta = re.compile("(CA\d\d\d\d\d\d|June)")
#get date and meta index
for i in range(len(information)):
if Date.match(information[i])!=None:
index_date = i
if Meta.match(information[i])!=None:
index_meta =i
content = information[:index_date]
others = information[index_date+2:index_meta]
for i in range(len(content)):
if content[i]=="":
title = content[:i]
abstract = content[i+1:]
break
#get author and other
author = []
other = []
for i in range(len(others)):
if others[i]=="":
if re.match("[A-Z].*, ?[A-Z].*\..*",others[0]) != None:
author = others[:i]
other = others[i+1:]
else:
other = others
break
for i in range(len(author)):
if re.match("[A-Z].*, ?[A-Z].*\..*",author[i]) != None:
name = author[i].split(",")
author[i] = (name[1]+name[0])
author[i] = author[i].replace(" ","")
author[i] = author[i].replace("\t","")
author[i] = author[i].lower()
#parse date
date = []
date.append(re.search("19\d\d", information[index_date]).group())
date.append(re.search("(January|February|March|April|May|June|JUly|July|August|September|October|November|December)",information[index_date]).group().lower())
#parse meta data
meta = []
meta.append(re.search("CA\d\d\d\d\d\d\w?",information[index_meta]).group().lower())#0
meta.append(re.search("[a-z0-9] [A-Z]{2}[A-Z]?",information[index_meta]).group()[2:].lower())#1
meta.append(re.search("(January|February|March|April|May|June|JUly|July|August|September|October|November|December)",information[index_meta]).group().lower())#2
meta.append(re.search("\w \d\d?",information[index_meta]).group()[2:])#3
meta.append(re.search("\d?\d:\d\d",information[index_meta]).group())#4
meta.append(re.search("(AM|PM)",information[index_meta]).group().lower())#5
meta.append(re.search("19\d\d",information[index_meta]).group())#6
#build corpus
corpus = set()
lemmatizer = WordNetLemmatizer()
for i in range(len(title)):
title[i] = re.sub("\(|\)|-|\d\d?\d?|:|/|\.|`|\?"," ",title[i])
words = word_tokenize(title[i])
for word in words:
normal_word = word.lower()
if normal_word not in stopwords.words("english"):
corpus.add(lemmatizer.lemmatize(normal_word))
for i in range(len(abstract)):
abstract[i] = re.sub("\(|\)|-|\d\d?\d?|:|/|\.|`|\?|,"," ",abstract[i])
words = word_tokenize(abstract[i])
for word in words:
normal_word = word.lower()
if normal_word not in stopwords.words("english"):
corpus.add(lemmatizer.lemmatize(normal_word))
for i in range(len(other)):
other[i] = re.sub("\(|\)|-|\d\d?\d?|:|/|\.|`|\?|,"," ",other[i])
words = word_tokenize(other[i])
for word in words:
normal_word = word.lower()
if normal_word not in stopwords.words("english"):
corpus.add(lemmatizer.lemmatize(normal_word))
corpus = list(corpus)
return paper(author= author, other= other, metadata= meta,date = date,title = title,abstract = abstract,id=int(input_dir[-9:-5]),corpus = corpus)
def convert(num):
"""
format the number like "0001","0012","0123","1234"
-------------------------------------------------------------------------
parameter:
num: int, the number to be formatted
-------------------------------------------------------------------------
return:
num:str, the formatted number
"""
if len(str(num))==1:
return "000%i"%num
elif len(str(num)) == 2:
return "00%i"%num
elif len(str(num)) == 3:
return "0%i"%num
elif len(str(num)) == 4:
return "%i"%num
|
nilq/baby-python
|
python
|
from .experiment import Experiment # noqa: F401
from .trial import Trial # noqa: F401
|
nilq/baby-python
|
python
|
import os
import argparse
import imageio
parser = argparse.ArgumentParser()
parser.add_argument('--name', required=True, type=str)
args = parser.parse_args()
for file in sorted(os.listdir(os.path.join('outputs', args.name, 'particles'))):
i = int(file.replace('.bin', ''))
print('frame %d' % i, flush=True)
os.system('python utils/render.py --name %s --frame %d --imshow 0' % (args.name, i))
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-02-20 14:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('payment', '0014_add_indices'),
]
operations = [
migrations.AddIndex(
model_name='payment',
index=models.Index(fields=['modified'], name='payment_pay_modifie_c1f247_idx'),
),
]
|
nilq/baby-python
|
python
|
###########################################################
# compare.py -Script that compares any two painting's hex
# hex values for similarity or rank in the
# frequency list and quantifies it with a
# percentage
# Author: Shaedil Dider
###########################################################
import colors
from statistics import mean
# Extract the two color sets and put them in a variable
# Process them into RGB>LAB values using colors library
# Comparison algorithm using delta E algorithm
# https://stackoverflow.com/a/52453462/6273236
def compare_color_sets(first_file, second_file):
color_set1 = eval(open("./dataset/" + first_file).read())
color_set2 = eval(open("./dataset/" + second_file).read())
converted_set1 = convert_color_set_to_LAB(color_set1)
converted_set2 = convert_color_set_to_LAB(color_set2)
mean_similarity = calculate_mean_simularity(converted_set1, converted_set2)
print_level_of_perceptibility(mean_similarity)
def convert_color_set_to_LAB(input_color_set):
color_set_in_LAB = []
for each_color_scheme in input_color_set:
each_color_scheme_in_RBG = colors.hexToRGB(each_color_scheme)
each_color_scheme_in_LAB = colors.rgb2lab(each_color_scheme_in_RBG)
color_set_in_LAB.append(each_color_scheme_in_LAB)
return color_set_in_LAB.sort()
def calculate_mean_simularity(color_set_in_LAB1, color_set_in_LAB2):
similarities = []
for color1, color2 in zip(color_set_in_LAB1, color_set_in_LAB2):
similarity_between_colors = colors.deltaE(color1, color2)
similarities.append(similarity_between_colors)
return mean(similarities)
def print_level_of_perceptibility(mean_similarity):
if mean_similarity <= 1.0:
print("Delta E: <= 1.0")
print(
"Perception: The difference in the color set of the paintings are not perceptible by human eyes"
)
elif mean_similarity <= 2.0:
print("Delta E: 1 - 2")
print(
"Perception: The difference in the color set of the paintings are perceptible through close observation"
)
elif mean_similarity <= 11.0:
print("Delta E: 2 - 10")
print(
"Perception: The difference in the color set of the paintings are perceptible at a glance"
)
elif mean_similarity <= 49.0:
print("Delta E: 11 - 49")
print(
"Preception: The color set of the paintings are more similar than opposite"
)
else:
print("Delta E: 49 - 100")
print("Perception: The color set of the paintings are exact opposites")
|
nilq/baby-python
|
python
|
from selenium import webdriver
from utils.logging import init_logger
from utils.s3_manager.manage import S3Manager
class SeleniumCrawler:
def __init__(self, base_url, bucket_name, key, head=False):
self.logger = init_logger()
self.bucket_name = bucket_name
self.s3_manager = S3Manager(bucket_name=self.bucket_name)
self.prefix = key
self.chrome_path = "C:/chromedriver"
options = webdriver.ChromeOptions()
if head is False:
options.add_argument('headless')
self.driver = webdriver.Chrome(executable_path=self.chrome_path, chrome_options=options)
self.base_url = base_url
# TODO: click elements sequentially
def click_element_by_xpath(self, xpath: str):
ele = self.driver.find_element_by_xpath(xpath=xpath)
ele.click()
def click_element_by_class_name(self, name: str):
ele = self.driver.find_element_by_class_name(name=name)
ele.click()
def click_element_by_tag_name(self, name: str):
ele = self.driver.find_element_by_tag_name(name=name)
ele.click()
def process(self):
pass
|
nilq/baby-python
|
python
|
from ._accuracy_per_cell_type import accuracy_per_cell_type
from ._cd_ratio import cd_ratio
from ._cumulative_node import cumulative_node
from ._cumulative_node_group import cumulative_node_group
from ._dists_distr import dists_distr
from ._mean_node_per_cell_type import mean_node_per_cell_type
from ._metric_heatmap import metric_heatmap
from ._node_distr import node_distr
from ._river_plot import river_plot
from ._river_plot_2_omics import river_plot_2_omics
from ._pairwise_distance import pairwise_distance
from ._silhouette import silhouette
from ._metrics_scatterplot import metrics_scatterplot
import scanpy
# check if scanpy version is below 1.8.0 or higher
if int(''.join(scanpy.__version__.split('.'))) < 180:
from ._umap_barcodes import umap_barcodes
else:
from ._umap_barcodes_2 import umap_barcodes
|
nilq/baby-python
|
python
|
import socket
class Triton200:
"""
Create an instance of the Triton200 class.
Supported modes: IP
:param str ip_address: The IP address of the Triton 200.
:param int port_number: The associated port number of the Triton 200 (default: 33576)
:param int timeout: How long to wait for a response (default: 10000)
:param int bytes_to_read: How many bytes to accept from the response (default: 2048)
"""
def __init__(self, ip_address, port_number=33576, timeout=10000, bytes_to_read=2048):
self._address = (str(ip_address), int(port_number))
self._timeout = timeout
self._bytes_to_read = bytes_to_read
self._temperature_channel = Triton200.RUO2_CHANNEL
self._temperature_setpoint = 0.0
self._heater_range = 0.0
self._heater_channel = '1'
self._turbo_channel = '1'
@property
def temperature_channel(self):
"""
:returns str: The temperature channel, either the cernox (5) or the RuO2 (6)
"""
return self._temperature_channel
@temperature_channel.setter
def temperature_channel(self, value):
self._temperature_channel = str(value)
@property
def temperature_setpoint(self):
return self._temperature_setpoint
@temperature_setpoint.setter
def temperature_setpoint(self, value):
if not isinstance(value, float):
raise RuntimeError("Make sure the temperature set point is a number.")
elif 0 <= value < 10:
self._temperature_setpoint = value
else:
print("Keep an eye on the turbo pump if you ramp!!!")
self._temperature_setpoint = value
@property
def temperature(self):
"""The temperature reading from the current temperature channel."""
noun = 'DEV:T' + str(self.temperature_channel) + ':TEMP:SIG:TEMP'
command = 'READ:' + noun + '\r\n'
response = self.query_and_receive(command)
return self.extract_value(response, noun, 'K')
def update_heater(self):
"""
Associates the heater with the current temperature channel and changes the heater current to
preset values given the temperature set point.
"""
heater_range = ['0.316', '1', '3.16', '10', '31.6', '100']
command = 'SET:DEV:T' + str(self.temperature_channel) + ':TEMP:LOOP:HTR:H' + str(self._heater_channel) + '\r\n'
response = self.query_and_receive(command)
if not response:
raise RuntimeError("Changing of heater focus unsuccessful.")
heater_index = ((self.temperature_setpoint > 0.030)
+ (self.temperature_setpoint > 0.050)
+ (self.temperature_setpoint > 0.300)
+ (self.temperature_setpoint > 1.000)
+ (self.temperature_setpoint > 1.500))
heater_current = heater_range[heater_index]
command = 'SET:DEV:T' + str(self.temperature_channel) + ':TEMP:LOOP:RANGE:' + heater_current + '\r\n'
response = self.query_and_receive(command)
if not response:
raise RuntimeError("Changing of heater range unsuccessful.")
def controlled_ramp_on(self):
"""Starts a temperature sweep for the current temperature channel."""
command = 'SET:DEV:T' + str(self.temperature_channel) + 'TEMP:LOOP:RAMP:ENAB:ON\r\n'
response = self.query_and_receive(command)
if not response:
raise RuntimeError("Enabling of temperature ramp unsuccessful.")
def controlled_ramp_off(self):
"""Stops a temperature sweep for the current temperature channel."""
command = 'SET:DEV:T' + str(self.temperature_channel) + 'TEMP:LOOP:RAMP:ENAB:OFF\r\n'
response = self.query_and_receive(command)
if not response:
raise RuntimeError("Disabling of temperature ramp unsuccessful.")
def turbo_on(self):
"""Turns on a turbo pump.
WARNING: Do not use this unless you know what you are doing."""
command = 'SET:DEV:TURB' + self._turbo_channel + ':PUMP:SIG:STATE:ON\r\n'
response = self.query_and_receive(command)
if not response:
raise RuntimeError("Enabling of turbo pump unsuccessful.")
def turbo_off(self):
"""Turns off a turbo pump.
WARNING: Do not use this unless you know what you are doing."""
command = 'SET:DEV:TURB' + self._turbo_channel + ':PUMP:SIG:STATE:OFF\r\n'
response = self.query_and_receive(command)
if not response:
raise RuntimeError("Disabling of turbo pump unsuccessful.")
def query_and_receive(self, command):
"""
Queries the Oxford Triton 200 with the given command.
:param command: Specifies a read/write of a property.
"""
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect(self._address)
s.settimeout(self._timeout)
s.sendall(command.encode())
response = s.recv(self._bytes_to_read).decode()
return response
@staticmethod
def extract_value(response, noun, unit):
expected_response = 'STAT:' + noun + ':'
value = float(response.replace(expected_response, '').strip('\n').replace(unit, ''))
return value
|
nilq/baby-python
|
python
|
import random
import itertools
import json
import networkx as nx
import sys, getopt
import dcop_instance as dcop
def generate(G : nx.Graph, dsize = 2, p2=1.0, cost_range=(0, 10), def_cost = 0, int_cost=True, outfile='') :
assert (0.0 < p2 <= 1.0)
agts = {}
vars = {}
doms = {'0': list(range(0, dsize))}
cons = {}
for i in range(0, len(G.nodes())):
agts[str(i)] = None
vars[str(i)] = {'dom': '0', 'agt': str(i)}
cid = 0
for e in G.edges():
arity = len(e)
cons[str(cid)] = {'arity': arity, 'def_cost': def_cost, 'scope': [str(x) for x in e], 'values': []}
for assignments in itertools.product(*([[0, 1], ] * arity)):
val = {'tuple': []}
val['tuple'] = list(assignments)
if int_cost:
val['cost'] = random.randint(*cost_range)
else:
val['cost'] = random.uniform(*cost_range)
cons[str(cid)]['values'].append(val)
cid += 1
return agts, vars, doms, cons
def main(argv):
agts = 10
max_arity = 2
max_cost = 10
out_file = ''
name = ''
def rise_exception():
print('Input Error. Usage:\nmain.py -a -r -c -n -o <outputfile>')
sys.exit(2)
try:
opts, args = getopt.getopt(argv, "a:r:c:n:o:h",
["agts=", "max_arity=", "max_cost=", "name=", "ofile=", "help"])
except getopt.GetoptError:
rise_exception()
if len(opts) != 5:
rise_exception()
for opt, arg in opts:
if opt in ('-h', '--help'):
print('main.py -i <inputfile> -o <outputfile>')
sys.exit()
elif opt in ('-a', '--agts'):
agts = int(arg)
elif opt in ('-r', '--max_arity'):
max_arity = int(arg)
elif opt in ('-c', '--max_cost'):
max_cost = int(arg)
elif opt in ("-n", "--name"):
name = arg
elif opt in ("-o", "--ofile"):
out_file = arg
return agts, max_arity, max_cost, name, out_file
if __name__ == '__main__':
nagts, maxarity, maxcost, name, outfile = main(sys.argv[1:])
G = nx.grid_graph([nagts, nagts]).to_undirected()
while not nx.is_connected(G):
G = nx.grid_graph(nagts).to_undirected()
# Normalize Graph
Gn = nx.empty_graph(nagts)
map_nodes = {}
nid = 0
for n in G.nodes():
map_nodes[n] = nid
nid += 1
for e in G.edges():
Gn.add_edge(map_nodes[e[0]], map_nodes[e[1]])
agts, vars, doms, cons = generate(Gn, cost_range=(0,maxcost))
print('Creating DCOP instance' + name, ' G nodes: ', len(Gn.nodes()), ' G edges:', len(Gn.edges()))
dcop.create_xml_instance(name, agts, vars, doms, cons, outfile+'.xml')
dcop.create_wcsp_instance(name, agts, vars, doms, cons, outfile+'.wcsp')
dcop.create_json_instance(name, agts, vars, doms, cons, outfile+'.json')
|
nilq/baby-python
|
python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.