code
stringlengths
22
1.05M
apis
listlengths
1
3.31k
extract_api
stringlengths
75
3.25M
from contextlib import contextmanager import torch.nn.functional as F import torch.nn as nn import torch import math # Initializes a layer with normally-distributed weights. def normal_weights(layer): classname = layer.__class__.__name__ if classname.find('Linear') != -1: n = layer.in_features y = 1.0 / math.sqrt(n) layer.weight.data.normal_(0, y) # A Dueling DQN. class QNetwork(nn.Module): def __init__(self, state_size, action_size, seed=1337): super(QNetwork, self).__init__() self.seed = torch.manual_seed(seed) state_val_net_layer_dims = [ state_size, 128, 32, # 1 ] advantage_net_layer_dims = [ state_size, 128, 32, # 4 ] # V(s) self.state_val_net = nn.Sequential( *self.gen_linear_layers(state_val_net_layer_dims), nn.Linear(state_val_net_layer_dims[-1], 1) ) # A(s, a) self.advantage_net = nn.Sequential( *self.gen_linear_layers(advantage_net_layer_dims), nn.Linear(advantage_net_layer_dims[-1], action_size) ) self.apply(normal_weights) def gen_linear_layers(self, layer_dims): return [ nn.Sequential( nn.Linear(layer_dims[i], layer_dims[i + 1]), nn.BatchNorm1d(layer_dims[i + 1]), nn.ReLU(), ) for i in range(len(layer_dims) - 1) ] def forward(self, state): state_vals = self.state_val_net(state) advantages = self.advantage_net(state) # Q(s, a) = V(s) + A(s, a) - mean(A(s, a')) return state_vals + advantages - advantages.mean() # Use this to interact with the environment # since action ranks don't change with V(s). def get_advantages(self, state): return self.advantage_net(state) @contextmanager def eval_no_grad(self): with torch.no_grad(): try: self.eval() yield finally: self.train()
[ "torch.nn.ReLU", "math.sqrt", "torch.manual_seed", "torch.nn.BatchNorm1d", "torch.nn.Linear", "torch.no_grad" ]
[((550, 573), 'torch.manual_seed', 'torch.manual_seed', (['seed'], {}), '(seed)\n', (567, 573), False, 'import torch\n'), ((331, 343), 'math.sqrt', 'math.sqrt', (['n'], {}), '(n)\n', (340, 343), False, 'import math\n'), ((951, 993), 'torch.nn.Linear', 'nn.Linear', (['state_val_net_layer_dims[-1]', '(1)'], {}), '(state_val_net_layer_dims[-1], 1)\n', (960, 993), True, 'import torch.nn as nn\n'), ((1142, 1194), 'torch.nn.Linear', 'nn.Linear', (['advantage_net_layer_dims[-1]', 'action_size'], {}), '(advantage_net_layer_dims[-1], action_size)\n', (1151, 1194), True, 'import torch.nn as nn\n'), ((2017, 2032), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2030, 2032), False, 'import torch\n'), ((1347, 1390), 'torch.nn.Linear', 'nn.Linear', (['layer_dims[i]', 'layer_dims[i + 1]'], {}), '(layer_dims[i], layer_dims[i + 1])\n', (1356, 1390), True, 'import torch.nn as nn\n'), ((1408, 1441), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['layer_dims[i + 1]'], {}), '(layer_dims[i + 1])\n', (1422, 1441), True, 'import torch.nn as nn\n'), ((1459, 1468), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (1466, 1468), True, 'import torch.nn as nn\n')]
############################################################################## # # Copyright (c) 2017 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import unittest class SafeUnicodeTests(unittest.TestCase): def _makeOne(self, value): from ZPublisher.utils import safe_unicode return safe_unicode(value) def test_ascii(self): self.assertEqual(self._makeOne('foo'), 'foo') self.assertEqual(self._makeOne(b'foo'), 'foo') def test_latin_1(self): self.assertEqual(self._makeOne(b'fo\xf6'), 'fo\ufffd') def test_unicode(self): self.assertEqual(self._makeOne('foö'), 'foö') def test_utf_8(self): self.assertEqual(self._makeOne('test\xc2\xae'), 'test\xc2\xae') self.assertEqual(self._makeOne(b'test\xc2\xae'), 'test\xae') class NoUpdatePropertyManager: """PropertyManager without _updateProperty method. This is a simplified version of the original PropertyManager, with only the methods we need. """ _properties = () def _setPropValue(self, id, value): if type(value) == list: value = tuple(value) setattr(self, id, value) def _setProperty(self, id, value, type='string'): self._properties = self._properties + ({'id': id, 'type': type},) self._setPropValue(id, value) def hasProperty(self, id): for p in self._properties: if id == p['id']: return 1 return 0 def getProperty(self, id, d=None): if self.hasProperty(id): return getattr(self, id) return d def getPropertyType(self, id): for md in self._properties: if md['id'] == id: return md.get('type', 'string') return None def _propertyMap(self): return self._properties def propertyMap(self): return tuple(dict.copy() for dict in self._propertyMap()) class NoPropertiesManager(NoUpdatePropertyManager): """PropertyManager with _updateProperty method but without _properties.""" _properties = None def _updateProperty(self, id, value): self._setPropValue(id, value) class FixPropertiesTests(unittest.TestCase): def _makeOne(self): from OFS.PropertyManager import PropertyManager return PropertyManager() def test_lines(self): from ZPublisher.utils import fix_properties obj = self._makeOne() obj._setProperty("mixed", ["text and", b"bytes"], "lines") self.assertEqual(obj.getProperty("mixed"), ("text and", b"bytes")) self.assertEqual(obj.getPropertyType("mixed"), "lines") fix_properties(obj) self.assertEqual(obj.getProperty("mixed"), ("text and", "bytes")) self.assertEqual(obj.getPropertyType("mixed"), "lines") def test_ulines(self): from ZPublisher.utils import fix_properties obj = self._makeOne() obj._setProperty("mixed", ["text and", b"bytes"], "ulines") self.assertEqual(obj.getProperty("mixed"), ("text and", b"bytes")) self.assertEqual(obj.getPropertyType("mixed"), "ulines") fix_properties(obj) self.assertEqual(obj.getProperty("mixed"), ("text and", "bytes")) self.assertEqual(obj.getPropertyType("mixed"), "lines") def test_utokens(self): from ZPublisher.utils import fix_properties obj = self._makeOne() obj._setProperty("mixed", ["text", "and", b"bytes"], "utokens") self.assertEqual(obj.getProperty("mixed"), ("text", "and", b"bytes")) self.assertEqual(obj.getPropertyType("mixed"), "utokens") fix_properties(obj) self.assertEqual(obj.getProperty("mixed"), ("text", "and", "bytes")) self.assertEqual(obj.getPropertyType("mixed"), "tokens") def test_utext(self): from ZPublisher.utils import fix_properties obj = self._makeOne() obj._setProperty("prop1", "multiple\nlines", "utext") self.assertEqual(obj.getProperty("prop1"), "multiple\nlines") self.assertEqual(obj.getPropertyType("prop1"), "utext") fix_properties(obj) self.assertEqual(obj.getProperty("prop1"), "multiple\nlines") self.assertEqual(obj.getPropertyType("prop1"), "text") def test_ustring(self): from ZPublisher.utils import fix_properties obj = self._makeOne() obj._setProperty("prop1", "single line", "ustring") self.assertEqual(obj.getProperty("prop1"), "single line") self.assertEqual(obj.getPropertyType("prop1"), "ustring") fix_properties(obj) self.assertEqual(obj.getProperty("prop1"), "single line") self.assertEqual(obj.getPropertyType("prop1"), "string") def test_no_update(self): # Test that an object without _updateProperty method does not trip up # our code. from ZPublisher.utils import fix_properties obj = NoUpdatePropertyManager() obj._setProperty("mixed", ["text and", b"bytes"], "lines") self.assertEqual(obj.getProperty("mixed"), ("text and", b"bytes")) self.assertEqual(obj.getPropertyType("mixed"), "lines") # This should not raise an error. fix_properties(obj) # The properties should have remained the same. self.assertEqual(obj.getProperty("mixed"), ("text and", b"bytes")) self.assertEqual(obj.getPropertyType("mixed"), "lines") def test_no_properties(self): # Test that an object with a failing propertyMap method, # due to _properties=None, does not trip up our code. from ZPublisher.utils import fix_properties obj = NoPropertiesManager() # This should not raise an error. fix_properties(obj)
[ "OFS.PropertyManager.PropertyManager", "ZPublisher.utils.fix_properties", "ZPublisher.utils.safe_unicode" ]
[((795, 814), 'ZPublisher.utils.safe_unicode', 'safe_unicode', (['value'], {}), '(value)\n', (807, 814), False, 'from ZPublisher.utils import safe_unicode\n'), ((2792, 2809), 'OFS.PropertyManager.PropertyManager', 'PropertyManager', ([], {}), '()\n', (2807, 2809), False, 'from OFS.PropertyManager import PropertyManager\n'), ((3135, 3154), 'ZPublisher.utils.fix_properties', 'fix_properties', (['obj'], {}), '(obj)\n', (3149, 3154), False, 'from ZPublisher.utils import fix_properties\n'), ((3621, 3640), 'ZPublisher.utils.fix_properties', 'fix_properties', (['obj'], {}), '(obj)\n', (3635, 3640), False, 'from ZPublisher.utils import fix_properties\n'), ((4116, 4135), 'ZPublisher.utils.fix_properties', 'fix_properties', (['obj'], {}), '(obj)\n', (4130, 4135), False, 'from ZPublisher.utils import fix_properties\n'), ((4593, 4612), 'ZPublisher.utils.fix_properties', 'fix_properties', (['obj'], {}), '(obj)\n', (4607, 4612), False, 'from ZPublisher.utils import fix_properties\n'), ((5059, 5078), 'ZPublisher.utils.fix_properties', 'fix_properties', (['obj'], {}), '(obj)\n', (5073, 5078), False, 'from ZPublisher.utils import fix_properties\n'), ((5689, 5708), 'ZPublisher.utils.fix_properties', 'fix_properties', (['obj'], {}), '(obj)\n', (5703, 5708), False, 'from ZPublisher.utils import fix_properties\n'), ((6205, 6224), 'ZPublisher.utils.fix_properties', 'fix_properties', (['obj'], {}), '(obj)\n', (6219, 6224), False, 'from ZPublisher.utils import fix_properties\n')]
import random from random import randint from app import app from models import ( db, Actor, Movie, Role, ETHNICITY_TYPE, HAIR_COLOR_TYPE, BODY_TYPE, GENDER_TYPE, EYE_COLOR_TYPE ) PHONES = [ '+1-202-555-0169', '+1-202-555-0125', '+1-202-555-0133', '+1-202-555-0143', '+1-202-555-0163', '+1-202-555-0159', '+1-202-555-0166', '+1-202-555-0169', '+1-202-555-0172', '+1-202-555-0102', '+1-202-555-0183', '+1-202-555-0137' ] EMAILS = [ '<EMAIL>', '<EMAIL>', '<EMAIL>', '<EMAIL>', '<EMAIL>', '<EMAIL>', '<EMAIL>', '<EMAIL>', '<EMAIL>', '<EMAIL>', '<EMAIL>', '<EMAIL>' ] def insert_movie(title, release_date, company, description): movie = Movie( title=title, release_date=release_date, company=company, description=description) movie.insert() def insert_actor(name, age, gender, location): actor = Actor(name=name, age=age, gender=gender, location=location) actor.passport = random.choice([True, False]) actor.driver_license = random.choice([True, False]) actor.ethnicity = random.choice(ETHNICITY_TYPE) actor.hair_color = random.choice(HAIR_COLOR_TYPE) actor.eye_color = random.choice(EYE_COLOR_TYPE) actor.body_type = random.choice(BODY_TYPE) actor.height = randint(160, 200) actor.phone = random.choice(PHONES) actor.email = random.choice(EMAILS) actor.insert() def insert_role(movie_id, name, gender, min_age, max_age): role = Role(movie_id=movie_id, name=name, gender=gender, min_age=min_age, max_age=max_age) role.insert() MOVIES = [ [ 'Blancanieves', '2021-05-30', 'Nix Films', ('A twist on the Snow White fairy tale that' ' is set in 1920s Seville and centered on a female bullfighter.') ], [ 'Aufschneider', '2021-01-01', 'Nix Films', ('About a pathologist with a complicated life.' ' His problems with himself, his colleagues and' ' patients who come down to him, dead or alive.') ], [ 'Edge of Darkness', '2020-12-30', 'BBC Films', ('As homicide detective <NAME> investigates' ' the murder of his activist daughter, he uncovers' ' a corporate cover-up and government conspiracy that' ' attracts an agent tasked with cleaning up the evidence.') ], [ 'A Crime', '2022-10-21', 'BBC Films', ("Vincent's life is on hold until he finds his wife's killer." " Alice, his neighbor, is convinced she can make him happy. " "She decides to invent a culprit, so that Vincent can find " "revenge and leave the past behind. But there is no ideal " "culprit and no perfect crime.") ], [ 'Diabolique', '2022-05-05', 'ABC Productions', ("The wife and mistress of the sadistic dean of an " "exclusive prep school conspire to murder him.") ], [ '<NAME>', '2021-08-10', 'ABC Productions', ("Everything changes for 15-year-old Mia when her " "mum brings home a new boyfriend.") ], [ "Manderlay", "2022-07-15", "ABC Productions", "A story of slavery, set in the southern U.S. in the 1930s." ], [ "Precious", "2021-05-15", "ABC Productions", ("In New York City's Harlem circa 1987, an overweight, abused," " illiterate teen who is pregnant with her second child is " "invited to enroll in an alternative school in hopes that" " her life can head in a new direction.") ], [ "The Last Temptation of Christ", "2023-10-01", "<NAME>", ("The life of Jesus Christ, his journey through life as he faces" " the struggles all humans do, and his final " "temptation on the cross.") ], [ "Palace Beach Hotel", "2023-01-01", "<NAME>", ("Three young soldiers who participated in a military operation" " that went wrong, and where one of their comrades had been " "killed before their eyes, are placed in a luxury hotel to " "prevent a scandal. Despite the help of a young military " "psychiatrist, the young trio denies any trauma suffered, " "but they seem to hold a very different secret truth.") ], [ "Nordwand", "2021-03-28", "<NAME>", ("Based on a true story, North Face is a survival drama film" " about a competition to climb the most dangerous rock face" " in the Alps. Set in 1936, as Nazi propaganda urges the " "nation's Alpinists to conquer the unclimbed north face of" " the Swiss massif - the Eiger - two reluctant German " "climbers begin their daring ascent.") ], [ "Das Zeugenhaus", "2020-12-20", "<NAME>", ("Witnesses about to testify at the Nuremberg War Trials " "needed a safe place to wait. All under one roof, each " "with their own secrets. And the countess assigned to " "take care of them. What was her secret?") ], [ "<NAME>", "2021-04-12", "Met film", ("A look at the life of philosopher and political " "theorist <NAME>, who reported for 'The New " "Yorker' on the trial of the Nazi leader Adolf " "Eichmann in Jerusalem.") ], [ "<NAME>", "2021-04-13", "Met film", ("A woman inexplicably finds herself cut off from" " all human contact when an invisible, unyielding" " wall suddenly surrounds the countryside. Accompanied" " by her loyal dog Lynx, she becomes immersed in a world" " untouched by civilization and ruled by the laws of nature.") ], [ "Winnetou", "2022-02-02", "Met film", ("When violent conflict breaks out between greedy railroaders" " and a tribe of Mescalero Apaches, only two men, destined " "to be blood brothers, can prevent all-out war: chief's son" " Winnetou and German engineer <NAME>.") ], [ "Am Limit", "2023-09-09", "Met film", ("Daredevil mountain climbers on their attempt to" " break yet another speed climbing record.") ], [ "<NAME>, Anna!", "2024-06-06", "Netflix", "Anna life stroy" ], [ "Momentversagen", "2024-07-17", "Netflix", ("In a trendy restaurant, public prosecutor <NAME>" " and his colleagues toast his promotion. On the way home," " he meets a quarrelling junkie couple in a park. Manuel " "wants to help the woman, who is beaten and strangled, " "and intervenes.") ], [ "Fake Movie", "2024-08-28", "Matrix film", "Just Fake one" ], [ "Fun Movie", "2025-01-01", "Matrix film", "Just Fun one" ] ] ACTORS = [ ['<NAME>', 24, 'female', 'LA'], ['<NAME>', 62, 'male', 'CA'], ['<NAME>', 33, 'female', 'LA'], ['<NAME>', 40, 'female', 'CA'], ['<NAME>', 52, 'male', 'MA'], ['<NAME>', 15, 'male', 'MA'], ['<NAME>', 22, 'female', 'MA'], ['<NAME>', 38, 'male', 'KA'], ['<NAME>', 40, 'female', 'KA'], ['<NAME>', 30, 'male', 'KA'] ] ROLES = [ [1, 'kimmich', 'male', 25, 30], [1, 'revan', 'male', 30, 35], [2, 'jack', 'male', 15, 25], [2, 'mich', 'female', 10, 15], [3, 'kim', 'female', 20, 25], [3, 'lee', 'female', 20, 25], [4, 'park', 'male', 50, 60], [4, 'park', 'male', 70, 80], ] for movie in MOVIES: insert_movie(movie[0], movie[1], movie[2], movie[3]) for actor in ACTORS: insert_actor(actor[0], actor[1], actor[2], actor[3]) for role in ROLES: insert_role(role[0], role[1], role[2], role[3], role[4])
[ "models.Role", "models.Movie", "random.randint", "random.choice", "models.Actor" ]
[((774, 866), 'models.Movie', 'Movie', ([], {'title': 'title', 'release_date': 'release_date', 'company': 'company', 'description': 'description'}), '(title=title, release_date=release_date, company=company, description=\n description)\n', (779, 866), False, 'from models import db, Actor, Movie, Role, ETHNICITY_TYPE, HAIR_COLOR_TYPE, BODY_TYPE, GENDER_TYPE, EYE_COLOR_TYPE\n'), ((1007, 1066), 'models.Actor', 'Actor', ([], {'name': 'name', 'age': 'age', 'gender': 'gender', 'location': 'location'}), '(name=name, age=age, gender=gender, location=location)\n', (1012, 1066), False, 'from models import db, Actor, Movie, Role, ETHNICITY_TYPE, HAIR_COLOR_TYPE, BODY_TYPE, GENDER_TYPE, EYE_COLOR_TYPE\n'), ((1088, 1116), 'random.choice', 'random.choice', (['[True, False]'], {}), '([True, False])\n', (1101, 1116), False, 'import random\n'), ((1144, 1172), 'random.choice', 'random.choice', (['[True, False]'], {}), '([True, False])\n', (1157, 1172), False, 'import random\n'), ((1195, 1224), 'random.choice', 'random.choice', (['ETHNICITY_TYPE'], {}), '(ETHNICITY_TYPE)\n', (1208, 1224), False, 'import random\n'), ((1248, 1278), 'random.choice', 'random.choice', (['HAIR_COLOR_TYPE'], {}), '(HAIR_COLOR_TYPE)\n', (1261, 1278), False, 'import random\n'), ((1301, 1330), 'random.choice', 'random.choice', (['EYE_COLOR_TYPE'], {}), '(EYE_COLOR_TYPE)\n', (1314, 1330), False, 'import random\n'), ((1353, 1377), 'random.choice', 'random.choice', (['BODY_TYPE'], {}), '(BODY_TYPE)\n', (1366, 1377), False, 'import random\n'), ((1397, 1414), 'random.randint', 'randint', (['(160)', '(200)'], {}), '(160, 200)\n', (1404, 1414), False, 'from random import randint\n'), ((1433, 1454), 'random.choice', 'random.choice', (['PHONES'], {}), '(PHONES)\n', (1446, 1454), False, 'import random\n'), ((1473, 1494), 'random.choice', 'random.choice', (['EMAILS'], {}), '(EMAILS)\n', (1486, 1494), False, 'import random\n'), ((1586, 1674), 'models.Role', 'Role', ([], {'movie_id': 'movie_id', 'name': 'name', 'gender': 'gender', 'min_age': 'min_age', 'max_age': 'max_age'}), '(movie_id=movie_id, name=name, gender=gender, min_age=min_age, max_age=\n max_age)\n', (1590, 1674), False, 'from models import db, Actor, Movie, Role, ETHNICITY_TYPE, HAIR_COLOR_TYPE, BODY_TYPE, GENDER_TYPE, EYE_COLOR_TYPE\n')]
import logging import numpy as np from .dataset import DataSet from .markers import markers_to_events def sliding_window_indices(window_size, window_step, sig_len): '''Returns indices for a sliding window with shape [nwindows x window_size]''' nwindows = int(np.floor((sig_len - window_size + window_step) / float(window_step))) print(nwindows) starts = np.arange(nwindows).reshape(nwindows, 1) * window_step return starts + np.arange(window_size) def sliding_window(signal, window_size, window_step, win_func=None): '''Apply a sliding window to a 1D signal. Returns [#windows x window_size].''' signal = np.asarray(signal) if signal.ndim != 1: raise ValueError('Sliding window works on 1D arrays only!') if win_func is not None: if win_func.size != window_size: raise ValueError('window_size (%d) does not match win_func.size (%d)' % ( window_size, win_func.size)) indices = sliding_window_indices(window_size, window_step, signal.shape[0]) windows = signal.take(indices=indices) if win_func is not None: windows = windows * win_func # broadcasting matches from last dim return windows def stft(signal, nfft, stepsize): '''Calculate the short-time Fourier transform (STFT). Returns [windows x FFT coefficients]''' wins = sliding_window(signal, nfft, stepsize, win_func=np.hanning(nfft)) return np.fft.rfft(wins, axis=1) def spectrogram(signal, nfft, stepsize): ''' Calculate a spectrogram using STFT. Returns [windows x frequencies], in units related to power. Equivalent to power spectral density. ''' spec = stft(signal, nfft, stepsize) # convert to power. The abs() is the magnitude of a complex number spec = np.abs(spec) ** 2 / nfft # compensate for missing negative frequencies spec[:, 1:-1] *= 2 # correct for window spec /= np.mean(np.abs(np.hanning(nfft)) ** 2) # compensate for overlapping windows nwins = spec.shape[0] overlap = stepsize / float(nfft) spec *= (1 + (nwins - 1) * overlap) / nwins return spec def get_samplerate(d, axis=1): ''' Derive the sample rate from the timestamps given in either ``feat_lab`` or ``d.ids``. The median of the difference between consecutive time stamps is takes to be the sample rate. Parameters ---------- d : :class:`psychic.DataSet` The data to estimate the sample rate of. Must contain time stamps in ``d.ids`` axis : int (default 1) The axis along which time samples are stored. If the last axis is specified here, time stamps are taken from the ``ids`` property, otherwise they are taken from the corresponding index of ``feat_lab``. Returns ------- sample_rate : float The estimated samplerate. ''' assert axis < d.data.ndim, 'Invalid axis specified' if axis == d.data.ndim - 1: return np.round(1./np.median(np.diff(d.ids[0]))) else: return np.round(1./np.median(np.diff([float(x) for x in d.feat_lab[axis]]))) def find_segments(events, event_indices, start_mark, end_mark): '''Helper to find matching start/end markers in an event array''' events, event_indices = np.asarray(events), np.asarray(event_indices) assert events.size == event_indices.size mask = (events==start_mark) | (events==end_mark) sevents, sevent_ids = events[mask], event_indices[mask] stack, result = [], [] for si in range(sevent_ids.size): if sevents[si] == start_mark: stack.append(sevent_ids[si]) else: assert stack != [], 'Missing start marker' result.append((stack.pop(), sevent_ids[si])) if not stack == []: logging.getLogger('psychic.utils.find_segments').warning( 'Did not end start marker(s) at %s' % repr(stack)) return result def cut_segments(d, marker_tuples, offsets=[0, 0]): ''' Cut a dataset into segments using (start_marker, end_marker) tuples. Parameters ---------- d : :class:`psychic.DataSet` Continuous data to cut into segments. marker_tuples : list of tuples A list of (start_marker, end_marker) marker codes delimiting each type of segment. Returns ------- data : list of :class:`psychic.DataSet` A list with datasets. ''' start_off, end_off = offsets segments = [] e, ei, _ = markers_to_events(d.labels.flat) for (sm, em) in marker_tuples: segments.extend(find_segments(e, ei, sm, em)) segments.sort() return [d[s + start_off:e + end_off] for (s, e) in segments] def wolpaw_bitr(N, P): assert 0 <= P <= 1 assert 2 <= N result = np.log2(N) if P > 0: result += P * np.log2(P) if P < 1: result += (1 - P) * np.log2((1 - P)/(N - 1.)) return result def split_in_bins(d, order, n, legend=lambda i,b: 'slice %d' % i, ascending=True): idx = np.argsort(order) if not ascending: idx = idx[::-1] bin_size = int(len(order) / float(n)) bins = [idx[i*bin_size:(i+1)*bin_size] for i in range(n)] labels = np.zeros((n, d.ninstances), dtype=np.bool) for i,b in enumerate(bins): labels[i, b] = True cl_lab = [legend(i, bins[i]) for i in range(n)] return (bins, DataSet(labels=labels, cl_lab=cl_lab, default=d))
[ "numpy.fft.rfft", "numpy.abs", "numpy.log2", "numpy.asarray", "numpy.zeros", "numpy.argsort", "numpy.diff", "numpy.arange", "numpy.hanning", "logging.getLogger" ]
[((627, 645), 'numpy.asarray', 'np.asarray', (['signal'], {}), '(signal)\n', (637, 645), True, 'import numpy as np\n'), ((1364, 1389), 'numpy.fft.rfft', 'np.fft.rfft', (['wins'], {'axis': '(1)'}), '(wins, axis=1)\n', (1375, 1389), True, 'import numpy as np\n'), ((4481, 4491), 'numpy.log2', 'np.log2', (['N'], {}), '(N)\n', (4488, 4491), True, 'import numpy as np\n'), ((4704, 4721), 'numpy.argsort', 'np.argsort', (['order'], {}), '(order)\n', (4714, 4721), True, 'import numpy as np\n'), ((4879, 4921), 'numpy.zeros', 'np.zeros', (['(n, d.ninstances)'], {'dtype': 'np.bool'}), '((n, d.ninstances), dtype=np.bool)\n', (4887, 4921), True, 'import numpy as np\n'), ((442, 464), 'numpy.arange', 'np.arange', (['window_size'], {}), '(window_size)\n', (451, 464), True, 'import numpy as np\n'), ((3108, 3126), 'numpy.asarray', 'np.asarray', (['events'], {}), '(events)\n', (3118, 3126), True, 'import numpy as np\n'), ((3128, 3153), 'numpy.asarray', 'np.asarray', (['event_indices'], {}), '(event_indices)\n', (3138, 3153), True, 'import numpy as np\n'), ((1337, 1353), 'numpy.hanning', 'np.hanning', (['nfft'], {}), '(nfft)\n', (1347, 1353), True, 'import numpy as np\n'), ((1702, 1714), 'numpy.abs', 'np.abs', (['spec'], {}), '(spec)\n', (1708, 1714), True, 'import numpy as np\n'), ((4523, 4533), 'numpy.log2', 'np.log2', (['P'], {}), '(P)\n', (4530, 4533), True, 'import numpy as np\n'), ((4570, 4598), 'numpy.log2', 'np.log2', (['((1 - P) / (N - 1.0))'], {}), '((1 - P) / (N - 1.0))\n', (4577, 4598), True, 'import numpy as np\n'), ((369, 388), 'numpy.arange', 'np.arange', (['nwindows'], {}), '(nwindows)\n', (378, 388), True, 'import numpy as np\n'), ((1847, 1863), 'numpy.hanning', 'np.hanning', (['nfft'], {}), '(nfft)\n', (1857, 1863), True, 'import numpy as np\n'), ((3572, 3620), 'logging.getLogger', 'logging.getLogger', (['"""psychic.utils.find_segments"""'], {}), "('psychic.utils.find_segments')\n", (3589, 3620), False, 'import logging\n'), ((2838, 2855), 'numpy.diff', 'np.diff', (['d.ids[0]'], {}), '(d.ids[0])\n', (2845, 2855), True, 'import numpy as np\n')]
from boa3.neo.cryptography import hash160 from boa3.neo.vm.type.String import String from boa3_test.tests.boa_test import BoaTest from boa3_test.tests.test_classes.testengine import TestEngine class TestClass(BoaTest): default_folder: str = 'test_sc/class_test' def test_notification_get_variables(self): path = self.get_contract_path('NotificationGetVariables.py') output, manifest = self.compile_and_save(path) script = hash160(output) engine = TestEngine() result = self.run_smart_contract(engine, path, 'script_hash', [], expected_result_type=bytes) self.assertEqual(len(engine.notifications), 0) self.assertEqual(bytes(20), result) result = self.run_smart_contract(engine, path, 'event_name', []) self.assertEqual(len(engine.notifications), 0) self.assertEqual('', result) result = self.run_smart_contract(engine, path, 'state', []) self.assertEqual(len(engine.notifications), 0) self.assertEqual([], result) result = self.run_smart_contract(engine, path, 'script_hash', [1]) self.assertEqual(len(engine.notifications), 1) self.assertEqual(script, result) engine.reset_engine() result = self.run_smart_contract(engine, path, 'event_name', [1]) self.assertEqual(len(engine.notifications), 1) self.assertEqual('notify', result) engine.reset_engine() result = self.run_smart_contract(engine, path, 'state', [1]) self.assertEqual(len(engine.notifications), 1) self.assertEqual([1], result) engine.reset_engine() result = self.run_smart_contract(engine, path, 'state', ['1']) self.assertEqual(len(engine.notifications), 1) self.assertEqual(['1'], result) def test_notification_set_variables(self): path = self.get_contract_path('NotificationSetVariables.py') output, manifest = self.compile_and_save(path) script = hash160(output) engine = TestEngine() result = self.run_smart_contract(engine, path, 'script_hash', script, expected_result_type=bytes) self.assertEqual(script, result) result = self.run_smart_contract(engine, path, 'event_name', 'unit test') self.assertEqual('unit test', result) result = self.run_smart_contract(engine, path, 'state', (1, 2, 3)) self.assertEqual([1, 2, 3], result) def test_contract_constructor(self): path = self.get_contract_path('ContractConstructor.py') output, manifest = self.compile_and_save(path) engine = TestEngine() result = self.run_smart_contract(engine, path, 'new_contract') self.assertEqual(5, len(result)) if isinstance(result[2], str): result[2] = String(result[2]).to_bytes() if isinstance(result[3], str): result[3] = String(result[3]).to_bytes() self.assertEqual(0, result[0]) self.assertEqual(0, result[1]) self.assertEqual(bytes(20), result[2]) self.assertEqual(bytes(), result[3]) self.assertEqual({}, result[4])
[ "boa3.neo.vm.type.String.String", "boa3_test.tests.test_classes.testengine.TestEngine", "boa3.neo.cryptography.hash160" ]
[((459, 474), 'boa3.neo.cryptography.hash160', 'hash160', (['output'], {}), '(output)\n', (466, 474), False, 'from boa3.neo.cryptography import hash160\n'), ((493, 505), 'boa3_test.tests.test_classes.testengine.TestEngine', 'TestEngine', ([], {}), '()\n', (503, 505), False, 'from boa3_test.tests.test_classes.testengine import TestEngine\n'), ((2030, 2045), 'boa3.neo.cryptography.hash160', 'hash160', (['output'], {}), '(output)\n', (2037, 2045), False, 'from boa3.neo.cryptography import hash160\n'), ((2064, 2076), 'boa3_test.tests.test_classes.testengine.TestEngine', 'TestEngine', ([], {}), '()\n', (2074, 2076), False, 'from boa3_test.tests.test_classes.testengine import TestEngine\n'), ((2693, 2705), 'boa3_test.tests.test_classes.testengine.TestEngine', 'TestEngine', ([], {}), '()\n', (2703, 2705), False, 'from boa3_test.tests.test_classes.testengine import TestEngine\n'), ((2882, 2899), 'boa3.neo.vm.type.String.String', 'String', (['result[2]'], {}), '(result[2])\n', (2888, 2899), False, 'from boa3.neo.vm.type.String import String\n'), ((2974, 2991), 'boa3.neo.vm.type.String.String', 'String', (['result[3]'], {}), '(result[3])\n', (2980, 2991), False, 'from boa3.neo.vm.type.String import String\n')]
# Generated by Django 3.0.8 on 2020-09-13 18:33 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('auctions', '0001_initial'), ] operations = [ migrations.CreateModel( name='AuctionListing', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=64)), ('description', models.CharField(max_length=500)), ('category', models.CharField(max_length=64)), ('startingbid', models.FloatField()), ('piclink', models.CharField(default='https://encrypted-tbn0.gstatic.com/images?q=tbn%3AANd9GcRqEWgS0uxxEYJ0PsOb2OgwyWvC0Gjp8NUdPw&usqp=CAU', max_length=200)), ('currentbid', models.FloatField()), ('isactive', models.BooleanField(default=True)), ('created_on', models.DateTimeField(auto_now_add=True)), ('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='user_auctions', to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='Watchlist', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('item', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='item_in_watchlist', to='auctions.AuctionListing')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='watchlist_of_user', to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='Comments', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('comment', models.CharField(max_length=200)), ('created_on', models.DateTimeField(auto_now_add=True)), ('item', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments_on_title', to='auctions.AuctionListing')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments_by_user', to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='Bids', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('bid', models.FloatField()), ('item', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='bid_on_title', to='auctions.AuctionListing')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='bids_by_user', to=settings.AUTH_USER_MODEL)), ], ), ]
[ "django.db.models.CharField", "django.db.models.ForeignKey", "django.db.models.FloatField", "django.db.models.BooleanField", "django.db.models.AutoField", "django.db.models.DateTimeField" ]
[((394, 487), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (410, 487), False, 'from django.db import migrations, models\n'), ((512, 543), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)'}), '(max_length=64)\n', (528, 543), False, 'from django.db import migrations, models\n'), ((578, 610), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(500)'}), '(max_length=500)\n', (594, 610), False, 'from django.db import migrations, models\n'), ((642, 673), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)'}), '(max_length=64)\n', (658, 673), False, 'from django.db import migrations, models\n'), ((708, 727), 'django.db.models.FloatField', 'models.FloatField', ([], {}), '()\n', (725, 727), False, 'from django.db import migrations, models\n'), ((758, 913), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""https://encrypted-tbn0.gstatic.com/images?q=tbn%3AANd9GcRqEWgS0uxxEYJ0PsOb2OgwyWvC0Gjp8NUdPw&usqp=CAU"""', 'max_length': '(200)'}), "(default=\n 'https://encrypted-tbn0.gstatic.com/images?q=tbn%3AANd9GcRqEWgS0uxxEYJ0PsOb2OgwyWvC0Gjp8NUdPw&usqp=CAU'\n , max_length=200)\n", (774, 913), False, 'from django.db import migrations, models\n'), ((937, 956), 'django.db.models.FloatField', 'models.FloatField', ([], {}), '()\n', (954, 956), False, 'from django.db import migrations, models\n'), ((988, 1021), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (1007, 1021), False, 'from django.db import migrations, models\n'), ((1055, 1094), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1075, 1094), False, 'from django.db import migrations, models\n'), ((1122, 1258), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""user_auctions"""', 'to': 'settings.AUTH_USER_MODEL'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n related_name='user_auctions', to=settings.AUTH_USER_MODEL)\n", (1139, 1258), False, 'from django.db import migrations, models\n'), ((1389, 1482), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1405, 1482), False, 'from django.db import migrations, models\n'), ((1506, 1637), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""item_in_watchlist"""', 'to': '"""auctions.AuctionListing"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='item_in_watchlist', to='auctions.AuctionListing')\n", (1523, 1637), False, 'from django.db import migrations, models\n'), ((1660, 1790), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""watchlist_of_user"""', 'to': 'settings.AUTH_USER_MODEL'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='watchlist_of_user', to=settings.AUTH_USER_MODEL)\n", (1677, 1790), False, 'from django.db import migrations, models\n'), ((1919, 2012), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1935, 2012), False, 'from django.db import migrations, models\n'), ((2039, 2071), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (2055, 2071), False, 'from django.db import migrations, models\n'), ((2105, 2144), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (2125, 2144), False, 'from django.db import migrations, models\n'), ((2172, 2303), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""comments_on_title"""', 'to': '"""auctions.AuctionListing"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='comments_on_title', to='auctions.AuctionListing')\n", (2189, 2303), False, 'from django.db import migrations, models\n'), ((2326, 2455), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""comments_by_user"""', 'to': 'settings.AUTH_USER_MODEL'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='comments_by_user', to=settings.AUTH_USER_MODEL)\n", (2343, 2455), False, 'from django.db import migrations, models\n'), ((2580, 2673), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (2596, 2673), False, 'from django.db import migrations, models\n'), ((2696, 2715), 'django.db.models.FloatField', 'models.FloatField', ([], {}), '()\n', (2713, 2715), False, 'from django.db import migrations, models\n'), ((2743, 2869), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""bid_on_title"""', 'to': '"""auctions.AuctionListing"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='bid_on_title', to='auctions.AuctionListing')\n", (2760, 2869), False, 'from django.db import migrations, models\n'), ((2892, 3017), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""bids_by_user"""', 'to': 'settings.AUTH_USER_MODEL'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='bids_by_user', to=settings.AUTH_USER_MODEL)\n", (2909, 3017), False, 'from django.db import migrations, models\n')]
# -*- coding: utf-8 -*- """ Created on Mon Nov 2 01:28:58 2020 """ import unittest from htsinfer.read_motif_v6 import find_overlaps class Test_Inputs(unittest.TestCase): def test_call(self): # 0 arguments, not acceptable with self.assertRaises(TypeError): find_overlaps() # 1 argument, not acceptable with self.assertRaises(TypeError): find_overlaps("") # 2 arguments, not acceptable with self.assertRaises(TypeError): find_overlaps("", "") # 3 or 4 or arguments, acceptable find_overlaps("A", "A", 1) find_overlaps("A", "A", 1, True) # 5 arguments, not acceptable with self.assertRaises(TypeError): find_overlaps("", "", 0, True, 0) def test_positional_arguments(self): # Check that argument 1 is of type string with self.assertRaises(TypeError): find_overlaps(0, "", 1, True) # Check that argument 2 is of type string with self.assertRaises(TypeError): find_overlaps("", 0, 1, True) # Check that argument 3 is of type int with self.assertRaises(TypeError): find_overlaps("", "", "", True) # Check that argument 4 is of type bool with self.assertRaises(TypeError): find_overlaps("", "", 1, "") def test_arguments_types(self): # Check that motif is of type string with self.assertRaises(TypeError): find_overlaps(motif=0, read="", min_overlap=1, full_contain=False) # Check that read is of type string with self.assertRaises(TypeError): find_overlaps(motif="", read=0, min_overlap=1, full_contain=False) # Check that min_overlap is of type int with self.assertRaises(TypeError): find_overlaps(motif="", read="", min_overlap="", full_contain=False) # Check that use_n is of type bool with self.assertRaises(TypeError): find_overlaps(motif="", read="", min_overlap=1, full_contain=6) def test_argument_range(self): # Check that motif is not accepted if it is an empty string with self.assertRaises(ValueError): find_overlaps(motif="", read="A", min_overlap=1, full_contain=False) # Check that read is not accepted if it is an empty string with self.assertRaises(ValueError): find_overlaps(motif="A", read="", min_overlap=1, full_contain=False) # Check that min_overlap is not accepted if smaller than 1 with self.assertRaises(ValueError): find_overlaps(motif="A", read="A", min_overlap=0, full_contain=False) # check if read is longer than motif with self.assertRaises(ValueError): find_overlaps(motif="AAAAAA", read="A", min_overlap=1, full_contain=False) # check if motif contains small character with self.assertRaises(ValueError): find_overlaps(motif="a", read="AAAAAA", min_overlap=1, full_contain=False) # check if read contains small character with self.assertRaises(ValueError): find_overlaps(motif="A", read="Aa", min_overlap=1, full_contain=False) def test_return_value(self): rv = find_overlaps(motif="G", read="AAAA", min_overlap=1, full_contain=False) self.assertTrue(isinstance(rv, list)) class TestMatchFull(unittest.TestCase): def test_single_match(self): rv = find_overlaps(motif="GGA", read="TACGGGACGAT", min_overlap=1, full_contain=False) self.assertTrue(len(rv) == 1) self.assertTrue(rv[0] == (4, 1)) def test_single_match_start(self): rv = find_overlaps(motif="ACGGG", read="ACGGGACGAT", min_overlap=1, full_contain=False) self.assertTrue(len(rv) == 1) self.assertTrue(rv[0] == (0, 1)) def test_single_match_end(self): rv = find_overlaps(motif="CGA", read="TACGGGACGA", min_overlap=1, full_contain=False) self.assertTrue(len(rv) == 1) self.assertTrue(rv[0] == (7, 1)) def test_multi_match_not_overlapping(self): rv = find_overlaps(motif="CGA", read="TATTCGATTAGCGAAT", min_overlap=1, full_contain=False) self.assertTrue(len(rv) == 2) self.assertTrue(rv[0] == (4, 1)) self.assertTrue(rv[1] == (11, 1)) def test_multi_match_overlapping(self): rv = find_overlaps(motif="CGACGA", read="TATTCGACGACGATTAGCGAAT", min_overlap=1, full_contain=False) self.assertTrue(len(rv) == 2) self.assertTrue(rv[0] == (4, 1)) self.assertTrue(rv[1] == (7, 1)) def test_return_value_full(self): rv = find_overlaps(motif="AAA", read="AAAA", min_overlap=1, full_contain=True) self.assertTrue(rv[0] == (0, 1)) class TestMatchPartial(unittest.TestCase): def test_match_start(self): rv = find_overlaps(motif="GTA", read="TACGGGACGA", min_overlap=2, full_contain=False) self.assertTrue(len(rv) == 1) self.assertTrue(rv[0] == (0, 2/3)) def test_match_end(self): rv = find_overlaps(motif="GTAAA", read="TACGGGACGAGT", min_overlap=2, full_contain=False) self.assertTrue(len(rv) == 1) self.assertTrue(rv[0] == (10, 2/5)) class TestMatchMixed(unittest.TestCase): def test_multi_match_start(self): rv = find_overlaps(motif="GTA", read="TACGGGTAGA", min_overlap=2, full_contain=False) self.assertTrue(len(rv) == 2) self.assertTrue(rv[0] == (0, 2/3)) self.assertTrue(rv[1] == (5, 1)) def test_multi_match_end(self): rv = find_overlaps(motif="GTAAA", read="ACGGTAAAAGT", min_overlap=2, full_contain=False) self.assertTrue(len(rv) == 2) self.assertTrue(rv[0] == (3, 1)) self.assertTrue(rv[1] == (9, 2/5)) # further to implement. to give stress and see if it works well. class NoMatch(unittest.TestCase): def test_noMatch(self): rv = find_overlaps(motif="GTA", read="AAAAAAAAAAA", min_overlap=2, full_contain=False) self.assertTrue(len(rv) == 0) class TestLongReads(unittest.TestCase): def test_file_1(self): pass if __name__ == '__main__': unittest.main()
[ "unittest.main", "htsinfer.read_motif_v6.find_overlaps" ]
[((6663, 6678), 'unittest.main', 'unittest.main', ([], {}), '()\n', (6676, 6678), False, 'import unittest\n'), ((584, 610), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', (['"""A"""', '"""A"""', '(1)'], {}), "('A', 'A', 1)\n", (597, 610), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((619, 651), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', (['"""A"""', '"""A"""', '(1)', '(True)'], {}), "('A', 'A', 1, True)\n", (632, 651), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((3437, 3509), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {'motif': '"""G"""', 'read': '"""AAAA"""', 'min_overlap': '(1)', 'full_contain': '(False)'}), "(motif='G', read='AAAA', min_overlap=1, full_contain=False)\n", (3450, 3509), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((3672, 3758), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {'motif': '"""GGA"""', 'read': '"""TACGGGACGAT"""', 'min_overlap': '(1)', 'full_contain': '(False)'}), "(motif='GGA', read='TACGGGACGAT', min_overlap=1, full_contain=\n False)\n", (3685, 3758), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((3913, 4000), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {'motif': '"""ACGGG"""', 'read': '"""ACGGGACGAT"""', 'min_overlap': '(1)', 'full_contain': '(False)'}), "(motif='ACGGG', read='ACGGGACGAT', min_overlap=1, full_contain\n =False)\n", (3926, 4000), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((4153, 4238), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {'motif': '"""CGA"""', 'read': '"""TACGGGACGA"""', 'min_overlap': '(1)', 'full_contain': '(False)'}), "(motif='CGA', read='TACGGGACGA', min_overlap=1, full_contain=False\n )\n", (4166, 4238), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((4402, 4492), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {'motif': '"""CGA"""', 'read': '"""TATTCGATTAGCGAAT"""', 'min_overlap': '(1)', 'full_contain': '(False)'}), "(motif='CGA', read='TATTCGATTAGCGAAT', min_overlap=1,\n full_contain=False)\n", (4415, 4492), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((4695, 4794), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {'motif': '"""CGACGA"""', 'read': '"""TATTCGACGACGATTAGCGAAT"""', 'min_overlap': '(1)', 'full_contain': '(False)'}), "(motif='CGACGA', read='TATTCGACGACGATTAGCGAAT', min_overlap=1,\n full_contain=False)\n", (4708, 4794), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((4990, 5063), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {'motif': '"""AAA"""', 'read': '"""AAAA"""', 'min_overlap': '(1)', 'full_contain': '(True)'}), "(motif='AAA', read='AAAA', min_overlap=1, full_contain=True)\n", (5003, 5063), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((5223, 5308), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {'motif': '"""GTA"""', 'read': '"""TACGGGACGA"""', 'min_overlap': '(2)', 'full_contain': '(False)'}), "(motif='GTA', read='TACGGGACGA', min_overlap=2, full_contain=False\n )\n", (5236, 5308), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((5456, 5544), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {'motif': '"""GTAAA"""', 'read': '"""TACGGGACGAGT"""', 'min_overlap': '(2)', 'full_contain': '(False)'}), "(motif='GTAAA', read='TACGGGACGAGT', min_overlap=2,\n full_contain=False)\n", (5469, 5544), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((5745, 5830), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {'motif': '"""GTA"""', 'read': '"""TACGGGTAGA"""', 'min_overlap': '(2)', 'full_contain': '(False)'}), "(motif='GTA', read='TACGGGTAGA', min_overlap=2, full_contain=False\n )\n", (5758, 5830), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((6025, 6112), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {'motif': '"""GTAAA"""', 'read': '"""ACGGTAAAAGT"""', 'min_overlap': '(2)', 'full_contain': '(False)'}), "(motif='GTAAA', read='ACGGTAAAAGT', min_overlap=2,\n full_contain=False)\n", (6038, 6112), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((6400, 6486), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {'motif': '"""GTA"""', 'read': '"""AAAAAAAAAAA"""', 'min_overlap': '(2)', 'full_contain': '(False)'}), "(motif='GTA', read='AAAAAAAAAAA', min_overlap=2, full_contain=\n False)\n", (6413, 6486), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((293, 308), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {}), '()\n', (306, 308), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((401, 418), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', (['""""""'], {}), "('')\n", (414, 418), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((512, 533), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', (['""""""', '""""""'], {}), "('', '')\n", (525, 533), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((745, 778), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', (['""""""', '""""""', '(0)', '(True)', '(0)'], {}), "('', '', 0, True, 0)\n", (758, 778), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((926, 955), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', (['(0)', '""""""', '(1)', '(True)'], {}), "(0, '', 1, True)\n", (939, 955), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((1061, 1090), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', (['""""""', '(0)', '(1)', '(True)'], {}), "('', 0, 1, True)\n", (1074, 1090), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((1193, 1224), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', (['""""""', '""""""', '""""""', '(True)'], {}), "('', '', '', True)\n", (1206, 1224), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((1328, 1356), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', (['""""""', '""""""', '(1)', '""""""'], {}), "('', '', 1, '')\n", (1341, 1356), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((1494, 1560), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {'motif': '(0)', 'read': '""""""', 'min_overlap': '(1)', 'full_contain': '(False)'}), "(motif=0, read='', min_overlap=1, full_contain=False)\n", (1507, 1560), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((1660, 1726), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {'motif': '""""""', 'read': '(0)', 'min_overlap': '(1)', 'full_contain': '(False)'}), "(motif='', read=0, min_overlap=1, full_contain=False)\n", (1673, 1726), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((1830, 1898), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {'motif': '""""""', 'read': '""""""', 'min_overlap': '""""""', 'full_contain': '(False)'}), "(motif='', read='', min_overlap='', full_contain=False)\n", (1843, 1898), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((2023, 2086), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {'motif': '""""""', 'read': '""""""', 'min_overlap': '(1)', 'full_contain': '(6)'}), "(motif='', read='', min_overlap=1, full_contain=6)\n", (2036, 2086), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((2247, 2315), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {'motif': '""""""', 'read': '"""A"""', 'min_overlap': '(1)', 'full_contain': '(False)'}), "(motif='', read='A', min_overlap=1, full_contain=False)\n", (2260, 2315), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((2465, 2533), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {'motif': '"""A"""', 'read': '""""""', 'min_overlap': '(1)', 'full_contain': '(False)'}), "(motif='A', read='', min_overlap=1, full_contain=False)\n", (2478, 2533), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((2683, 2752), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {'motif': '"""A"""', 'read': '"""A"""', 'min_overlap': '(0)', 'full_contain': '(False)'}), "(motif='A', read='A', min_overlap=0, full_contain=False)\n", (2696, 2752), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((2880, 2954), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {'motif': '"""AAAAAA"""', 'read': '"""A"""', 'min_overlap': '(1)', 'full_contain': '(False)'}), "(motif='AAAAAA', read='A', min_overlap=1, full_contain=False)\n", (2893, 2954), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((3087, 3161), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {'motif': '"""a"""', 'read': '"""AAAAAA"""', 'min_overlap': '(1)', 'full_contain': '(False)'}), "(motif='a', read='AAAAAA', min_overlap=1, full_contain=False)\n", (3100, 3161), False, 'from htsinfer.read_motif_v6 import find_overlaps\n'), ((3293, 3363), 'htsinfer.read_motif_v6.find_overlaps', 'find_overlaps', ([], {'motif': '"""A"""', 'read': '"""Aa"""', 'min_overlap': '(1)', 'full_contain': '(False)'}), "(motif='A', read='Aa', min_overlap=1, full_contain=False)\n", (3306, 3363), False, 'from htsinfer.read_motif_v6 import find_overlaps\n')]
# todo 13. Jogo da palavra embaralhada. Desenvolva um jogo em que o usuário tenha que adivinhar uma palavra que será mostrada com as letras embaralhadas. O programa terá uma lista de palavras lidas de um arquivo texto e escolherá uma aleatoriamente. O jogador terá seis tentativas para adivinhar a palavra. Ao final a palavra deve ser mostrada na tela, informando se o usuário ganhou ou perdeu o jogo. from random import shuffle, choice from os import system from cores import * with open("palavras.txt", "r") as file: allText = file.read() words = list(map(str, allText.split())) palavra = choice(words) lista = list() nova = '' tentativa = 6 system('cls') for i in range(0, len(palavra)): lista.append(palavra[i]) shuffle(lista) for i in lista: nova += i while True: print(f'Palavra embaralhada > {cor["amarelo"]}{nova}{cor["limpar"]} <\n') guess = str(input(f'{tentativa} tentativas restantes: ')).lower().strip() system('cls') if guess == palavra: print(f'{cor["verde"]}Parabéns, vc venceu!!{cor["limpar"]}') print(f'A palavra era {palavra}') break else: tentativa -= 1 if tentativa == 0: print(f'{cor["vermelho"]}GAME-OVER{cor["limpar"]}') print(f'A palavra era {palavra}') break
[ "random.shuffle", "random.choice", "os.system" ]
[((663, 676), 'os.system', 'system', (['"""cls"""'], {}), "('cls')\n", (669, 676), False, 'from os import system\n'), ((741, 755), 'random.shuffle', 'shuffle', (['lista'], {}), '(lista)\n', (748, 755), False, 'from random import shuffle, choice\n'), ((608, 621), 'random.choice', 'choice', (['words'], {}), '(words)\n', (614, 621), False, 'from random import shuffle, choice\n'), ((960, 973), 'os.system', 'system', (['"""cls"""'], {}), "('cls')\n", (966, 973), False, 'from os import system\n')]
""" This module contains all the required backend stuff (logic) required to handle and download from the wallpaper website. URL signature looks like: https://wall.alphacoders.com/search.php?search={searchKey}&page={PageNo} The website may internally store some popular keywords like 'spiderman' in collections and serve them with collection ids, need to look out for those variations. """ import os, sys, logging, time import threading, requests, bs4 from logger import mainlogger from exceptions import (InvalidDownloadNum, MaxRetriesCrossed, SearchReturnedNone) # get module logger downloadLogger = logging.getLogger('main.downloader') """ A Wallpaper Downloader Class for https://wall.alphacoders.com """ class AlphaDownloader: queryStr = \ 'https://wall.alphacoders.com/search.php?search=%(searchKey)s&page=%(pageNo)d' headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1) ' 'AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/72.0.3626.28 Safari/537.36' } prefixes = ('Movie ', 'Video ', 'Comics ', 'TV Show ') chunksize = 10000000 # For current session (total) totalSize = 0 totalDownloads = 0 printFormat = ("Current Run :\n" "Images Downloaded : %(numDownloaded)d, Time taken: %(lastDownloadTime)d secs\n" "Number of Pages : %(numPages)d, Downloaded: %(downloadSize).3f MB\n\n" "Session Details:\n" "Total Images : %(totalDownloads)d, Total Size: %(totalSize).3f MB\n") def __init__(self, trace=False): " initialize attributes for object " self.imageMetaDict = dict() self.trace = trace self.mutex = threading.Lock() self._queryStrServed = None self.downloadSession = requests.Session() self.downloadSession.headers.update(self.headers) def startDownload(self, searchKey, numImages, downloadDir = os.curdir, maxretries = 2, imgPerThread = 5): """ toplevel method for starting download, handle and check actual download success """ # PreDownload Hooks if numImages <= 0: raise InvalidDownloadNum # Make sure download dir exists os.makedirs(downloadDir, exist_ok=True) downloadLogger.info(f'{downloadDir = }') self.downloadDir = downloadDir # For current run self.searchKey = searchKey self.numImages = numImages self.numPages = 0 self.numDownloaded = 0 self.downloadSize = 0 self.lastDownloadTime = None MaxRetries = maxretries start = time.time() self._queryStrServed = None # query string returned by website # (may be collection id) retries = 0 # Try until actual number of images downloaded is less than # given number; and retries is less than max retries while self.numDownloaded < self.numImages and retries < MaxRetries: self._runDownload(imgPerThread) retries += 1 self.lastDownloadTime = time.time() - start self.totalDownloads += self.numDownloaded self.sessionDict = dict( numDownloaded = self.numDownloaded, lastDownloadTime = self.lastDownloadTime, numPages = self.numPages, downloadSize = self.bytesToMiB(self.downloadSize), totalDownloads = self.totalDownloads, totalSize = self.bytesToMiB(self.totalSize), ) if self.trace: print('\n', ' Stats: '.center(50, '*')) print(self.printFormat % self.sessionDict) if retries >= MaxRetries and self.numDownloaded < self.numImages: raise MaxRetriesCrossed("Max Retries; check log for error details") def _downloadSq(self, imgList): " Target Function for threading " for imgname, imglink in imgList: self.downloadImage(imglink, imgname) def _runDownload(self, ImgPerThread=5): """ Threaded Download Logic; Perform Download assuming every link works, doesn't check if the actual number of download satisfies the required number given Not to be invoked directly, use wrapper method startDownload() """ threads = [] imgArg = [] finished = False imgLinksFetched = 0 while not finished: self.numPages += 1 for imgTuple in self.fetchLinks(self.searchKey, self.numPages): if imgLinksFetched >= self.numImages: finished = True else: imgArg.append(imgTuple) # if length becomes equal to image per thread # or image links are fetched but not processed # (not a multiple of imgPerThread) if len(imgArg) == ImgPerThread \ or (finished and imgArg): downloadLogger.info(f'{len(imgArg) = }') downloadLogger.debug(f'{imgLinksFetched = }') downloadLogger.debug(f'{self.numPages = }') thread = threading.Thread(target=self._downloadSq, args=(imgArg,)) threads.append(thread) thread.start() imgArg = [] imgLinksFetched += 1 if finished: break # break inner loop if download # number satisfied for thread in threads: thread.join() def downloadImage(self, link, name=''): " download given image link " # Use the trailing id of the image link: ('1149.jpg') # to make the image name truly unique imgfilename = os.path.join(self.downloadDir, name + '_' + os.path.basename(link)) # Abort Download (return) if: # 1) Filename exists, if os.path.exists(imgfilename): downloadLogger.warning(f'{imgfilename} exists; possible bug') return try: image = self.downloadSession.get(link) image.raise_for_status() # 2) Download error except Exception as exc: downloadLogger.error(f'Error saving image: {link}\n{str(exc)}') return # save downloaded image (try to delegate os-specific filename # restrictions to underlying platform by encoding filename) with open(imgfilename.encode(), 'wb') as imgfile: for chunk in image.iter_content(self.chunksize): imgfile.write(chunk) with self.mutex: imgSize = os.path.getsize(imgfilename) self.downloadSize += imgSize self.totalSize += imgSize self.numDownloaded += 1 self.imageMetaDict[name] = link if self.trace: print(f'Downloaded: {name}...') self.imgfilename = imgfilename # save filename for subclass def restoreMetadata(self, imageMetaDict, imgPerThread=5): " Download images from a previously saved name-image dict " imgList = [(name, link) for name, link in imageMetaDict.items()] threads = [] while imgList: imgArg, imgList = imgList[:imgPerThread], imgList[imgPerThread:] thread = threading.Thread(target=self._downloadSq, args=(imgArg,)) thread.start() threads.append(thread) for thread in threads: thread.join() msgb.showinfo(title='Imported', message='Previous session was successfully restored') def fetchLinks(self, searchKey, start=1, stop=None, step=1): """ Generate the image links for pages start to stop (non-inclusive) Optional: Stop: if not given, scrape links for start page only, Step: default 1, can travel backwards if given negative value """ if stop is None: # generate links for given page only stop = start + 1 downloadLogger.info(f'{start = }, {stop = }, {step = }') for pageNum in range(start, stop, step): # construct page url, if first pass, use base query, else fetched # query string pageInfoDict = dict(searchKey=searchKey, pageNo=pageNum) pageUrl = self._queryStrServed + f'&page={pageNum}' \ if self._queryStrServed \ else self.queryStr % pageInfoDict downloadLogger.info(f'{pageUrl = }') # fetch page try: pageResponse = self.downloadSession.get(pageUrl) pageResponse.raise_for_status() downloadLogger.info(f'{pageResponse.status_code = }') except Exception as exc: downloadLogger.error(f'Error Downloading Page: {pageNum}\n{str(exc)}') continue # parse and get the image links mainPageSoup = bs4.BeautifulSoup(pageResponse.text, 'lxml') # get the served query string (may give a collection id for # selected keywords) if self._queryStrServed is None: try: pageUrl = mainPageSoup.select('div.page_container')[0].get('data-url') except IndexError: raise SearchReturnedNone("Target Not found") from None self._queryStrServed = pageUrl downloadLogger.debug(f'{pageUrl = }') # get the image elements with class='img-responsive' imageTags = mainPageSoup.select('img.img-responsive') downloadLogger.debug(f'{len(imageTags) = }') # generate imagename, imagelink for every image found for imageTag in imageTags: imageName = imageTag.get('alt').rstrip(' HD Wallpaper | Background Image')[:50] # strip unnecessary prefixes (if present) for prefix in self.prefixes: if imageName.startswith(prefix): imageName = imageName.lstrip(prefix) break imageLink = imageTag.get('src').replace('thumbbig-', '') yield imageName, imageLink @staticmethod def bytesToMiB(sizeInBy): " Return size in bytes to MiB " return sizeInBy / (1024 * 1024)
[ "threading.Thread", "os.makedirs", "os.path.basename", "os.path.getsize", "requests.Session", "os.path.exists", "time.time", "threading.Lock", "exceptions.MaxRetriesCrossed", "exceptions.SearchReturnedNone", "bs4.BeautifulSoup", "logging.getLogger" ]
[((628, 664), 'logging.getLogger', 'logging.getLogger', (['"""main.downloader"""'], {}), "('main.downloader')\n", (645, 664), False, 'import os, sys, logging, time\n'), ((1745, 1761), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (1759, 1761), False, 'import threading, requests, bs4\n'), ((1829, 1847), 'requests.Session', 'requests.Session', ([], {}), '()\n', (1845, 1847), False, 'import threading, requests, bs4\n'), ((2398, 2437), 'os.makedirs', 'os.makedirs', (['downloadDir'], {'exist_ok': '(True)'}), '(downloadDir, exist_ok=True)\n', (2409, 2437), False, 'import os, sys, logging, time\n'), ((2797, 2808), 'time.time', 'time.time', ([], {}), '()\n', (2806, 2808), False, 'import os, sys, logging, time\n'), ((6166, 6193), 'os.path.exists', 'os.path.exists', (['imgfilename'], {}), '(imgfilename)\n', (6180, 6193), False, 'import os, sys, logging, time\n'), ((3272, 3283), 'time.time', 'time.time', ([], {}), '()\n', (3281, 3283), False, 'import os, sys, logging, time\n'), ((3967, 4028), 'exceptions.MaxRetriesCrossed', 'MaxRetriesCrossed', (['"""Max Retries; check log for error details"""'], {}), "('Max Retries; check log for error details')\n", (3984, 4028), False, 'from exceptions import InvalidDownloadNum, MaxRetriesCrossed, SearchReturnedNone\n'), ((6888, 6916), 'os.path.getsize', 'os.path.getsize', (['imgfilename'], {}), '(imgfilename)\n', (6903, 6916), False, 'import os, sys, logging, time\n'), ((7568, 7625), 'threading.Thread', 'threading.Thread', ([], {'target': 'self._downloadSq', 'args': '(imgArg,)'}), '(target=self._downloadSq, args=(imgArg,))\n', (7584, 7625), False, 'import threading, requests, bs4\n'), ((9223, 9267), 'bs4.BeautifulSoup', 'bs4.BeautifulSoup', (['pageResponse.text', '"""lxml"""'], {}), "(pageResponse.text, 'lxml')\n", (9240, 9267), False, 'import threading, requests, bs4\n'), ((6062, 6084), 'os.path.basename', 'os.path.basename', (['link'], {}), '(link)\n', (6078, 6084), False, 'import os, sys, logging, time\n'), ((5401, 5458), 'threading.Thread', 'threading.Thread', ([], {'target': 'self._downloadSq', 'args': '(imgArg,)'}), '(target=self._downloadSq, args=(imgArg,))\n', (5417, 5458), False, 'import threading, requests, bs4\n'), ((9592, 9630), 'exceptions.SearchReturnedNone', 'SearchReturnedNone', (['"""Target Not found"""'], {}), "('Target Not found')\n", (9610, 9630), False, 'from exceptions import InvalidDownloadNum, MaxRetriesCrossed, SearchReturnedNone\n')]
from enigma import ePicLoad, eTimer, eWindowAnimationSet, eFloatAnimation, eLinearInterpolator, eWindowAnimationManager, ePixmap, eActionMap, getDesktop from Components.config import config from Components.ActionMap import ActionMap from Components.GUIComponent import GUIComponent from Components.Pixmap import Pixmap from Screens.Screen import Screen from Tools.Directories import fileExists from Tools.Log import Log from twisted.web.client import downloadPage class MyPixmap(Pixmap): def postWidgetCreate(self, instance): Pixmap.postWidgetCreate(self, instance) self.setupAnimation() def setupAnimation(self): if self.instance: self.instance.setShowHideAnimation(PhotoScreensaver.ANIMATION_KEY_FADE) self.instance.setScale(ePixmap.SCALE_TYPE_WIDTH) class PhotoScreensaver(Screen): skin = """<screen name="Screensaver" title="Screensaver" position="center,center" size="fill_parent,fill_parent" backgroundColor="#000000"> <widget name="wallpaper" position="0,0" size="fill_parent,fill_parent" zPosition="1"/> </screen>""" TEMPFILE = "/tmp/wallpaper" ANIMATION_KEY_FADE = "wallpaper_slow_fade" def __init__(self, session): Screen.__init__(self, session) self["actions"] = ActionMap(["OkCancelActions"], { "ok": self._onOk, "cancel": self.close}, - 2) self.highPrioActionSlot = eActionMap.getInstance().bindAction('', -0x7FFFFFFF, self._onKeypress) #highest prio self._pixmap = MyPixmap() self["wallpaper"] = self._pixmap self._setupAnimation() #picload setup size = getDesktop(0).size() width, height = size.width(), size.height() self._picload = ePicLoad() self.__picload_conn = self._picload.PictureData.connect(self._onPixmapReady) self._picload.setPara((width, height, width, height, False, 1, '#ff000000')) self._nextPixmap = None self._timer = eTimer() self.__timer_conn = self._timer.timeout.connect(self._onTimeout) self._inactivityTimer = eTimer() self.__inactivityTimer_conn = self._inactivityTimer.timeout.connect(self._onInactivityTimeout) self._immediateShow = True self._isEnabled = False self._isInitial = True self.onShow.append(self._onShow) self.onClose.append(self._onClose) config.plugins.screensaver.photo.speed.addNotifier(self._setupAnimation, initial_call = False) def _onShow(self):# self._immediateShow = self._isInitial if not self._immediateShow: self._restartTimer() self._check() def _onClose(self): config.plugins.screensaver.photo.speed.removeNotifier(self._setupAnimation) def _setupAnimation(self, *args): animset = eWindowAnimationSet.create() animset.setKey(PhotoScreensaver.ANIMATION_KEY_FADE) animset.setName("Slow wallpaper fade") animset.setInternal(True) interpolator = eLinearInterpolator.create() duration = int(config.plugins.screensaver.photo.speed.value) * 1000 animset.setAlpha(eFloatAnimation.create(duration, 0.0, 1.0, False, interpolator)) eWindowAnimationManager.setAnimationSet(animset) self._pixmap.setupAnimation() def _check(self): if fileExists(self.TEMPFILE): self._onFileReady() else: self._loadNext() def isEnabled(self): return self._isEnabled def setEnabled(self, enabled): Log.i("%s" %(enabled,)) if enabled == self._isEnabled: return self._isEnabled = enabled if self._isEnabled: self._onKeypress() self._check() else: self._reset() enabled = property(isEnabled, setEnabled) def _reset(self): self._nextPixmap = None self._timer.stop() self._inactivityTimer.stop() def _onKeypress(self, *args): self.hide() self._reset() if self._isEnabled: self._inactivityTimer.startLongTimer(int(config.plugins.screensaver.delay.value)) return 0 def _onInactivityTimeout(self): self.show() def _onOk(self): pass def _loadNext(self): Log.i("Getting next photo") url = "https://source.unsplash.com/random/1920x1080" self._d = downloadPage(url, self.TEMPFILE).addCallbacks(self._onFileReady, self._failed) def _onFileReady(self, *args): self._picload.startDecode(self.TEMPFILE) def _failed(self, *args): Log.w(args) def _onPixmapReady(self, picInfo=None): Log.d(picInfo) if not self._isEnabled: self._reset() return self._picInfo = picInfo self._nextPixmap = self._picload.getData() if self._immediateShow: self._immediateShow = False self._onTimeout() def _restartTimer(self): self._timer.startLongTimer(int(config.plugins.screensaver.photo.retention.value)) def _showNext(self): if not self._isEnabled: self._reset() return if self._nextPixmap: self._isInitial = False self._pixmap.setPixmap(self._nextPixmap) self._nextPixmap = None self._restartTimer() return True return False def _onTimeout(self): if self._showNext(): self._loadNext() self._restartTimer() else: self._immediateShow = True
[ "enigma.eWindowAnimationManager.setAnimationSet", "Tools.Log.Log.i", "enigma.eActionMap.getInstance", "Components.config.config.plugins.screensaver.photo.speed.removeNotifier", "enigma.getDesktop", "enigma.eFloatAnimation.create", "Components.config.config.plugins.screensaver.photo.speed.addNotifier", "twisted.web.client.downloadPage", "Components.Pixmap.Pixmap.postWidgetCreate", "Screens.Screen.Screen.__init__", "Tools.Directories.fileExists", "Tools.Log.Log.w", "enigma.eTimer", "enigma.eWindowAnimationSet.create", "enigma.eLinearInterpolator.create", "enigma.ePicLoad", "Components.ActionMap.ActionMap", "Tools.Log.Log.d" ]
[((531, 570), 'Components.Pixmap.Pixmap.postWidgetCreate', 'Pixmap.postWidgetCreate', (['self', 'instance'], {}), '(self, instance)\n', (554, 570), False, 'from Components.Pixmap import Pixmap\n'), ((1155, 1185), 'Screens.Screen.Screen.__init__', 'Screen.__init__', (['self', 'session'], {}), '(self, session)\n', (1170, 1185), False, 'from Screens.Screen import Screen\n'), ((1206, 1282), 'Components.ActionMap.ActionMap', 'ActionMap', (["['OkCancelActions']", "{'ok': self._onOk, 'cancel': self.close}", '(-2)'], {}), "(['OkCancelActions'], {'ok': self._onOk, 'cancel': self.close}, -2)\n", (1215, 1282), False, 'from Components.ActionMap import ActionMap\n'), ((1610, 1620), 'enigma.ePicLoad', 'ePicLoad', ([], {}), '()\n', (1618, 1620), False, 'from enigma import ePicLoad, eTimer, eWindowAnimationSet, eFloatAnimation, eLinearInterpolator, eWindowAnimationManager, ePixmap, eActionMap, getDesktop\n'), ((1821, 1829), 'enigma.eTimer', 'eTimer', ([], {}), '()\n', (1827, 1829), False, 'from enigma import ePicLoad, eTimer, eWindowAnimationSet, eFloatAnimation, eLinearInterpolator, eWindowAnimationManager, ePixmap, eActionMap, getDesktop\n'), ((1923, 1931), 'enigma.eTimer', 'eTimer', ([], {}), '()\n', (1929, 1931), False, 'from enigma import ePicLoad, eTimer, eWindowAnimationSet, eFloatAnimation, eLinearInterpolator, eWindowAnimationManager, ePixmap, eActionMap, getDesktop\n'), ((2185, 2281), 'Components.config.config.plugins.screensaver.photo.speed.addNotifier', 'config.plugins.screensaver.photo.speed.addNotifier', (['self._setupAnimation'], {'initial_call': '(False)'}), '(self._setupAnimation,\n initial_call=False)\n', (2235, 2281), False, 'from Components.config import config\n'), ((2436, 2511), 'Components.config.config.plugins.screensaver.photo.speed.removeNotifier', 'config.plugins.screensaver.photo.speed.removeNotifier', (['self._setupAnimation'], {}), '(self._setupAnimation)\n', (2489, 2511), False, 'from Components.config import config\n'), ((2560, 2588), 'enigma.eWindowAnimationSet.create', 'eWindowAnimationSet.create', ([], {}), '()\n', (2586, 2588), False, 'from enigma import ePicLoad, eTimer, eWindowAnimationSet, eFloatAnimation, eLinearInterpolator, eWindowAnimationManager, ePixmap, eActionMap, getDesktop\n'), ((2729, 2757), 'enigma.eLinearInterpolator.create', 'eLinearInterpolator.create', ([], {}), '()\n', (2755, 2757), False, 'from enigma import ePicLoad, eTimer, eWindowAnimationSet, eFloatAnimation, eLinearInterpolator, eWindowAnimationManager, ePixmap, eActionMap, getDesktop\n'), ((2914, 2962), 'enigma.eWindowAnimationManager.setAnimationSet', 'eWindowAnimationManager.setAnimationSet', (['animset'], {}), '(animset)\n', (2953, 2962), False, 'from enigma import ePicLoad, eTimer, eWindowAnimationSet, eFloatAnimation, eLinearInterpolator, eWindowAnimationManager, ePixmap, eActionMap, getDesktop\n'), ((3020, 3045), 'Tools.Directories.fileExists', 'fileExists', (['self.TEMPFILE'], {}), '(self.TEMPFILE)\n', (3030, 3045), False, 'from Tools.Directories import fileExists\n'), ((3181, 3205), 'Tools.Log.Log.i', 'Log.i', (["('%s' % (enabled,))"], {}), "('%s' % (enabled,))\n", (3186, 3205), False, 'from Tools.Log import Log\n'), ((3783, 3810), 'Tools.Log.Log.i', 'Log.i', (['"""Getting next photo"""'], {}), "('Getting next photo')\n", (3788, 3810), False, 'from Tools.Log import Log\n'), ((4063, 4074), 'Tools.Log.Log.w', 'Log.w', (['args'], {}), '(args)\n', (4068, 4074), False, 'from Tools.Log import Log\n'), ((4119, 4133), 'Tools.Log.Log.d', 'Log.d', (['picInfo'], {}), '(picInfo)\n', (4124, 4133), False, 'from Tools.Log import Log\n'), ((2847, 2910), 'enigma.eFloatAnimation.create', 'eFloatAnimation.create', (['duration', '(0.0)', '(1.0)', '(False)', 'interpolator'], {}), '(duration, 0.0, 1.0, False, interpolator)\n', (2869, 2910), False, 'from enigma import ePicLoad, eTimer, eWindowAnimationSet, eFloatAnimation, eLinearInterpolator, eWindowAnimationManager, ePixmap, eActionMap, getDesktop\n'), ((1325, 1349), 'enigma.eActionMap.getInstance', 'eActionMap.getInstance', ([], {}), '()\n', (1347, 1349), False, 'from enigma import ePicLoad, eTimer, eWindowAnimationSet, eFloatAnimation, eLinearInterpolator, eWindowAnimationManager, ePixmap, eActionMap, getDesktop\n'), ((1525, 1538), 'enigma.getDesktop', 'getDesktop', (['(0)'], {}), '(0)\n', (1535, 1538), False, 'from enigma import ePicLoad, eTimer, eWindowAnimationSet, eFloatAnimation, eLinearInterpolator, eWindowAnimationManager, ePixmap, eActionMap, getDesktop\n'), ((3878, 3910), 'twisted.web.client.downloadPage', 'downloadPage', (['url', 'self.TEMPFILE'], {}), '(url, self.TEMPFILE)\n', (3890, 3910), False, 'from twisted.web.client import downloadPage\n')]
from common import SUNNY, CLOUDY, RAINY, SNOWY """ MATRIX = " MINUSACHTNOLL" + \ "EINZWOIVIERDRÜ" + \ "ZWÖLFNÜN FÖFÜF" + \ "ESEBENSÄCHSEIS" + \ "DRISGIVIERTELF" + \ "ZWÄNZGZÄHKOMMA" + \ "VORAB ESCHALBI" + \ "ELFI RACHTIDRÜ" + \ " KEISÄCHSINÜNI" + \ "SEBNIG NZÄHNI " + \ "FÜFISEBEZWÖLFI" + \ "ZWOI VIERIGRAD" """ class CharacterMatrix: MATRIX = "BMINUSACHTNOLL" + \ "EINZWOIVIERDRÜ" + \ "ZWÖLFNÜNRFÖFÜF" + \ "ESEBENSÄCHSEIS" + \ "DRISGIVIERTELF" + \ "ZWÄNZGZÄHKOMMA" + \ "VORABUESCHALBI" + \ "ELFINRACHTIDRÜ" + \ "OKEISÄCHSINÜNI" + \ "SEBNIGMNZÄHNIU" + \ "FÜFISEBEZWÖLFI" + \ "ZWOIEVIERIGRAD" ROW_LEN = 14 @classmethod def findTexts(cls, texts_array): result_coordinates = [] pos_in_matrix = 0 for text in texts_array: found_in_one_row = False while not found_in_one_row: found_start = cls.MATRIX.find(text.upper(), pos_in_matrix) # print("found", text, "at", found_start) if found_start < 0: return [] found_end = found_start + len(text) if found_start % cls.ROW_LEN + len(text) <= cls.ROW_LEN: # result is on one line result_coordinates.extend([(p // cls.ROW_LEN, p % cls.ROW_LEN) for p in range(found_start, found_end)]) found_in_one_row = True pos_in_matrix = found_end return result_coordinates class TextFinder: PIXEL_NUMBERS=[[[0,1], [0,2], [1,0], [1,3], [2,0], [2,3], [3,0], [3,3], [4,0], [4,3], [5,1], [5,2]], [[3,0], [2,1], [1,2], [0,3], [1,3], [2,3], [3,3], [4,3], [5,3]], [[1,0], [0,1], [0,2], [1,3], [2,3], [3,2], [4,1], [5,0], [5,1], [5,2], [5,3]], [[0,0], [0,1], [0,2], [1,3], [2,1], [2,2], [3,3], [4,3], [5,0], [5,1], [5,2]], [[2,0], [1,1], [0,2], [1,2], [2,2], [3,0], [3,1], [3,2], [3,3], [4,2], [5,2]], [[0,0], [0,1], [0,2], [0,3], [1,0], [2,0], [2,1], [2,2], [3,3], [4,3], [5,0], [5,1], [5,2]], [[0,1], [0,2], [0,3], [1,0], [2,0], [3,0], [4,0], [2,1], [2,2], [3,3], [4,3], [5,1], [5,2]], [[0,0], [0,1], [0,2], [0,3], [1,3], [2,2], [3,2], [4,2], [5,2]], [[0,1], [0,2], [1,0], [1,3], [2,1], [2,2], [3,0], [3,3], [4,0], [4,3], [5,1], [5,2]], [[0,1], [0,2], [1,0], [1,3], [2,0], [2,3], [3,1], [3,2], [3,3], [4,3], [5,0], [5,1], [5,2]]] WEATHER = {SUNNY: [[0,5], [1,5], [2,5], [3,5], [4,5], [5,5]], CLOUDY: [[5,1], [6,1], [7,1], [8,1], [9,1]], RAINY: [[7,5], [8,5], [9,5], [10,5]], SNOWY: [[6,7], [7,7], [8,7], [9,7], [10,7], [11,7]]} PERCENT = [[4,10], [4,13], [5,12], [6,11], [7,10], [7,13]] LUM = [[9, 3], [10, 3], [11, 3], [10, 5], [11, 5], [11, 6], [10, 7], [11, 7], [10, 9], [11, 9], [10, 10], [10, 11], [11, 11], [10, 12], [10, 13], [11, 13]] MINUTES_TEXTS = [["ES", "ESCH"], ["EIS", "AB"], ["ZWOI", "AB"], ["DRÜ", "AB"], ["VIER", "AB"], ["FÜF", "AB"], ["SÄCHS", "AB"], ["SEBE", "AB"], ["ACHT", "AB"], ["NÜN", "AB"], ["ZÄH", "AB"], ["ELF", "AB"], ["ZWÖLF", "AB"], ["DRI", "ZÄH", "AB"], ["VIER", "ZÄH", "AB"], ["VIERTEL", "AB"], ["SÄCH", "ZÄH", "AB"], ["SEB", "ZÄH", "AB"], ["ACHT", "ZÄH", "AB"], ["NÜN", "ZÄH", "AB"], ["ZWÄNZG", "AB"], ["EIN", "E", "ZWÄNZG", "AB"], ["ZWOI", "E", "ZWÄNZG", "AB"], ["DRÜ", "E", "ZWÄNZG", "AB"], ["VIER", "E", "ZWÄNZG", "AB"], ["FÜF", "VOR", "HALBI"], ["VIER", "VOR", "HALBI"], ["DRÜ", "VOR", "HALBI"], ["ZWOI", "VOR", "HALBI"], ["EIS", "VOR", "HALBI"], ["HALBI"], ["EIS", "AB", "HALBI"], ["ZWOI", "AB", "HALBI"], ["DRÜ", "AB", "HALBI"], ["VIER", "AB", "HALBI"], ["FÜF", "AB", "HALBI"], ["SÄCHS", "AB", "HALBI"], ["SEBE", "AB", "HALBI"], ["ACHT", "AB", "HALBI"], ["NÜN", "AB", "HALBI"], ["ZWÄNZG", "VOR"], ["NÜN", "ZÄH", "VOR"], ["ACHT", "ZÄH", "VOR"], ["SEB", "ZÄH", "VOR"], ["SÄCH", "ZÄH", "VOR"], ["VIERTEL", "VOR"], ["VIER", "ZÄH", "VOR"], ["DRI", "ZÄH", "VOR"], ["ZWÖLF", "VOR"], ["ELF", "VOR"], ["ZÄH", "VOR"], ["NÜN", "VOR"], ["ACHT", "VOR"], ["SEBE", "VOR"], ["SÄCHS", "VOR"], ["FÜF", "VOR"], ["VIER", "VOR"], ["DRÜ", "VOR"], ["ZWOI", "VOR"], ["EIS", "VOR"]] HOURS_TEXTS = ["ZWÖLFI", "EIS", "ZWOI", "DRÜ", "VIERI", "FÜFI", "SÄCHSI", "SEBNI", "ACHTI", "NÜNI", "ZÄHNI", "ELFI"] TEMP_BEFORE_DIGIT = [["NOLL"], ["EIS"], ["ZWOI"], ["DRÜ"], ["VIER"], ["FÜF"], ["SÄCHS"], ["SEBE"], ["ACHT"], ["NÜN"], ["ZÄH"], ["ELF"], ["ZWÖLF"], ["DRI", "ZÄH"], ["VIER", "ZÄH"], ["FÖF", "ZÄH"], ["SÄCH", "ZÄH"], ["SEBE", "ZÄH"], ["ACHT", "ZÄH"], ["NÜN", "ZÄH"], ["ZWÄNZG"], ["EIN", "E", "ZWÄNZG"], ["ZWOI", "E", "ZWÄNZG"], ["DRÜ", "E", "ZWÄNZG"], ["VIER", "E", "ZWÄNZG"], ["FÜF", "E", "ZWÄNZG"], ["SÄCHS", "E", "ZWÄNZG"], ["SEBEN", "E", "ZWÄNZG"], ["ACHT", "E", "ZWÄNZG"], ["NÜN", "E", "ZWÄNZG"], ["DRISG"], ["EIN", "E", "DRISG"], ["ZWOI", "E", "DRISG"], ["DRÜ", "E", "DRISG"], ["VIER", "E", "DRISG"], ["FÜF", "E", "DRISG"], ["SÄCHS", "E", "DRISG"], ["SEBEN", "E", "DRISG"], ["ACHT", "E", "DRISG"], ["NÜN", "E", "DRISG"]] TEMP_AFTER_DIGIT = [[], ["EIS"], ["ZWOI"], ["DRÜ"], ["VIER"], ["FÜF"], ["SÄCHS"], ["SEBE"], ["ACHT"], ["NÜN"]] MINUS = "MINUS" DOT = "KOMMA" DEGREE = "GRAD" def __init__(self): self._matrix = CharacterMatrix #@classmethod def _get_minutes_text(self, minutes): #try: return self.MINUTES_TEXTS[minutes] #except IndexError: # print(f"Illegal Minute Value: {minutes}") # return [] #@classmethod def _get_hours_text(self, hours): return [self.HOURS_TEXTS[hours % 12]] # zero == twelve, 13..24 == 1..12 def get_time_positions(self, hours, minutes): print("Searching", hours, ":", minutes) if minutes >= 25: # We say "Halbi <Next Hour>" and "zäh vor <Next Hour>" hours = hours + 1 return self._matrix.findTexts(self._get_minutes_text(minutes) + self._get_hours_text(hours)) def get_temperature_positions(self, temperature): print("Searching Temp.", temperature) sign = [self.MINUS] if temperature < 0 else [] before = int(abs(temperature)) after = int(round(abs(temperature) * 10, 0)) % 10 after_texts = [self.DOT] + self.TEMP_AFTER_DIGIT[after] if after != 0 else [] return self._matrix.findTexts(sign + self.TEMP_BEFORE_DIGIT[before] + after_texts + [self.DEGREE]) def get_humidity_positions(self, humidity): print("Searching Hum.", humidity) humidity_int = int(round(humidity,0)) ten_positions = self.PIXEL_NUMBERS[humidity_int // 10] one_positions = self.PIXEL_NUMBERS[humidity_int % 10] return [[p[0]+3, p[1]] for p in ten_positions] + [[p[0]+3, p[1]+5] for p in one_positions] + self.PERCENT def get_date_positions(self, day, month): print("Searching date", day, month) positions = [[p[0], p[1]+8] for p in self.PIXEL_NUMBERS[day % 10]] if day >= 10: positions += [[p[0], p[1]+3] for p in self.PIXEL_NUMBERS[day // 10]] positions += [[p[0]+6, p[1]+8] for p in self.PIXEL_NUMBERS[month % 10]] if month >= 10: positions += [[p[0]+6, p[1]+3] for p in self.PIXEL_NUMBERS[month // 10]] return positions + [[5, 13], [11, 13]] def get_luminance_position(self, luminance): print("Searching Lum.", luminance) luminance_int = min(int(round(luminance,0)), 999) if luminance_int >= 100: hun_positions = self.PIXEL_NUMBERS[luminance_int // 100] else: hun_positions = [] if luminance_int >= 10: ten_positions = self.PIXEL_NUMBERS[(luminance_int // 10) % 10] else: ten_positions = [] one_positions = self.PIXEL_NUMBERS[luminance_int % 10] return [[p[0]+2, p[1]] for p in hun_positions] + \ [[p[0]+2, p[1]+5] for p in ten_positions] + \ [[p[0]+2, p[1]+10] for p in one_positions] + \ self.LUM def get_weather_positions(self, weather_code): return self.WEATHER[weather_code] if __name__ == "__main__": import time def debugPrintPositions(positions): out = [" ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " "] for r, c in positions: out[r] = out[r][:c] + CharacterMatrix.MATRIX[r*CharacterMatrix.ROW_LEN + c] + out[r][c+1:] print("-------------") print("\n".join(out)) start = time.time() finder = TextFinder() for h in range(13): for m in range(60): positions = finder.get_time_positions(h, m) # debugPrintPositions(positions) print(time.time() - start)
[ "time.time" ]
[((9473, 9484), 'time.time', 'time.time', ([], {}), '()\n', (9482, 9484), False, 'import time\n'), ((9682, 9693), 'time.time', 'time.time', ([], {}), '()\n', (9691, 9693), False, 'import time\n')]
#!/usr/bin/env python3 from setuptools import setup setup( packages = ['ARIMA','LSTM'] )
[ "setuptools.setup" ]
[((52, 85), 'setuptools.setup', 'setup', ([], {'packages': "['ARIMA', 'LSTM']"}), "(packages=['ARIMA', 'LSTM'])\n", (57, 85), False, 'from setuptools import setup\n')]
# coding: utf-8 '''CLONE DATASETS – <NAME> – Esri Canada 2017 Creates new datasets (feature classes, tables, or relationship classes plus domains) using existing datasets as templates''' # All literal strings will be Unicode instead of bytes from __future__ import unicode_literals # Import modules import arcpy ## IN-CODE PARAMETERS ################# params = { "datasets": [], "outGDB": r"", "overwrite": False } ## END ################################ ##MAIN CODE######################################################################################## def execute(datasetList, outGDB, overwrite): '''Run through and clone datasets''' arcpy.SetProgressor("step", None, 0, len(datasetList), 1) results = {"successes": 0, "failures": 0} # Loop through datasets relationshipClasses = [] for dataset in datasetList: arcpy.SetProgressorLabel("Cloning {0}".format(dataset.split(".")[-1])) success = None try: desc = arcpy.Describe(dataset) # Feature classes if desc.dataType == "FeatureClass": success = cloneFeatureClass(desc, outGDB, overwrite) # Tables elif desc.dataType == "Table": success = cloneTables(desc, outGDB, overwrite) # Relationship Classes #(kept for last, ensuring related tables copied first) elif desc.dataType == "RelationshipClass": relationshipClasses.append(desc) # All other types are unsupported else: success = False arcpy.AddError("Dataset {0} is of an unsupported type ({1})".format(dataset, desc.dataType)) except Exception: success = False arcpy.AddError("An error occurred while cloning {0}".format(dataset)) if success is not None: arcpy.SetProgressorPosition() results["successes" if success else "failures"] += 1 # Relationship Classes for desc in relationshipClasses: arcpy.SetProgressorLabel("Cloning {0}".format(desc.name.split(".")[-1])) success = None try: success = cloneRelationshipClass(desc, outGDB) except Exception: success = False arcpy.AddError("An error occurred while cloning the {0} relationship class".format(desc.name)) arcpy.SetProgressorPosition() results["successes" if success else "failures"] += 1 return results ##CLONING FUNCTIONS################################################################################ def cloneFeatureClass(desc, outGDB, overwrite): '''Clone a feature class (name, shape type, schema, and domains)''' success = True # Cannot clone FCs without a shape type if desc.shapeType == "Any": arcpy.AddError("Unable to clone {0} as the shape type is not defined".format(desc.name)) success = False # Cannot clone non-simple feature classes elif not desc.featureType == "Simple": arcpy.AddError("Unable to clone {0} as it is not a simple feature class".format(desc.name)) else: cloneDomains(desc, outGDB) # Translate properties to parameters name = desc.name.split(".")[-1] shape = desc.shapeType.upper() template = "{0}\\{1}".format(desc.path, desc.name) SAT = "SAME_AS_TEMPLATE" if existsOrReplace(outGDB, name, overwrite): arcpy.CreateFeatureclass_management(outGDB, name, shape, template, SAT, SAT, template) arcpy.AddMessage("Cloned Feature Class {0}".format(name)) return success def cloneTables(desc, outGDB, overwrite): '''Clone a GDB table (name, schema and domains)''' success = True cloneDomains(desc, outGDB) name = desc.name.split(".")[-1] template = "{0}\\{1}".format(desc.path, desc.name) if existsOrReplace(outGDB, name, overwrite): arcpy.CreateTable_management(outGDB, name, template) arcpy.AddMessage("Cloned Table {0}".format(name)) return success def cloneDomains(datasetDesc, outGDB): '''Clone all domains attached to a dataset and not yet present in output GDB''' # Get all domains in dataset not yet in output GDB missingDomains = [] gdbDesc = arcpy.Describe(outGDB) for field in datasetDesc.fields: if field.domain and field.domain not in gdbDesc.domains and field.domain not in missingDomains: missingDomains.append(field.domain) # Add missing domains to output GDB if len(missingDomains) > 0: domainList = arcpy.da.ListDomains(datasetDesc.path) #pylint: disable=E1101 for domainName in missingDomains: domain = [e for e in domainList if e.name == domainName][0] # Translate properties to parameters name = domain.name description = domain.description fieldType = domain.type.upper() domainType = {"CodedValue": "CODED", "Range": "RANGE"}[domain.domainType] splitPolicy = {"DefaultValue": "DEFAULT", "Duplicate": "DUPLICATE", "GeometryRatio": "GEOMETRY_RATIO"}[domain.splitPolicy] mergePolicy = {"AreaWeighted": "AREA_WEIGHTED", "DefaultValue": "DEFAULT", "SumValues": "SUM_VALUES"}[domain.mergePolicy] # Create the domain arcpy.management.CreateDomain(outGDB, name, description, fieldType, domainType, splitPolicy, mergePolicy) # Add Values if domainType == "CODED": for key, value in domain.codedValues.iteritems(): arcpy.management.AddCodedValueToDomain(outGDB, name, key, value) else: arcpy.management.SetValueForRangeDomain(outGDB, name, domain.range[0], domain.range[1]) arcpy.AddMessage("Cloned Domain {0}".format(domainName)) return def cloneRelationshipClass(desc, outGDB): '''Clone a relationship class (all properties)''' success = True name = desc.name.split(".")[-1] # Derive origin/destination tables paths for the output GDB originTableName = desc.originClassNames[0].split(".")[-1] originTable = "{0}\\{1}".format(outGDB, originTableName) destinTableName = desc.destinationClassNames[0].split(".")[-1] destinTable = "{0}\\{1}".format(outGDB, destinTableName) # Ensure origin/destination tables exists in output GDB if not arcpy.Exists(originTable): arcpy.AddError("Can't clone {0} as the {1} origin table is missing".format(name, originTableName)) success = False elif not arcpy.Exists(destinTable): arcpy.AddError("Can't clone {0} as the {1} destination table is missing".format(name, destinTableName)) success = False else: # Translate properties to parameters path_name = "{0}\\{1}".format(outGDB, name) relType = "COMPOSITE" if desc.isComposite else "SIMPLE" fLabel = desc.forwardPathLabel bLabel = desc.backwardPathLabel msg_dir = {"None": "NONE", "Forward": "FORWARD", "Backward": "BACK", "Both": "BOTH"}[desc.notification] cardinality = {"OneToOne": "ONE_TO_ONE", "OneToMany": "ONE_TO_MANY", "ManyToMany": "MANY_TO_MANY"}[desc.cardinality] attributed = "ATTRIBUTED" if desc.isAttributed else "NONE" originKeyPrim = desc.originClassKeys[0][0] originKeyFore = desc.originClassKeys[1][0] if len(desc.destinationClassKeys) > 0: destinKeyPrim = desc.destinationClassKeys[0][0] destinKeyFore = desc.destinationClassKeys[1][0] else: destinKeyPrim = None destinKeyFore = None # If attributed, copy the intermediate table while creating rel. class if desc.isAttributed: fields = [e.name for e in desc.fields] table = arcpy.CreateTable_management("in_memory", "relClass", "{0}\\{1}".format(desc.path, desc.name)) arcpy.TableToRelationshipClass_management(originTable, destinTable, path_name, relType, fLabel, bLabel, msg_dir, cardinality, table, fields, originKeyPrim, originKeyFore, destinKeyPrim, destinKeyFore) arcpy.Delete_management(table) # If not attributed, create a simple relationship class else: arcpy.CreateRelationshipClass_management(originTable, destinTable, path_name, relType, fLabel, bLabel, msg_dir, cardinality, attributed, originKeyPrim, originKeyFore, destinKeyPrim, destinKeyFore) # Check for relationship rules (which are not copied by this tool) if len(desc.relationshipRules) > 0: arcpy.AddWarning("The {0} relationship class was cloned, but relationship rules could not be copied over".format(name)) else: arcpy.AddMessage("Cloned Relationship Class {0}".format(name)) return success ##UTILITIES######################################################################################## def existsOrReplace(outGDB, name, overwrite): '''Check whether dataset exists, and delete if overwriting''' dataset = "{0}\\{1}".format(outGDB,name) continueCloning = True # Check for dataset existence if arcpy.Exists(dataset): # If overwriting enabled, delete it, otherwise stop cloning if overwrite: try: arcpy.Delete_management(dataset) except Exception: arcpy.AddError("Could not delete {0}. Make sure it isn't locked. Dataset not cloned.".format(dataset)) continueCloning = False else: continueCloning = False arcpy.AddWarning("Could not clone {0} as it already exists in output geodatabase.".format(dataset)) return continueCloning ##MAIN EXECUTION CODE############################################################################## if __name__ == "__main__": #Execute when running outside Python Toolbox # Attempt to retrieve parameters from normal toolbox tool datasetsParam = arcpy.GetParameterAsText(0) outGDBParam = arcpy.GetParameterAsText(1) overwriteParam = arcpy.GetParameterAsText(2).lower() == "true" # Process the attributes if datasetsParam is not None: datasetListParam = [x[1:-1] for x in datasetsParam.split(";")] # If none provided through parameters, fall-back to in-code parameters else: datasetListParam = params["datasets"] outGDBParamParam = params["outGDB"] overwriteParam = params["overwrite"] # Run the processing execute(datasetListParam, outGDBParam, overwriteParam)
[ "arcpy.CreateFeatureclass_management", "arcpy.CreateRelationshipClass_management", "arcpy.GetParameterAsText", "arcpy.TableToRelationshipClass_management", "arcpy.Describe", "arcpy.management.SetValueForRangeDomain", "arcpy.management.AddCodedValueToDomain", "arcpy.da.ListDomains", "arcpy.CreateTable_management", "arcpy.Exists", "arcpy.SetProgressorPosition", "arcpy.management.CreateDomain", "arcpy.Delete_management" ]
[((3862, 3884), 'arcpy.Describe', 'arcpy.Describe', (['outGDB'], {}), '(outGDB)\n', (3876, 3884), False, 'import arcpy\n'), ((8276, 8297), 'arcpy.Exists', 'arcpy.Exists', (['dataset'], {}), '(dataset)\n', (8288, 8297), False, 'import arcpy\n'), ((8999, 9026), 'arcpy.GetParameterAsText', 'arcpy.GetParameterAsText', (['(0)'], {}), '(0)\n', (9023, 9026), False, 'import arcpy\n'), ((9042, 9069), 'arcpy.GetParameterAsText', 'arcpy.GetParameterAsText', (['(1)'], {}), '(1)\n', (9066, 9069), False, 'import arcpy\n'), ((2106, 2135), 'arcpy.SetProgressorPosition', 'arcpy.SetProgressorPosition', ([], {}), '()\n', (2133, 2135), False, 'import arcpy\n'), ((3530, 3582), 'arcpy.CreateTable_management', 'arcpy.CreateTable_management', (['outGDB', 'name', 'template'], {}), '(outGDB, name, template)\n', (3558, 3582), False, 'import arcpy\n'), ((4141, 4179), 'arcpy.da.ListDomains', 'arcpy.da.ListDomains', (['datasetDesc.path'], {}), '(datasetDesc.path)\n', (4161, 4179), False, 'import arcpy\n'), ((5774, 5799), 'arcpy.Exists', 'arcpy.Exists', (['originTable'], {}), '(originTable)\n', (5786, 5799), False, 'import arcpy\n'), ((941, 964), 'arcpy.Describe', 'arcpy.Describe', (['dataset'], {}), '(dataset)\n', (955, 964), False, 'import arcpy\n'), ((1667, 1696), 'arcpy.SetProgressorPosition', 'arcpy.SetProgressorPosition', ([], {}), '()\n', (1694, 1696), False, 'import arcpy\n'), ((4800, 4909), 'arcpy.management.CreateDomain', 'arcpy.management.CreateDomain', (['outGDB', 'name', 'description', 'fieldType', 'domainType', 'splitPolicy', 'mergePolicy'], {}), '(outGDB, name, description, fieldType,\n domainType, splitPolicy, mergePolicy)\n', (4829, 4909), False, 'import arcpy\n'), ((5932, 5957), 'arcpy.Exists', 'arcpy.Exists', (['destinTable'], {}), '(destinTable)\n', (5944, 5957), False, 'import arcpy\n'), ((3086, 3176), 'arcpy.CreateFeatureclass_management', 'arcpy.CreateFeatureclass_management', (['outGDB', 'name', 'shape', 'template', 'SAT', 'SAT', 'template'], {}), '(outGDB, name, shape, template, SAT, SAT,\n template)\n', (3121, 3176), False, 'import arcpy\n'), ((5096, 5187), 'arcpy.management.SetValueForRangeDomain', 'arcpy.management.SetValueForRangeDomain', (['outGDB', 'name', 'domain.range[0]', 'domain.range[1]'], {}), '(outGDB, name, domain.range[0],\n domain.range[1])\n', (5135, 5187), False, 'import arcpy\n'), ((7131, 7339), 'arcpy.TableToRelationshipClass_management', 'arcpy.TableToRelationshipClass_management', (['originTable', 'destinTable', 'path_name', 'relType', 'fLabel', 'bLabel', 'msg_dir', 'cardinality', 'table', 'fields', 'originKeyPrim', 'originKeyFore', 'destinKeyPrim', 'destinKeyFore'], {}), '(originTable, destinTable,\n path_name, relType, fLabel, bLabel, msg_dir, cardinality, table, fields,\n originKeyPrim, originKeyFore, destinKeyPrim, destinKeyFore)\n', (7172, 7339), False, 'import arcpy\n'), ((7335, 7365), 'arcpy.Delete_management', 'arcpy.Delete_management', (['table'], {}), '(table)\n', (7358, 7365), False, 'import arcpy\n'), ((7438, 7642), 'arcpy.CreateRelationshipClass_management', 'arcpy.CreateRelationshipClass_management', (['originTable', 'destinTable', 'path_name', 'relType', 'fLabel', 'bLabel', 'msg_dir', 'cardinality', 'attributed', 'originKeyPrim', 'originKeyFore', 'destinKeyPrim', 'destinKeyFore'], {}), '(originTable, destinTable,\n path_name, relType, fLabel, bLabel, msg_dir, cardinality, attributed,\n originKeyPrim, originKeyFore, destinKeyPrim, destinKeyFore)\n', (7478, 7642), False, 'import arcpy\n'), ((8392, 8424), 'arcpy.Delete_management', 'arcpy.Delete_management', (['dataset'], {}), '(dataset)\n', (8415, 8424), False, 'import arcpy\n'), ((9088, 9115), 'arcpy.GetParameterAsText', 'arcpy.GetParameterAsText', (['(2)'], {}), '(2)\n', (9112, 9115), False, 'import arcpy\n'), ((5014, 5078), 'arcpy.management.AddCodedValueToDomain', 'arcpy.management.AddCodedValueToDomain', (['outGDB', 'name', 'key', 'value'], {}), '(outGDB, name, key, value)\n', (5052, 5078), False, 'import arcpy\n')]
import click @click.group('delete') @click.pass_obj def delete(ctx): """Delete Azure DevOps resources""" @delete.command('repo') @click.option('-p', '--project', 'project', help='Project name or id the repository in') @click.argument('repository_names', nargs=-1, required=True) @click.pass_obj def create_repo(ctx, project, repository_names): """Delete an Azure DevOps repository""" try: click.confirm('Are you sure you want to delete these repositories?', default=False, abort=True) _git_client = ctx.connection.clients.get_git_client() if not project: project = ctx._azure_devops_project for repo_name in repository_names: repository = _git_client.get_repository(repo_name, project) _git_client.delete_repository(repository.id, repository.project.name) click.echo('Deleted repository ' + repo_name + ' within project ' + project) except Exception as err: raise click.UsageError(err) @delete.command('build-definition') @click.option('-p', '--project', 'project', help='Project name or id the build definition in') @click.argument('build_definitions', nargs=-1, required=True) @click.pass_obj def delete_build_definition(ctx, project, build_definitions): """Delete an Azure DevOps build definition""" try: click.confirm('Are you sure you want to delete these build definitions?', default=False, abort=True) _build_client = ctx.connection.clients.get_build_client() if not project: project = ctx._azure_devops_project for build_definition in build_definitions: definition = _build_client.get_definitions(project, build_definition) _build_client.delete_definition(project, definition[0].id) except Exception as err: raise err
[ "click.argument", "click.confirm", "click.option", "click.echo", "click.UsageError", "click.group" ]
[((16, 37), 'click.group', 'click.group', (['"""delete"""'], {}), "('delete')\n", (27, 37), False, 'import click\n'), ((138, 230), 'click.option', 'click.option', (['"""-p"""', '"""--project"""', '"""project"""'], {'help': '"""Project name or id the repository in"""'}), "('-p', '--project', 'project', help=\n 'Project name or id the repository in')\n", (150, 230), False, 'import click\n'), ((241, 300), 'click.argument', 'click.argument', (['"""repository_names"""'], {'nargs': '(-1)', 'required': '(True)'}), "('repository_names', nargs=-1, required=True)\n", (255, 300), False, 'import click\n'), ((1069, 1167), 'click.option', 'click.option', (['"""-p"""', '"""--project"""', '"""project"""'], {'help': '"""Project name or id the build definition in"""'}), "('-p', '--project', 'project', help=\n 'Project name or id the build definition in')\n", (1081, 1167), False, 'import click\n'), ((1178, 1238), 'click.argument', 'click.argument', (['"""build_definitions"""'], {'nargs': '(-1)', 'required': '(True)'}), "('build_definitions', nargs=-1, required=True)\n", (1192, 1238), False, 'import click\n'), ((427, 526), 'click.confirm', 'click.confirm', (['"""Are you sure you want to delete these repositories?"""'], {'default': '(False)', 'abort': '(True)'}), "('Are you sure you want to delete these repositories?',\n default=False, abort=True)\n", (440, 526), False, 'import click\n'), ((1384, 1488), 'click.confirm', 'click.confirm', (['"""Are you sure you want to delete these build definitions?"""'], {'default': '(False)', 'abort': '(True)'}), "('Are you sure you want to delete these build definitions?',\n default=False, abort=True)\n", (1397, 1488), False, 'import click\n'), ((888, 964), 'click.echo', 'click.echo', (["('Deleted repository ' + repo_name + ' within project ' + project)"], {}), "('Deleted repository ' + repo_name + ' within project ' + project)\n", (898, 964), False, 'import click\n'), ((1008, 1029), 'click.UsageError', 'click.UsageError', (['err'], {}), '(err)\n', (1024, 1029), False, 'import click\n')]
import tensorflow.keras import tensorflow.keras.models import tensorflow.keras.layers import tensorflow.keras.regularizers import json def model_skeleton_from_simple_config_file(config_filename): with open(config_filename) as f: configuration = json.load(f) return model_skeleton_from_simple_config(configuration) def model_skeleton_from_simple_config(configuration): activation = configuration['activation'] if 'l1_regularization' in configuration.keys(): regularization_l1 = configuration['l1_regularization'] regularizer = tensorflow.keras.regularizers.l1(regularization_l1) if 'l2_regularization' in configuration.keys(): regularization_l2 = configuration['l2_regularization'] regularizer = tensorflow.keras.regularizers.l2(regularization_l2) else: regularizer = None network_topology = configuration['network_topology'] model = tensorflow.keras.models.Sequential() model.add(tensorflow.keras.layers.Dense(network_topology[1], input_shape=(network_topology[0],), activation=activation, kernel_regularizer=regularizer)) for layer in network_topology[2:-1]: model.add(tensorflow.keras.layers.Dense(layer, activation=configuration['activation'], kernel_regularizer=regularizer)) model.add(tensorflow.keras.layers.Dense(network_topology[-1])) return model
[ "json.load" ]
[((260, 272), 'json.load', 'json.load', (['f'], {}), '(f)\n', (269, 272), False, 'import json\n')]
#!/usr/bin/python3 from db_query import DBQuery from trigger import Trigger import time import os service_interval=list(map(float,os.environ["SERVICE_INTERVAL"].split(","))) office=list(map(float, os.environ["OFFICE"].split(","))) dbhost=os.environ["DBHOST"] class ImbalanceTrigger(Trigger): def __init__(self): super(ImbalanceTrigger,self).__init__() self._dbs=DBQuery(index="sensors",office=office,host=dbhost) self._dba=DBQuery(index="algorithms",office=office,host=dbhost) def trigger(self): time.sleep(service_interval[2]) info=[] try: nsensors={ "total": self._dbs.count("sensor:*"), "streaming": self._dbs.count("status:'streaming'"), "idle": self._dbs.count("status:'idle'"), } nalgorithms={ "total": self._dba.count("name:*"), } except Exception as e: print("Exception: "+str(e), flush=True) return info if nsensors["total"]>nsensors["streaming"]+nsensors["idle"]: info.append({ "fatal": [{ "message": "Check sensor: #disconnected="+str(nsensors["total"]-nsensors["streaming"]-nsensors["idle"]), "args": nsensors, }] }) if nalgorithms["total"]>nsensors["streaming"]+nsensors["idle"]: info.append({ "warning": [{ "message": "Imbalance: #analytics="+str(nalgorithms["total"])+",#sensors="+str(nsensors["streaming"]+nsensors["idle"]), "args": { "nalgorithms": nalgorithms["total"], "nsensors": nsensors["streaming"]+nsensors["idle"], }, }], }) return info
[ "db_query.DBQuery", "time.sleep" ]
[((385, 437), 'db_query.DBQuery', 'DBQuery', ([], {'index': '"""sensors"""', 'office': 'office', 'host': 'dbhost'}), "(index='sensors', office=office, host=dbhost)\n", (392, 437), False, 'from db_query import DBQuery\n'), ((454, 509), 'db_query.DBQuery', 'DBQuery', ([], {'index': '"""algorithms"""', 'office': 'office', 'host': 'dbhost'}), "(index='algorithms', office=office, host=dbhost)\n", (461, 509), False, 'from db_query import DBQuery\n'), ((540, 571), 'time.sleep', 'time.sleep', (['service_interval[2]'], {}), '(service_interval[2])\n', (550, 571), False, 'import time\n')]
from SimulatorExceptions import ValueOutOfRangeException class BoundedInteger: def __init__(self, minimum, maximum, default): assert (minimum <= maximum) assert (default >= minimum) assert (default <= maximum) self.minimum = minimum self.maximum = maximum self.default = default self.value = default def get(self): value = self.value assert (value >= self.minimum) assert (value <= self.maximum) return value def set(self, value): if ((value >= self.minimum) and (value <= self.maximum)): self.value = value else: raise ValueOutOfRangeException(value)
[ "SimulatorExceptions.ValueOutOfRangeException" ]
[((665, 696), 'SimulatorExceptions.ValueOutOfRangeException', 'ValueOutOfRangeException', (['value'], {}), '(value)\n', (689, 696), False, 'from SimulatorExceptions import ValueOutOfRangeException\n')]
# Copyright (c) 2017 Cloudbase Solutions Srl # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Command-line interface sub-commands related to endpoints. """ import json from cliff import command from cliff import lister from cliff import show from coriolisclient import exceptions from coriolisclient.cli import formatter class EndpointFormatter(formatter.EntityFormatter): columns = ("ID", "Name", "Type", "Description", ) def _get_sorted_list(self, obj_list): return sorted(obj_list, key=lambda o: o.created_at) def _get_formatted_data(self, obj): data = (obj.id, obj.name, obj.type, obj.description or "", ) return data class EndpointDetailFormatter(formatter.EntityFormatter): def __init__(self, show_instances_data=False): self.columns = [ "id", "name", "type", "description", "connection_info", "last_updated", ] def _get_formatted_data(self, obj): data = [obj.id, obj.name, obj.type, obj.description or "", obj.connection_info.to_dict(), obj.created_at, obj.updated_at, ] return data class CreateEndpoint(show.ShowOne): """Creates a new endpoint""" def get_parser(self, prog_name): parser = super(CreateEndpoint, self).get_parser(prog_name) parser.add_argument('--name', required=True, help='The endpoints\'s name') parser.add_argument('--provider', required=True, help='The provider, e.g.: ' 'vmware_vsphere, openstack') parser.add_argument('--description', help='A description for this endpoint') parser.add_argument('--connection', help='JSON encoded connection data') parser.add_argument('--connection-secret', help='The url of the Barbican secret containing ' 'the connection info') parser.add_argument('--skip-validation', dest='skip_validation', action='store_true', help='Whether to skip validating the connection ' 'when creating the endpoint.') return parser def take_action(self, args): if args.connection_secret and args.connection: raise exceptions.CoriolisException( "Please specify either --connection or " "--connection-secret, but not both") conn_info = None if args.connection_secret: conn_info = {"secret_ref": args.connection_secret} if args.connection: conn_info = json.loads(args.connection) endpoint = self.app.client_manager.coriolis.endpoints.create( args.name, args.provider, conn_info, args.description) if not args.skip_validation: valid, message = ( self.app.client_manager.coriolis.endpoints.validate_connection( endpoint.id)) if not valid: raise exceptions.EndpointConnectionValidationFailed(message) return EndpointDetailFormatter().get_formatted_entity(endpoint) class UpdateEndpoint(show.ShowOne): """Updates an endpoint""" def get_parser(self, prog_name): parser = super(UpdateEndpoint, self).get_parser(prog_name) parser.add_argument('id', help='The endpoint\'s id') parser.add_argument('--name', help='The endpoints\'s name') parser.add_argument('--description', help='A description for this endpoint') parser.add_argument('--connection', help='JSON encoded connection data') parser.add_argument('--connection-secret', help='The url of the Barbican secret containing ' 'the connection info') return parser def take_action(self, args): if args.connection_secret and args.connection: raise exceptions.CoriolisException( "Please specify either --connection or " "--connection-secret, but not both") conn_info = None if args.connection_secret: conn_info = {"secret_ref": args.connection_secret} if args.connection: conn_info = json.loads(args.connection) updated_values = {} if args.name is not None: updated_values["name"] = args.name if args.description is not None: updated_values["description"] = args.description if conn_info: updated_values["connection_info"] = conn_info endpoint = self.app.client_manager.coriolis.endpoints.update( args.id, updated_values) return EndpointDetailFormatter().get_formatted_entity(endpoint) class ShowEndpoint(show.ShowOne): """Show an endpoint""" def get_parser(self, prog_name): parser = super(ShowEndpoint, self).get_parser(prog_name) parser.add_argument('id', help='The endpoint\'s id') return parser def take_action(self, args): endpoint = self.app.client_manager.coriolis.endpoints.get(args.id) return EndpointDetailFormatter().get_formatted_entity(endpoint) class DeleteEndpoint(command.Command): """Delete an endpoint""" def get_parser(self, prog_name): parser = super(DeleteEndpoint, self).get_parser(prog_name) parser.add_argument('id', help='The endpoint\'s id') return parser def take_action(self, args): self.app.client_manager.coriolis.endpoints.delete(args.id) class ListEndpoint(lister.Lister): """List endpoints""" def get_parser(self, prog_name): parser = super(ListEndpoint, self).get_parser(prog_name) return parser def take_action(self, args): obj_list = self.app.client_manager.coriolis.endpoints.list() return EndpointFormatter().list_objects(obj_list) class EndpointValidateConnection(command.Command): """validates an edpoint's connection""" def get_parser(self, prog_name): parser = super(EndpointValidateConnection, self).get_parser(prog_name) parser.add_argument('id', help='The endpoint\'s id') return parser def take_action(self, args): endpoints = self.app.client_manager.coriolis.endpoints valid, message = endpoints.validate_connection(args.id) if not valid: raise exceptions.EndpointConnectionValidationFailed(message)
[ "coriolisclient.exceptions.CoriolisException", "coriolisclient.exceptions.EndpointConnectionValidationFailed", "json.loads" ]
[((3136, 3244), 'coriolisclient.exceptions.CoriolisException', 'exceptions.CoriolisException', (['"""Please specify either --connection or --connection-secret, but not both"""'], {}), "(\n 'Please specify either --connection or --connection-secret, but not both')\n", (3164, 3244), False, 'from coriolisclient import exceptions\n'), ((3452, 3479), 'json.loads', 'json.loads', (['args.connection'], {}), '(args.connection)\n', (3462, 3479), False, 'import json\n'), ((4873, 4981), 'coriolisclient.exceptions.CoriolisException', 'exceptions.CoriolisException', (['"""Please specify either --connection or --connection-secret, but not both"""'], {}), "(\n 'Please specify either --connection or --connection-secret, but not both')\n", (4901, 4981), False, 'from coriolisclient import exceptions\n'), ((5189, 5216), 'json.loads', 'json.loads', (['args.connection'], {}), '(args.connection)\n', (5199, 5216), False, 'import json\n'), ((7325, 7379), 'coriolisclient.exceptions.EndpointConnectionValidationFailed', 'exceptions.EndpointConnectionValidationFailed', (['message'], {}), '(message)\n', (7370, 7379), False, 'from coriolisclient import exceptions\n'), ((3885, 3939), 'coriolisclient.exceptions.EndpointConnectionValidationFailed', 'exceptions.EndpointConnectionValidationFailed', (['message'], {}), '(message)\n', (3930, 3939), False, 'from coriolisclient import exceptions\n')]
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'about.ui' # # Created by: PyQt5 UI code generator 5.15.4 # # WARNING: Any manual changes made to this file will be lost when pyuic5 is # run again. Do not edit this file unless you know what you are doing. from PyQt5 import QtCore, QtGui, QtWidgets class Ui_about_dialog(object): def setupUi(self, about_dialog): about_dialog.setObjectName("about_dialog") about_dialog.setWindowModality(QtCore.Qt.ApplicationModal) about_dialog.resize(400, 331) about_dialog.setFocusPolicy(QtCore.Qt.NoFocus) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap("ui/images/passkey.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off) about_dialog.setWindowIcon(icon) about_dialog.setModal(True) self.verticalLayout = QtWidgets.QVBoxLayout(about_dialog) self.verticalLayout.setObjectName("verticalLayout") self.ablout_label = QtWidgets.QLabel(about_dialog) font = QtGui.QFont() font.setPointSize(15) self.ablout_label.setFont(font) self.ablout_label.setAlignment(QtCore.Qt.AlignCenter) self.ablout_label.setObjectName("ablout_label") self.verticalLayout.addWidget(self.ablout_label) self.about_field = QtWidgets.QTextBrowser(about_dialog) self.about_field.setFocusPolicy(QtCore.Qt.NoFocus) self.about_field.setAutoFillBackground(False) self.about_field.setStyleSheet("background: rgba(0, 255, 0, 0)") self.about_field.setFrameShape(QtWidgets.QFrame.NoFrame) self.about_field.setObjectName("about_field") self.verticalLayout.addWidget(self.about_field) self.cryptex_image = QtWidgets.QLabel(about_dialog) self.cryptex_image.setText("") self.cryptex_image.setPixmap(QtGui.QPixmap("ui/images/cryptex.png")) self.cryptex_image.setAlignment(QtCore.Qt.AlignCenter) self.cryptex_image.setObjectName("cryptex_image") self.verticalLayout.addWidget(self.cryptex_image) self.author_label = QtWidgets.QLabel(about_dialog) self.author_label.setLayoutDirection(QtCore.Qt.LeftToRight) self.author_label.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter) self.author_label.setObjectName("author_label") self.verticalLayout.addWidget(self.author_label) self.horizontalLayout = QtWidgets.QHBoxLayout() self.horizontalLayout.setObjectName("horizontalLayout") spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum) self.horizontalLayout.addItem(spacerItem) self.close_button = QtWidgets.QPushButton(about_dialog) self.close_button.setMaximumSize(QtCore.QSize(75, 16777215)) self.close_button.setLayoutDirection(QtCore.Qt.LeftToRight) self.close_button.setAutoFillBackground(False) self.close_button.setObjectName("close_button") self.horizontalLayout.addWidget(self.close_button) self.verticalLayout.addLayout(self.horizontalLayout) self.retranslateUi(about_dialog) QtCore.QMetaObject.connectSlotsByName(about_dialog) def retranslateUi(self, about_dialog): _translate = QtCore.QCoreApplication.translate about_dialog.setWindowTitle(_translate("about_dialog", "About...")) self.ablout_label.setText(_translate("about_dialog", "CryptEX")) self.about_field.setPlaceholderText(_translate("about_dialog", "Blah-blah-blah~")) self.author_label.setText(_translate("about_dialog", "Author")) self.close_button.setText(_translate("about_dialog", "Close"))
[ "PyQt5.QtWidgets.QTextBrowser", "PyQt5.QtWidgets.QLabel", "PyQt5.QtGui.QIcon", "PyQt5.QtWidgets.QHBoxLayout", "PyQt5.QtWidgets.QPushButton", "PyQt5.QtCore.QSize", "PyQt5.QtGui.QFont", "PyQt5.QtWidgets.QSpacerItem", "PyQt5.QtWidgets.QVBoxLayout", "PyQt5.QtGui.QPixmap", "PyQt5.QtCore.QMetaObject.connectSlotsByName" ]
[((627, 640), 'PyQt5.QtGui.QIcon', 'QtGui.QIcon', ([], {}), '()\n', (638, 640), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((848, 883), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['about_dialog'], {}), '(about_dialog)\n', (869, 883), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((972, 1002), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['about_dialog'], {}), '(about_dialog)\n', (988, 1002), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1018, 1031), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (1029, 1031), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1304, 1340), 'PyQt5.QtWidgets.QTextBrowser', 'QtWidgets.QTextBrowser', (['about_dialog'], {}), '(about_dialog)\n', (1326, 1340), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1731, 1761), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['about_dialog'], {}), '(about_dialog)\n', (1747, 1761), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2085, 2115), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['about_dialog'], {}), '(about_dialog)\n', (2101, 2115), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2437, 2460), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (2458, 2460), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2546, 2644), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(40)', '(20)', 'QtWidgets.QSizePolicy.Expanding', 'QtWidgets.QSizePolicy.Minimum'], {}), '(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.\n QSizePolicy.Minimum)\n', (2567, 2644), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2718, 2753), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['about_dialog'], {}), '(about_dialog)\n', (2739, 2753), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3172, 3223), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['about_dialog'], {}), '(about_dialog)\n', (3209, 3223), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((664, 702), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', (['"""ui/images/passkey.png"""'], {}), "('ui/images/passkey.png')\n", (677, 702), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1838, 1876), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', (['"""ui/images/cryptex.png"""'], {}), "('ui/images/cryptex.png')\n", (1851, 1876), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2795, 2821), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(75)', '(16777215)'], {}), '(75, 16777215)\n', (2807, 2821), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n')]
from flask import request, jsonify from server import app from server.database import * from server.utils import json_abort, list_exists, has_json from server.models import * # MARK: Task routes @app.route('/api/lists/<string:list_id>/tasks', methods=['GET']) @list_exists def get_tasks(list_id): response = {} response['tasks'] = [t.__dict__ for t in db_get_tasks_for_list(list_id)] return jsonify(response) # CREATE ROUTE @app.route('/api/lists/<string:list_id>/tasks', methods=['POST']) @list_exists @has_json def create_task(list_id): ''' creates a new task for a list ''' data = request.get_json() title = data.get('title', None) if title == None: json_abort(400, 'Invalid request parameters') newTask = db_create_task(list_id, title) if newTask == None: json_abort(400, 'Could not create task') return jsonify(newTask.__dict__) # DESTROY ROUTE @app.route('/api/lists/<string:list_id>/tasks/<string:task_id>', methods=['DELETE']) @list_exists def remove_task(list_id, task_id): db_delete_task(task_id) return jsonify({'result': True}) # UPDATE ROUTE @app.route('/api/lists/<string:list_id>/tasks/<string:task_id>', methods=['PUT']) @list_exists @has_json def update_task(list_id, task_id): data = request.get_json() task = db_get_task(list_id, task_id) if task == None: json_abort(404, 'Task not found') title = data.get('title', None) status = data.get('status', None) description = data.get('description', None) due = data.get('due', None) revision = data.get('revision', None) if title == None or status == None or description == None or \ due == None or revision == None: json_abort(400, 'Invalid request parameters') # Only update tasks with there is no newer version on the server if revision < task.revision: json_abort(409, 'Newer version of task available') task.title = title task.status = status task.description = description task.due = due task.revision = task.revision + 1 task = db_update_task(list_id, task) if task == None: json_abort(500, 'Could not update task') return jsonify(task.__dict__)
[ "server.app.route", "flask.jsonify", "flask.request.get_json", "server.utils.json_abort" ]
[((198, 261), 'server.app.route', 'app.route', (['"""/api/lists/<string:list_id>/tasks"""'], {'methods': "['GET']"}), "('/api/lists/<string:list_id>/tasks', methods=['GET'])\n", (207, 261), False, 'from server import app\n'), ((440, 504), 'server.app.route', 'app.route', (['"""/api/lists/<string:list_id>/tasks"""'], {'methods': "['POST']"}), "('/api/lists/<string:list_id>/tasks', methods=['POST'])\n", (449, 504), False, 'from server import app\n'), ((915, 1003), 'server.app.route', 'app.route', (['"""/api/lists/<string:list_id>/tasks/<string:task_id>"""'], {'methods': "['DELETE']"}), "('/api/lists/<string:list_id>/tasks/<string:task_id>', methods=[\n 'DELETE'])\n", (924, 1003), False, 'from server import app\n'), ((1129, 1214), 'server.app.route', 'app.route', (['"""/api/lists/<string:list_id>/tasks/<string:task_id>"""'], {'methods': "['PUT']"}), "('/api/lists/<string:list_id>/tasks/<string:task_id>', methods=['PUT']\n )\n", (1138, 1214), False, 'from server import app\n'), ((405, 422), 'flask.jsonify', 'jsonify', (['response'], {}), '(response)\n', (412, 422), False, 'from flask import request, jsonify\n'), ((607, 625), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (623, 625), False, 'from flask import request, jsonify\n'), ((871, 896), 'flask.jsonify', 'jsonify', (['newTask.__dict__'], {}), '(newTask.__dict__)\n', (878, 896), False, 'from flask import request, jsonify\n'), ((1086, 1111), 'flask.jsonify', 'jsonify', (["{'result': True}"], {}), "({'result': True})\n", (1093, 1111), False, 'from flask import request, jsonify\n'), ((1279, 1297), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (1295, 1297), False, 'from flask import request, jsonify\n'), ((2186, 2208), 'flask.jsonify', 'jsonify', (['task.__dict__'], {}), '(task.__dict__)\n', (2193, 2208), False, 'from flask import request, jsonify\n'), ((693, 738), 'server.utils.json_abort', 'json_abort', (['(400)', '"""Invalid request parameters"""'], {}), "(400, 'Invalid request parameters')\n", (703, 738), False, 'from server.utils import json_abort, list_exists, has_json\n'), ((818, 858), 'server.utils.json_abort', 'json_abort', (['(400)', '"""Could not create task"""'], {}), "(400, 'Could not create task')\n", (828, 858), False, 'from server.utils import json_abort, list_exists, has_json\n'), ((1369, 1402), 'server.utils.json_abort', 'json_abort', (['(404)', '"""Task not found"""'], {}), "(404, 'Task not found')\n", (1379, 1402), False, 'from server.utils import json_abort, list_exists, has_json\n'), ((1713, 1758), 'server.utils.json_abort', 'json_abort', (['(400)', '"""Invalid request parameters"""'], {}), "(400, 'Invalid request parameters')\n", (1723, 1758), False, 'from server.utils import json_abort, list_exists, has_json\n'), ((1870, 1920), 'server.utils.json_abort', 'json_abort', (['(409)', '"""Newer version of task available"""'], {}), "(409, 'Newer version of task available')\n", (1880, 1920), False, 'from server.utils import json_abort, list_exists, has_json\n'), ((2133, 2173), 'server.utils.json_abort', 'json_abort', (['(500)', '"""Could not update task"""'], {}), "(500, 'Could not update task')\n", (2143, 2173), False, 'from server.utils import json_abort, list_exists, has_json\n')]
from config import * from flask import Flask, request app = Flask(__name__) @app.route('/ping', methods=['GET', 'POST']) def index(): if request.method == 'GET': print('received a get request') else: print(request.json()) return b'success', 200
[ "flask.request.json", "flask.Flask" ]
[((62, 77), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (67, 77), False, 'from flask import Flask, request\n'), ((234, 248), 'flask.request.json', 'request.json', ([], {}), '()\n', (246, 248), False, 'from flask import Flask, request\n')]
from directory_components.mixins import CountryDisplayMixin, GA360Mixin from django.views.generic import TemplateView from django.views.generic.edit import FormView from django.urls import reverse_lazy from django.conf import settings from contact import forms from core.mixins import LocalisedURLsMixin, InvestEnableTranslationsMixin class ActiveViewNameMixin: def get_context_data(self, *args, **kwargs): return super().get_context_data( active_view_name=self.active_view_name, *args, **kwargs ) class ContactFormView( ActiveViewNameMixin, InvestEnableTranslationsMixin, LocalisedURLsMixin, CountryDisplayMixin, GA360Mixin, FormView, ): success_url = reverse_lazy('contact-success') template_name = 'contact/contact.html' form_class = forms.ContactForm active_view_name = 'contact' available_languages = settings.LANGUAGES def __init__(self): super().__init__() self.set_ga360_payload( page_id='InvestContactForm', business_unit='Invest', site_section='Contact' ) def get_form_kwargs(self): kwargs = super().get_form_kwargs() kwargs['utm_data'] = self.request.utm kwargs['submission_url'] = self.request.path return kwargs def form_valid(self, form): form.save() return super().form_valid(form) class ContactFormSuccessView( ActiveViewNameMixin, LocalisedURLsMixin, InvestEnableTranslationsMixin, CountryDisplayMixin, GA360Mixin, TemplateView, ): template_name = 'contact/contact_form_success_page.html' active_view_name = 'contact' available_languages = settings.LANGUAGES def __init__(self): super().__init__() self.set_ga360_payload( page_id='InvestContactFormSuccess', business_unit='Invest', site_section='Contact', site_subsection='ContactSuccess' )
[ "django.urls.reverse_lazy" ]
[((743, 774), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""contact-success"""'], {}), "('contact-success')\n", (755, 774), False, 'from django.urls import reverse_lazy\n')]
import tkinter as tk chlamydia_info = """ General Facts A common sexually transmitted infection caused by the bacteria Chlamydia trachomatis. The infection is transmitted through vaginal, oral, and anal unprotected sex. It can be passed on from an infected mother to the child during childbirth. Chlamydia eye infection can occur through genital contact with the eyes. Risk Factors Having multiple partners. Unprotected sex. History of STI. Symptoms Usually, no symptoms during the initial stages of infection. Women Vaginal discharge and itching Bleeding between periods Painful sexual intercourse Men Pain and swelling in testicles Discharge from penis Diagnosis Urine culture for men. Swab test of cervix for women. Treatment Antibiotics to kill bacteria such as Azithromycin or Doxycycline. Specialist to consult OB GYN Urologist If left untreated: Pelvic inflammatory disease (PID), infertility and ectopic pregnancy in women. """ gonorrhea_info = """ Gonorrhea General Facts A sexually transmitted bacterial infection caused by the bacteria Neisseria gonorrhea. It often affects the urethra, rectum, or throat. Symptoms: Men: Frequent urination Puss-like discharge from the penis or pain in the testicle Persistent sore throat Women: Discharge from the vagina Pain or burning sensation while urinating Heavier periods or spotting Pain during sexual intercourse Sharp pain in the lower abdomen Sore throat, fever Causes: It is caused by the bacterium Neisseria gonorrhea. Affects the mouth, throat, eyes, rectum and female reproductive tract. It spreads through unprotected sex. Can be passed from an infected mother to her baby during delivery. Prevention: Stay away from unprotected sex Always use a condom Get tested if suspicious of infection Complications: Pelvic inflammatory disease in women (PID) Blockage or scarring of fallopian tubes Scarring in the urethra in men Ectopic pregnancy Painful abscess may develop in the interior of the penis. Diagnosis: Swab test: a sample is collected either from the genitals or mouth and tested for the presence of bacteria. Treatment: Antibiotics to kill the bacteria such as Ceftriaxone and Azithromycin Self-care Strategies: Avoid sexual intercourse during the treatment period. Specialist to Consult: Gynecologist Urologist """ hpv_info = """ ENTER TEXT HERE """ syphilis_info = """ ENTER TEXT HERE """ trichomoniasis_info = """ ENTER TEXT HERE """ genitalHerpes_info = """ ENTER TEXT HERE """ #ACTIONS def click(): print("Chlamydia") window = tk.Toplevel(main_window) window.title("Chlamydia Information") info = tk.Label(window, text=chlamydia_info, foreground="black") info.config(font=('Georgia', 12)) info.grid(row=0, column=0, columnspan=3) def click1(): print("Gonorrhea") window = tk.Toplevel(main_window) window.title("Gonorrhea Information") info = tk.Label(window, text=gonorrhea_info, foreground="black") info.config(font=('Georgia', 12)) info.grid(row=0, column=0, columnspan=3) def click2(): print("HPV") window = tk.Toplevel(main_window) window.title("HPV Information") info = tk.Label(window, text=hpv_info, foreground="black") info.config(font=('Georgia', 12)) info.grid(row=0, column=0, columnspan=3) def click3(): print("Syphilis") window = tk.Toplevel(main_window) window.title("Syphilis Information") info = tk.Label(window, text=syphilis_info, foreground="black") info.config(font=('Georgia', 12)) info.grid(row=0, column=0, columnspan=3) def click4(): print("Trichomoniasis") window = tk.Toplevel(main_window) window.title("Trichomoniasis Information") info = tk.Label(window, text=trichomoniasis_info, foreground="black") info.config(font=('Georgia', 12)) info.grid(row=0, column=0, columnspan=3) def click5(): print("<NAME>") window = tk.Toplevel(main_window) window.title("Genital Herpes Information") info = tk.Label(window, text=genitalHerpes_info, foreground="black") info.config(font=('Georgia', 12)) info.grid(row=0, column=0, columnspan=3) #SETUP def click_setup(): button1 = tk.Button(text='Chlamydia') button1.config(command=click) # performs call back of function button1.config(height = 5, width = 25) button1.config(font=('Comic Sans', 15, 'bold')) button1.config(bg='orange') button1.config(fg='white') button1.grid(row=0, column=0) def click1_setup(): button2 = tk.Button(text='Gonorrhea') button2.config(command=click1) # performs call back of function button2.config(height = 5, width = 25) button2.config(font=('Comic Sans', 15, 'bold')) button2.config(bg='#DE1F27') button2.config(fg='white') button2.grid(row=1, column=0) def click2_setup(): button3 = tk.Button(text='Human Papillomavirus') button3.config(command=click2) # performs call back of function button3.config(height = 5, width = 25) button3.config(font=('Comic Sans', 15, 'bold')) button3.config(bg='#1FDED3') button3.config(fg='white') button3.grid(row=2, column=0) def click3_setup(): button4 = tk.Button(text='Syphilis') button4.config(command=click3) # performs call back of function button4.config(height = 5, width = 25) button4.config(font=('Comic Sans', 15, 'bold')) button4.config(bg='#B6DE1F') button4.config(fg='white') button4.grid(row=3, column=0) def click4_setup(): print("Trichomoniasis") button5 = tk.Button(text='Trichomoniasis') button5.config(command=click4) # performs call back of function button5.config(height = 5, width = 25) button5.config(font=('Comic Sans', 15, 'bold')) button5.config(bg='#1FDED6') button5.config(fg='white') button5.grid(row=4, column=0) def click5_setup(): print("<NAME>") button5 = tk.Button(text='<NAME>') button5.config(command=click4) # performs call back of function button5.config(height = 5, width = 25) button5.config(font=('Comic Sans', 15, 'bold')) button5.config(bg='#DE1FBC') button5.config(fg='white') button5.grid(row=4, column=0) main_window = tk.Tk() main_window.title("STI Educational Health App") click_setup() click1_setup() click2_setup() click3_setup() click4_setup() click5_setup() main_window.mainloop()
[ "tkinter.Button", "tkinter.Toplevel", "tkinter.Label", "tkinter.Tk" ]
[((6539, 6546), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (6544, 6546), True, 'import tkinter as tk\n'), ((2941, 2965), 'tkinter.Toplevel', 'tk.Toplevel', (['main_window'], {}), '(main_window)\n', (2952, 2965), True, 'import tkinter as tk\n'), ((3019, 3076), 'tkinter.Label', 'tk.Label', (['window'], {'text': 'chlamydia_info', 'foreground': '"""black"""'}), "(window, text=chlamydia_info, foreground='black')\n", (3027, 3076), True, 'import tkinter as tk\n'), ((3212, 3236), 'tkinter.Toplevel', 'tk.Toplevel', (['main_window'], {}), '(main_window)\n', (3223, 3236), True, 'import tkinter as tk\n'), ((3290, 3347), 'tkinter.Label', 'tk.Label', (['window'], {'text': 'gonorrhea_info', 'foreground': '"""black"""'}), "(window, text=gonorrhea_info, foreground='black')\n", (3298, 3347), True, 'import tkinter as tk\n'), ((3476, 3500), 'tkinter.Toplevel', 'tk.Toplevel', (['main_window'], {}), '(main_window)\n', (3487, 3500), True, 'import tkinter as tk\n'), ((3548, 3599), 'tkinter.Label', 'tk.Label', (['window'], {'text': 'hpv_info', 'foreground': '"""black"""'}), "(window, text=hpv_info, foreground='black')\n", (3556, 3599), True, 'import tkinter as tk\n'), ((3733, 3757), 'tkinter.Toplevel', 'tk.Toplevel', (['main_window'], {}), '(main_window)\n', (3744, 3757), True, 'import tkinter as tk\n'), ((3810, 3866), 'tkinter.Label', 'tk.Label', (['window'], {'text': 'syphilis_info', 'foreground': '"""black"""'}), "(window, text=syphilis_info, foreground='black')\n", (3818, 3866), True, 'import tkinter as tk\n'), ((4006, 4030), 'tkinter.Toplevel', 'tk.Toplevel', (['main_window'], {}), '(main_window)\n', (4017, 4030), True, 'import tkinter as tk\n'), ((4089, 4151), 'tkinter.Label', 'tk.Label', (['window'], {'text': 'trichomoniasis_info', 'foreground': '"""black"""'}), "(window, text=trichomoniasis_info, foreground='black')\n", (4097, 4151), True, 'import tkinter as tk\n'), ((4283, 4307), 'tkinter.Toplevel', 'tk.Toplevel', (['main_window'], {}), '(main_window)\n', (4294, 4307), True, 'import tkinter as tk\n'), ((4366, 4427), 'tkinter.Label', 'tk.Label', (['window'], {'text': 'genitalHerpes_info', 'foreground': '"""black"""'}), "(window, text=genitalHerpes_info, foreground='black')\n", (4374, 4427), True, 'import tkinter as tk\n'), ((4553, 4580), 'tkinter.Button', 'tk.Button', ([], {'text': '"""Chlamydia"""'}), "(text='Chlamydia')\n", (4562, 4580), True, 'import tkinter as tk\n'), ((4877, 4904), 'tkinter.Button', 'tk.Button', ([], {'text': '"""Gonorrhea"""'}), "(text='Gonorrhea')\n", (4886, 4904), True, 'import tkinter as tk\n'), ((5201, 5239), 'tkinter.Button', 'tk.Button', ([], {'text': '"""Human Papillomavirus"""'}), "(text='Human Papillomavirus')\n", (5210, 5239), True, 'import tkinter as tk\n'), ((5536, 5562), 'tkinter.Button', 'tk.Button', ([], {'text': '"""Syphilis"""'}), "(text='Syphilis')\n", (5545, 5562), True, 'import tkinter as tk\n'), ((5888, 5920), 'tkinter.Button', 'tk.Button', ([], {'text': '"""Trichomoniasis"""'}), "(text='Trichomoniasis')\n", (5897, 5920), True, 'import tkinter as tk\n'), ((6237, 6261), 'tkinter.Button', 'tk.Button', ([], {'text': '"""<NAME>"""'}), "(text='<NAME>')\n", (6246, 6261), True, 'import tkinter as tk\n')]
from meta_requests.utils.request import * from tests.utils import get_response_with_text def test_response_detect_blocking_messages(): blocked_message: str = "You got blocked" text = get_response_with_text(blocked_message) assert response_detect_blocking_messages(text, [blocked_message])
[ "tests.utils.get_response_with_text" ]
[((193, 232), 'tests.utils.get_response_with_text', 'get_response_with_text', (['blocked_message'], {}), '(blocked_message)\n', (215, 232), False, 'from tests.utils import get_response_with_text\n')]
import pandas as pd import numpy as np from GPfates import GPfates etpm = pd.read_table('tapio_tcell_tpm.txt', index_col=0) etpm = etpm[(etpm > 2).sum(1) > 2] logexp = np.log10(etpm + 1) tcells = pd.read_csv('tcells_rebuttal.csv', index_col=0) m = GPfates.GPfates(tcells, logexp) # m.dimensionality_reduction() # # m.store_dr() # # m.infer_pseudotime(priors=m.s.day_int, s_columns=['bgplvm_0', 'bgplvm_1']) # m.infer_pseudotime(priors=m.s.day_int, s_columns=['bgplvm_2d_0', 'bgplvm_2d_1']) # GPfates.plt.scatter(m.s.scaled_pseudotime, m.s.pseudotime); GPfates.plt.show() # m.model_fates(X=['bgplvm_1']) m.model_fates(X=['bgplvm_2d_1']) # p = m.identify_bifurcation_point() # print(p) # m.calculate_bifurcation_statistics() # m.fate_model.plot(); GPfates.plt.show() m.make_fates_viz(['bgplvm_2d_0', 'bgplvm_2d_1']) m.fates_viz.plot(); GPfates.plt.show()
[ "pandas.read_csv", "GPfates.GPfates.GPfates", "GPfates.GPfates.plt.show", "pandas.read_table", "numpy.log10" ]
[((76, 125), 'pandas.read_table', 'pd.read_table', (['"""tapio_tcell_tpm.txt"""'], {'index_col': '(0)'}), "('tapio_tcell_tpm.txt', index_col=0)\n", (89, 125), True, 'import pandas as pd\n'), ((170, 188), 'numpy.log10', 'np.log10', (['(etpm + 1)'], {}), '(etpm + 1)\n', (178, 188), True, 'import numpy as np\n'), ((199, 246), 'pandas.read_csv', 'pd.read_csv', (['"""tcells_rebuttal.csv"""'], {'index_col': '(0)'}), "('tcells_rebuttal.csv', index_col=0)\n", (210, 246), True, 'import pandas as pd\n'), ((252, 283), 'GPfates.GPfates.GPfates', 'GPfates.GPfates', (['tcells', 'logexp'], {}), '(tcells, logexp)\n', (267, 283), False, 'from GPfates import GPfates\n'), ((849, 867), 'GPfates.GPfates.plt.show', 'GPfates.plt.show', ([], {}), '()\n', (865, 867), False, 'from GPfates import GPfates\n')]
# $Id$ # # Copyright (C) 2003 <NAME> and Rational Discovery LLC # All Rights Reserved # """ functionality to allow adjusting composite model contents """ from __future__ import print_function import copy import numpy def BalanceComposite(model, set1, set2, weight, targetSize, names1=None, names2=None): """ adjusts the contents of the composite model so as to maximize the weighted classification accuracty across the two data sets. The resulting composite model, with _targetSize_ models, is returned. **Notes**: - if _names1_ and _names2_ are not provided, _set1_ and _set2_ should have the same ordering of columns and _model_ should have already have had _SetInputOrder()_ called. """ # # adjust the weights to be proportional to the size of the two data sets # The normalization we do here assures that a perfect model contributes # a score of S1+S2 to the final # S1 = len(set1) S2 = len(set2) weight1 = float(S1 + S2) * (1 - weight) / S1 weight2 = float(S1 + S2) * weight / S2 # print('\t:::', S1, S2, weight1, weight2) # print('nModels:', len(model)) # start with a copy so that we get all the additional schnick-schnack res = copy.copy(model) res.modelList = [] res.errList = [] res.countList = [] res.quantizationRequirements = [] startSize = len(model) scores = numpy.zeros(startSize, numpy.float) actQuantBounds = model.GetActivityQuantBounds() if names1 is not None: model.SetInputOrder(names1) for pt in set1: pred, conf = model.ClassifyExample(pt) if actQuantBounds: ans = model.QuantizeActivity(pt)[-1] else: ans = pt[-1] votes = model.GetVoteDetails() for i in range(startSize): if votes[i] == ans: scores[i] += weight1 if names2 is not None: model.SetInputOrder(names2) for pt in set2: pred, conf = model.ClassifyExample(pt) if actQuantBounds: ans = model.QuantizeActivity(pt)[-1] else: ans = pt[-1] votes = model.GetVoteDetails() for i in range(startSize): if votes[i] == ans: scores[i] += weight2 # normalize the scores nPts = S1 + S2 scores /= nPts # sort them: bestOrder = list(numpy.argsort(scores)) bestOrder.reverse() print('\tTAKE:', bestOrder[:targetSize]) # and now take the best set: for i in range(targetSize): idx = bestOrder[i] mdl = model.modelList[idx] res.modelList.append(mdl) res.errList.append(1. - scores[idx]) res.countList.append(1) # FIX: this should probably be more general: res.quantizationRequirements.append(0) return res
[ "numpy.zeros", "numpy.argsort", "copy.copy" ]
[((1217, 1233), 'copy.copy', 'copy.copy', (['model'], {}), '(model)\n', (1226, 1233), False, 'import copy\n'), ((1368, 1403), 'numpy.zeros', 'numpy.zeros', (['startSize', 'numpy.float'], {}), '(startSize, numpy.float)\n', (1379, 1403), False, 'import numpy\n'), ((2215, 2236), 'numpy.argsort', 'numpy.argsort', (['scores'], {}), '(scores)\n', (2228, 2236), False, 'import numpy\n')]
#!/usr/bin/python """ This application simply reads a config file created by the HackPSUconfig module and prints the output to the console """ import HackPSUconfig as config configFile = input('Please enter the name of a configuration file: ') dict = config.getProperties(configFile) print('Dictionary:') for key in dict: print(key + ':' + dict[key]) print('Dictionary complete')
[ "HackPSUconfig.getProperties" ]
[((253, 285), 'HackPSUconfig.getProperties', 'config.getProperties', (['configFile'], {}), '(configFile)\n', (273, 285), True, 'import HackPSUconfig as config\n')]
from django.http import HttpResponse from django.shortcuts import render from django.db.models import Q from applications.movies.models import * import applications.utils as utils import visualizer.settings as settings import json import os def index(request): return render(request, 'movies/index.html', {}) def top_k_neighbours(request): return render(request, 'movies/top_k_neighbours.html', {}) def feature_visualization(request): return render(request, 'movies/embeddings.html', {}) def ajax_get_stats(request): movie_ids = None genres = [] if 'movie_ids[]' in request.GET: movie_ids = request.GET.getlist('movie_ids[]') if movie_ids != None: queries = [Q(movie__id=movie_id) for movie_id in movie_ids] query = queries.pop() for item in queries: query |= item genres = list(MovieToGenre.objects.filter(query).values_list('genre__name', flat=True)) return HttpResponse(json.dumps({ 'genres': genres }), content_type="applications/json", status=200) def ajax_get_embeddings(request): syear = None eyear = None categories = None andopr = None feature = None if 'syear' in request.GET: syear = request.GET['syear'] if 'eyear' in request.GET: eyear = request.GET['eyear'] if 'category[]' in request.GET: categories = request.GET.getlist('category[]') if 'andopr' in request.GET: andopr = request.GET['andopr'] andopr = True if (andopr == 'true') else False if 'feature' in request.GET: feature = request.GET['feature'] movies = get_movies_range(syear=syear, eyear=eyear, categories=categories, andopr=andopr) if len(movies) > 0: # X_t, Y_t, I_t = utils.preprocess_data(settings.FEATURES[feature], settings.DATASET, movies) # plot = utils.visualize_features(X_t, Y_t, I_t, min(settings.E_PCA, X_t.shape[0])) X_cor, Y_cor, I_t = utils.get_plot_values(settings.DATASET, movies, feature) for i in range(len(movies)): movies[i]['x'] = X_cor[i] movies[i]['y'] = Y_cor[i] plot = utils.bokeh_plot(I_t, X_cor, Y_cor) else: return HttpResponse(json.dumps({ 'error': 'No movies in this category' }), content_type="application/json", status=200) return HttpResponse(json.dumps({ 'plot': plot, 'movies': movies }), content_type="application/json", status=200) def ajax_get_movies(request): syear = None eyear = None categories = None andopr = None if 'syear' in request.GET: syear = request.GET['syear'] if 'eyear' in request.GET: eyear = request.GET['eyear'] if 'category[]' in request.GET: categories = request.GET.getlist('category[]') if 'andopr' in request.GET: andopr = request.GET['andopr'] andopr = True if (andopr == 'true') else False movies = get_movies_range(syear=syear, eyear=eyear, categories=categories, andopr=andopr) return HttpResponse(json.dumps({ 'total': len(movies), 'movies': movies }), content_type="application/json", status=200) def ajax_get_genres(request): genres = get_genres() return HttpResponse(json.dumps({ 'total': len(genres), 'genres': genres }), content_type="application/json", status=200) def ajax_get_features(request): features = get_features() return HttpResponse(json.dumps({ 'total': len(features), 'features': features }), content_type="application/json", status=200) def ajax_get_top_neighbours(request): image = None k = None feature = None if 'image' in request.GET: image = request.GET['image'] if 'k' in request.GET: k = int(request.GET['k']) if 'feature' in request.GET: feature = request.GET['feature'] if feature not in settings.FEATURES: raise Exception('path for this feature not specified') movies = get_movies() movies = utils.get_top_neighbours(settings.FEATURES[feature], image, movies, k) for movie in movies: movie['genres'] = get_genres_by_movie(movie) return HttpResponse(json.dumps({ 'total': len(movies), 'movies': movies }), content_type="application/json", status=200) def get_genres_by_movie(movie): movie = Movie.objects.filter(id=movie['id']).first() items = MovieToGenre.objects.filter(movie=movie) genres = [] for item in items: genres.append(item.genre.serialize()['name']) return genres def get_movies(year=None, category=None): items = MovieToGenre.objects.select_related('movie', 'genre') if year != None: items = items.filter(movie__year=year) if category != None: items = items.filter(genre__name=category) movies = [item.movie.serialize() for item in items] movies = utils.filter_unique(movies, 'image') return movies def get_genres(): items = Genre.objects.all() genres = [item.serialize() for item in items] return genres def get_features(): items = Feature.objects.all() features = [item.serialize() for item in items] return features def get_movies_range(syear, eyear, categories, andopr): items = MovieToGenre.objects.select_related('movie', 'genre') if syear != None: items = items.filter(movie__year__gte=syear) if eyear != None: items = items.filter(movie__year__lte=eyear) if categories != None and andopr != None: if andopr == True: movies = set(items.values_list('movie', flat=True)) for category in categories: movies = movies.intersection(set(items.filter(genre__name=category).values_list('movie', flat=True))) movies = list(movies) if len(movies) > 0: queries = [Q(id=movie_id) for movie_id in movies] query = queries.pop() for item in queries: query |= item items = Movie.objects.filter(query) movies = [item.serialize() for item in items] return movies return [] else: queries = [Q(genre__name=category) for category in categories] query = queries.pop() for item in queries: query |= item items = items.filter(query) movies = [item.movie.serialize() for item in items] movies = utils.filter_unique(movies, 'image') return movies
[ "applications.utils.filter_unique", "django.db.models.Q", "json.dumps", "applications.utils.get_plot_values", "applications.utils.get_top_neighbours", "django.shortcuts.render", "applications.utils.bokeh_plot" ]
[((271, 311), 'django.shortcuts.render', 'render', (['request', '"""movies/index.html"""', '{}'], {}), "(request, 'movies/index.html', {})\n", (277, 311), False, 'from django.shortcuts import render\n'), ((352, 403), 'django.shortcuts.render', 'render', (['request', '"""movies/top_k_neighbours.html"""', '{}'], {}), "(request, 'movies/top_k_neighbours.html', {})\n", (358, 403), False, 'from django.shortcuts import render\n'), ((449, 494), 'django.shortcuts.render', 'render', (['request', '"""movies/embeddings.html"""', '{}'], {}), "(request, 'movies/embeddings.html', {})\n", (455, 494), False, 'from django.shortcuts import render\n'), ((3632, 3702), 'applications.utils.get_top_neighbours', 'utils.get_top_neighbours', (['settings.FEATURES[feature]', 'image', 'movies', 'k'], {}), '(settings.FEATURES[feature], image, movies, k)\n', (3656, 3702), True, 'import applications.utils as utils\n'), ((4430, 4466), 'applications.utils.filter_unique', 'utils.filter_unique', (['movies', '"""image"""'], {}), "(movies, 'image')\n", (4449, 4466), True, 'import applications.utils as utils\n'), ((5746, 5782), 'applications.utils.filter_unique', 'utils.filter_unique', (['movies', '"""image"""'], {}), "(movies, 'image')\n", (5765, 5782), True, 'import applications.utils as utils\n'), ((901, 931), 'json.dumps', 'json.dumps', (["{'genres': genres}"], {}), "({'genres': genres})\n", (911, 931), False, 'import json\n'), ((1792, 1848), 'applications.utils.get_plot_values', 'utils.get_plot_values', (['settings.DATASET', 'movies', 'feature'], {}), '(settings.DATASET, movies, feature)\n', (1813, 1848), True, 'import applications.utils as utils\n'), ((1948, 1983), 'applications.utils.bokeh_plot', 'utils.bokeh_plot', (['I_t', 'X_cor', 'Y_cor'], {}), '(I_t, X_cor, Y_cor)\n', (1964, 1983), True, 'import applications.utils as utils\n'), ((2141, 2185), 'json.dumps', 'json.dumps', (["{'plot': plot, 'movies': movies}"], {}), "({'plot': plot, 'movies': movies})\n", (2151, 2185), False, 'import json\n'), ((676, 697), 'django.db.models.Q', 'Q', ([], {'movie__id': 'movie_id'}), '(movie__id=movie_id)\n', (677, 697), False, 'from django.db.models import Q\n'), ((2013, 2064), 'json.dumps', 'json.dumps', (["{'error': 'No movies in this category'}"], {}), "({'error': 'No movies in this category'})\n", (2023, 2064), False, 'import json\n'), ((5533, 5556), 'django.db.models.Q', 'Q', ([], {'genre__name': 'category'}), '(genre__name=category)\n', (5534, 5556), False, 'from django.db.models import Q\n'), ((5281, 5295), 'django.db.models.Q', 'Q', ([], {'id': 'movie_id'}), '(id=movie_id)\n', (5282, 5295), False, 'from django.db.models import Q\n')]
''' Reference: <NAME>, et al., "IRGAN: A Minimax Game for Unifying Generative and Discriminative Information Retrieval Models." SIGIR 2017. @author: <NAME> ''' from neurec.model.AbstractRecommender import AbstractRecommender import tensorflow as tf import pickle import numpy as np from concurrent.futures import ThreadPoolExecutor from neurec.util import data_gen, reader from neurec.evaluation import Evaluate from neurec.util.properties import Properties class GEN(object): def __init__(self, itemNum, userNum, emb_dim, lamda, param=None, initdelta=0.05, learning_rate=0.05): self.itemNum = itemNum self.userNum = userNum self.emb_dim = emb_dim self.lamda = lamda # regularization parameters self.param = param self.initdelta = initdelta self.learning_rate = learning_rate self.g_params = [] with tf.variable_scope('generator'): if self.param == None: self.user_embeddings = tf.Variable( tf.random_uniform([self.userNum, self.emb_dim], minval=-self.initdelta, maxval=self.initdelta, dtype=tf.float32)) self.item_embeddings = tf.Variable( tf.random_uniform([self.itemNum, self.emb_dim], minval=-self.initdelta, maxval=self.initdelta, dtype=tf.float32)) self.item_bias = tf.Variable(tf.zeros([self.itemNum])) else: self.user_embeddings = tf.Variable(self.param[0]) self.item_embeddings = tf.Variable(self.param[1]) self.item_bias = tf.Variable(param[2]) self.g_params = [self.user_embeddings, self.item_embeddings, self.item_bias] self.u = tf.placeholder(tf.int32) self.i = tf.placeholder(tf.int32) self.reward = tf.placeholder(tf.float32) self.u_embedding = tf.nn.embedding_lookup(self.user_embeddings, self.u) self.i_embedding = tf.nn.embedding_lookup(self.item_embeddings, self.i) self.i_bias = tf.gather(self.item_bias, self.i) self.all_logits = tf.reduce_sum(tf.multiply(self.u_embedding, self.item_embeddings), 1) + self.item_bias self.i_prob = tf.gather( tf.reshape(tf.nn.softmax(tf.reshape(self.all_logits, [1, -1])), [-1]), self.i) self.gan_loss = -tf.reduce_mean(tf.log(self.i_prob) * self.reward) + self.lamda * ( tf.nn.l2_loss(self.u_embedding) + tf.nn.l2_loss(self.i_embedding) + tf.nn.l2_loss(self.i_bias)) g_opt = tf.train.GradientDescentOptimizer(self.learning_rate) self.gan_updates = g_opt.minimize(self.gan_loss, var_list=self.g_params) # for test stage, self.u: [batch_size] self.all_rating = tf.matmul(self.u_embedding, self.item_embeddings, transpose_a=False, transpose_b=True) + self.item_bias class DIS(object): def __init__(self, itemNum, userNum, emb_dim, lamda, param=None, initdelta=0.05, learning_rate=0.05): self.itemNum = itemNum self.userNum = userNum self.emb_dim = emb_dim self.lamda = lamda # regularization parameters self.param = param self.initdelta = initdelta self.learning_rate = learning_rate self.d_params = [] with tf.variable_scope('discriminator'): if self.param == None: self.user_embeddings = tf.Variable( tf.random_uniform([self.userNum, self.emb_dim], minval=-self.initdelta, maxval=self.initdelta, dtype=tf.float32)) self.item_embeddings = tf.Variable( tf.random_uniform([self.itemNum, self.emb_dim], minval=-self.initdelta, maxval=self.initdelta, dtype=tf.float32)) self.item_bias = tf.Variable(tf.zeros([self.itemNum])) else: self.user_embeddings = tf.Variable(self.param[0]) self.item_embeddings = tf.Variable(self.param[1]) self.item_bias = tf.Variable(self.param[2]) self.d_params = [self.user_embeddings, self.item_embeddings, self.item_bias] # placeholder definition self.u = tf.placeholder(tf.int32) self.i = tf.placeholder(tf.int32) self.label = tf.placeholder(tf.float32) self.u_embedding = tf.nn.embedding_lookup(self.user_embeddings, self.u) self.i_embedding = tf.nn.embedding_lookup(self.item_embeddings, self.i) self.i_bias = tf.gather(self.item_bias, self.i) self.pre_logits = tf.reduce_sum(tf.multiply(self.u_embedding, self.i_embedding), 1) + self.i_bias self.pre_loss = tf.nn.sigmoid_cross_entropy_with_logits(labels=self.label, logits=self.pre_logits) + self.lamda * ( tf.nn.l2_loss(self.u_embedding) + tf.nn.l2_loss(self.i_embedding) + tf.nn.l2_loss(self.i_bias) ) d_opt = tf.train.GradientDescentOptimizer(self.learning_rate) self.d_updates = d_opt.minimize(self.pre_loss, var_list=self.d_params) self.reward_logits = tf.reduce_sum(tf.multiply(self.u_embedding, self.i_embedding), 1) + self.i_bias self.reward = 2 * (tf.sigmoid(self.reward_logits) - 0.5) # for test stage, self.u: [batch_size] self.all_rating = tf.matmul(self.u_embedding, self.item_embeddings, transpose_a=False, transpose_b=True) + self.item_bias self.all_logits = tf.reduce_sum(tf.multiply(self.u_embedding, self.item_embeddings), 1) + self.item_bias self.NLL = -tf.reduce_mean(tf.log( tf.gather(tf.reshape(tf.nn.softmax(tf.reshape(self.all_logits, [1, -1])), [-1]), self.i)) ) # for dns sample self.dns_rating = tf.reduce_sum(tf.multiply(self.u_embedding, self.item_embeddings), 1) + self.item_bias class IRGAN(AbstractRecommender): properties = [ "factors_num", "lr", "g_reg", "d_reg", "epochs", "g_epoch", "d_epoch", "batch_size", "d_tau", "topk", "pretrain_file" ] def __init__(self, sess, dataset): # super(IRGAN, self).__init__() super().__init__(**kwds) train_matrix = dataset.trainMatrix.tocsr() self.num_users, self.num_items = train_matrix.shape self.factors_num = self.conf["factors_num"] self.lr = self.conf["lr"] self.g_reg = self.conf["g_reg"] self.d_reg = self.conf["d_reg"] self.epochs = self.conf["epochs"] self.g_epoch = self.conf["g_epoch"] self.d_epoch = self.conf["d_epoch"] self.batch_size = self.conf["batch_size"] self.d_tau = self.conf["d_tau"] self.topK = self.conf["topk"] self.pretrain_file = self.conf["pretrain_file"] self.loss_function = "None" idx_value_dict = {} for idx, value in enumerate(train_matrix): if any(value.indices): idx_value_dict[idx] = value.indices self.user_pos_train = idx_value_dict self.num_users, self.num_items = dataset.num_users, dataset.num_items self.all_items = np.arange(self.num_items) def build_graph(self): file = reader.lines(self.pretrain_file) pretrain_params = pickle.load(file, encoding="latin") self.generator = GEN(self.num_items, self.num_users, self.factors_num, self.g_reg, param=pretrain_params, learning_rate=self.lr) self.discriminator = DIS(self.num_items, self.num_users, self.factors_num, self.d_reg, param=None, learning_rate=self.lr) def get_train_data(self): users_list, items_list, labels_list = [], [], [] train_users = list(self.user_pos_train.keys()) with ThreadPoolExecutor() as executor: data = executor.map(self.get_train_data_one_user, train_users) data = list(data) for users, items, labels in data: users_list.extend(users) items_list.extend(items) labels_list.extend(labels) return users_list, items_list, labels_list def get_train_data_one_user(self, user): user_list, items_list, label_list = [], [], [] pos = self.user_pos_train[user] rating = self.sess.run(self.generator.all_rating, {self.generator.u: [user]}) rating = np.reshape(rating, [-1]) rating = np.array(rating) / self.d_tau # Temperature exp_rating = np.exp(rating) prob = exp_rating / np.sum(exp_rating) neg = np.random.choice(self.all_items, size=len(pos), p=prob) for i, j in zip(pos, neg): user_list.append(user) items_list.append(i) label_list.append(1.0) user_list.append(user) items_list.append(j) label_list.append(0.0) return (user_list, items_list, label_list) def train_model(self): for _ in range(self.epochs): for _ in range(self.d_epoch): users_list, items_list, labels_list = self.get_train_data() self.training_discriminator(users_list, items_list, labels_list) for _ in range(self.g_epoch): self.training_generator() Evaluate.test_model(self, self.dataset) def training_discriminator(self, user, item, label): num_training_instances = len(user) for num_batch in np.arange(int(num_training_instances / self.batch_size)): bat_users, bat_items, bat_lables = \ data_gen._get_pointwise_batch_data(user, item, label, num_batch, self.batch_size) feed = {self.discriminator.u: bat_users, self.discriminator.i: bat_items, self.discriminator.label: bat_lables} self.sess.run(self.discriminator.d_updates, feed_dict=feed) def training_generator(self): for user, pos in self.user_pos_train.items(): sample_lambda = 0.2 rating = self.sess.run(self.generator.all_logits, {self.generator.u: user}) exp_rating = np.exp(rating) prob = exp_rating / np.sum(exp_rating) # prob is generator distribution p_\theta pn = (1 - sample_lambda) * prob pn[pos] += sample_lambda * 1.0 / len(pos) # Now, pn is the Pn in importance sampling, prob is generator distribution p_\theta sample = np.random.choice(self.all_items, 2 * len(pos), p=pn) ########################################################################### # Get reward and adapt it with importance sampling ########################################################################### feed = {self.discriminator.u: user, self.discriminator.i: sample} reward = self.sess.run(self.discriminator.reward, feed_dict=feed) reward = reward * prob[sample] / pn[sample] ########################################################################### # Update G ########################################################################### feed = {self.generator.u: user, self.generator.i: sample, self.generator.reward: reward} self.sess.run(self.generator.gan_updates, feed_dict=feed) def predict(self, user_id, items): user_embedding, item_embedding, item_bias = self.sess.run(self.generator.g_params) u_embedding = user_embedding[user_id] item_embedding = item_embedding[items] item_bias = item_bias[items] ratings = np.matmul(u_embedding, item_embedding.T) + item_bias return ratings
[ "numpy.sum", "tensorflow.reshape", "tensorflow.nn.sigmoid_cross_entropy_with_logits", "tensorflow.multiply", "tensorflow.matmul", "pickle.load", "numpy.arange", "numpy.exp", "tensorflow.Variable", "neurec.util.data_gen._get_pointwise_batch_data", "tensorflow.gather", "tensorflow.variable_scope", "tensorflow.placeholder", "numpy.reshape", "neurec.evaluation.Evaluate.test_model", "concurrent.futures.ThreadPoolExecutor", "tensorflow.nn.embedding_lookup", "tensorflow.log", "tensorflow.train.GradientDescentOptimizer", "tensorflow.random_uniform", "neurec.util.reader.lines", "tensorflow.zeros", "numpy.array", "numpy.matmul", "tensorflow.nn.l2_loss", "tensorflow.sigmoid" ]
[((1778, 1802), 'tensorflow.placeholder', 'tf.placeholder', (['tf.int32'], {}), '(tf.int32)\n', (1792, 1802), True, 'import tensorflow as tf\n'), ((1820, 1844), 'tensorflow.placeholder', 'tf.placeholder', (['tf.int32'], {}), '(tf.int32)\n', (1834, 1844), True, 'import tensorflow as tf\n'), ((1867, 1893), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {}), '(tf.float32)\n', (1881, 1893), True, 'import tensorflow as tf\n'), ((1922, 1974), 'tensorflow.nn.embedding_lookup', 'tf.nn.embedding_lookup', (['self.user_embeddings', 'self.u'], {}), '(self.user_embeddings, self.u)\n', (1944, 1974), True, 'import tensorflow as tf\n'), ((2002, 2054), 'tensorflow.nn.embedding_lookup', 'tf.nn.embedding_lookup', (['self.item_embeddings', 'self.i'], {}), '(self.item_embeddings, self.i)\n', (2024, 2054), True, 'import tensorflow as tf\n'), ((2077, 2110), 'tensorflow.gather', 'tf.gather', (['self.item_bias', 'self.i'], {}), '(self.item_bias, self.i)\n', (2086, 2110), True, 'import tensorflow as tf\n'), ((2579, 2632), 'tensorflow.train.GradientDescentOptimizer', 'tf.train.GradientDescentOptimizer', (['self.learning_rate'], {}), '(self.learning_rate)\n', (2612, 2632), True, 'import tensorflow as tf\n'), ((4287, 4311), 'tensorflow.placeholder', 'tf.placeholder', (['tf.int32'], {}), '(tf.int32)\n', (4301, 4311), True, 'import tensorflow as tf\n'), ((4329, 4353), 'tensorflow.placeholder', 'tf.placeholder', (['tf.int32'], {}), '(tf.int32)\n', (4343, 4353), True, 'import tensorflow as tf\n'), ((4375, 4401), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {}), '(tf.float32)\n', (4389, 4401), True, 'import tensorflow as tf\n'), ((4430, 4482), 'tensorflow.nn.embedding_lookup', 'tf.nn.embedding_lookup', (['self.user_embeddings', 'self.u'], {}), '(self.user_embeddings, self.u)\n', (4452, 4482), True, 'import tensorflow as tf\n'), ((4510, 4562), 'tensorflow.nn.embedding_lookup', 'tf.nn.embedding_lookup', (['self.item_embeddings', 'self.i'], {}), '(self.item_embeddings, self.i)\n', (4532, 4562), True, 'import tensorflow as tf\n'), ((4585, 4618), 'tensorflow.gather', 'tf.gather', (['self.item_bias', 'self.i'], {}), '(self.item_bias, self.i)\n', (4594, 4618), True, 'import tensorflow as tf\n'), ((5048, 5101), 'tensorflow.train.GradientDescentOptimizer', 'tf.train.GradientDescentOptimizer', (['self.learning_rate'], {}), '(self.learning_rate)\n', (5081, 5101), True, 'import tensorflow as tf\n'), ((7348, 7373), 'numpy.arange', 'np.arange', (['self.num_items'], {}), '(self.num_items)\n', (7357, 7373), True, 'import numpy as np\n'), ((7417, 7449), 'neurec.util.reader.lines', 'reader.lines', (['self.pretrain_file'], {}), '(self.pretrain_file)\n', (7429, 7449), False, 'from neurec.util import data_gen, reader\n'), ((7476, 7511), 'pickle.load', 'pickle.load', (['file'], {'encoding': '"""latin"""'}), "(file, encoding='latin')\n", (7487, 7511), False, 'import pickle\n'), ((8584, 8608), 'numpy.reshape', 'np.reshape', (['rating', '[-1]'], {}), '(rating, [-1])\n', (8594, 8608), True, 'import numpy as np\n'), ((8692, 8706), 'numpy.exp', 'np.exp', (['rating'], {}), '(rating)\n', (8698, 8706), True, 'import numpy as np\n'), ((879, 909), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""generator"""'], {}), "('generator')\n", (896, 909), True, 'import tensorflow as tf\n'), ((2788, 2878), 'tensorflow.matmul', 'tf.matmul', (['self.u_embedding', 'self.item_embeddings'], {'transpose_a': '(False)', 'transpose_b': '(True)'}), '(self.u_embedding, self.item_embeddings, transpose_a=False,\n transpose_b=True)\n', (2797, 2878), True, 'import tensorflow as tf\n'), ((3350, 3384), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""discriminator"""'], {}), "('discriminator')\n", (3367, 3384), True, 'import tensorflow as tf\n'), ((4750, 4837), 'tensorflow.nn.sigmoid_cross_entropy_with_logits', 'tf.nn.sigmoid_cross_entropy_with_logits', ([], {'labels': 'self.label', 'logits': 'self.pre_logits'}), '(labels=self.label, logits=self.\n pre_logits)\n', (4789, 4837), True, 'import tensorflow as tf\n'), ((5473, 5563), 'tensorflow.matmul', 'tf.matmul', (['self.u_embedding', 'self.item_embeddings'], {'transpose_a': '(False)', 'transpose_b': '(True)'}), '(self.u_embedding, self.item_embeddings, transpose_a=False,\n transpose_b=True)\n', (5482, 5563), True, 'import tensorflow as tf\n'), ((7997, 8017), 'concurrent.futures.ThreadPoolExecutor', 'ThreadPoolExecutor', ([], {}), '()\n', (8015, 8017), False, 'from concurrent.futures import ThreadPoolExecutor\n'), ((8626, 8642), 'numpy.array', 'np.array', (['rating'], {}), '(rating)\n', (8634, 8642), True, 'import numpy as np\n'), ((8735, 8753), 'numpy.sum', 'np.sum', (['exp_rating'], {}), '(exp_rating)\n', (8741, 8753), True, 'import numpy as np\n'), ((9770, 9856), 'neurec.util.data_gen._get_pointwise_batch_data', 'data_gen._get_pointwise_batch_data', (['user', 'item', 'label', 'num_batch', 'self.batch_size'], {}), '(user, item, label, num_batch, self.\n batch_size)\n', (9804, 9856), False, 'from neurec.util import data_gen, reader\n'), ((10323, 10337), 'numpy.exp', 'np.exp', (['rating'], {}), '(rating)\n', (10329, 10337), True, 'import numpy as np\n'), ((11803, 11843), 'numpy.matmul', 'np.matmul', (['u_embedding', 'item_embedding.T'], {}), '(u_embedding, item_embedding.T)\n', (11812, 11843), True, 'import numpy as np\n'), ((1522, 1548), 'tensorflow.Variable', 'tf.Variable', (['self.param[0]'], {}), '(self.param[0])\n', (1533, 1548), True, 'import tensorflow as tf\n'), ((1588, 1614), 'tensorflow.Variable', 'tf.Variable', (['self.param[1]'], {}), '(self.param[1])\n', (1599, 1614), True, 'import tensorflow as tf\n'), ((1648, 1669), 'tensorflow.Variable', 'tf.Variable', (['param[2]'], {}), '(param[2])\n', (1659, 1669), True, 'import tensorflow as tf\n'), ((2152, 2203), 'tensorflow.multiply', 'tf.multiply', (['self.u_embedding', 'self.item_embeddings'], {}), '(self.u_embedding, self.item_embeddings)\n', (2163, 2203), True, 'import tensorflow as tf\n'), ((3997, 4023), 'tensorflow.Variable', 'tf.Variable', (['self.param[0]'], {}), '(self.param[0])\n', (4008, 4023), True, 'import tensorflow as tf\n'), ((4063, 4089), 'tensorflow.Variable', 'tf.Variable', (['self.param[1]'], {}), '(self.param[1])\n', (4074, 4089), True, 'import tensorflow as tf\n'), ((4123, 4149), 'tensorflow.Variable', 'tf.Variable', (['self.param[2]'], {}), '(self.param[2])\n', (4134, 4149), True, 'import tensorflow as tf\n'), ((4660, 4707), 'tensorflow.multiply', 'tf.multiply', (['self.u_embedding', 'self.i_embedding'], {}), '(self.u_embedding, self.i_embedding)\n', (4671, 4707), True, 'import tensorflow as tf\n'), ((5225, 5272), 'tensorflow.multiply', 'tf.multiply', (['self.u_embedding', 'self.i_embedding'], {}), '(self.u_embedding, self.i_embedding)\n', (5236, 5272), True, 'import tensorflow as tf\n'), ((5361, 5391), 'tensorflow.sigmoid', 'tf.sigmoid', (['self.reward_logits'], {}), '(self.reward_logits)\n', (5371, 5391), True, 'import tensorflow as tf\n'), ((5654, 5705), 'tensorflow.multiply', 'tf.multiply', (['self.u_embedding', 'self.item_embeddings'], {}), '(self.u_embedding, self.item_embeddings)\n', (5665, 5705), True, 'import tensorflow as tf\n'), ((5947, 5998), 'tensorflow.multiply', 'tf.multiply', (['self.u_embedding', 'self.item_embeddings'], {}), '(self.u_embedding, self.item_embeddings)\n', (5958, 5998), True, 'import tensorflow as tf\n'), ((9481, 9520), 'neurec.evaluation.Evaluate.test_model', 'Evaluate.test_model', (['self', 'self.dataset'], {}), '(self, self.dataset)\n', (9500, 9520), False, 'from neurec.evaluation import Evaluate\n'), ((10370, 10388), 'numpy.sum', 'np.sum', (['exp_rating'], {}), '(exp_rating)\n', (10376, 10388), True, 'import numpy as np\n'), ((1018, 1134), 'tensorflow.random_uniform', 'tf.random_uniform', (['[self.userNum, self.emb_dim]'], {'minval': '(-self.initdelta)', 'maxval': 'self.initdelta', 'dtype': 'tf.float32'}), '([self.userNum, self.emb_dim], minval=-self.initdelta,\n maxval=self.initdelta, dtype=tf.float32)\n', (1035, 1134), True, 'import tensorflow as tf\n'), ((1242, 1358), 'tensorflow.random_uniform', 'tf.random_uniform', (['[self.itemNum, self.emb_dim]'], {'minval': '(-self.initdelta)', 'maxval': 'self.initdelta', 'dtype': 'tf.float32'}), '([self.itemNum, self.emb_dim], minval=-self.initdelta,\n maxval=self.initdelta, dtype=tf.float32)\n', (1259, 1358), True, 'import tensorflow as tf\n'), ((1439, 1463), 'tensorflow.zeros', 'tf.zeros', (['[self.itemNum]'], {}), '([self.itemNum])\n', (1447, 1463), True, 'import tensorflow as tf\n'), ((2295, 2331), 'tensorflow.reshape', 'tf.reshape', (['self.all_logits', '[1, -1]'], {}), '(self.all_logits, [1, -1])\n', (2305, 2331), True, 'import tensorflow as tf\n'), ((2534, 2560), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['self.i_bias'], {}), '(self.i_bias)\n', (2547, 2560), True, 'import tensorflow as tf\n'), ((3493, 3609), 'tensorflow.random_uniform', 'tf.random_uniform', (['[self.userNum, self.emb_dim]'], {'minval': '(-self.initdelta)', 'maxval': 'self.initdelta', 'dtype': 'tf.float32'}), '([self.userNum, self.emb_dim], minval=-self.initdelta,\n maxval=self.initdelta, dtype=tf.float32)\n', (3510, 3609), True, 'import tensorflow as tf\n'), ((3717, 3833), 'tensorflow.random_uniform', 'tf.random_uniform', (['[self.itemNum, self.emb_dim]'], {'minval': '(-self.initdelta)', 'maxval': 'self.initdelta', 'dtype': 'tf.float32'}), '([self.itemNum, self.emb_dim], minval=-self.initdelta,\n maxval=self.initdelta, dtype=tf.float32)\n', (3734, 3833), True, 'import tensorflow as tf\n'), ((3914, 3938), 'tensorflow.zeros', 'tf.zeros', (['[self.itemNum]'], {}), '([self.itemNum])\n', (3922, 3938), True, 'import tensorflow as tf\n'), ((4994, 5020), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['self.i_bias'], {}), '(self.i_bias)\n', (5007, 5020), True, 'import tensorflow as tf\n'), ((2402, 2421), 'tensorflow.log', 'tf.log', (['self.i_prob'], {}), '(self.i_prob)\n', (2408, 2421), True, 'import tensorflow as tf\n'), ((2466, 2497), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['self.u_embedding'], {}), '(self.u_embedding)\n', (2479, 2497), True, 'import tensorflow as tf\n'), ((2500, 2531), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['self.i_embedding'], {}), '(self.i_embedding)\n', (2513, 2531), True, 'import tensorflow as tf\n'), ((4926, 4957), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['self.u_embedding'], {}), '(self.u_embedding)\n', (4939, 4957), True, 'import tensorflow as tf\n'), ((4960, 4991), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['self.i_embedding'], {}), '(self.i_embedding)\n', (4973, 4991), True, 'import tensorflow as tf\n'), ((5817, 5853), 'tensorflow.reshape', 'tf.reshape', (['self.all_logits', '[1, -1]'], {}), '(self.all_logits, [1, -1])\n', (5827, 5853), True, 'import tensorflow as tf\n')]
# PyFluids # Copyright (c) 2021 <NAME> import pytest from pyfluids import * class TestHAInputs: @pytest.mark.parametrize("name", list(HAInput)) def test_with_value(self, name): assert name.with_value(0).value == 0 @pytest.mark.parametrize( "name, coolprop_key", [ (HAInput.Density, "Vha"), (HAInput.DewTemperature, "D"), (HAInput.Enthalpy, "Hha"), (HAInput.Entropy, "Sha"), (HAInput.Humidity, "W"), (HAInput.PartialPressure, "P_w"), (HAInput.Pressure, "P"), (HAInput.RelativeHumidity, "R"), (HAInput.Temperature, "T"), (HAInput.WBTemperature, "B"), ], ) def test_coolprop_key(self, name, coolprop_key): assert name.coolprop_key == coolprop_key @pytest.mark.parametrize("name", list(HAInput)) def test_value(self, name): assert name.value is None
[ "pytest.mark.parametrize" ]
[((242, 593), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""name, coolprop_key"""', "[(HAInput.Density, 'Vha'), (HAInput.DewTemperature, 'D'), (HAInput.Enthalpy,\n 'Hha'), (HAInput.Entropy, 'Sha'), (HAInput.Humidity, 'W'), (HAInput.\n PartialPressure, 'P_w'), (HAInput.Pressure, 'P'), (HAInput.\n RelativeHumidity, 'R'), (HAInput.Temperature, 'T'), (HAInput.\n WBTemperature, 'B')]"], {}), "('name, coolprop_key', [(HAInput.Density, 'Vha'), (\n HAInput.DewTemperature, 'D'), (HAInput.Enthalpy, 'Hha'), (HAInput.\n Entropy, 'Sha'), (HAInput.Humidity, 'W'), (HAInput.PartialPressure,\n 'P_w'), (HAInput.Pressure, 'P'), (HAInput.RelativeHumidity, 'R'), (\n HAInput.Temperature, 'T'), (HAInput.WBTemperature, 'B')])\n", (265, 593), False, 'import pytest\n')]
import pandas as pd from ..utils import constants, plot, utils import numpy as np from warnings import warn from shapely.geometry import Polygon, Point import geopandas as gpd from .flowdataframe import FlowDataFrame from skmob.preprocessing import routing class TrajSeries(pd.Series): @property def _constructor(self): return TrajSeries @property def _constructor_expanddim(self): return TrajDataFrame class TrajDataFrame(pd.DataFrame): _metadata = ['_parameters', '_crs'] # All the metadata that should be accessible must be also in the metadata method def __init__(self, data, latitude=constants.LATITUDE, longitude=constants.LONGITUDE, datetime=constants.DATETIME, user_id=constants.UID, trajectory_id=constants.TID, timestamp=False, crs={"init": "epsg:4326"}, parameters={}): original2default = {latitude: constants.LATITUDE, longitude: constants.LONGITUDE, datetime: constants.DATETIME, user_id: constants.UID, trajectory_id: constants.TID} columns = None if isinstance(data, pd.DataFrame): tdf = data.rename(columns=original2default) columns = tdf.columns # Dictionary elif isinstance(data, dict): tdf = pd.DataFrame.from_dict(data).rename(columns=original2default) columns = tdf.columns # List elif isinstance(data, list) or isinstance(data, np.ndarray): tdf = data columns = [] num_columns = len(data[0]) for i in range(num_columns): try: columns += [original2default[i]] except KeyError: columns += [i] elif isinstance(data, pd.core.internals.BlockManager): tdf = data else: raise TypeError('DataFrame constructor called with incompatible data and dtype: {e}'.format(e=type(data))) super(TrajDataFrame, self).__init__(tdf, columns=columns) # Check crs consistency if crs is None: warn("crs will be set to the default crs WGS84 (EPSG:4326).") if not isinstance(crs, dict): raise TypeError('crs must be a dict type.') self._crs = crs if not isinstance(parameters, dict): raise AttributeError("parameters must be a dictionary.") self._parameters = parameters if self._has_traj_columns(): self._set_traj(timestamp=timestamp, inplace=True) def _has_traj_columns(self): if (constants.DATETIME in self) and (constants.LATITUDE in self) and (constants.LONGITUDE in self): return True return False def _is_trajdataframe(self): if ((constants.DATETIME in self) and pd.core.dtypes.common.is_datetime64_any_dtype(self[constants.DATETIME]))\ and ((constants.LONGITUDE in self) and pd.core.dtypes.common.is_float_dtype(self[constants.LONGITUDE])) \ and ((constants.LATITUDE in self) and pd.core.dtypes.common.is_float_dtype(self[constants.LATITUDE])): return True return False def _set_traj(self, timestamp=False, inplace=False): if not inplace: frame = self.copy() else: frame = self if timestamp: frame[constants.DATETIME] = pd.to_datetime(frame[constants.DATETIME], unit='s') if not pd.core.dtypes.common.is_datetime64_any_dtype(frame[constants.DATETIME].dtype): frame[constants.DATETIME] = pd.to_datetime(frame[constants.DATETIME]) if not pd.core.dtypes.common.is_float_dtype(frame[constants.LONGITUDE].dtype): frame[constants.LONGITUDE] = frame[constants.LONGITUDE].astype('float') if not pd.core.dtypes.common.is_float_dtype(frame[constants.LATITUDE].dtype): frame[constants.LATITUDE] = frame[constants.LATITUDE].astype('float') frame.parameters = self._parameters frame.crs = self._crs if not inplace: return frame def to_flowdataframe(self, tessellation, remove_na=False, self_loops=True): """ :param tessellation: :param remove_na: :param self_loop: if True, it counts self movements (default True) :return: """ # Step 1: order the dataframe by user_id, traj_id, datetime self.sort_values(by=self.__operate_on(), ascending=True, inplace=True) # Step 2: map the trajectory onto the tessellation flow = self.mapping(tessellation, remove_na=remove_na) # Step 3: groupby tile_id and sum to obtain the flow flow.loc[:, constants.DESTINATION] = flow[constants.TILE_ID].shift(-1) flow = flow.groupby([constants.TILE_ID, constants.DESTINATION]).size().reset_index(name=constants.FLOW) flow.rename(columns={constants.TILE_ID: constants.ORIGIN}, inplace=True) if not self_loops: flow = flow[flow[constants.ORIGIN] != flow[constants.DESTINATION]] return FlowDataFrame(flow, tessellation=tessellation) def to_geodataframe(self): gdf = gpd.GeoDataFrame(self.copy(), geometry=gpd.points_from_xy(self[constants.LONGITUDE], self[constants.LATITUDE]), crs=self._crs) return gdf def mapping(self, tessellation, remove_na=False): """ Method to assign to each point of the TrajDataFrame a corresponding tile_id of a given tessellation. :param tessellation: GeoDataFrame containing a tessellation (geometry of points or polygons). :param remove_na: (default False) it removes points that do not have a corresponding tile_id :return: TrajDataFrame with an additional column containing the tile_ids. """ gdf = self.to_geodataframe() if all(isinstance(x, Polygon) for x in tessellation.geometry): if remove_na: how = 'inner' else: how = 'left' tile_ids = gpd.sjoin(gdf, tessellation, how=how, op='within')[[constants.TILE_ID]] elif all(isinstance(x, Point) for x in tessellation.geometry): tile_ids = utils.nearest(gdf, tessellation, constants.TILE_ID) new_data = self._constructor(self).__finalize__(self) new_data = new_data.merge(tile_ids, right_index=True, left_index=True) return new_data def __getitem__(self, key): """ It the result contains lat, lng and datetime, return a TrajDataFrame, else a pandas DataFrame. """ result = super(TrajDataFrame, self).__getitem__(key) if (isinstance(result, TrajDataFrame)) and result._is_trajdataframe(): result.__class__ = TrajDataFrame result.crs = self._crs result.parameters = self._parameters elif isinstance(result, TrajDataFrame) and not result._is_trajdataframe(): result.__class__ = pd.DataFrame return result def settings_from(self, trajdataframe): """ Method to copy attributes from another TrajDataFrame. :param trajdataframe: TrajDataFrame from which copy the attributes. """ for k in trajdataframe.metadata: value = getattr(trajdataframe, k) setattr(self, k, value) @classmethod def from_file(cls, filename, latitude=constants.LATITUDE, longitude=constants.LONGITUDE, datetime=constants.DATETIME, user_id=constants.UID, trajectory_id=constants.TID, usecols=None, header='infer', timestamp=False, crs={"init": "epsg:4326"}, sep=",", parameters=None): df = pd.read_csv(filename, sep=sep, header=header, usecols=usecols) if parameters is None: # Init prop dictionary parameters = {'from_file': filename} return cls(df, latitude=latitude, longitude=longitude, datetime=datetime, user_id=user_id, trajectory_id=trajectory_id, parameters=parameters, crs=crs, timestamp=timestamp) @property def lat(self): if constants.LATITUDE not in self: raise AttributeError("The TrajDataFrame does not contain the column '%s.'" % constants.LATITUDE) return self[constants.LATITUDE] @property def lng(self): if constants.LONGITUDE not in self: raise AttributeError("The TrajDataFrame does not contain the column '%s.'"%constants.LONGITUDE) return self[constants.LONGITUDE] @property def datetime(self): if constants.DATETIME not in self: raise AttributeError("The TrajDataFrame does not contain the column '%s.'"%constants.DATETIME) return self[constants.DATETIME] @property def _constructor(self): return TrajDataFrame @property def _constructor_sliced(self): return TrajSeries @property def _constructor_expanddim(self): return TrajDataFrame @property def metadata(self): md = ['crs', 'parameters'] # Add here all the metadata that are accessible from the object return md def __finalize__(self, other, method=None, **kwargs): """propagate metadata from other to self """ # merge operation: using metadata of the left object if method == 'merge': for name in self._metadata: object.__setattr__(self, name, getattr(other.left, name, None)) # concat operation: using metadata of the first object elif method == 'concat': for name in self._metadata: object.__setattr__(self, name, getattr(other.objs[0], name, None)) else: for name in self._metadata: object.__setattr__(self, name, getattr(other, name, None)) return self def set_parameter(self, key, param): self._parameters[key] = param @property def crs(self): return self._crs @crs.setter def crs(self, crs): self._crs = crs @property def parameters(self): return self._parameters @parameters.setter def parameters(self, parameters): self._parameters = dict(parameters) def __operate_on(self): """ Check which optional fields are present and return a list of them plus mandatory fields to which apply built-in pandas functions such as sort_values or groupby. :return: list """ cols = [] if constants.UID in self: cols.append(constants.UID) if constants.TID in self: cols.append(constants.TID) cols.append(constants.DATETIME) return cols # Sorting def sort_by_uid_and_datetime(self): if constants.UID in self.columns: return self.sort_values(by=[constants.UID, constants.DATETIME], ascending=[True, True]) else: return self.sort_values(by=[constants.DATETIME], ascending=[True]) # Plot methods def plot_trajectory(self, map_f=None, max_users=10, max_points=1000, style_function=plot.traj_style_function, tiles='cartodbpositron', zoom=12, hex_color=-1, weight=2, opacity=0.75, start_end_markers=True): """ :param map_f: folium.Map `folium.Map` object where the trajectory will be plotted. If `None`, a new map will be created. :param max_users: int maximum number of users whose trajectories should be plotted. :param max_points: int maximum number of points per user to plot. If necessary, a user's trajectory will be down-sampled to have at most `max_points` points. :param style_function: lambda function function specifying the style (weight, color, opacity) of the GeoJson object. :param tiles: str folium's `tiles` parameter. :param zoom: int initial zoom. :param hex_color: str or int hex color of the trajectory line. If `-1` a random color will be generated for each trajectory. :param weight: float thickness of the trajectory line. :param opacity: float opacity (alpha level) of the trajectory line. :param start_end_markers: bool add markers on the start and end points of the trajectory. :return: `folium.Map` object with the plotted trajectories. """ return plot.plot_trajectory(self, map_f=map_f, max_users=max_users, max_points=max_points, style_function=style_function, tiles=tiles, zoom=zoom, hex_color=hex_color, weight=weight, opacity=opacity, start_end_markers=start_end_markers) def plot_stops(self, map_f=None, max_users=10, tiles='cartodbpositron', zoom=12, hex_color=-1, opacity=0.3, radius=12, popup=True): """ Requires a TrajDataFrame with stops or clusters, output of `preprocessing.detection.stops` or `preprocessing.clustering.cluster`. The column `constants.LEAVING_DATETIME` must be present. :param map_f: folium.Map `folium.Map` object where the stops will be plotted. If `None`, a new map will be created. :param max_users: int maximum number of users whose stops should be plotted. :param tiles: str folium's `tiles` parameter. :param zoom: int initial zoom. :param hex_color: str or int hex color of the stop markers. If `-1` a random color will be generated for each user. :param opacity: float opacity (alpha level) of the stop makers. :param radius: float size of the markers. :param popup: bool if `True`, when clicking on a marker a popup window displaying information on the stop will appear. :return: `folium.Map` object with the plotted stops. """ return plot.plot_stops(self, map_f=map_f, max_users=max_users, tiles=tiles, zoom=zoom, hex_color=hex_color, opacity=opacity, radius=radius, popup=popup) def plot_diary(self, user, start_datetime=None, end_datetime=None, ax=None): """ Requires a TrajDataFrame with clusters, output of `preprocessing.clustering.cluster`. The column `constants.CLUSTER` must be present. :param user: str or int user ID whose diary should be plotted. :param start_datetime: datetime.datetime Only stops made after this date will be plotted. If `None` the datetime of the oldest stop will be selected. :param end_datetime: datetime.datetime Only stops made before this date will be plotted. If `None` the datetime of the newest stop will be selected. :param ax: matplotlib.axes axes where the diary will be plotted. :return: `matplotlib.axes` of the plotted diary. """ return plot.plot_diary(self, user, start_datetime=start_datetime, end_datetime=end_datetime, ax=ax) def route(self, G=None, index_origin=0, index_destin=-1): return routing.route(self, G=G, index_origin=index_origin, index_destin=index_destin) def timezone_conversion(self, from_timezone, to_timezone): """ :param from_timezone: str current timezone (e.g. 'GMT') :param to_timezone: str new timezone (e.g. 'Asia/Shanghai') """ self.rename(columns={'datetime': 'original_datetime'}, inplace=True) self['datetime'] = self['original_datetime']. \ dt.tz_localize(from_timezone). \ dt.tz_convert(to_timezone). \ dt.tz_localize(None) self.drop(columns=['original_datetime'], inplace=True) def nparray_to_trajdataframe(trajectory_array, columns, parameters={}): df = pd.DataFrame(trajectory_array, columns=columns) tdf = TrajDataFrame(df, parameters=parameters) return tdf def _dataframe_set_geometry(self, col, timestampe=False, drop=False, inplace=False, crs=None): if inplace: raise ValueError("Can't do inplace setting when converting from" " DataFrame to GeoDataFrame") gf = TrajDataFrame(self) # this will copy so that BlockManager gets copied return gf._set_traj() #.set_geometry(col, drop=drop, inplace=False, crs=crs) pd.DataFrame._set_traj = _dataframe_set_geometry
[ "pandas.DataFrame", "pandas.DataFrame.from_dict", "pandas.core.dtypes.common.is_float_dtype", "pandas.read_csv", "geopandas.sjoin", "pandas.to_datetime", "skmob.preprocessing.routing.route", "geopandas.points_from_xy", "pandas.core.dtypes.common.is_datetime64_any_dtype", "warnings.warn" ]
[((16078, 16125), 'pandas.DataFrame', 'pd.DataFrame', (['trajectory_array'], {'columns': 'columns'}), '(trajectory_array, columns=columns)\n', (16090, 16125), True, 'import pandas as pd\n'), ((7820, 7882), 'pandas.read_csv', 'pd.read_csv', (['filename'], {'sep': 'sep', 'header': 'header', 'usecols': 'usecols'}), '(filename, sep=sep, header=header, usecols=usecols)\n', (7831, 7882), True, 'import pandas as pd\n'), ((15355, 15433), 'skmob.preprocessing.routing.route', 'routing.route', (['self'], {'G': 'G', 'index_origin': 'index_origin', 'index_destin': 'index_destin'}), '(self, G=G, index_origin=index_origin, index_destin=index_destin)\n', (15368, 15433), False, 'from skmob.preprocessing import routing\n'), ((2193, 2254), 'warnings.warn', 'warn', (['"""crs will be set to the default crs WGS84 (EPSG:4326)."""'], {}), "('crs will be set to the default crs WGS84 (EPSG:4326).')\n", (2197, 2254), False, 'from warnings import warn\n'), ((3477, 3528), 'pandas.to_datetime', 'pd.to_datetime', (['frame[constants.DATETIME]'], {'unit': '"""s"""'}), "(frame[constants.DATETIME], unit='s')\n", (3491, 3528), True, 'import pandas as pd\n'), ((3545, 3623), 'pandas.core.dtypes.common.is_datetime64_any_dtype', 'pd.core.dtypes.common.is_datetime64_any_dtype', (['frame[constants.DATETIME].dtype'], {}), '(frame[constants.DATETIME].dtype)\n', (3590, 3623), True, 'import pandas as pd\n'), ((3665, 3706), 'pandas.to_datetime', 'pd.to_datetime', (['frame[constants.DATETIME]'], {}), '(frame[constants.DATETIME])\n', (3679, 3706), True, 'import pandas as pd\n'), ((3723, 3793), 'pandas.core.dtypes.common.is_float_dtype', 'pd.core.dtypes.common.is_float_dtype', (['frame[constants.LONGITUDE].dtype'], {}), '(frame[constants.LONGITUDE].dtype)\n', (3759, 3793), True, 'import pandas as pd\n'), ((3895, 3964), 'pandas.core.dtypes.common.is_float_dtype', 'pd.core.dtypes.common.is_float_dtype', (['frame[constants.LATITUDE].dtype'], {}), '(frame[constants.LATITUDE].dtype)\n', (3931, 3964), True, 'import pandas as pd\n'), ((2898, 2969), 'pandas.core.dtypes.common.is_datetime64_any_dtype', 'pd.core.dtypes.common.is_datetime64_any_dtype', (['self[constants.DATETIME]'], {}), '(self[constants.DATETIME])\n', (2943, 2969), True, 'import pandas as pd\n'), ((3027, 3090), 'pandas.core.dtypes.common.is_float_dtype', 'pd.core.dtypes.common.is_float_dtype', (['self[constants.LONGITUDE]'], {}), '(self[constants.LONGITUDE])\n', (3063, 3090), True, 'import pandas as pd\n'), ((3148, 3210), 'pandas.core.dtypes.common.is_float_dtype', 'pd.core.dtypes.common.is_float_dtype', (['self[constants.LATITUDE]'], {}), '(self[constants.LATITUDE])\n', (3184, 3210), True, 'import pandas as pd\n'), ((5287, 5358), 'geopandas.points_from_xy', 'gpd.points_from_xy', (['self[constants.LONGITUDE]', 'self[constants.LATITUDE]'], {}), '(self[constants.LONGITUDE], self[constants.LATITUDE])\n', (5305, 5358), True, 'import geopandas as gpd\n'), ((6178, 6228), 'geopandas.sjoin', 'gpd.sjoin', (['gdf', 'tessellation'], {'how': 'how', 'op': '"""within"""'}), "(gdf, tessellation, how=how, op='within')\n", (6187, 6228), True, 'import geopandas as gpd\n'), ((1385, 1413), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['data'], {}), '(data)\n', (1407, 1413), True, 'import pandas as pd\n')]
"""Definitions for the primitive `composite_full`.""" from myia.lib import ( SHAPE, TYPE, VALUE, AbstractArray, AbstractScalar, AbstractType, abstract_array, distribute, force_pending, scalar_cast, u64tup_typecheck, ) from myia.operations import primitives as P from myia.xtype import NDArray def pyimpl_composite_full(shape, fill_value, abstract_scalar_type): """Implement `composite_full`.""" scalar_value = scalar_cast(fill_value, abstract_scalar_type) return distribute( P.scalar_to_array(scalar_value, abstract_array(shape, scalar_value)), shape, ) async def infer_composite_full( self, engine, shape: u64tup_typecheck, fill_value: AbstractScalar, dtype: AbstractType, ): """Infer the return type of primitive `composite_full`.""" return AbstractArray( AbstractScalar( { TYPE: await force_pending(dtype.element.xtype()), VALUE: fill_value.xvalue(), } ), { SHAPE: tuple( self.require_constant(e, argnum=f'"0:shape[{edx}]"') for edx, e in enumerate(shape.elements) ), TYPE: NDArray, }, )
[ "myia.lib.abstract_array", "myia.lib.scalar_cast" ]
[((464, 509), 'myia.lib.scalar_cast', 'scalar_cast', (['fill_value', 'abstract_scalar_type'], {}), '(fill_value, abstract_scalar_type)\n', (475, 509), False, 'from myia.lib import SHAPE, TYPE, VALUE, AbstractArray, AbstractScalar, AbstractType, abstract_array, distribute, force_pending, scalar_cast, u64tup_typecheck\n'), ((573, 608), 'myia.lib.abstract_array', 'abstract_array', (['shape', 'scalar_value'], {}), '(shape, scalar_value)\n', (587, 608), False, 'from myia.lib import SHAPE, TYPE, VALUE, AbstractArray, AbstractScalar, AbstractType, abstract_array, distribute, force_pending, scalar_cast, u64tup_typecheck\n')]
import abc import six from typing import Any, Union, List, Optional, Tuple @six.add_metaclass(abc.ABCMeta) class ICache(object): @abc.abstractmethod def put(self, key, value, expiry=None): # type: (str, Any, Optional[int]) -> dict raise NotImplementedError @abc.abstractmethod def put_many(self, values, expiry=None): # type: (List[Tuple[str, Any]], Optional[int]) -> dict raise NotImplementedError @abc.abstractmethod def get(self, key): # type: (str) -> dict raise NotImplementedError @abc.abstractmethod def extract(self, key): # type: (str) -> dict raise NotImplementedError @abc.abstractmethod def has(self, key): # type: (str) -> dict raise NotImplementedError @abc.abstractmethod def touch(self, keys): # type: (str) -> dict raise NotImplementedError @abc.abstractmethod def forget(self, keys): # type: (Union[List[str], str]) -> dict raise NotImplementedError
[ "six.add_metaclass" ]
[((78, 108), 'six.add_metaclass', 'six.add_metaclass', (['abc.ABCMeta'], {}), '(abc.ABCMeta)\n', (95, 108), False, 'import six\n')]
import numpy as np import os import h5py import sys import scipy import scipy.io.wavfile from scipy.signal import butter # Reconsider the Handling of SN_L, Gp_L, and Gp in the Freq_Bin Commands # Command for Initiallizing work space with Access to both: All the Data and Ephysflow Commands def initiate_path(): """ This Code is used to construct a path to the Data Folder using both the os and sys modules please :return: Path to the Bird Song Data """ experiment_folder = '/net/expData/birdSong/' ss_data_folder = os.path.join(experiment_folder, 'ss_data') # Path to All Awake Bird Data sys.path.append(os.path.join(experiment_folder, 'ephysflow')) # Appends the module created by Gentner Lab return ss_data_folder def get_birds_data(Bird_Id=str, Session=str, ss_data_folder=str): """ This code is used to grab the data from the Awake Free Behaving Experiments done by Zeke and store them in a format that works with the Python Environment :param Bird_Id: Specify the Specific Bird you are going to be looking at :param Session: Specify which Session you will be working with :param ss_data_folder: This Parameter is created by the initiate_path :return: Returns a List containing the Designated Experiments Results, and the Labels for its Motifs """ bird_id = Bird_Id sess_name = Session kwd_file_folder = os.path.join(ss_data_folder, bird_id, sess_name) kwd_files = [f for f in os.listdir(kwd_file_folder) if f.endswith('.kwd')] assert (len(kwd_files) == 1) kwd_file = kwd_files[0] print(kwd_file) # Sanity Check to Make Sure You are working with the Correct File # open the file in read mode kwd_file = h5py.File(os.path.join(kwd_file_folder, kwd_file), 'r') # Dynamic Members Size Num_Member = kwd_file.get('recordings') # Test for making the For Loop for HD5 file dynamic Num_Members = Num_Member.keys() P = len(Num_Members) # Import Data from the .kwd File. Entire_trial = [] File_loc = 'recordings/' k = '' j = 0 # Isolate and Store Data into Numpy Array. Then Store Numpy Array into a List. for j in range(0, P): k = File_loc + str(j) + '/data' print(k) # This is a Sanity Check to Ensure the Correct Data is accessed Epoch_data = np.array(kwd_file.get(k)) Entire_trial.append(Epoch_data) j += 1 # File Structure Part 2 kwe_files = [f for f in os.listdir(kwd_file_folder) if f.endswith('.kwe')] assert (len(kwe_files) == 1) kwe_file = kwe_files[0] print(kwe_file) # Sanity Check to Make Sure You are working with the Correct File # open the file in read mode kwe_file = h5py.File(os.path.join(kwd_file_folder, kwe_file), 'r') # Import Data from the .kwe File. # Store the Labels and Markers to Variables epoch_label = np.array(kwe_file.get('event_types/singing/motiff_1/recording')) print('Number of Motifs:', epoch_label.size) # Good to Know/Sanity Check # print('') start_time = np.array(kwe_file.get('event_types/singing/motiff_1/time_samples')) print('Number of Start Times:', start_time.size) # Sanity Check The Two Numbers should be equal assert (start_time.size == epoch_label.size) # Check to Make Sure they are the same Length print('') print(epoch_label) print('') print(start_time) return Entire_trial, epoch_label, start_time def clip_all_motifs(Entire_trial, Labels=np.ndarray, Starts=np.ndarray, song_length=str, Gaps=str): """ Command that Clips and Store Motifs or Bouts with a given Set of Parameters: Song Length, and Gap Length. :param Entire_trial: :param Labels: :param Starts: :param Song_Length: :param Gaps: :return: """ All_Songs = [] Motif_T = [] Epoch_w_motif = [] Testes = [] Song_length = song_length # Expected Song Duration in Seconds Gap = Gaps # How much time before and after to add SN_L = int(Song_length * 30000) Gp = int(Gap * 30000) Gp_L = Gp * 2 ############## SN_L and GP aren't integers which causes problems downstream, Changing this to int also causes problems fs = 30000.0 # 30 kHz lowcut = 400.0 highcut = 10000.0 # Motif_starts = [] # New_Labels z = Labels.size stop_time = Starts + 30000 * Song_length i = 0 for i in range(0, z): j = int(Labels[i]) Holder = [] Epoch_w_motif = Entire_trial[j] Motif_T = Epoch_w_motif[int(Starts[i] - Gp):int(stop_time[i] + Gp), :] # Holder = scipy.signal.lfilter( bT, aT, Motif_T[:,16]) Holder = butter_bandpass_filter(Motif_T[:, 16], lowcut, highcut, fs, order=2) Motif_T[:, 16] = Holder All_Songs.append(Motif_T[:, :]) # All_Songs.append(Epoch_w_motif[int(start_time[i]-Gp):int(stop_time[i]+Gp),:]) # i += 1 print('Song Motifs Acquired') return All_Songs, SN_L, Gp_L, Gp # noinspection PyTupleAssignmentBalance def butter_bandpass(lowcut, highcut, fs, order=5): nyq = 0.5 * fs low = lowcut / nyq high = highcut / nyq b, a = butter(order, [low, high], btype='bandpass') return b, a def butter_bandpass_filter(data, lowcut, highcut, fs, order=5): b, a = butter_bandpass(lowcut, highcut, fs, order=order) # Pycharm Freaks out here y = scipy.signal.filtfilt(b, a, data) return y
[ "os.listdir", "os.path.join", "scipy.signal.butter", "scipy.signal.filtfilt" ]
[((537, 579), 'os.path.join', 'os.path.join', (['experiment_folder', '"""ss_data"""'], {}), "(experiment_folder, 'ss_data')\n", (549, 579), False, 'import os\n'), ((1385, 1433), 'os.path.join', 'os.path.join', (['ss_data_folder', 'bird_id', 'sess_name'], {}), '(ss_data_folder, bird_id, sess_name)\n', (1397, 1433), False, 'import os\n'), ((5134, 5178), 'scipy.signal.butter', 'butter', (['order', '[low, high]'], {'btype': '"""bandpass"""'}), "(order, [low, high], btype='bandpass')\n", (5140, 5178), False, 'from scipy.signal import butter\n'), ((5357, 5390), 'scipy.signal.filtfilt', 'scipy.signal.filtfilt', (['b', 'a', 'data'], {}), '(b, a, data)\n', (5378, 5390), False, 'import scipy\n'), ((635, 679), 'os.path.join', 'os.path.join', (['experiment_folder', '"""ephysflow"""'], {}), "(experiment_folder, 'ephysflow')\n", (647, 679), False, 'import os\n'), ((1720, 1759), 'os.path.join', 'os.path.join', (['kwd_file_folder', 'kwd_file'], {}), '(kwd_file_folder, kwd_file)\n', (1732, 1759), False, 'import os\n'), ((2714, 2753), 'os.path.join', 'os.path.join', (['kwd_file_folder', 'kwe_file'], {}), '(kwd_file_folder, kwe_file)\n', (2726, 2753), False, 'import os\n'), ((1462, 1489), 'os.listdir', 'os.listdir', (['kwd_file_folder'], {}), '(kwd_file_folder)\n', (1472, 1489), False, 'import os\n'), ((2456, 2483), 'os.listdir', 'os.listdir', (['kwd_file_folder'], {}), '(kwd_file_folder)\n', (2466, 2483), False, 'import os\n')]
import tensorflow as tf from tensorflow import keras from utils import data_utils, argmanager from utils.loss import multinomial_nll import numpy as np import os import json import scipy import sklearn.metrics import scipy.stats from collections import OrderedDict def softmax(x, temp=1): norm_x = x - np.mean(x,axis=1, keepdims=True) return np.exp(temp*norm_x)/np.sum(np.exp(temp*norm_x), axis=1, keepdims=True) def get_jsd(preds, cts, min_tot_cts=10): return np.array([scipy.spatial.distance.jensenshannon(x,y) for x,y in zip(preds, cts) \ if y.sum()>min_tot_cts]) def main(): args = argmanager.fetch_metrics_args() print(args) # load model with keras.utils.CustomObjectScope({'multinomial_nll':multinomial_nll, 'tf':tf}): model = keras.models.load_model(args.model) inputlen = int(model.input_shape[1]) outputlen = int(model.output_shape[0][1]) # load data test_peaks_seqs, test_peaks_cts, \ test_nonpeaks_seqs, test_nonpeaks_cts = data_utils.load_test_data( args.peaks, args.nonpeaks, args.genome, args.bigwig, args.test_chr, inputlen, outputlen ) # predict on peaks and nonpeaks test_peaks_pred_logits, test_peaks_pred_logcts = \ model.predict(test_peaks_seqs, batch_size=args.batch_size, verbose=True) test_nonpeaks_pred_logits, test_nonpeaks_pred_logcts = \ model.predict(test_nonpeaks_seqs, batch_size=args.batch_size, verbose=True) metrics = OrderedDict() # counts metrics all_test_logcts = np.log(1 + np.vstack([test_peaks_cts, test_nonpeaks_cts]).sum(-1)) cur_pair = (all_test_logcts, np.vstack([test_peaks_pred_logcts, test_nonpeaks_pred_logcts]).ravel()) metrics['bpnet_cts_pearson_peaks_nonpeaks'] = scipy.stats.pearsonr(*cur_pair)[0] metrics['bpnet_cts_spearman_peaks_nonpeaks'] = scipy.stats.spearmanr(*cur_pair)[0] cur_pair = ([1]*len(test_peaks_pred_logcts) + [0]*len(test_nonpeaks_pred_logcts), np.vstack([test_peaks_pred_logcts, test_nonpeaks_pred_logcts]).ravel()) metrics['binary_auc'] = sklearn.metrics.roc_auc_score(*cur_pair) peaks_test_logcts = np.log(1 + test_peaks_cts.sum(-1)) cur_pair = (peaks_test_logcts, test_peaks_pred_logcts.ravel()) metrics['bpnet_cts_pearson_peaks'] = scipy.stats.pearsonr(*cur_pair)[0] metrics['bpnet_cts_spearman_peaks'] = scipy.stats.spearmanr(*cur_pair)[0] # profile metrics (all within peaks) cur_pair = (softmax(test_peaks_pred_logits), test_peaks_cts) metrics['bpnet_profile_median_jsd_peaks'] = np.median(get_jsd(*cur_pair)) cur_pair = (softmax(test_peaks_pred_logits), test_peaks_cts[:, np.random.permutation(test_peaks_cts.shape[1])]) metrics['bpnet_profile_median_jsd_peaks_randomized'] = np.median(get_jsd(*cur_pair)) with open(args.output_prefix + ".metrics.json", "w") as f: json.dump(metrics, f, ensure_ascii=False, indent=4) if __name__=="__main__": main()
[ "json.dump", "tensorflow.keras.utils.CustomObjectScope", "tensorflow.keras.models.load_model", "scipy.spatial.distance.jensenshannon", "utils.argmanager.fetch_metrics_args", "scipy.stats.spearmanr", "scipy.stats.pearsonr", "numpy.mean", "numpy.exp", "utils.data_utils.load_test_data", "numpy.random.permutation", "collections.OrderedDict", "numpy.vstack" ]
[((629, 660), 'utils.argmanager.fetch_metrics_args', 'argmanager.fetch_metrics_args', ([], {}), '()\n', (658, 660), False, 'from utils import data_utils, argmanager\n'), ((1020, 1139), 'utils.data_utils.load_test_data', 'data_utils.load_test_data', (['args.peaks', 'args.nonpeaks', 'args.genome', 'args.bigwig', 'args.test_chr', 'inputlen', 'outputlen'], {}), '(args.peaks, args.nonpeaks, args.genome, args.\n bigwig, args.test_chr, inputlen, outputlen)\n', (1045, 1139), False, 'from utils import data_utils, argmanager\n'), ((1667, 1680), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (1678, 1680), False, 'from collections import OrderedDict\n'), ((308, 341), 'numpy.mean', 'np.mean', (['x'], {'axis': '(1)', 'keepdims': '(True)'}), '(x, axis=1, keepdims=True)\n', (315, 341), True, 'import numpy as np\n'), ((352, 373), 'numpy.exp', 'np.exp', (['(temp * norm_x)'], {}), '(temp * norm_x)\n', (358, 373), True, 'import numpy as np\n'), ((704, 781), 'tensorflow.keras.utils.CustomObjectScope', 'keras.utils.CustomObjectScope', (["{'multinomial_nll': multinomial_nll, 'tf': tf}"], {}), "({'multinomial_nll': multinomial_nll, 'tf': tf})\n", (733, 781), False, 'from tensorflow import keras\n'), ((797, 832), 'tensorflow.keras.models.load_model', 'keras.models.load_model', (['args.model'], {}), '(args.model)\n', (820, 832), False, 'from tensorflow import keras\n'), ((1990, 2021), 'scipy.stats.pearsonr', 'scipy.stats.pearsonr', (['*cur_pair'], {}), '(*cur_pair)\n', (2010, 2021), False, 'import scipy\n'), ((2076, 2108), 'scipy.stats.spearmanr', 'scipy.stats.spearmanr', (['*cur_pair'], {}), '(*cur_pair)\n', (2097, 2108), False, 'import scipy\n'), ((2581, 2612), 'scipy.stats.pearsonr', 'scipy.stats.pearsonr', (['*cur_pair'], {}), '(*cur_pair)\n', (2601, 2612), False, 'import scipy\n'), ((2658, 2690), 'scipy.stats.spearmanr', 'scipy.stats.spearmanr', (['*cur_pair'], {}), '(*cur_pair)\n', (2679, 2690), False, 'import scipy\n'), ((3175, 3226), 'json.dump', 'json.dump', (['metrics', 'f'], {'ensure_ascii': '(False)', 'indent': '(4)'}), '(metrics, f, ensure_ascii=False, indent=4)\n', (3184, 3226), False, 'import json\n'), ((379, 400), 'numpy.exp', 'np.exp', (['(temp * norm_x)'], {}), '(temp * norm_x)\n', (385, 400), True, 'import numpy as np\n'), ((487, 529), 'scipy.spatial.distance.jensenshannon', 'scipy.spatial.distance.jensenshannon', (['x', 'y'], {}), '(x, y)\n', (523, 529), False, 'import scipy\n'), ((1841, 1903), 'numpy.vstack', 'np.vstack', (['[test_peaks_pred_logcts, test_nonpeaks_pred_logcts]'], {}), '([test_peaks_pred_logcts, test_nonpeaks_pred_logcts])\n', (1850, 1903), True, 'import numpy as np\n'), ((2217, 2279), 'numpy.vstack', 'np.vstack', (['[test_peaks_pred_logcts, test_nonpeaks_pred_logcts]'], {}), '([test_peaks_pred_logcts, test_nonpeaks_pred_logcts])\n', (2226, 2279), True, 'import numpy as np\n'), ((2965, 3011), 'numpy.random.permutation', 'np.random.permutation', (['test_peaks_cts.shape[1]'], {}), '(test_peaks_cts.shape[1])\n', (2986, 3011), True, 'import numpy as np\n'), ((1736, 1782), 'numpy.vstack', 'np.vstack', (['[test_peaks_cts, test_nonpeaks_cts]'], {}), '([test_peaks_cts, test_nonpeaks_cts])\n', (1745, 1782), True, 'import numpy as np\n')]
# Generated by Django 3.2.1 on 2021-06-01 13:14 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('tr_ars', '0001_initial'), ] operations = [ migrations.AddField( model_name='actor', name='active', field=models.BooleanField(default=True, verbose_name='actor is active'), ), ]
[ "django.db.models.BooleanField" ]
[((321, 386), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)', 'verbose_name': '"""actor is active"""'}), "(default=True, verbose_name='actor is active')\n", (340, 386), False, 'from django.db import migrations, models\n')]
from ckanext.issues import model try: from ckan.new_tests import factories, helpers except ImportError: from ckan.tests import factories, helpers import factory class Issue(factory.Factory): class Meta: model = model.Issue abstract = False title = factory.Sequence(lambda n: 'Test Issue [{n}]'.format(n=n)) description = 'Some description' dataset_id = factory.LazyAttribute(lambda _: factories.Dataset()['id']) @classmethod def _build(cls, target_class, *args, **kwargs): raise NotImplementedError(".build() isn't supported in CKAN") @classmethod def _create(cls, target_class, *args, **kwargs): if args: assert False, "Positional args aren't supported, use keyword args." context = {'user': factories._get_action_user_name(kwargs)} # issue_create is so badly behaved I'm doing this for now data_dict = dict(**kwargs) data_dict.pop('user', None) issue_dict = helpers.call_action('issue_create', context=context, **data_dict) return issue_dict class IssueComment(factory.Factory): class Meta: model = model.IssueComment abstract = False comment = 'some comment' @classmethod def _build(cls, target_class, *args, **kwargs): raise NotImplementedError(".build() isn't supported in CKAN") @classmethod def _create(cls, target_class, *args, **kwargs): if args: assert False, "Positional args aren't supported, use keyword args." context = {'user': factories._get_action_user_name(kwargs)} issue_comment_dict = helpers.call_action('issue_comment_create', context=context, **kwargs) return issue_comment_dict
[ "ckan.tests.helpers.call_action", "ckan.tests.factories.Dataset", "ckan.tests.factories._get_action_user_name" ]
[((994, 1059), 'ckan.tests.helpers.call_action', 'helpers.call_action', (['"""issue_create"""'], {'context': 'context'}), "('issue_create', context=context, **data_dict)\n", (1013, 1059), False, 'from ckan.tests import factories, helpers\n'), ((1718, 1788), 'ckan.tests.helpers.call_action', 'helpers.call_action', (['"""issue_comment_create"""'], {'context': 'context'}), "('issue_comment_create', context=context, **kwargs)\n", (1737, 1788), False, 'from ckan.tests import factories, helpers\n'), ((793, 832), 'ckan.tests.factories._get_action_user_name', 'factories._get_action_user_name', (['kwargs'], {}), '(kwargs)\n', (824, 832), False, 'from ckan.tests import factories, helpers\n'), ((1648, 1687), 'ckan.tests.factories._get_action_user_name', 'factories._get_action_user_name', (['kwargs'], {}), '(kwargs)\n', (1679, 1687), False, 'from ckan.tests import factories, helpers\n'), ((430, 449), 'ckan.tests.factories.Dataset', 'factories.Dataset', ([], {}), '()\n', (447, 449), False, 'from ckan.tests import factories, helpers\n')]
import logging from tqdm import tqdm import random import re import xml.etree.ElementTree as ET def process_posts(fd_in,fd_out_train,fd_out_test,target_tag,split): line_num = 1 for line in tqdm(fd_in): try: fd_out = fd_out_train if random.random() > split else fd_out_test attr = ET.fromstring(line).attrib pid = attr.get("Id","") label = 1 if target_tag in attr.get("Tags","") else 0 title = re.sub(r"\s+"," ",attr.get("Ttile","")).strip() body = re.sub(r"\s+"," ",attr.get("Body","")).strip() text = title+ " "+ body fd_out.write(f"{pid}\t {label}\t{text}\n") line_num+=1 except Exception as e: msg = f"skipping the broken line{line_num}: {e}\n" logging.exception(e)
[ "random.random", "tqdm.tqdm", "logging.exception", "xml.etree.ElementTree.fromstring" ]
[((204, 215), 'tqdm.tqdm', 'tqdm', (['fd_in'], {}), '(fd_in)\n', (208, 215), False, 'from tqdm import tqdm\n'), ((327, 346), 'xml.etree.ElementTree.fromstring', 'ET.fromstring', (['line'], {}), '(line)\n', (340, 346), True, 'import xml.etree.ElementTree as ET\n'), ((816, 836), 'logging.exception', 'logging.exception', (['e'], {}), '(e)\n', (833, 836), False, 'import logging\n'), ((267, 282), 'random.random', 'random.random', ([], {}), '()\n', (280, 282), False, 'import random\n')]
#!/usr/bin/env python """ """ import numpy as np from scipy.odr import Model from scipy.optimize import leastsq from scipy import ndimage from scipy.ndimage import gaussian_gradient_magnitude from scipy.ndimage import map_coordinates from common import PIX_ERR from features import line_profile def contour(img, A0, R0, phi1=-np.pi/2, phi2=np.pi/2, dphi=np.pi/180, DR=0.2, sigma=3): #this is just a rough draft not intended to be working y0, x0 = A0 phi = np.arange(phi1, phi2, dphi) x1 = x0+R0*(1-DR)*np.cos(phi) y1 = y0+R0*(1-DR)*np.sin(phi) x2 = x0+R0*(1+DR)*np.cos(phi) y2 = y0+R0*(1+DR)*np.sin(phi) rim=[] Nphi, = phi.shape for i in range(Nphi): A1 = np.asarray(((y1[i],x1[i]),(PIX_ERR, PIX_ERR))) A2 = np.asarray(((y2[i],x2[i]),(PIX_ERR, PIX_ERR))) metrics, metrics_err, profile = line_profile(img, A1[i], A2[i]) rel_rim = find_rim(profile, sigma)*metrics real_rim = A1 + rel_rim rim.append(real_rim) return rim def find_rim(profile, sigma=3): grad = ndimage.gaussian_gradient_magnitude( ndimage.gaussian_filter1d(profile,sigma) , sigma) return np.argmax(grad) def line_from_points(point1, point2): """ @param point1: array in numpy order = (y,x) @param point2: """ k = (point2 - point1)[0] / (point2 - point1)[1] b = point1[0] - k * point1[1] return k, b def line_perpendicular(k,b,x): """ @param k: y=kx+b @param b: y=kx+b @param x: where the perpendicular has to intersect the line """ # y = k*x+b k_perp = -1./k b_perp = (k - k_perp) * x + b return k_perp, b_perp def circle_fcn(B, x, y): return B[0]**2 - (B[1]-x)**2 - (B[2]-y)**2 def _circle_fjacb(B,x,y): fjacb = np.empty((x.shape[0],3)) fjacb[:,0] = 2*B[0] fjacb[:,1] = -2*(B[1]-x) fjacb[:,2] = -2*(B[2]-y) return fjacb def _circle_fjacd(B,x,y): fjacd = np.empty((x.shape[0],2)) fjacd[:,0] = 2*(B[1]-x) fjacd[:,1] = 2*(B[1]-y) return fjacd def _circle_est(x,y): return np.mean((x.ptp(), y.ptp()))/2.0, x.mean(), y.mean() def _circle_meta(): return {'name':'Equation of a circle'} circle_model = Model(circle_fcn, estimate=_circle_est, fjacb=_circle_fjacb, fjacd=_circle_fjacd, meta=_circle_meta, implicit=True) def FitCircle(x,y): ''' leastsq without errors ''' return leastsq(circle_fcn, _circle_est(x,y), (x, y), Dfun=_circle_fjacb, full_output=1) def section_profile(img, point1, point2): '''define the brightness profile along the line defined by 2 points coordinates of points with their errors are supplied as numpy arrays in notation array((y,x),(dy,dx))! might as well submit other options to map_coordinates function it is assumed that pipette is more or less horizontal so that axis intersects left and right image sides ''' # define the line going though 2 points y1,x1,dy1,dx1 = point1.flatten() y2,x2,dy2,dx2 = point2.flatten() k = (y2 - y1) / (x2 - x1) dk = np.sqrt(dy1*dy1 + dy2*dy2 + k*k*(dx1*dx1+dx2*dx2) )/np.fabs(x2-x1) # number of points for profile # it is assumed that pipette is more or less horizontal # so that axis intersects left and right image sides nPoints = int(max(np.fabs(y2-y1), np.fabs(x2-x1))) #coordinates of points in the profile x = np.linspace(x1, x2, nPoints) y = np.linspace(y1, y2, nPoints) #calculate profile metric - coefficient for lengths in profile vs pixels if np.fabs(k) <=1: metric = np.sqrt(1 + k*k) metric_err = np.fabs(k)*dk/metric else: metric = np.sqrt(1 + 1/(k*k)) metric_err = dk/np.fabs(metric * k*k*k) #output interpolated values at points of profile and profile metric return metric, metric_err, map_coordinates(img, [y, x], output = float) def CircleFunc(r, N=100): phi = np.linspace(0,2*np.pi,N) return r*np.cos(phi), r*np.sin(phi) def VesicleEdge_phc(img, x0, y0, r0, N=100, phi1=0, phi2=2*np.pi, sigma=1): Xedge = np.empty(N) Yedge = np.empty(N) for i, phi in enumerate(np.linspace(phi1, phi2, N)): x = x0+r0*np.cos(phi) y = y0+r0*np.sin(phi) if x < 0: x = 0 y = y0+(x-x0)*np.tan(phi) elif x > img.shape[1]-1: x = img.shape[1]-1 y = y0+(x-x0)*np.tan(phi) if y < 0: y = 0 x = x0+(y-y0)/np.tan(phi) elif y > img.shape[0]-1: y = img.shape[1]-1 x = x0+(y-y0)/np.tan(phi) point1 = np.asarray(((y0,x0),(PIX_ERR, PIX_ERR))) point2 = np.asarray(((y,x),(PIX_ERR, PIX_ERR))) metric, metric_err, line = section_profile(img, point1, point2) grad = gaussian_gradient_magnitude(line,sigma) pos = np.argmax(grad) Xedge[i] = x0+pos*np.cos(phi)*metric Yedge[i] = y0+pos*np.sin(phi)*metric return Xedge, Yedge
[ "scipy.ndimage.gaussian_filter1d", "numpy.argmax", "scipy.odr.Model", "numpy.empty", "numpy.asarray", "features.line_profile", "scipy.ndimage.gaussian_gradient_magnitude", "numpy.fabs", "numpy.arange", "numpy.sin", "numpy.linspace", "numpy.cos", "numpy.tan", "scipy.ndimage.map_coordinates", "numpy.sqrt" ]
[((2221, 2341), 'scipy.odr.Model', 'Model', (['circle_fcn'], {'estimate': '_circle_est', 'fjacb': '_circle_fjacb', 'fjacd': '_circle_fjacd', 'meta': '_circle_meta', 'implicit': '(True)'}), '(circle_fcn, estimate=_circle_est, fjacb=_circle_fjacb, fjacd=\n _circle_fjacd, meta=_circle_meta, implicit=True)\n', (2226, 2341), False, 'from scipy.odr import Model\n'), ((479, 506), 'numpy.arange', 'np.arange', (['phi1', 'phi2', 'dphi'], {}), '(phi1, phi2, dphi)\n', (488, 506), True, 'import numpy as np\n'), ((1184, 1199), 'numpy.argmax', 'np.argmax', (['grad'], {}), '(grad)\n', (1193, 1199), True, 'import numpy as np\n'), ((1794, 1819), 'numpy.empty', 'np.empty', (['(x.shape[0], 3)'], {}), '((x.shape[0], 3))\n', (1802, 1819), True, 'import numpy as np\n'), ((1957, 1982), 'numpy.empty', 'np.empty', (['(x.shape[0], 2)'], {}), '((x.shape[0], 2))\n', (1965, 1982), True, 'import numpy as np\n'), ((3442, 3470), 'numpy.linspace', 'np.linspace', (['x1', 'x2', 'nPoints'], {}), '(x1, x2, nPoints)\n', (3453, 3470), True, 'import numpy as np\n'), ((3479, 3507), 'numpy.linspace', 'np.linspace', (['y1', 'y2', 'nPoints'], {}), '(y1, y2, nPoints)\n', (3490, 3507), True, 'import numpy as np\n'), ((3966, 3994), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', 'N'], {}), '(0, 2 * np.pi, N)\n', (3977, 3994), True, 'import numpy as np\n'), ((4124, 4135), 'numpy.empty', 'np.empty', (['N'], {}), '(N)\n', (4132, 4135), True, 'import numpy as np\n'), ((4148, 4159), 'numpy.empty', 'np.empty', (['N'], {}), '(N)\n', (4156, 4159), True, 'import numpy as np\n'), ((719, 767), 'numpy.asarray', 'np.asarray', (['((y1[i], x1[i]), (PIX_ERR, PIX_ERR))'], {}), '(((y1[i], x1[i]), (PIX_ERR, PIX_ERR)))\n', (729, 767), True, 'import numpy as np\n'), ((779, 827), 'numpy.asarray', 'np.asarray', (['((y2[i], x2[i]), (PIX_ERR, PIX_ERR))'], {}), '(((y2[i], x2[i]), (PIX_ERR, PIX_ERR)))\n', (789, 827), True, 'import numpy as np\n'), ((866, 897), 'features.line_profile', 'line_profile', (['img', 'A1[i]', 'A2[i]'], {}), '(img, A1[i], A2[i])\n', (878, 897), False, 'from features import line_profile\n'), ((1123, 1164), 'scipy.ndimage.gaussian_filter1d', 'ndimage.gaussian_filter1d', (['profile', 'sigma'], {}), '(profile, sigma)\n', (1148, 1164), False, 'from scipy import ndimage\n'), ((3116, 3180), 'numpy.sqrt', 'np.sqrt', (['(dy1 * dy1 + dy2 * dy2 + k * k * (dx1 * dx1 + dx2 * dx2))'], {}), '(dy1 * dy1 + dy2 * dy2 + k * k * (dx1 * dx1 + dx2 * dx2))\n', (3123, 3180), True, 'import numpy as np\n'), ((3168, 3184), 'numpy.fabs', 'np.fabs', (['(x2 - x1)'], {}), '(x2 - x1)\n', (3175, 3184), True, 'import numpy as np\n'), ((3593, 3603), 'numpy.fabs', 'np.fabs', (['k'], {}), '(k)\n', (3600, 3603), True, 'import numpy as np\n'), ((3626, 3644), 'numpy.sqrt', 'np.sqrt', (['(1 + k * k)'], {}), '(1 + k * k)\n', (3633, 3644), True, 'import numpy as np\n'), ((3712, 3736), 'numpy.sqrt', 'np.sqrt', (['(1 + 1 / (k * k))'], {}), '(1 + 1 / (k * k))\n', (3719, 3736), True, 'import numpy as np\n'), ((3884, 3926), 'scipy.ndimage.map_coordinates', 'map_coordinates', (['img', '[y, x]'], {'output': 'float'}), '(img, [y, x], output=float)\n', (3899, 3926), False, 'from scipy.ndimage import map_coordinates\n'), ((4188, 4214), 'numpy.linspace', 'np.linspace', (['phi1', 'phi2', 'N'], {}), '(phi1, phi2, N)\n', (4199, 4214), True, 'import numpy as np\n'), ((4655, 4697), 'numpy.asarray', 'np.asarray', (['((y0, x0), (PIX_ERR, PIX_ERR))'], {}), '(((y0, x0), (PIX_ERR, PIX_ERR)))\n', (4665, 4697), True, 'import numpy as np\n'), ((4713, 4753), 'numpy.asarray', 'np.asarray', (['((y, x), (PIX_ERR, PIX_ERR))'], {}), '(((y, x), (PIX_ERR, PIX_ERR)))\n', (4723, 4753), True, 'import numpy as np\n'), ((4839, 4879), 'scipy.ndimage.gaussian_gradient_magnitude', 'gaussian_gradient_magnitude', (['line', 'sigma'], {}), '(line, sigma)\n', (4866, 4879), False, 'from scipy.ndimage import gaussian_gradient_magnitude\n'), ((4893, 4908), 'numpy.argmax', 'np.argmax', (['grad'], {}), '(grad)\n', (4902, 4908), True, 'import numpy as np\n'), ((530, 541), 'numpy.cos', 'np.cos', (['phi'], {}), '(phi)\n', (536, 541), True, 'import numpy as np\n'), ((565, 576), 'numpy.sin', 'np.sin', (['phi'], {}), '(phi)\n', (571, 576), True, 'import numpy as np\n'), ((600, 611), 'numpy.cos', 'np.cos', (['phi'], {}), '(phi)\n', (606, 611), True, 'import numpy as np\n'), ((635, 646), 'numpy.sin', 'np.sin', (['phi'], {}), '(phi)\n', (641, 646), True, 'import numpy as np\n'), ((3358, 3374), 'numpy.fabs', 'np.fabs', (['(y2 - y1)'], {}), '(y2 - y1)\n', (3365, 3374), True, 'import numpy as np\n'), ((3374, 3390), 'numpy.fabs', 'np.fabs', (['(x2 - x1)'], {}), '(x2 - x1)\n', (3381, 3390), True, 'import numpy as np\n'), ((3757, 3784), 'numpy.fabs', 'np.fabs', (['(metric * k * k * k)'], {}), '(metric * k * k * k)\n', (3764, 3784), True, 'import numpy as np\n'), ((4004, 4015), 'numpy.cos', 'np.cos', (['phi'], {}), '(phi)\n', (4010, 4015), True, 'import numpy as np\n'), ((4019, 4030), 'numpy.sin', 'np.sin', (['phi'], {}), '(phi)\n', (4025, 4030), True, 'import numpy as np\n'), ((3664, 3674), 'numpy.fabs', 'np.fabs', (['k'], {}), '(k)\n', (3671, 3674), True, 'import numpy as np\n'), ((4235, 4246), 'numpy.cos', 'np.cos', (['phi'], {}), '(phi)\n', (4241, 4246), True, 'import numpy as np\n'), ((4265, 4276), 'numpy.sin', 'np.sin', (['phi'], {}), '(phi)\n', (4271, 4276), True, 'import numpy as np\n'), ((4339, 4350), 'numpy.tan', 'np.tan', (['phi'], {}), '(phi)\n', (4345, 4350), True, 'import numpy as np\n'), ((4515, 4526), 'numpy.tan', 'np.tan', (['phi'], {}), '(phi)\n', (4521, 4526), True, 'import numpy as np\n'), ((4935, 4946), 'numpy.cos', 'np.cos', (['phi'], {}), '(phi)\n', (4941, 4946), True, 'import numpy as np\n'), ((4980, 4991), 'numpy.sin', 'np.sin', (['phi'], {}), '(phi)\n', (4986, 4991), True, 'import numpy as np\n'), ((4441, 4452), 'numpy.tan', 'np.tan', (['phi'], {}), '(phi)\n', (4447, 4452), True, 'import numpy as np\n'), ((4617, 4628), 'numpy.tan', 'np.tan', (['phi'], {}), '(phi)\n', (4623, 4628), True, 'import numpy as np\n')]
# Copyright (c) 2017-2022 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. # SPDX-License-Identifier: Apache-2.0 from dazl import connect from dazl.ledger import ActAs, Admin, ReadAs, User import pytest @pytest.mark.asyncio async def test_ledger_create_user(sandbox_v2) -> None: async with connect(url=sandbox_v2, admin=True) as conn: party_info = await conn.allocate_party() await conn.create_user(User("testuser1", party_info.party)) @pytest.mark.asyncio async def test_ledger_create_user_with_rights(sandbox_v2) -> None: async with connect(url=sandbox_v2, admin=True) as conn: party_info = await conn.allocate_party() await conn.create_user( User("testuser2", party_info.party), [ActAs(party_info.party), ReadAs(party_info.party), Admin], )
[ "dazl.ledger.ActAs", "dazl.connect", "dazl.ledger.ReadAs", "dazl.ledger.User" ]
[((324, 359), 'dazl.connect', 'connect', ([], {'url': 'sandbox_v2', 'admin': '(True)'}), '(url=sandbox_v2, admin=True)\n', (331, 359), False, 'from dazl import connect\n'), ((591, 626), 'dazl.connect', 'connect', ([], {'url': 'sandbox_v2', 'admin': '(True)'}), '(url=sandbox_v2, admin=True)\n', (598, 626), False, 'from dazl import connect\n'), ((449, 484), 'dazl.ledger.User', 'User', (['"""testuser1"""', 'party_info.party'], {}), "('testuser1', party_info.party)\n", (453, 484), False, 'from dazl.ledger import ActAs, Admin, ReadAs, User\n'), ((729, 764), 'dazl.ledger.User', 'User', (['"""testuser2"""', 'party_info.party'], {}), "('testuser2', party_info.party)\n", (733, 764), False, 'from dazl.ledger import ActAs, Admin, ReadAs, User\n'), ((779, 802), 'dazl.ledger.ActAs', 'ActAs', (['party_info.party'], {}), '(party_info.party)\n', (784, 802), False, 'from dazl.ledger import ActAs, Admin, ReadAs, User\n'), ((804, 828), 'dazl.ledger.ReadAs', 'ReadAs', (['party_info.party'], {}), '(party_info.party)\n', (810, 828), False, 'from dazl.ledger import ActAs, Admin, ReadAs, User\n')]
#shazi.py from ShazamAPI import Shazam from threading import Thread import traceback from pydub import AudioSegment import time # class Shazi(object): ''' None-blocking function to get title, artist, and other shazam data from a file ''' def shazam(mp3path, outDict = None, checkFull = False): if outDict is None: outDict = {"out":None} sT = Thread(target=shazamAsync,args=[[mp3path, outDict, checkFull]]) sT.start() return outDict def shazamAsync(data, round = 0): print('''%%%%%%%%%%% SHAZAMMING %%%%%%%%%%%''') print('''%%%%%%%%%%% SHAZAMMING %%%%%%%%%%%''') print('''%%%%%%%%%%% SHAZAMMING %%%%%%%%%%%''') t = time.time() try: mp3path, outDict, checkFull = data if checkFull: mp3_file_content_to_recognize = open(mp3path, 'rb').read() else: audio = AudioSegment.from_mp3(mp3path) mp3_file_content_to_recognize = audio.export(format="mp3").read() start = 0 seconds = 1.2 length = len(audio) if length > 0: if length > seconds: seconds = seconds else: seconds = length/1000 mp3_file_content_to_recognize = mp3_file_content_to_recognize[start*60*1000:int((start+seconds)*60*1000)] # shazam = Shazam(mp3_file_content_to_recognize) outDict["out"] = next(Shazam(mp3_file_content_to_recognize).recognizeSong()) # recognize_generator = shazam.recognizeSong() # outDict["out"] = next(recognize_generator) if outDict is not None: firstRes = None try: print(firstRes) firstRes = outDict["out"][1]["track"] except: print("EEEEE SHAZAM COULD NOT FIND SONG") traceback.print_exc() if firstRes is not None and "title" in firstRes and "subtitle" in firstRes: outDict["title"] = firstRes["title"] outDict["artist"] = firstRes["subtitle"] print(outDict["title"] + " - " + outDict["artist"]) print('''%%%%%%%%%%% DONE! %%%%%%%%%%%''', "time",time.time()-t) # while True: # print(next(recognize_generator)) # current offset & shazam response to recognize requests1 except: traceback.print_exc()
[ "threading.Thread", "pydub.AudioSegment.from_mp3", "traceback.print_exc", "time.time", "ShazamAPI.Shazam" ]
[((349, 413), 'threading.Thread', 'Thread', ([], {'target': 'shazamAsync', 'args': '[[mp3path, outDict, checkFull]]'}), '(target=shazamAsync, args=[[mp3path, outDict, checkFull]])\n', (355, 413), False, 'from threading import Thread\n'), ((628, 639), 'time.time', 'time.time', ([], {}), '()\n', (637, 639), False, 'import time\n'), ((781, 811), 'pydub.AudioSegment.from_mp3', 'AudioSegment.from_mp3', (['mp3path'], {}), '(mp3path)\n', (802, 811), False, 'from pydub import AudioSegment\n'), ((1990, 2011), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (2009, 2011), False, 'import traceback\n'), ((1851, 1862), 'time.time', 'time.time', ([], {}), '()\n', (1860, 1862), False, 'import time\n'), ((1224, 1261), 'ShazamAPI.Shazam', 'Shazam', (['mp3_file_content_to_recognize'], {}), '(mp3_file_content_to_recognize)\n', (1230, 1261), False, 'from ShazamAPI import Shazam\n'), ((1551, 1572), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (1570, 1572), False, 'import traceback\n')]
import os import pickle import pandas as pd import matplotlib.pyplot as plt from datetime import datetime from datetime import timedelta from sklearn import preprocessing, svm from lassofeatsel import Lasso_wrapper, edit_features ##################### # Wrapper Function ##################### def model_exploration(df, obj): """This function is the wrapper function of changing time slices for training, validation, and testing sets. It will perform lasso on the training data, allow features to be edited, build a model, and test the model. Then it will ask if the user would like to explore different time slices - this is useful in finding the optimum amount of data necessary to build an adequate model. This takes the entire dataframe (df) and the sensor to build a model for (obj)""" see_another_set = 'y' while see_another_set == 'y': # this while loop is so we don't have to load and reclean etc every # time we want to see a different timeslice of the data train_months_start = input('Input the start date of training data: ') train_months_end = input('Input the end date of training data: ') val_months_start = input('Input the start date of validation data: ') val_months_end = input('Input the end date of validation data: ') train = df[train_months_start: train_months_end] # Training dataframe val_set = df[val_months_start: val_months_end] # Testing (Validation set) feat_mo_og = Lasso_wrapper(val_set, train, obj, 0.1) # get features from lasso, with an initial alpha value of 0.1 # this alpha can be changed by the user during the lasso_wrapper # function features = edit_features(feat_mo_og, train) # this allows the user to change features that don't make sense # df_val and df_test might have some NaN values in them for the # features selected by LASSO- clean those out # val_set = val_set.dropna(subset = features) df_val, savepickleas = build_model(train, val_set, obj, features) # (ability to catch out of calibration) # plot the train, validation: fig2 = plt.figure(figsize=(20, 10), facecolor='w', edgecolor='k') plt.subplot(211) myplot2 = plt.scatter( df_val.index, df_val[obj], color='red', label='val data-actual') plt.scatter( df_val.index, df_val.Predicted, color='blue', label='val data-model', alpha=0.5) plt.scatter(train.index, train[obj], color='green', label='train data') plt.ylabel(obj, fontsize=16) plt.xlabel('Index', fontsize=16) plt.title('Training, Validation, and Test Model of ' + obj, fontsize=28) plt.legend(fontsize=16) plt.xlim() # plot the absolute error between the model and the test data # this is the metric that would be used to "raise an alarm" if sensor # begins to drift allow_error = input( 'Please input the allowable error in ' + 'this sensor (|predicted - actual|): ') # this allows the user to set the amount of drift that is acceptable # before an alarm should be raised plt.subplot(212) myplot3 = plt.plot( df_val.index, df_val['Absolute Error'], color='green') plt.axhline(y=int(allow_error), color='red', linestyle='dashed', label='Allowable Error') plt.ylabel('Absolute Error (sensor dependent unit)', fontsize=16) plt.xlabel('Index', fontsize=16) plt.legend(fontsize=16) plt.show() test_yn = input( 'Would you like to test the model on the month ' + 'subsequent to the validation data? If that data' + ' is not available in the folder, answer "n" (y/n): ') if test_yn == 'n': None else: test_initial_start = val_set.index[-1] + timedelta(hours=1) test_initial_end = val_set.index[-1] + timedelta(days=30) # want the first set of testing data to be after the # set validation date range # subsequent test sets will be after the training data df_test = retest_model( savepickleas, features, df, obj, test_initial_start, test_initial_end) # this is testing the model on the test dates - using the # test_initial_start and the test_initial_end # then we plot the test,train, and validation dataframes: plt.figure(figsize=(20, 10), facecolor='w', edgecolor='k') plt.subplot(211) myplot2 = plt.scatter( df_val.index, df_val[obj], color='red', label='val data-actual') plt.scatter( df_val.index, df_val.Predicted, color='blue', label='val data-model', alpha=0.5) plt.scatter( df_test.index, df_test[obj], color='purple', label='test data-actual', alpha=0.5) plt.scatter( df_test.index, df_test.Predicted, color='yellow', label='test data-model', alpha=0.5) plt.scatter( train.index, train[obj], color='green', label='train data', alpha=0.5) plt.ylabel(obj, fontsize=16) plt.xlabel('Index', fontsize=16) plt.title('Training, Validation, and Test Model of ' + obj, fontsize=28) plt.legend(fontsize=16) plt.xlim() plt.subplot(212) myplot3 = plt.plot( df_test.index, df_test['Absolute Error'], color='green') plt.axhline(y=int(allow_error), color='red', linestyle='dashed', label='Allowable Error') plt.ylabel('Absolute Error (sensor dependent unit)', fontsize=16) plt.xlabel('Index', fontsize=16) plt.legend(fontsize=16) plt.show() y_n = input( 'Would you like to remove the out-of-calibration data from ' + 'the training set, re-train, and predict the ' + 'following month? (y/n):') # if the answer is 'y', this while loop starts, removing data. while y_n == 'y': df_train_raw = pd.concat([train, df_test]) df_test = df_test[df_test['Absolute Error'] < int(allow_error)] # adding the df_test section where the sensor error is below # the allowable error add_train = df[df.index.isin(df_test.index)] train = pd.concat([train, add_train]) # adding the "in calibration" data to the training dataframe plt.figure(figsize=(20, 4), facecolor='w', edgecolor='k') plt.scatter( train.index, train[obj], color='green', label='train data') plt.show() y_n2 = input( 'Is there a date range you would like to add ' + 'back in? (y/n): ') # this allows the user to add back in any date ranges # that were removed because they were above the # allowable sensor error. # this could probably be streamlined to have the date # ranges not removed before the user gives input, # since it's easier to see if you want to keep any # ranges while you can see them, before they # are removed. while y_n2 == 'y': start = input('Input the start date: ') end = input('Input the end date: ') add_train2 = df[start:end] train = pd.concat([train, add_train2]) train = train.sort_index() plt.figure(figsize=(20, 4), facecolor='w', edgecolor='k') plt.scatter( train.index, train[obj], color='green', label='train data') plt.show() y_n2 = input('Another date range? (y/n): ') if y_n2 == 'n': pass elif y_n2 != 'y' or 'n': break # now we are setting the new test set to thirty days # after the training set test_nmodel_start = df_train_raw.index[-1] + timedelta(hours=1) test_nmodel_end = df_train_raw.index[-1] + timedelta(days=30) # leave val set as the same one inputted at first feat_mo_og = Lasso_wrapper(val_set, train, obj, 0.1) # get the features from LASSO features = edit_features(feat_mo_og, train) # give the user the option to edit those features from LASSO df_val, savepickleas = build_model( train, val_set, obj, features) # building the model based off of the training data and those # edited features df_test = retest_model( savepickleas, features, df, obj, test_nmodel_start, test_nmodel_end) # this is testing the model on the test data # set bound by test_nmodel_start # and test_nmodel_end # now we plot the train and test data sets plt.figure(figsize=(20, 10), facecolor='w', edgecolor='k') plt.subplot(211) myplot2 = plt.scatter( df_val.index, df_val[obj], color='red', label='val data-actual') plt.scatter( df_val.index, df_val.Predicted, color='blue', label='val data-model', alpha=0.5) plt.scatter( df_test.index, df_test[obj], color='purple', label='test data-actual', alpha=0.5) plt.scatter( df_test.index, df_test.Predicted, color='yellow', label='test data-model', alpha=0.5) plt.scatter( train.index, train[obj], color='green', label='train data', alpha=0.5) plt.ylabel(obj, fontsize=16) plt.xlabel('Index', fontsize=16) plt.title('Training and Testing Model of ' + obj, fontsize=28) plt.legend(fontsize=16) plt.xlim() plt.subplot(212) myplot3 = plt.plot( df_test.index, df_test['Absolute Error'], color='green') plt.axhline( y=int(allow_error), color='red', linestyle='dashed', label='Allowable Error') plt.ylabel( 'Absolute Error (sensor dependent unit)', fontsize=16) plt.xlabel('Index', fontsize=16) plt.legend(fontsize=16) plt.show() # asking if we would like to repeat, adding on another month # of training data and retesting on the next month. # can only do this if there is enough data in the # given data folder. y_n = input('Would you like to repeat? (y/n):') if y_n == 'n': pass # this is if you want to change where the initial # training and validation # is - the second and third questions that pop up when the code is ran. see_another_set = input( 'Would you like to see another set of ' + 'training/validation/testing data? (y/n): ') ##################### # Component Functions ##################### def build_model(train, val_set, obj, features): """This function takes a train and validation set (train, val_set), which are both data frames, builds an SVR model for the sensor of interest (obj - a string) using the given features (features - a list of strings) and pickles it. This returns the validation dataframe with the errors and the filename the model was pickled as.""" val_set = val_set.dropna(subset=features) train = train.dropna(subset=features) # set the train and val y values - which is the thing # we are trying to predict. train_y = train[obj] val_y = val_set[obj] # the train and val _x are the features used to predict # the _y train_x = train[features] val_x = val_set[features] # have to normalize the features by l1 train_x_scaled = preprocessing.normalize(train_x, norm='l1') val_x_scaled = preprocessing.normalize(val_x, norm='l1') # gather the filname to save the pickled model as, so # it can be reloaded and referenced later. savepickleas = input( 'Input the model name to save this as (example.sav): ') filenamesaveas = 'svr_model' + savepickleas # Change path to save sav files os.chdir(os.path.abspath(os.path.join(os.getcwd(), '..'))) os.chdir(os.getcwd() + '/saved_models') # checks to see if the savepickle as file already exists or not # and asks if we should overwrite it if it does - or gives the # user the option to use a different .sav filename. if os.path.isfile(savepickleas): print('There is already a model for this!') rewrite = input('Would you like to overwrite the file? (y/n): ') if rewrite == 'y': # this is where the linear SVR model for the # sensor (train_y) is being built based off of the # features (train_x) lin_svr = svm.LinearSVR().fit(train_x, train_y) # then we can use that lin_svr to predict the # train and val sets based off of the scaled features trainpred = lin_svr.predict(train_x_scaled) valpred = lin_svr.predict(val_x_scaled) filename = filenamesaveas # then we pickle the model: pickle.dump(lin_svr, open(savepickleas, 'wb')) else: # this is the same as above - just would be a different # filename savepickleas_new = input( 'Input a different name to save this as (example.sav): ') filenamesaveas_new = 'svr_model' + savepickleas_new lin_svr = svm.LinearSVR().fit(train_x, train_y) trainpred = lin_svr.predict(train_x_scaled) valpred = lin_svr.predict(val_x_scaled) filename = filenamesaveas_new pickle.dump(lin_svr, open(savepickleas_new, 'wb')) # this could be changed to overwrite the file else: # this is the same as above - just ran when there # is no previous file with the same name. lin_svr = svm.LinearSVR().fit(train_x, train_y) trainpred = lin_svr.predict(train_x_scaled) valpred = lin_svr.predict(val_x_scaled) filename = filenamesaveas pickle.dump(lin_svr, open(savepickleas, 'wb')) # Should be reducing the number of things we need to type in. # If only focusing on continuous real-time training, the # model will never be reused anyway. # Calls the pickled model loaded_model = pickle.load(open(savepickleas, 'rb')) predict = loaded_model.predict(val_x) # predicting the validation set. result = loaded_model.score(val_x, val_y) # the model score is an R^2 value. print('the model score is: ' + str(result)) df_val = pd.DataFrame(val_y) df_val['Predicted'] = predict df_val['Error'] = (abs(df_val['Predicted'] - df_val[obj]) ) / abs(df_val[obj]) df_val['Absolute Error'] = abs(df_val['Predicted'] - df_val[obj]) print('the mean absolute error is: ' + str(df_val['Absolute Error'].mean())) return df_val, savepickleas def retest_model( savepickleas, features, df, obj, test_model_start, test_model_end): """This function tests the model for the sensor of interests (obj) on data that may or may not be calibrated, in the date range constrained by test_model_start and test_model_end (both strings) in the dataframe loaded (df) Use this function to see if the model retains the accurate levels when the sensor begins to drift. Features is a list of strings of the model features, savepickleas is the .sav filename where the model is saved. This function returns the df_test dataframe with calculated absolute errors.""" df_test = df[test_model_start: test_model_end] # Need to clean out of dataframe sets that have nan values # in the features df_test = df_test.dropna(subset=features) test_y = df_test[obj] test_x = df_test[features] loaded_model = pickle.load(open(savepickleas, 'rb')) # load the pickled model predict = loaded_model.predict(test_x) # use that loaded model to predict based off of the features # in the test set. df_test = pd.DataFrame(test_y) df_test['Predicted'] = predict df_test['Error'] = ( abs(df_test['Predicted'] - df_test[obj])) / abs(df_test[obj]) df_test['Absolute Error'] = abs(df_test['Predicted'] - df_test[obj]) # calculate the absolute error. return df_test
[ "matplotlib.pyplot.title", "os.path.isfile", "matplotlib.pyplot.figure", "pandas.DataFrame", "datetime.timedelta", "pandas.concat", "sklearn.svm.LinearSVR", "matplotlib.pyplot.show", "matplotlib.pyplot.legend", "sklearn.preprocessing.normalize", "matplotlib.pyplot.ylabel", "matplotlib.pyplot.subplot", "matplotlib.pyplot.xlim", "matplotlib.pyplot.plot", "os.getcwd", "matplotlib.pyplot.scatter", "lassofeatsel.Lasso_wrapper", "lassofeatsel.edit_features", "matplotlib.pyplot.xlabel" ]
[((13735, 13778), 'sklearn.preprocessing.normalize', 'preprocessing.normalize', (['train_x'], {'norm': '"""l1"""'}), "(train_x, norm='l1')\n", (13758, 13778), False, 'from sklearn import preprocessing, svm\n'), ((13798, 13839), 'sklearn.preprocessing.normalize', 'preprocessing.normalize', (['val_x'], {'norm': '"""l1"""'}), "(val_x, norm='l1')\n", (13821, 13839), False, 'from sklearn import preprocessing, svm\n'), ((14426, 14454), 'os.path.isfile', 'os.path.isfile', (['savepickleas'], {}), '(savepickleas)\n', (14440, 14454), False, 'import os\n'), ((16632, 16651), 'pandas.DataFrame', 'pd.DataFrame', (['val_y'], {}), '(val_y)\n', (16644, 16651), True, 'import pandas as pd\n'), ((18150, 18170), 'pandas.DataFrame', 'pd.DataFrame', (['test_y'], {}), '(test_y)\n', (18162, 18170), True, 'import pandas as pd\n'), ((1526, 1565), 'lassofeatsel.Lasso_wrapper', 'Lasso_wrapper', (['val_set', 'train', 'obj', '(0.1)'], {}), '(val_set, train, obj, 0.1)\n', (1539, 1565), False, 'from lassofeatsel import Lasso_wrapper, edit_features\n'), ((1747, 1779), 'lassofeatsel.edit_features', 'edit_features', (['feat_mo_og', 'train'], {}), '(feat_mo_og, train)\n', (1760, 1779), False, 'from lassofeatsel import Lasso_wrapper, edit_features\n'), ((2209, 2267), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(20, 10)', 'facecolor': '"""w"""', 'edgecolor': '"""k"""'}), "(figsize=(20, 10), facecolor='w', edgecolor='k')\n", (2219, 2267), True, 'import matplotlib.pyplot as plt\n'), ((2276, 2292), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(211)'], {}), '(211)\n', (2287, 2292), True, 'import matplotlib.pyplot as plt\n'), ((2311, 2387), 'matplotlib.pyplot.scatter', 'plt.scatter', (['df_val.index', 'df_val[obj]'], {'color': '"""red"""', 'label': '"""val data-actual"""'}), "(df_val.index, df_val[obj], color='red', label='val data-actual')\n", (2322, 2387), True, 'import matplotlib.pyplot as plt\n'), ((2445, 2542), 'matplotlib.pyplot.scatter', 'plt.scatter', (['df_val.index', 'df_val.Predicted'], {'color': '"""blue"""', 'label': '"""val data-model"""', 'alpha': '(0.5)'}), "(df_val.index, df_val.Predicted, color='blue', label=\n 'val data-model', alpha=0.5)\n", (2456, 2542), True, 'import matplotlib.pyplot as plt\n'), ((2607, 2678), 'matplotlib.pyplot.scatter', 'plt.scatter', (['train.index', 'train[obj]'], {'color': '"""green"""', 'label': '"""train data"""'}), "(train.index, train[obj], color='green', label='train data')\n", (2618, 2678), True, 'import matplotlib.pyplot as plt\n'), ((2687, 2715), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['obj'], {'fontsize': '(16)'}), '(obj, fontsize=16)\n', (2697, 2715), True, 'import matplotlib.pyplot as plt\n'), ((2724, 2756), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Index"""'], {'fontsize': '(16)'}), "('Index', fontsize=16)\n", (2734, 2756), True, 'import matplotlib.pyplot as plt\n'), ((2765, 2837), 'matplotlib.pyplot.title', 'plt.title', (["('Training, Validation, and Test Model of ' + obj)"], {'fontsize': '(28)'}), "('Training, Validation, and Test Model of ' + obj, fontsize=28)\n", (2774, 2837), True, 'import matplotlib.pyplot as plt\n'), ((2864, 2887), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'fontsize': '(16)'}), '(fontsize=16)\n', (2874, 2887), True, 'import matplotlib.pyplot as plt\n'), ((2896, 2906), 'matplotlib.pyplot.xlim', 'plt.xlim', ([], {}), '()\n', (2904, 2906), True, 'import matplotlib.pyplot as plt\n'), ((3344, 3360), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(212)'], {}), '(212)\n', (3355, 3360), True, 'import matplotlib.pyplot as plt\n'), ((3379, 3442), 'matplotlib.pyplot.plot', 'plt.plot', (['df_val.index', "df_val['Absolute Error']"], {'color': '"""green"""'}), "(df_val.index, df_val['Absolute Error'], color='green')\n", (3387, 3442), True, 'import matplotlib.pyplot as plt\n'), ((3606, 3671), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Absolute Error (sensor dependent unit)"""'], {'fontsize': '(16)'}), "('Absolute Error (sensor dependent unit)', fontsize=16)\n", (3616, 3671), True, 'import matplotlib.pyplot as plt\n'), ((3680, 3712), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Index"""'], {'fontsize': '(16)'}), "('Index', fontsize=16)\n", (3690, 3712), True, 'import matplotlib.pyplot as plt\n'), ((3721, 3744), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'fontsize': '(16)'}), '(fontsize=16)\n', (3731, 3744), True, 'import matplotlib.pyplot as plt\n'), ((3753, 3763), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3761, 3763), True, 'import matplotlib.pyplot as plt\n'), ((4758, 4816), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(20, 10)', 'facecolor': '"""w"""', 'edgecolor': '"""k"""'}), "(figsize=(20, 10), facecolor='w', edgecolor='k')\n", (4768, 4816), True, 'import matplotlib.pyplot as plt\n'), ((4829, 4845), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(211)'], {}), '(211)\n', (4840, 4845), True, 'import matplotlib.pyplot as plt\n'), ((4868, 4944), 'matplotlib.pyplot.scatter', 'plt.scatter', (['df_val.index', 'df_val[obj]'], {'color': '"""red"""', 'label': '"""val data-actual"""'}), "(df_val.index, df_val[obj], color='red', label='val data-actual')\n", (4879, 4944), True, 'import matplotlib.pyplot as plt\n'), ((5022, 5119), 'matplotlib.pyplot.scatter', 'plt.scatter', (['df_val.index', 'df_val.Predicted'], {'color': '"""blue"""', 'label': '"""val data-model"""', 'alpha': '(0.5)'}), "(df_val.index, df_val.Predicted, color='blue', label=\n 'val data-model', alpha=0.5)\n", (5033, 5119), True, 'import matplotlib.pyplot as plt\n'), ((5208, 5306), 'matplotlib.pyplot.scatter', 'plt.scatter', (['df_test.index', 'df_test[obj]'], {'color': '"""purple"""', 'label': '"""test data-actual"""', 'alpha': '(0.5)'}), "(df_test.index, df_test[obj], color='purple', label=\n 'test data-actual', alpha=0.5)\n", (5219, 5306), True, 'import matplotlib.pyplot as plt\n'), ((5395, 5497), 'matplotlib.pyplot.scatter', 'plt.scatter', (['df_test.index', 'df_test.Predicted'], {'color': '"""yellow"""', 'label': '"""test data-model"""', 'alpha': '(0.5)'}), "(df_test.index, df_test.Predicted, color='yellow', label=\n 'test data-model', alpha=0.5)\n", (5406, 5497), True, 'import matplotlib.pyplot as plt\n'), ((5586, 5672), 'matplotlib.pyplot.scatter', 'plt.scatter', (['train.index', 'train[obj]'], {'color': '"""green"""', 'label': '"""train data"""', 'alpha': '(0.5)'}), "(train.index, train[obj], color='green', label='train data',\n alpha=0.5)\n", (5597, 5672), True, 'import matplotlib.pyplot as plt\n'), ((5762, 5790), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['obj'], {'fontsize': '(16)'}), '(obj, fontsize=16)\n', (5772, 5790), True, 'import matplotlib.pyplot as plt\n'), ((5803, 5835), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Index"""'], {'fontsize': '(16)'}), "('Index', fontsize=16)\n", (5813, 5835), True, 'import matplotlib.pyplot as plt\n'), ((5848, 5920), 'matplotlib.pyplot.title', 'plt.title', (["('Training, Validation, and Test Model of ' + obj)"], {'fontsize': '(28)'}), "('Training, Validation, and Test Model of ' + obj, fontsize=28)\n", (5857, 5920), True, 'import matplotlib.pyplot as plt\n'), ((5955, 5978), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'fontsize': '(16)'}), '(fontsize=16)\n', (5965, 5978), True, 'import matplotlib.pyplot as plt\n'), ((5991, 6001), 'matplotlib.pyplot.xlim', 'plt.xlim', ([], {}), '()\n', (5999, 6001), True, 'import matplotlib.pyplot as plt\n'), ((6014, 6030), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(212)'], {}), '(212)\n', (6025, 6030), True, 'import matplotlib.pyplot as plt\n'), ((6053, 6118), 'matplotlib.pyplot.plot', 'plt.plot', (['df_test.index', "df_test['Absolute Error']"], {'color': '"""green"""'}), "(df_test.index, df_test['Absolute Error'], color='green')\n", (6061, 6118), True, 'import matplotlib.pyplot as plt\n'), ((6306, 6371), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Absolute Error (sensor dependent unit)"""'], {'fontsize': '(16)'}), "('Absolute Error (sensor dependent unit)', fontsize=16)\n", (6316, 6371), True, 'import matplotlib.pyplot as plt\n'), ((6384, 6416), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Index"""'], {'fontsize': '(16)'}), "('Index', fontsize=16)\n", (6394, 6416), True, 'import matplotlib.pyplot as plt\n'), ((6429, 6452), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'fontsize': '(16)'}), '(fontsize=16)\n', (6439, 6452), True, 'import matplotlib.pyplot as plt\n'), ((6465, 6475), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6473, 6475), True, 'import matplotlib.pyplot as plt\n'), ((14197, 14208), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (14206, 14208), False, 'import os\n'), ((4095, 4113), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (4104, 4113), False, 'from datetime import timedelta\n'), ((4165, 4183), 'datetime.timedelta', 'timedelta', ([], {'days': '(30)'}), '(days=30)\n', (4174, 4183), False, 'from datetime import timedelta\n'), ((6825, 6852), 'pandas.concat', 'pd.concat', (['[train, df_test]'], {}), '([train, df_test])\n', (6834, 6852), True, 'import pandas as pd\n'), ((7133, 7162), 'pandas.concat', 'pd.concat', (['[train, add_train]'], {}), '([train, add_train])\n', (7142, 7162), True, 'import pandas as pd\n'), ((7257, 7314), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(20, 4)', 'facecolor': '"""w"""', 'edgecolor': '"""k"""'}), "(figsize=(20, 4), facecolor='w', edgecolor='k')\n", (7267, 7314), True, 'import matplotlib.pyplot as plt\n'), ((7331, 7402), 'matplotlib.pyplot.scatter', 'plt.scatter', (['train.index', 'train[obj]'], {'color': '"""green"""', 'label': '"""train data"""'}), "(train.index, train[obj], color='green', label='train data')\n", (7342, 7402), True, 'import matplotlib.pyplot as plt\n'), ((7500, 7510), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (7508, 7510), True, 'import matplotlib.pyplot as plt\n'), ((9277, 9316), 'lassofeatsel.Lasso_wrapper', 'Lasso_wrapper', (['val_set', 'train', 'obj', '(0.1)'], {}), '(val_set, train, obj, 0.1)\n', (9290, 9316), False, 'from lassofeatsel import Lasso_wrapper, edit_features\n'), ((9390, 9422), 'lassofeatsel.edit_features', 'edit_features', (['feat_mo_og', 'train'], {}), '(feat_mo_og, train)\n', (9403, 9422), False, 'from lassofeatsel import Lasso_wrapper, edit_features\n'), ((10168, 10226), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(20, 10)', 'facecolor': '"""w"""', 'edgecolor': '"""k"""'}), "(figsize=(20, 10), facecolor='w', edgecolor='k')\n", (10178, 10226), True, 'import matplotlib.pyplot as plt\n'), ((10243, 10259), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(211)'], {}), '(211)\n', (10254, 10259), True, 'import matplotlib.pyplot as plt\n'), ((10286, 10362), 'matplotlib.pyplot.scatter', 'plt.scatter', (['df_val.index', 'df_val[obj]'], {'color': '"""red"""', 'label': '"""val data-actual"""'}), "(df_val.index, df_val[obj], color='red', label='val data-actual')\n", (10297, 10362), True, 'import matplotlib.pyplot as plt\n'), ((10460, 10557), 'matplotlib.pyplot.scatter', 'plt.scatter', (['df_val.index', 'df_val.Predicted'], {'color': '"""blue"""', 'label': '"""val data-model"""', 'alpha': '(0.5)'}), "(df_val.index, df_val.Predicted, color='blue', label=\n 'val data-model', alpha=0.5)\n", (10471, 10557), True, 'import matplotlib.pyplot as plt\n'), ((10670, 10768), 'matplotlib.pyplot.scatter', 'plt.scatter', (['df_test.index', 'df_test[obj]'], {'color': '"""purple"""', 'label': '"""test data-actual"""', 'alpha': '(0.5)'}), "(df_test.index, df_test[obj], color='purple', label=\n 'test data-actual', alpha=0.5)\n", (10681, 10768), True, 'import matplotlib.pyplot as plt\n'), ((10881, 10983), 'matplotlib.pyplot.scatter', 'plt.scatter', (['df_test.index', 'df_test.Predicted'], {'color': '"""yellow"""', 'label': '"""test data-model"""', 'alpha': '(0.5)'}), "(df_test.index, df_test.Predicted, color='yellow', label=\n 'test data-model', alpha=0.5)\n", (10892, 10983), True, 'import matplotlib.pyplot as plt\n'), ((11096, 11182), 'matplotlib.pyplot.scatter', 'plt.scatter', (['train.index', 'train[obj]'], {'color': '"""green"""', 'label': '"""train data"""', 'alpha': '(0.5)'}), "(train.index, train[obj], color='green', label='train data',\n alpha=0.5)\n", (11107, 11182), True, 'import matplotlib.pyplot as plt\n'), ((11296, 11324), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['obj'], {'fontsize': '(16)'}), '(obj, fontsize=16)\n', (11306, 11324), True, 'import matplotlib.pyplot as plt\n'), ((11341, 11373), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Index"""'], {'fontsize': '(16)'}), "('Index', fontsize=16)\n", (11351, 11373), True, 'import matplotlib.pyplot as plt\n'), ((11390, 11452), 'matplotlib.pyplot.title', 'plt.title', (["('Training and Testing Model of ' + obj)"], {'fontsize': '(28)'}), "('Training and Testing Model of ' + obj, fontsize=28)\n", (11399, 11452), True, 'import matplotlib.pyplot as plt\n'), ((11495, 11518), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'fontsize': '(16)'}), '(fontsize=16)\n', (11505, 11518), True, 'import matplotlib.pyplot as plt\n'), ((11535, 11545), 'matplotlib.pyplot.xlim', 'plt.xlim', ([], {}), '()\n', (11543, 11545), True, 'import matplotlib.pyplot as plt\n'), ((11562, 11578), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(212)'], {}), '(212)\n', (11573, 11578), True, 'import matplotlib.pyplot as plt\n'), ((11605, 11670), 'matplotlib.pyplot.plot', 'plt.plot', (['df_test.index', "df_test['Absolute Error']"], {'color': '"""green"""'}), "(df_test.index, df_test['Absolute Error'], color='green')\n", (11613, 11670), True, 'import matplotlib.pyplot as plt\n'), ((11935, 12000), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Absolute Error (sensor dependent unit)"""'], {'fontsize': '(16)'}), "('Absolute Error (sensor dependent unit)', fontsize=16)\n", (11945, 12000), True, 'import matplotlib.pyplot as plt\n'), ((12058, 12090), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Index"""'], {'fontsize': '(16)'}), "('Index', fontsize=16)\n", (12068, 12090), True, 'import matplotlib.pyplot as plt\n'), ((12107, 12130), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'fontsize': '(16)'}), '(fontsize=16)\n', (12117, 12130), True, 'import matplotlib.pyplot as plt\n'), ((12147, 12157), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (12155, 12157), True, 'import matplotlib.pyplot as plt\n'), ((14163, 14174), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (14172, 14174), False, 'import os\n'), ((15934, 15949), 'sklearn.svm.LinearSVR', 'svm.LinearSVR', ([], {}), '()\n', (15947, 15949), False, 'from sklearn import preprocessing, svm\n'), ((8347, 8377), 'pandas.concat', 'pd.concat', (['[train, add_train2]'], {}), '([train, add_train2])\n', (8356, 8377), True, 'import pandas as pd\n'), ((8445, 8502), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(20, 4)', 'facecolor': '"""w"""', 'edgecolor': '"""k"""'}), "(figsize=(20, 4), facecolor='w', edgecolor='k')\n", (8455, 8502), True, 'import matplotlib.pyplot as plt\n'), ((8523, 8594), 'matplotlib.pyplot.scatter', 'plt.scatter', (['train.index', 'train[obj]'], {'color': '"""green"""', 'label': '"""train data"""'}), "(train.index, train[obj], color='green', label='train data')\n", (8534, 8594), True, 'import matplotlib.pyplot as plt\n'), ((8712, 8722), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (8720, 8722), True, 'import matplotlib.pyplot as plt\n'), ((9083, 9101), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (9092, 9101), False, 'from datetime import timedelta\n'), ((9161, 9179), 'datetime.timedelta', 'timedelta', ([], {'days': '(30)'}), '(days=30)\n', (9170, 9179), False, 'from datetime import timedelta\n'), ((14783, 14798), 'sklearn.svm.LinearSVR', 'svm.LinearSVR', ([], {}), '()\n', (14796, 14798), False, 'from sklearn import preprocessing, svm\n'), ((15493, 15508), 'sklearn.svm.LinearSVR', 'svm.LinearSVR', ([], {}), '()\n', (15506, 15508), False, 'from sklearn import preprocessing, svm\n')]
import os import sys import copy import enum import time import uuid import argparse import subprocess from dataclasses import dataclass, field from typing import List, Dict, Set, Tuple class PlinkWrapper: class InputType(enum.Enum): BED = 1 PED = 2 VCF = 3 def get_plink_flag(self): if self.value == 1: return '--bfile' elif self.value == 2: return '--file' elif self.value == 3: return '--vcf' def __init__(self, args, uuid=str(uuid.uuid4())): self.args = args self.uuid = uuid # Not currently used, can be used for temp files self.input_set = False self._validate_input_path() def _validate_input_path(self): def check_input_init(): if self.input_set: print("Maximum of 1 input allowed, found input:") print("{}: {}".format(self.input_type, self.input_str)) print("exiting...") sys.exit() if self.args.bed is not None: check_input_init() self.input_type = self.InputType.BED self.input_str = self.args.bed self.input_set = True elif self.args.ped is not None: check_input_init() self.input_type = self.InputType.PED self.input_str = self.args.ped self.input_set = True elif self.args.vcf is not None: check_input_init() self.input_type = self.InputType.VCF self.input_str = self.args.vcf self.input_set = True else: if not self.input_set: print("No valid input file path has been given") print("exiting...") sys.exit() def run(self, options): plink_options = [ self.args.plink_binary, self.input_type.get_plink_flag(), self.input_str, ] plink_options += options subprocess.run(plink_options, capture_output=True) def inout(f): def in_out(*args, **kw): start = time.time() a = '...' print(f"Entering {f.__name__}{a:25}", end='') res = f(*args, **kw) end = time.time() print(f"Exiting. (Finished in {end-start:2.4} seconds)") return res return in_out class Sex(enum.Enum): UNKNOWN = 0 MALE = 1 FEMALE = 2 @dataclass class MerlinRecord: fid: str iid: int mid: int pid: int sex: Sex genotypes: List[str] def as_string(self): line = f"{self.fid}\t{self.iid}\t{self.mid}\t{self.pid}\t{self.sex.value}\t" for i in range(0, len(self.genotypes) - 2, 2): line += f"{self.genotypes[i]}/{self.genotypes[i+1]}\t" line += f"{self.genotypes[-2]}/{self.genotypes[-1]}\n" return line @dataclass class Individual: iid: int fid: str mid: int pid: int sex: Sex def __eq__(self, obj): iid_eq = self.iid == obj.iid fid_eq = self.fid == obj.fid return iid_eq and fid_eq def __hash__(self): return hash((self.iid, self.fid)) @dataclass class Variant: chrom: int rsid: str pos: float bp: int def __eq__(self, obj): return self.rsid == obj.rsid # sketchy def __hash__(self): return hash(self.rsid) def get_individuals_from_fam(filename: str) -> Set[Individual]: individual_list = [] with open(filename, 'r') as f: for line in f: fid, iid, pid, mid, sex, _ = line.split() indiv = Individual(int(iid), fid, int(mid), int(pid), Sex(int(sex))) individual_list.append(indiv) return set(individual_list) def add_missing_individuals(indivs: Set[Individual]): updated_indivs = [] for indiv in indivs: if indiv.mid == 0 and indiv.pid == 0: updated_indivs.append(indiv) continue tmp_mother = Individual(indiv.mid, indiv.fid, 0, 0, Sex(2)) tmp_father = Individual(indiv.pid, indiv.fid, 0, 0, Sex(1)) if tmp_mother not in indivs: updated_indivs.append(tmp_mother) if tmp_father not in indivs: updated_indivs.append(tmp_father) updated_indivs.append(indiv) return set(updated_indivs) def generate_family_map(individuals: Set[Individual]) -> Dict[str, List[Individual]]: fam_map = {} for indiv in individuals: if indiv.fid not in fam_map.keys(): fam_map[indiv.fid] = [] fam_map[indiv.fid].append(indiv) return fam_map def filter_useful_fams(fam_map: Dict[str, List[Individual]]) -> Dict[str, List[Individual]]: filtered = {} for fid, indivs in fam_map.items(): founders = [] non_founders = [] for indiv in indivs: if indiv.pid == 0 and indiv.mid == 0: founders.append(indiv) else: non_founders.append(indiv) if len(non_founders) > 1: filtered[fid] = indivs return filtered def find_disjoint_fams(fam_map: Dict[str, List[Individual]]) -> Dict[str, List[set]]: fid_disjoint_map = {} for fid, indivs in fam_map.items(): family_sets = [] for indiv in indivs: tmp_fs = set([indiv.iid]) if indiv.mid != 0: tmp_fs.add(indiv.mid) if indiv.pid != 0: tmp_fs.add(indiv.pid) if len(family_sets) == 0: family_sets.append(tmp_fs) continue for fs in family_sets: if not fs.isdisjoint(tmp_fs): tmp_fs |= fs break family_sets.append(tmp_fs) did_merge = True while did_merge: did_merge = False for i in range(len(family_sets)): fs = family_sets[i] for j in range(i, len(family_sets)): if i == j: continue tmp = family_sets[j] if not fs.isdisjoint(tmp): family_sets.remove(tmp) family_sets.remove(fs) fs |= tmp family_sets.append(fs) did_merge = True break if did_merge: break fid_disjoint_map[fid] = family_sets return fid_disjoint_map # Rename, and change func sig def get_fidpid_genotype_map(plink: PlinkWrapper, fm: Dict[str, List[set]]): ''' This is one of the two big bottlenecks in the script, uses plink to generate the .ped/.map files from the .bed files. When the genotypes could be directly read from the binary, but from experience doing this, it's very easy to make mistakes and not very easy to know IF you've made a mistake. Because of this, this just reads the plaintext genotypes from the .ped file. ''' files_to_delete = [] indiv_tuples = [(indiv.fid, indiv.iid) for indivs in fm.values() for indiv in indivs] with open('keep.txt', 'w+') as f: for fid, pid in indiv_tuples: f.write(f"{fid} {pid}\n") files_to_delete.append('keep.txt') plink.run([ '--keep', 'keep.txt', '--maf', '0.2', '--indep-pairwise', '50', '5', '0.05', '--out', 'plink', ]) files_to_delete += ['plink.prune.in', 'plink.prune.out', 'plink.log'] plink.run([ '--extract', 'plink.prune.in', '--recode', '--out', 'pruned' ]) files_to_delete += ['pruned.ped', 'pruned.map', 'pruned.log'] cm_rsid_set = set() variants = [] indices = [] index = 0 with open('pruned.map', 'r') as f: for line in f: chrom, rsid, pos_cm, pos_bp = line.split() if pos_cm in cm_rsid_set: indices.append(index) continue cm_rsid_set.add(pos_cm) variants.append(Variant(int(chrom), rsid, float(pos_cm), int(pos_bp))) index += 1 fp_geno_map = {} with open('pruned.ped', 'r') as f: for line in f: data = line.split() fid, iid, pid, mid, sex = data[:5] genotypes = data[6:] for i in reversed(indices): del genotypes[i:i+2] fp_geno_map[fid+" "+iid] = genotypes [os.remove(fn) for fn in files_to_delete] return fp_geno_map, variants def create_merlin_records(fam_map, disjoint_fams, fp_geno_map): def swap_fid(disjoint_fams, fid, pid, iid): tmp_pid = pid if pid is 0: tmp_pid = iid # Sets founders pid to itself, to get correct fid for i in range(len(disjoint_fams[fid])): disjoint_fam = disjoint_fams[fid][i] if tmp_pid in disjoint_fam: return f"{fid}_{i+1}" records = [] genos_len = len(list(fp_geno_map.values())[0]) for fid, indivs in fam_map.items(): for indiv in indivs: new_fid = swap_fid(disjoint_fams, fid, indiv.pid, indiv.iid) genotypes = None try: genotypes = fp_geno_map[f"{fid} {indiv.iid}"] except KeyError: genotypes = ['0' for _ in range(genos_len)] r = MerlinRecord(new_fid, indiv.iid, indiv.mid, indiv.pid, indiv.sex, genotypes) records.append(r) return records def write_merlin_ped(records, out_filename): ''' This is the biggest bottleneck, for obvious reasons. Maybe optimising the as_string method of the MerlinRecord class would speed it up, not sure how much though. ''' with open(out_filename, 'w+') as f: for record in records: f.write(record.as_string()) def write_merlin_dat(variants, out_filename): with open(out_filename, 'w+') as f: for variant in variants: f.write(f"M {variant.rsid}\n") def write_merlin_map(variants: List[Variant], outfile: str): with open(outfile, 'w+') as f: f.write("CHROMOSOME\tMARKER\tPOSITION\n") for variant in variants: line = f"{variant.chrom}\t{variant.rsid}\t{variant.pos}\n" f.write(line) @inout def process_chrom(chr_n: int, plink: PlinkWrapper, indir='split_by_chromosome', outdir='merlin_input_files'): ''' This function is not ideal, ideally I would have written the PlinkWrapper object with a better constructor so that I could easily make a new PlinkWrapper object without using output from argparse. Instead I've chosen to just deepcopy the object and then manually change the input string. This function is also very redundant, the family information never changes, only the genotypes... ''' pc = copy.deepcopy(plink) pc.input_str = f"{indir}/{chr_n}" indivs = get_individuals_from_fam(f"{indir}/{chr_n}.fam") indivs = add_missing_individuals(indivs) fam_map = generate_family_map(indivs) # Uncomment line below to only include families with >1 offspring (i.e. # families with sib-pairs) #fam_map = filter_useful_fams(fam_map) disjoint_fams = find_disjoint_fams(fam_map) fp_geno_map, variants = get_fidpid_genotype_map(pc, fam_map) records = create_merlin_records(fam_map, disjoint_fams, fp_geno_map) fp_geno_map = None # Makes sure we're not holding excessive memory try: os.mkdir(outdir) except FileExistsError: pass write_merlin_ped(records, f"{outdir}/{chr_n}.ped") write_merlin_map(variants, f"{outdir}/{chr_n}.map") write_merlin_dat(variants, f"{outdir}/{chr_n}.dat") def plink_split_by_chrom(plink: PlinkWrapper, outdir='split_by_chromosome'): try: os.mkdir(outdir) except FileExistsError: pass for chrom in range(1,23): plink.run([ '--chr', str(chrom), '--make-bed', '--out', f"{outdir}/{chrom}", ]) if __name__ == '__main__': desc = ''' File format converter to take a plink binary file or vcf and convert it to a format which MERLIN accepts, which consists of a .ped and a .dat file. It is important to note that the .ped file here is not the same as the plink .ped file. ''' parser = argparse.ArgumentParser(description=desc) parser.add_argument('--bed', type=str, help='PLINK binary input file path stub') parser.add_argument('--ped', type=str, help='PLINK input file path stub') parser.add_argument('--vcf', type=str, help='VCF file path') parser.add_argument('--plink-binary', type=str, default='plink', help='''Path to PLINK binary, useful if plink isn't globally accessible or you want to use a specific version of plink''') args = parser.parse_args() plink = PlinkWrapper(args) plink_split_by_chrom(plink) for i in range(1, 23): print(f"Processing chromosome {i}...") process_chrom(i, plink)
[ "subprocess.run", "copy.deepcopy", "os.remove", "os.mkdir", "argparse.ArgumentParser", "uuid.uuid4", "time.time", "sys.exit" ]
[((10892, 10912), 'copy.deepcopy', 'copy.deepcopy', (['plink'], {}), '(plink)\n', (10905, 10912), False, 'import copy\n'), ((12419, 12460), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'desc'}), '(description=desc)\n', (12442, 12460), False, 'import argparse\n'), ((2012, 2062), 'subprocess.run', 'subprocess.run', (['plink_options'], {'capture_output': '(True)'}), '(plink_options, capture_output=True)\n', (2026, 2062), False, 'import subprocess\n'), ((2124, 2135), 'time.time', 'time.time', ([], {}), '()\n', (2133, 2135), False, 'import time\n'), ((2251, 2262), 'time.time', 'time.time', ([], {}), '()\n', (2260, 2262), False, 'import time\n'), ((8456, 8469), 'os.remove', 'os.remove', (['fn'], {}), '(fn)\n', (8465, 8469), False, 'import os\n'), ((11526, 11542), 'os.mkdir', 'os.mkdir', (['outdir'], {}), '(outdir)\n', (11534, 11542), False, 'import os\n'), ((11847, 11863), 'os.mkdir', 'os.mkdir', (['outdir'], {}), '(outdir)\n', (11855, 11863), False, 'import os\n'), ((558, 570), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (568, 570), False, 'import uuid\n'), ((1030, 1040), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1038, 1040), False, 'import sys\n'), ((1797, 1807), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1805, 1807), False, 'import sys\n')]
import os import typing from .types import Author def write_log(path, log: str): path = os.path.expanduser(path) base_path = os.path.dirname(path) if not os.path.exists(base_path): os.makedirs(base_path) with open(path, 'w') as f: f.write(log) def write_avatars(dirname, avatars_by_author: typing.Dict[Author, bytes]): dirname = os.path.expanduser(dirname) if not os.path.exists(dirname): os.makedirs(dirname) for author, image in avatars_by_author.items(): with open(os.path.join(dirname, author.name), 'wb') as f: f.write(image)
[ "os.path.expanduser", "os.makedirs", "os.path.dirname", "os.path.exists", "os.path.join" ]
[((95, 119), 'os.path.expanduser', 'os.path.expanduser', (['path'], {}), '(path)\n', (113, 119), False, 'import os\n'), ((136, 157), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (151, 157), False, 'import os\n'), ((370, 397), 'os.path.expanduser', 'os.path.expanduser', (['dirname'], {}), '(dirname)\n', (388, 397), False, 'import os\n'), ((169, 194), 'os.path.exists', 'os.path.exists', (['base_path'], {}), '(base_path)\n', (183, 194), False, 'import os\n'), ((204, 226), 'os.makedirs', 'os.makedirs', (['base_path'], {}), '(base_path)\n', (215, 226), False, 'import os\n'), ((409, 432), 'os.path.exists', 'os.path.exists', (['dirname'], {}), '(dirname)\n', (423, 432), False, 'import os\n'), ((442, 462), 'os.makedirs', 'os.makedirs', (['dirname'], {}), '(dirname)\n', (453, 462), False, 'import os\n'), ((533, 567), 'os.path.join', 'os.path.join', (['dirname', 'author.name'], {}), '(dirname, author.name)\n', (545, 567), False, 'import os\n')]
import torch from torch import nn import torch.optim as optim import torch.nn.functional as F import math import numpy as np from config import parameters as conf from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence if conf.pretrained_model == "bert": from transformers import BertModel elif conf.pretrained_model == "roberta": from transformers import RobertaModel elif conf.pretrained_model == "finbert": from transformers import BertModel elif conf.pretrained_model == "longformer": from transformers import LongformerModel class Bert_model(nn.Module): def __init__(self, num_decoder_layers, hidden_size, dropout_rate, input_length, program_length, op_list, const_list, num_char_length, num_emb_dim): super(Bert_model, self).__init__() self.op_list_size = len(op_list) self.const_list_size = len(const_list) self.reserved_token_size = self.op_list_size + self.const_list_size self.program_length = program_length self.hidden_size = hidden_size self.const_list = const_list self.op_list = op_list self.input_length = input_length self.num_char_length = num_char_length self.num_emb_dim = num_emb_dim self.reserved_ind = nn.Parameter(torch.arange( 0, self.reserved_token_size), requires_grad=False) self.reserved_go = nn.Parameter(torch.arange(op_list.index( 'GO'), op_list.index('GO') + 1), requires_grad=False) self.reserved_para = nn.Parameter(torch.arange(op_list.index( ')'), op_list.index(')') + 1), requires_grad=False) # masking for decoidng for test time op_ones = nn.Parameter(torch.ones( self.op_list_size), requires_grad=False) op_zeros = nn.Parameter(torch.zeros( self.op_list_size), requires_grad=False) other_ones = nn.Parameter(torch.ones( input_length + self.const_list_size), requires_grad=False) other_zeros = nn.Parameter(torch.zeros( input_length + self.const_list_size), requires_grad=False) self.op_only_mask = nn.Parameter( torch.cat((op_ones, other_zeros), 0), requires_grad=False) self.seq_only_mask = nn.Parameter( torch.cat((op_zeros, other_ones), 0), requires_grad=False) # for ")" para_before_ones = nn.Parameter(torch.ones( op_list.index(')')), requires_grad=False) para_after_ones = nn.Parameter(torch.ones( input_length + self.reserved_token_size - op_list.index(')') - 1), requires_grad=False) para_zero = nn.Parameter(torch.zeros(1), requires_grad=False) self.para_mask = nn.Parameter(torch.cat( (para_before_ones, para_zero, para_after_ones), 0), requires_grad=False) # for step embedding # self.step_masks = [] all_tmp_list = self.op_list + self.const_list self.step_masks = nn.Parameter(torch.zeros( conf.max_step_ind, input_length + self.reserved_token_size), requires_grad=False) for i in range(conf.max_step_ind): this_step_mask_ind = all_tmp_list.index("#" + str(i)) self.step_masks[i, this_step_mask_ind] = 1.0 # self.step_mask_eye = torch.eye(conf.max_step_ind) if conf.pretrained_model == "bert": self.bert = BertModel.from_pretrained( conf.model_size, cache_dir=conf.cache_dir) elif conf.pretrained_model == "roberta": self.bert = RobertaModel.from_pretrained( conf.model_size, cache_dir=conf.cache_dir) elif conf.pretrained_model == "finbert": self.bert = BertModel.from_pretrained( conf.model_size, cache_dir=conf.cache_dir) elif conf.pretrained_model == "longformer": self.bert = LongformerModel.from_pretrained( conf.model_size, cache_dir=conf.cache_dir) self.cls_prj = nn.Linear(hidden_size, hidden_size, bias=True) self.cls_dropout = nn.Dropout(dropout_rate) self.seq_prj = nn.Linear(hidden_size, hidden_size, bias=True) self.seq_dropout = nn.Dropout(dropout_rate) self.reserved_token_embedding = nn.Embedding( self.reserved_token_size, hidden_size) self.num_char_embedding = nn.Embedding(self.num_char_length, num_emb_dim) # attentions self.decoder_history_attn_prj = nn.Linear( hidden_size, hidden_size, bias=True) self.decoder_history_attn_dropout = nn.Dropout(dropout_rate) self.question_attn_prj = nn.Linear(hidden_size, hidden_size, bias=True) self.question_attn_dropout = nn.Dropout(dropout_rate) self.question_summary_attn_prj = nn.Linear( hidden_size, hidden_size, bias=True) self.question_summary_attn_dropout = nn.Dropout(dropout_rate) if conf.sep_attention: self.input_embeddings_prj = nn.Linear( hidden_size*3, hidden_size, bias=True) else: self.input_embeddings_prj = nn.Linear( hidden_size*2, hidden_size, bias=True) self.input_embeddings_layernorm = nn.LayerNorm([1, hidden_size]) self.option_embeddings_prj = nn.Linear( hidden_size*2, hidden_size, bias=True) # decoder lstm self.rnn = torch.nn.LSTM(input_size=hidden_size, hidden_size=hidden_size, num_layers=conf.num_decoder_layers, batch_first=True) # num char encoder self.num_bilstm = torch.nn.LSTM(input_size=num_emb_dim, hidden_size=hidden_size // 2, num_layers=conf.num_encoder_layers, bidirectional=True) self.num_char_prj = nn.Linear(hidden_size, hidden_size, bias=True) self.num_char_dropout = nn.Dropout(dropout_rate) # num attention self.num_attn_prj = nn.Linear(hidden_size, hidden_size, bias=True) self.num_attn_dropout = nn.Dropout(dropout_rate) # seq_out_prj self.seqout_prj = nn.Linear(hidden_size * 2, hidden_size, bias=True) self.seqout_dropout = nn.Dropout(dropout_rate) # step vector self.decoder_step_proj = nn.Linear( 3*hidden_size, hidden_size, bias=True) self.decoder_step_proj_dropout = nn.Dropout(dropout_rate) self.step_mix_proj = nn.Linear( hidden_size*2, hidden_size, bias=True) def forward(self, is_training, input_ids, input_mask, segment_ids, option_mask, program_ids, program_mask, num_char_ids, number_mask, num_char_mask, device): bert_outputs = self.bert( input_ids=input_ids, attention_mask=input_mask, token_type_ids=segment_ids) # print("="*30) # print("input_ids.size(), ", input_ids.size()) # [batch, seq_length], [16, 512] # print("number_mask: ", number_mask.size()) # print("input_mask.size(), ", input_mask.size()) # [batch, seq_length], [16, 512] # print("segment_ids.size(), ", segment_ids.size()) # [batch, seq_length], [16, 512] # print("option_mask.size()", option_mask.size()) # [batch, option_length], [16, 556] # print("program_ids.size()", program_ids.size()) # [batch, program_length], [16, 30] # print("program_mask.size()", program_mask.size()) # [batch, program_length], [16, 30] ###### # Step 1: get the sequence, including questions and retrieved text: {h_i^e} ###### bert_sequence_output = bert_outputs.last_hidden_state # [batch, seq_length, hidden], [16, 512, 768] bert_pooled_output = bert_sequence_output[:, 0, :] # [batch, hidden], [16, 768] batch_size, seq_length, bert_dim = list(bert_sequence_output.size()) pooled_output = self.cls_prj(bert_pooled_output) # if conf.sep_attention is True, the pooled_output will not be used pooled_output = self.cls_dropout(pooled_output) option_size = self.reserved_token_size + seq_length # 556 sequence_output = self.seq_prj(bert_sequence_output) sequence_output = self.seq_dropout(sequence_output) # [batch_size, seq_length, hidden], [16, 512, 768] if conf.num_char: ###### # Step new1: get number embeddings and number_hidden_state # the number char_hidden_avg will be concated with the input sequence # therefore, for the word (not number), we copy the output of the encoder here ###### num_char_embeddings = self.num_char_embedding(num_char_ids) size_a, size_b, size_c, size_d = num_char_embeddings.size() num_char_embeddings = num_char_embeddings.reshape(-1, size_c, size_d) # [16 * 512, 10, 300] # add pad, get bilstm output num_char_length = num_char_mask.sum(-1).reshape(-1) # [16 * 512] num_char_length += (num_char_length == 0).long() # add 1 to those has 0 number, we can multiply 0 again to avoid the calculation num_char_length = num_char_length.tolist() num_char_embeddings_pad = torch.nn.utils.rnn.pack_padded_sequence(input=num_char_embeddings, lengths=num_char_length, batch_first=True, enforce_sorted=False) num_char_hidden, _ = self.num_bilstm(num_char_embeddings_pad) num_char_hidden, out_len = pad_packed_sequence(num_char_hidden, batch_first=True) num_char_hidden = num_char_hidden.reshape(size_a, size_b, size_c, -1) # because bilstm num_char_mask_repeat = num_char_mask.unsqueeze(-1).repeat(1,1,1,self.hidden_size) # batch, seq_length, max_num_length, hidden num_char_hidden = num_char_hidden * num_char_mask_repeat # same as above num_char_hidden_sum = num_char_hidden.sum(-2) num_char_mask = num_char_mask.sum(-1).unsqueeze(-1).repeat(1,1,self.hidden_size) + 1e-7 num_char_hidden_avg = num_char_hidden_sum / num_char_mask # batch, seq_length, hidden num_char_output = self.num_char_prj(num_char_hidden_avg) num_char_output = self.num_char_dropout(num_char_output) # batch, seq_length, hidden mask = number_mask.unsqueeze(-1).repeat(1,1,self.hidden_size) # batch, seq_length, hidden concat_num_word_output = num_char_output * mask + sequence_output * (mask - 1) # batch, seq_length, hidden # copy the output of the encoder here # number_mask: [batch, seq_length] num_attn_vec = self.num_attn_prj(concat_num_word_output) num_attn_vec = self.num_attn_dropout(num_attn_vec) # batch, seq_length, hidden # print("num_attn_vec: ", num_attn_vec.size()) num_attn_w = torch.matmul(concat_num_word_output, torch.transpose(num_attn_vec, 1, 2)) # batch, seq_length, seq_length (len_generated) # print("num_attn_w: ", num_attn_w.size()) # print("mask: ", mask.size()) attn_mask = number_mask.unsqueeze(-1).repeat(1, 1, num_attn_w.size()[-1]) # batch, num_attn_w -= 1e6 * (1 - attn_mask) num_attn_w = F.softmax(num_attn_w, dim=1) # print("after softmax, num_attn_w.size(): ", num_attn_w.size()) # num_ctx_out = torch.matmul( torch.transpose(num_attn_w, 1, 2), concat_num_word_output) # batch, seq_length, hidden # print("num_ctx_out: ", num_ctx_out.size()) # batch, seq_length, hidden sequence_output = torch.cat([sequence_output, num_ctx_out], dim=-1) sequence_output = self.seqout_prj(sequence_output) sequence_output = self.seqout_dropout(sequence_output) print("run this???") # print(sequence_output) ###### # Step 2: get option embeddings: {h_i^s, h_i^m} # and concat it with sequence_output: H ###### op_embeddings = self.reserved_token_embedding(self.reserved_ind) op_embeddings = op_embeddings.repeat(batch_size, 1, 1) # [batch_size, reserved_ind_length, hidden], [16, 44, 768], the length of reserved_ind = len(op_list) + len(const_list) # [batch, op + seq len, hidden] initial_option_embeddings = torch.cat([op_embeddings, sequence_output], dim=1) ###### # Step 3: init something used for LSTM decoder ###### # for init, only one symbol "GO", so the size of decoder_output is [batch_size, 1, hidden] init_decoder_output = self.reserved_token_embedding(self.reserved_go) # [1, 768] decoder_output = init_decoder_output.repeat(batch_size, 1, 1) # [16, 1, 768] if conf.sep_attention: decoder_history = decoder_output else: decoder_history = torch.unsqueeze(pooled_output, dim=-1) # initialize the hidden state for the LSTM decoder decoder_state_h = torch.zeros(1, batch_size, self.hidden_size, device=device) decoder_state_c = torch.zeros(1, batch_size, self.hidden_size, device=device) ###### # Step 4: prepare something for future use ###### split_program_ids = torch.split(program_ids, 1, dim=1) # len(split_program_ids) = 30, split_program_ids[0].size() = [16, 1] # What's the float_input_mask for? float_input_mask = input_mask.float() float_input_mask = torch.unsqueeze(float_input_mask, dim=-1) # used for updating option embeddings, adding step embedding this_step_new_op_emb = initial_option_embeddings # [batch, option_length, hidden] logits = [] ###### # Step 5: generate program ###### for cur_step in range(self.program_length): ###### # Step 5.1: get decoder history attention: att_h ###### decoder_history_attn_vec = self.decoder_history_attn_prj(decoder_output) # [batch, 1, hidden], [16, 1, 768] decoder_history_attn_vec = self.decoder_history_attn_dropout(decoder_history_attn_vec) decoder_history_attn_w = torch.matmul( decoder_history, torch.transpose(decoder_history_attn_vec, 1, 2)) # [batch, cur_step + 1, 1] decoder_history_attn_w = F.softmax(decoder_history_attn_w, dim=1) # [batch, cur_step + 1, 1] decoder_history_ctx_embeddings = torch.matmul( torch.transpose(decoder_history_attn_w, 1, 2), decoder_history) # [batch, 1, hidden],[16, 1, 768] ###### # Step 5.2: get attention for input sequence: att_p ###### if conf.sep_attention: # input seq att question_attn_vec = self.question_attn_prj(decoder_output) question_attn_vec = self.question_attn_dropout(question_attn_vec) #[batch, 1, hidden],[16, 1, 768] question_attn_w = torch.matmul( sequence_output, torch.transpose(question_attn_vec, 1, 2))#[batch, seq_length, 1],[16, 512, 1] question_attn_w -= 1e6 * (1 - float_input_mask) question_attn_w = F.softmax(question_attn_w, dim=1) # [batch, seq_length, 1], [16, 512, 1] question_ctx_embeddings = torch.matmul( torch.transpose(question_attn_w, 1, 2), sequence_output) # [batch, 1, hidden], [16, 1, 768] ###### # Step 5.3: get another input sequence attention: att_p' ###### question_summary_vec = self.question_summary_attn_prj(decoder_output) question_summary_vec = self.question_summary_attn_dropout(question_summary_vec) # [batch, 1, hidden] question_summary_w = torch.matmul( sequence_output, torch.transpose(question_summary_vec, 1, 2)) #[batch, seq_length, 1],[16, 512, 1] question_summary_w -= 1e6 * (1 - float_input_mask) question_summary_w = F.softmax(question_summary_w, dim=1) # [batch, seq_length, 1], [16, 512, 1] question_summary_embeddings = torch.matmul( torch.transpose(question_summary_w, 1, 2), sequence_output) ###### # Step 5.4: get contextual information C_T ###### if conf.sep_attention: # [batch, 1, hidden * 3], [16, 1, 2304] concat_input_embeddings = torch.cat([decoder_history_ctx_embeddings, question_ctx_embeddings, decoder_output], dim=-1) else: concat_input_embeddings = torch.cat([decoder_history_ctx_embeddings, decoder_output], dim=-1) input_embeddings = self.input_embeddings_prj(concat_input_embeddings) #[batch, 1, hidden],[16, 1, 768] if conf.layer_norm: input_embeddings = self.input_embeddings_layernorm( input_embeddings) ###### # Step 5.5: get all token embeddings: H_T' ###### question_option_vec = this_step_new_op_emb * question_summary_embeddings # [batch, option_size, hidden], [16 556, 768] option_embeddings = torch.cat( [this_step_new_op_emb, question_option_vec], dim=-1) # [batch, option_size, hidden*2], [16, 556, 1536] option_embeddings = self.option_embeddings_prj(option_embeddings) # [batch, option_size, hidden], [16, 556, 768] ###### # Step 5.6: get logits ###### option_logits = torch.matmul( option_embeddings, torch.transpose(input_embeddings, 1, 2)) # batch, option_size, 1],[16, 556, 1] option_logits = torch.squeeze(option_logits, dim=2) # [batch, op + seq_len],op + seq_len = option_size option_logits -= 1e6 * (1 - option_mask) logits.append(option_logits) ###### # Step 6: update state ###### if is_training: program_index = torch.unsqueeze(split_program_ids[cur_step], dim=1) # [batch, 1, 1], [16, 1, 1] else: # constrain decoding if cur_step % 4 == 0 or (cur_step + 1) % 4 == 0: # op round option_logits -= 1e6 * self.seq_only_mask else: # number round option_logits -= 1e6 * self.op_only_mask if (cur_step + 1) % 4 == 0: # ")" round option_logits -= 1e6 * self.para_mask # print(program_index) program_index = torch.argmax(option_logits, axis=-1, keepdim=True) program_index = torch.unsqueeze(program_index, dim=1) if (cur_step + 1) % 4 == 0: # update op embeddings this_step_index = cur_step // 4 this_step_list_index = ( self.op_list + self.const_list).index("#" + str(this_step_index)) # ??? integer this_step_mask = self.step_masks[this_step_index, :] # [option_size], [556] decoder_step_vec = self.decoder_step_proj(concat_input_embeddings) decoder_step_vec = self.decoder_step_proj_dropout(decoder_step_vec)#[batch,1,hidden], [16, 1, 768] decoder_step_vec = torch.squeeze(decoder_step_vec) # [batch, hidden], [16, 768] this_step_new_emb = decoder_step_vec # [batch, hidden] this_step_new_emb = torch.unsqueeze(this_step_new_emb, 1) this_step_new_emb = this_step_new_emb.repeat( 1, self.reserved_token_size+self.input_length, 1) # [batch, op seq, hidden] this_step_mask = torch.unsqueeze(this_step_mask, 0) # [1, op seq] this_step_mask = torch.unsqueeze(this_step_mask, 2) # [1, op seq, 1] this_step_mask = this_step_mask.repeat(batch_size, 1, self.hidden_size) # [batch, op seq, hidden] this_step_new_op_emb = torch.where( this_step_mask > 0, this_step_new_emb, initial_option_embeddings) program_index = torch.repeat_interleave(program_index, self.hidden_size, dim=2) # [batch, 1, hidden] input_program_embeddings = torch.gather(option_embeddings, dim=1, index=program_index) decoder_output, (decoder_state_h, decoder_state_c) = self.rnn( input_program_embeddings, (decoder_state_h, decoder_state_c)) decoder_history = torch.cat( [decoder_history, input_program_embeddings], dim=1) # [batch, cur_step + 1, hidden] logits = torch.stack(logits, dim=1) return logits
[ "torch.nn.Dropout", "torch.argmax", "torch.nn.Embedding", "torch.cat", "torch.arange", "torch.nn.utils.rnn.pad_packed_sequence", "torch.nn.utils.rnn.pack_padded_sequence", "transformers.LongformerModel.from_pretrained", "torch.ones", "torch.repeat_interleave", "torch.gather", "torch.squeeze", "torch.nn.LayerNorm", "transformers.RobertaModel.from_pretrained", "torch.nn.Linear", "torch.zeros", "transformers.BertModel.from_pretrained", "torch.nn.LSTM", "torch.where", "torch.split", "torch.unsqueeze", "torch.stack", "torch.nn.functional.softmax", "torch.transpose" ]
[((3980, 4026), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {'bias': '(True)'}), '(hidden_size, hidden_size, bias=True)\n', (3989, 4026), False, 'from torch import nn\n'), ((4054, 4078), 'torch.nn.Dropout', 'nn.Dropout', (['dropout_rate'], {}), '(dropout_rate)\n', (4064, 4078), False, 'from torch import nn\n'), ((4103, 4149), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {'bias': '(True)'}), '(hidden_size, hidden_size, bias=True)\n', (4112, 4149), False, 'from torch import nn\n'), ((4177, 4201), 'torch.nn.Dropout', 'nn.Dropout', (['dropout_rate'], {}), '(dropout_rate)\n', (4187, 4201), False, 'from torch import nn\n'), ((4243, 4294), 'torch.nn.Embedding', 'nn.Embedding', (['self.reserved_token_size', 'hidden_size'], {}), '(self.reserved_token_size, hidden_size)\n', (4255, 4294), False, 'from torch import nn\n'), ((4343, 4390), 'torch.nn.Embedding', 'nn.Embedding', (['self.num_char_length', 'num_emb_dim'], {}), '(self.num_char_length, num_emb_dim)\n', (4355, 4390), False, 'from torch import nn\n'), ((4452, 4498), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {'bias': '(True)'}), '(hidden_size, hidden_size, bias=True)\n', (4461, 4498), False, 'from torch import nn\n'), ((4556, 4580), 'torch.nn.Dropout', 'nn.Dropout', (['dropout_rate'], {}), '(dropout_rate)\n', (4566, 4580), False, 'from torch import nn\n'), ((4615, 4661), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {'bias': '(True)'}), '(hidden_size, hidden_size, bias=True)\n', (4624, 4661), False, 'from torch import nn\n'), ((4699, 4723), 'torch.nn.Dropout', 'nn.Dropout', (['dropout_rate'], {}), '(dropout_rate)\n', (4709, 4723), False, 'from torch import nn\n'), ((4766, 4812), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {'bias': '(True)'}), '(hidden_size, hidden_size, bias=True)\n', (4775, 4812), False, 'from torch import nn\n'), ((4871, 4895), 'torch.nn.Dropout', 'nn.Dropout', (['dropout_rate'], {}), '(dropout_rate)\n', (4881, 4895), False, 'from torch import nn\n'), ((5196, 5226), 'torch.nn.LayerNorm', 'nn.LayerNorm', (['[1, hidden_size]'], {}), '([1, hidden_size])\n', (5208, 5226), False, 'from torch import nn\n'), ((5265, 5315), 'torch.nn.Linear', 'nn.Linear', (['(hidden_size * 2)', 'hidden_size'], {'bias': '(True)'}), '(hidden_size * 2, hidden_size, bias=True)\n', (5274, 5315), False, 'from torch import nn\n'), ((5370, 5491), 'torch.nn.LSTM', 'torch.nn.LSTM', ([], {'input_size': 'hidden_size', 'hidden_size': 'hidden_size', 'num_layers': 'conf.num_decoder_layers', 'batch_first': '(True)'}), '(input_size=hidden_size, hidden_size=hidden_size, num_layers=\n conf.num_decoder_layers, batch_first=True)\n', (5383, 5491), False, 'import torch\n'), ((5583, 5710), 'torch.nn.LSTM', 'torch.nn.LSTM', ([], {'input_size': 'num_emb_dim', 'hidden_size': '(hidden_size // 2)', 'num_layers': 'conf.num_encoder_layers', 'bidirectional': '(True)'}), '(input_size=num_emb_dim, hidden_size=hidden_size // 2,\n num_layers=conf.num_encoder_layers, bidirectional=True)\n', (5596, 5710), False, 'import torch\n'), ((5775, 5821), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {'bias': '(True)'}), '(hidden_size, hidden_size, bias=True)\n', (5784, 5821), False, 'from torch import nn\n'), ((5854, 5878), 'torch.nn.Dropout', 'nn.Dropout', (['dropout_rate'], {}), '(dropout_rate)\n', (5864, 5878), False, 'from torch import nn\n'), ((5932, 5978), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'hidden_size'], {'bias': '(True)'}), '(hidden_size, hidden_size, bias=True)\n', (5941, 5978), False, 'from torch import nn\n'), ((6011, 6035), 'torch.nn.Dropout', 'nn.Dropout', (['dropout_rate'], {}), '(dropout_rate)\n', (6021, 6035), False, 'from torch import nn\n'), ((6085, 6135), 'torch.nn.Linear', 'nn.Linear', (['(hidden_size * 2)', 'hidden_size'], {'bias': '(True)'}), '(hidden_size * 2, hidden_size, bias=True)\n', (6094, 6135), False, 'from torch import nn\n'), ((6166, 6190), 'torch.nn.Dropout', 'nn.Dropout', (['dropout_rate'], {}), '(dropout_rate)\n', (6176, 6190), False, 'from torch import nn\n'), ((6256, 6306), 'torch.nn.Linear', 'nn.Linear', (['(3 * hidden_size)', 'hidden_size'], {'bias': '(True)'}), '(3 * hidden_size, hidden_size, bias=True)\n', (6265, 6306), False, 'from torch import nn\n'), ((6359, 6383), 'torch.nn.Dropout', 'nn.Dropout', (['dropout_rate'], {}), '(dropout_rate)\n', (6369, 6383), False, 'from torch import nn\n'), ((6414, 6464), 'torch.nn.Linear', 'nn.Linear', (['(hidden_size * 2)', 'hidden_size'], {'bias': '(True)'}), '(hidden_size * 2, hidden_size, bias=True)\n', (6423, 6464), False, 'from torch import nn\n'), ((12318, 12368), 'torch.cat', 'torch.cat', (['[op_embeddings, sequence_output]'], {'dim': '(1)'}), '([op_embeddings, sequence_output], dim=1)\n', (12327, 12368), False, 'import torch\n'), ((12994, 13053), 'torch.zeros', 'torch.zeros', (['(1)', 'batch_size', 'self.hidden_size'], {'device': 'device'}), '(1, batch_size, self.hidden_size, device=device)\n', (13005, 13053), False, 'import torch\n'), ((13080, 13139), 'torch.zeros', 'torch.zeros', (['(1)', 'batch_size', 'self.hidden_size'], {'device': 'device'}), '(1, batch_size, self.hidden_size, device=device)\n', (13091, 13139), False, 'import torch\n'), ((13259, 13293), 'torch.split', 'torch.split', (['program_ids', '(1)'], {'dim': '(1)'}), '(program_ids, 1, dim=1)\n', (13270, 13293), False, 'import torch\n'), ((13481, 13522), 'torch.unsqueeze', 'torch.unsqueeze', (['float_input_mask'], {'dim': '(-1)'}), '(float_input_mask, dim=-1)\n', (13496, 13522), False, 'import torch\n'), ((20909, 20935), 'torch.stack', 'torch.stack', (['logits'], {'dim': '(1)'}), '(logits, dim=1)\n', (20920, 20935), False, 'import torch\n'), ((1295, 1336), 'torch.arange', 'torch.arange', (['(0)', 'self.reserved_token_size'], {}), '(0, self.reserved_token_size)\n', (1307, 1336), False, 'import torch\n'), ((1718, 1747), 'torch.ones', 'torch.ones', (['self.op_list_size'], {}), '(self.op_list_size)\n', (1728, 1747), False, 'import torch\n'), ((1815, 1845), 'torch.zeros', 'torch.zeros', (['self.op_list_size'], {}), '(self.op_list_size)\n', (1826, 1845), False, 'import torch\n'), ((1915, 1962), 'torch.ones', 'torch.ones', (['(input_length + self.const_list_size)'], {}), '(input_length + self.const_list_size)\n', (1925, 1962), False, 'import torch\n'), ((2033, 2081), 'torch.zeros', 'torch.zeros', (['(input_length + self.const_list_size)'], {}), '(input_length + self.const_list_size)\n', (2044, 2081), False, 'import torch\n'), ((2171, 2207), 'torch.cat', 'torch.cat', (['(op_ones, other_zeros)', '(0)'], {}), '((op_ones, other_zeros), 0)\n', (2180, 2207), False, 'import torch\n'), ((2285, 2321), 'torch.cat', 'torch.cat', (['(op_zeros, other_ones)', '(0)'], {}), '((op_zeros, other_ones), 0)\n', (2294, 2321), False, 'import torch\n'), ((2653, 2667), 'torch.zeros', 'torch.zeros', (['(1)'], {}), '(1)\n', (2664, 2667), False, 'import torch\n'), ((2728, 2788), 'torch.cat', 'torch.cat', (['(para_before_ones, para_zero, para_after_ones)', '(0)'], {}), '((para_before_ones, para_zero, para_after_ones), 0)\n', (2737, 2788), False, 'import torch\n'), ((2978, 3049), 'torch.zeros', 'torch.zeros', (['conf.max_step_ind', '(input_length + self.reserved_token_size)'], {}), '(conf.max_step_ind, input_length + self.reserved_token_size)\n', (2989, 3049), False, 'import torch\n'), ((3381, 3449), 'transformers.BertModel.from_pretrained', 'BertModel.from_pretrained', (['conf.model_size'], {'cache_dir': 'conf.cache_dir'}), '(conf.model_size, cache_dir=conf.cache_dir)\n', (3406, 3449), False, 'from transformers import BertModel\n'), ((4968, 5018), 'torch.nn.Linear', 'nn.Linear', (['(hidden_size * 3)', 'hidden_size'], {'bias': '(True)'}), '(hidden_size * 3, hidden_size, bias=True)\n', (4977, 5018), False, 'from torch import nn\n'), ((5088, 5138), 'torch.nn.Linear', 'nn.Linear', (['(hidden_size * 2)', 'hidden_size'], {'bias': '(True)'}), '(hidden_size * 2, hidden_size, bias=True)\n', (5097, 5138), False, 'from torch import nn\n'), ((9151, 9287), 'torch.nn.utils.rnn.pack_padded_sequence', 'torch.nn.utils.rnn.pack_padded_sequence', ([], {'input': 'num_char_embeddings', 'lengths': 'num_char_length', 'batch_first': '(True)', 'enforce_sorted': '(False)'}), '(input=num_char_embeddings, lengths=\n num_char_length, batch_first=True, enforce_sorted=False)\n', (9190, 9287), False, 'import torch\n'), ((9404, 9458), 'torch.nn.utils.rnn.pad_packed_sequence', 'pad_packed_sequence', (['num_char_hidden'], {'batch_first': '(True)'}), '(num_char_hidden, batch_first=True)\n', (9423, 9458), False, 'from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence\n'), ((11198, 11226), 'torch.nn.functional.softmax', 'F.softmax', (['num_attn_w'], {'dim': '(1)'}), '(num_attn_w, dim=1)\n', (11207, 11226), True, 'import torch.nn.functional as F\n'), ((11573, 11622), 'torch.cat', 'torch.cat', (['[sequence_output, num_ctx_out]'], {'dim': '(-1)'}), '([sequence_output, num_ctx_out], dim=-1)\n', (11582, 11622), False, 'import torch\n'), ((12869, 12907), 'torch.unsqueeze', 'torch.unsqueeze', (['pooled_output'], {'dim': '(-1)'}), '(pooled_output, dim=-1)\n', (12884, 12907), False, 'import torch\n'), ((14350, 14390), 'torch.nn.functional.softmax', 'F.softmax', (['decoder_history_attn_w'], {'dim': '(1)'}), '(decoder_history_attn_w, dim=1)\n', (14359, 14390), True, 'import torch.nn.functional as F\n'), ((16024, 16060), 'torch.nn.functional.softmax', 'F.softmax', (['question_summary_w'], {'dim': '(1)'}), '(question_summary_w, dim=1)\n', (16033, 16060), True, 'import torch.nn.functional as F\n'), ((17354, 17416), 'torch.cat', 'torch.cat', (['[this_step_new_op_emb, question_option_vec]'], {'dim': '(-1)'}), '([this_step_new_op_emb, question_option_vec], dim=-1)\n', (17363, 17416), False, 'import torch\n'), ((17891, 17926), 'torch.squeeze', 'torch.squeeze', (['option_logits'], {'dim': '(2)'}), '(option_logits, dim=2)\n', (17904, 17926), False, 'import torch\n'), ((20398, 20461), 'torch.repeat_interleave', 'torch.repeat_interleave', (['program_index', 'self.hidden_size'], {'dim': '(2)'}), '(program_index, self.hidden_size, dim=2)\n', (20421, 20461), False, 'import torch\n'), ((20523, 20582), 'torch.gather', 'torch.gather', (['option_embeddings'], {'dim': '(1)', 'index': 'program_index'}), '(option_embeddings, dim=1, index=program_index)\n', (20535, 20582), False, 'import torch\n'), ((20779, 20840), 'torch.cat', 'torch.cat', (['[decoder_history, input_program_embeddings]'], {'dim': '(1)'}), '([decoder_history, input_program_embeddings], dim=1)\n', (20788, 20840), False, 'import torch\n'), ((3540, 3611), 'transformers.RobertaModel.from_pretrained', 'RobertaModel.from_pretrained', (['conf.model_size'], {'cache_dir': 'conf.cache_dir'}), '(conf.model_size, cache_dir=conf.cache_dir)\n', (3568, 3611), False, 'from transformers import RobertaModel\n'), ((10844, 10879), 'torch.transpose', 'torch.transpose', (['num_attn_vec', '(1)', '(2)'], {}), '(num_attn_vec, 1, 2)\n', (10859, 10879), False, 'import torch\n'), ((11364, 11397), 'torch.transpose', 'torch.transpose', (['num_attn_w', '(1)', '(2)'], {}), '(num_attn_w, 1, 2)\n', (11379, 11397), False, 'import torch\n'), ((14236, 14283), 'torch.transpose', 'torch.transpose', (['decoder_history_attn_vec', '(1)', '(2)'], {}), '(decoder_history_attn_vec, 1, 2)\n', (14251, 14283), False, 'import torch\n'), ((14494, 14539), 'torch.transpose', 'torch.transpose', (['decoder_history_attn_w', '(1)', '(2)'], {}), '(decoder_history_attn_w, 1, 2)\n', (14509, 14539), False, 'import torch\n'), ((15214, 15247), 'torch.nn.functional.softmax', 'F.softmax', (['question_attn_w'], {'dim': '(1)'}), '(question_attn_w, dim=1)\n', (15223, 15247), True, 'import torch.nn.functional as F\n'), ((15846, 15889), 'torch.transpose', 'torch.transpose', (['question_summary_vec', '(1)', '(2)'], {}), '(question_summary_vec, 1, 2)\n', (15861, 15889), False, 'import torch\n'), ((16174, 16215), 'torch.transpose', 'torch.transpose', (['question_summary_w', '(1)', '(2)'], {}), '(question_summary_w, 1, 2)\n', (16189, 16215), False, 'import torch\n'), ((16461, 16557), 'torch.cat', 'torch.cat', (['[decoder_history_ctx_embeddings, question_ctx_embeddings, decoder_output]'], {'dim': '(-1)'}), '([decoder_history_ctx_embeddings, question_ctx_embeddings,\n decoder_output], dim=-1)\n', (16470, 16557), False, 'import torch\n'), ((16722, 16789), 'torch.cat', 'torch.cat', (['[decoder_history_ctx_embeddings, decoder_output]'], {'dim': '(-1)'}), '([decoder_history_ctx_embeddings, decoder_output], dim=-1)\n', (16731, 16789), False, 'import torch\n'), ((17784, 17823), 'torch.transpose', 'torch.transpose', (['input_embeddings', '(1)', '(2)'], {}), '(input_embeddings, 1, 2)\n', (17799, 17823), False, 'import torch\n'), ((18206, 18257), 'torch.unsqueeze', 'torch.unsqueeze', (['split_program_ids[cur_step]'], {'dim': '(1)'}), '(split_program_ids[cur_step], dim=1)\n', (18221, 18257), False, 'import torch\n'), ((18846, 18896), 'torch.argmax', 'torch.argmax', (['option_logits'], {'axis': '(-1)', 'keepdim': '(True)'}), '(option_logits, axis=-1, keepdim=True)\n', (18858, 18896), False, 'import torch\n'), ((18929, 18966), 'torch.unsqueeze', 'torch.unsqueeze', (['program_index'], {'dim': '(1)'}), '(program_index, dim=1)\n', (18944, 18966), False, 'import torch\n'), ((19579, 19610), 'torch.squeeze', 'torch.squeeze', (['decoder_step_vec'], {}), '(decoder_step_vec)\n', (19592, 19610), False, 'import torch\n'), ((19750, 19787), 'torch.unsqueeze', 'torch.unsqueeze', (['this_step_new_emb', '(1)'], {}), '(this_step_new_emb, 1)\n', (19765, 19787), False, 'import torch\n'), ((19980, 20014), 'torch.unsqueeze', 'torch.unsqueeze', (['this_step_mask', '(0)'], {}), '(this_step_mask, 0)\n', (19995, 20014), False, 'import torch\n'), ((20063, 20097), 'torch.unsqueeze', 'torch.unsqueeze', (['this_step_mask', '(2)'], {}), '(this_step_mask, 2)\n', (20078, 20097), False, 'import torch\n'), ((20270, 20347), 'torch.where', 'torch.where', (['(this_step_mask > 0)', 'this_step_new_emb', 'initial_option_embeddings'], {}), '(this_step_mask > 0, this_step_new_emb, initial_option_embeddings)\n', (20281, 20347), False, 'import torch\n'), ((3702, 3770), 'transformers.BertModel.from_pretrained', 'BertModel.from_pretrained', (['conf.model_size'], {'cache_dir': 'conf.cache_dir'}), '(conf.model_size, cache_dir=conf.cache_dir)\n', (3727, 3770), False, 'from transformers import BertModel\n'), ((15038, 15078), 'torch.transpose', 'torch.transpose', (['question_attn_vec', '(1)', '(2)'], {}), '(question_attn_vec, 1, 2)\n', (15053, 15078), False, 'import torch\n'), ((15364, 15402), 'torch.transpose', 'torch.transpose', (['question_attn_w', '(1)', '(2)'], {}), '(question_attn_w, 1, 2)\n', (15379, 15402), False, 'import torch\n'), ((3864, 3938), 'transformers.LongformerModel.from_pretrained', 'LongformerModel.from_pretrained', (['conf.model_size'], {'cache_dir': 'conf.cache_dir'}), '(conf.model_size, cache_dir=conf.cache_dir)\n', (3895, 3938), False, 'from transformers import LongformerModel\n')]
#!/usr/bin/env python """Test gracefully exiting if no study genes are in assc or population.""" import os # from goatools.rpt.goea_nt_xfrm import MgrNtGOEAs # get_goea_nts_all from goatools.test_data.genes_NCBI_10090_ProteinCoding import GENEID2NT as GeneID2nt_mus from goatools.test_data.nature3102_goea import get_geneid2symbol, get_goeaobj __copyright__ = "Copyright (C) 2016-2017, <NAME>, <NAME>, All rights reserved." REPO = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../") def test_example(): """Test GOEnrichmentStudy::print_results.""" # -------------------------------------------------------------------- # -------------------------------------------------------------------- # Gene Ontology Enrichment Analysis (GOEA) # -------------------------------------------------------------------- # -------------------------------------------------------------------- taxid = 10090 # Mouse study # Load ontologies, associations, and population ids geneids_pop = GeneID2nt_mus.keys() geneids2symbol_study = get_geneid2symbol("nbt.3102-S4_GeneIDs.xlsx") goeaobj = get_goeaobj("fdr_bh", geneids_pop, taxid) # No study genes at all geneids_study_none = set() goea_results_all = goeaobj.run_study(geneids_study_none) assert not goea_results_all, 'NO STUDY GENES TEST FAILED: {R}'.format(R=goea_results_all) # No study genes in population or association geneids_study_bad = set(['BADVAL']) goea_results_all = goeaobj.run_study(geneids_study_bad) # goea_results_sig = [r for r in goea_results_all if r.p_fdr_bh < 0.05] assert not goea_results_all, 'NO VALID STUDY GENES TEST FAILED: {R}'.format(R=goea_results_all) # goea_results_all = goeaobj.run_study(geneids_study) goeaobj.print_results(goea_results_all, pval=None) goeaobj.print_date() if __name__ == '__main__': test_example() # Copyright (C) 2016-2017, <NAME>, <NAME>, All rights reserved.
[ "goatools.test_data.nature3102_goea.get_geneid2symbol", "os.path.abspath", "goatools.test_data.nature3102_goea.get_goeaobj", "goatools.test_data.genes_NCBI_10090_ProteinCoding.GENEID2NT.keys" ]
[((1023, 1043), 'goatools.test_data.genes_NCBI_10090_ProteinCoding.GENEID2NT.keys', 'GeneID2nt_mus.keys', ([], {}), '()\n', (1041, 1043), True, 'from goatools.test_data.genes_NCBI_10090_ProteinCoding import GENEID2NT as GeneID2nt_mus\n'), ((1071, 1116), 'goatools.test_data.nature3102_goea.get_geneid2symbol', 'get_geneid2symbol', (['"""nbt.3102-S4_GeneIDs.xlsx"""'], {}), "('nbt.3102-S4_GeneIDs.xlsx')\n", (1088, 1116), False, 'from goatools.test_data.nature3102_goea import get_geneid2symbol, get_goeaobj\n'), ((1131, 1172), 'goatools.test_data.nature3102_goea.get_goeaobj', 'get_goeaobj', (['"""fdr_bh"""', 'geneids_pop', 'taxid'], {}), "('fdr_bh', geneids_pop, taxid)\n", (1142, 1172), False, 'from goatools.test_data.nature3102_goea import get_geneid2symbol, get_goeaobj\n'), ((464, 489), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (479, 489), False, 'import os\n')]
# -*- coding: utf8 -*- import pandas as pd from fix_spanish_title_case import fix_spanish_title_case from classification import ( Hierarchy, parent_code_table_to_parent_id_table, Classification, ) if __name__ == "__main__": df = pd.read_csv( "in/ubigeo-data-titlecased.csv", encoding="utf-8", dtype={"inei": str} ) df.columns = ["reniec_code", "code", "name", "complete_name"] df = df[["code", "name"]] df = df[~df.code.isnull()] df.name = df.name.map(fix_spanish_title_case, na_action="ignore") df["name_es"] = df.name df["name_en"] = df.name df["name_short_es"] = df.name df["name_short_en"] = df.name # This adds a highest level element that represents the whole country peru = pd.Series( { "code": "000000", "name": u"Peru", "name_es": u"Perú", "name_short_es": u"Perú", "name_en": u"Peru", "name_short_en": u"Peru", } ) df = pd.concat([pd.DataFrame(peru).T, df]) def fix_levels(row): if row.code == "000000": row["level"] = "country" row["parent_code"] = pd.np.nan elif row.code.endswith("0000"): row["level"] = "department" row["parent_code"] = "000000" elif row.code.endswith("00"): row["level"] = "province" row["parent_code"] = row["code"][:2] + "0000" else: row["level"] = "district" row["parent_code"] = row["code"][:4] + "00" return row df = df.apply(fix_levels, axis=1) h = Hierarchy(["country", "department", "province", "district"]) df.level = df.level.astype("category", categories=h, ordered=True) df = df.sort_values(by=["level", "code"]) df.level = df.level.astype(str) df = df.reset_index(drop=True) parent_id_table = parent_code_table_to_parent_id_table(df, h) # TODO: This isn't the official classification level name but this makes # compatibility between colombia and mexico way easier # parent_code_table.loc[parent_code_table.level == "state", "level"] = "department" # Drop the "locality" level since we don't use it # parent_code_table = parent_code_table[parent_code_table.level != "locality"] parent_id_table = parent_id_table[ [ "code", "name", "level", "name_es", "name_en", "name_short_es", "name_short_en", "parent_id", ] ] c = Classification(parent_id_table, h) c.to_csv("out/locations_peru_inei.csv") c.to_stata("out/locations_peru_inei.dta")
[ "pandas.DataFrame", "pandas.read_csv", "classification.Classification", "pandas.Series", "classification.parent_code_table_to_parent_id_table", "classification.Hierarchy" ]
[((249, 337), 'pandas.read_csv', 'pd.read_csv', (['"""in/ubigeo-data-titlecased.csv"""'], {'encoding': '"""utf-8"""', 'dtype': "{'inei': str}"}), "('in/ubigeo-data-titlecased.csv', encoding='utf-8', dtype={\n 'inei': str})\n", (260, 337), True, 'import pandas as pd\n'), ((756, 898), 'pandas.Series', 'pd.Series', (["{'code': '000000', 'name': u'Peru', 'name_es': u'Perú', 'name_short_es':\n u'Perú', 'name_en': u'Peru', 'name_short_en': u'Peru'}"], {}), "({'code': '000000', 'name': u'Peru', 'name_es': u'Perú',\n 'name_short_es': u'Perú', 'name_en': u'Peru', 'name_short_en': u'Peru'})\n", (765, 898), True, 'import pandas as pd\n'), ((1609, 1669), 'classification.Hierarchy', 'Hierarchy', (["['country', 'department', 'province', 'district']"], {}), "(['country', 'department', 'province', 'district'])\n", (1618, 1669), False, 'from classification import Hierarchy, parent_code_table_to_parent_id_table, Classification\n'), ((1882, 1925), 'classification.parent_code_table_to_parent_id_table', 'parent_code_table_to_parent_id_table', (['df', 'h'], {}), '(df, h)\n', (1918, 1925), False, 'from classification import Hierarchy, parent_code_table_to_parent_id_table, Classification\n'), ((2554, 2588), 'classification.Classification', 'Classification', (['parent_id_table', 'h'], {}), '(parent_id_table, h)\n', (2568, 2588), False, 'from classification import Hierarchy, parent_code_table_to_parent_id_table, Classification\n'), ((1012, 1030), 'pandas.DataFrame', 'pd.DataFrame', (['peru'], {}), '(peru)\n', (1024, 1030), True, 'import pandas as pd\n')]
# MIT License # # Copyright (c) 2019 <NAME> <https://github.com/delivrance> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import asyncio import logging import os import subprocess from pathlib import Path from random import Random from uuid import uuid4 from PIL import Image MIN_AMOUNT_IMAGE = 1 MAX_AMOUNT_IMAGE = 10 MIN_AMOUNT_VIDEO = 0 MAX_AMOUNT_VIDEO = 3 MIN_SEED = -2 ** 63 MAX_SEED = 2 ** 63 - 1 SOS = b"\xFF\xDA" # Start Of Scan EOI = b"\xFF\xD9" # End Of Image OUT_NAME_TEMPLATE = "{}_glitch.{}" log = logging.getLogger(__name__) def jpeg(photo: str, seed: int = None, min_amount: int = MIN_AMOUNT_IMAGE, max_amount: int = MAX_AMOUNT_IMAGE, inplace: bool = False) -> str: """Glitch a JPEG file. A new image will be saved in the current working directory with the string "_glitch" appended to the filename. E.g.: "monalisa.jpg" becomes "monalisa_glitch.jpg". Args: photo (str): JPEG photo file to glitch. Pass a file path as string to glitch a photo that exists on your local machine. seed (int, optional): Pseudo-random number generator seed. Using again the same seed on the original file will result in identical glitched images. Defaults to a random value. min_amount (int, optional): Minimum amount of bytes to corrupt. A negative value will result in min_amount = 0. A value higher than max_amount will result in max_amount = min_amount. The actual amount will be chosen randomly in range [min_amount, max_amount]. Defaults to 1. max_amount (int, optional): Maximum amount of bytes to corrupt. A negative value will result in max_amount = 1. A value lower than min_amount will result in max_amount = min_amount. The actual amount will be chosen randomly in range [min_amount, max_amount]. Defaults to 10. inplace (bool, optional): Pass True to glitch the image in-place and avoid creating a new JPEG file. This will overwrite the original image. Defaults to False. Returns: On success, the absolute path of the glitched image is returned. """ out = photo if inplace else OUT_NAME_TEMPLATE.format(Path(photo).stem, "jpg") prng = Random(seed) if min_amount < 0: min_amount = 0 if max_amount < 0: max_amount = 1 if min_amount > max_amount: max_amount = min_amount amount = prng.randint(min_amount, max_amount) with open(photo, "rb") as f: original = f.read() start = original.index(SOS) + len(SOS) + 10 end = original.rindex(EOI) data = bytearray(original[start:end]) glitched = set() for _ in range(amount): while True: index = prng.randrange(len(data)) if index not in glitched: if data[index] not in [0, 255]: glitched.add(index) break while True: value = prng.randint(1, 254) if data[index] != value: data[index] = value break with open(out, "wb") as f: f.write( original[:start] + data + original[end:] ) return Path(out).absolute() async def jpeg_async(*args, **kwargs): return jpeg(*args, **kwargs) def png(photo: str, seed: int = None, min_amount: int = MIN_AMOUNT_IMAGE, max_amount: int = MAX_AMOUNT_IMAGE, inplace: bool = False): out = photo if inplace else OUT_NAME_TEMPLATE.format(Path(photo).stem, "png") jpg_path = "{}.jpg".format(uuid4()) png = Image.open(photo).convert("RGBA") bg = Image.new("RGB", png.size, (255, 255, 255)) bg.paste(png, png) bg.save(jpg_path) jpeg(jpg_path, seed, min_amount, max_amount, True) Image.open(jpg_path).convert("RGBA").save(out) os.remove(jpg_path) return Path(out).absolute() async def png_async(*args, **kwargs): return png(*args, **kwargs) def webp(photo: str, seed: int = None, min_amount: int = MIN_AMOUNT_IMAGE, max_amount: int = MAX_AMOUNT_IMAGE, inplace: bool = False): out = photo if inplace else OUT_NAME_TEMPLATE.format(Path(photo).stem, "webp") png_path = "{}.png".format(uuid4()) webp = Image.open(photo) webp.save(png_path) png(png_path, seed, min_amount, max_amount, True) Image.open(png_path).save(out) os.remove(png_path) return Path(out).absolute() async def webp_async(*args, **kwargs): return webp(*args, **kwargs) def mp4(video: str, seed: int = None, min_amount: int = MIN_AMOUNT_VIDEO, max_amount: int = MAX_AMOUNT_VIDEO, inplace: bool = False): out = video if inplace else OUT_NAME_TEMPLATE.format(Path(video).stem, "mp4") uuid = uuid4() try: fps = subprocess.check_output( "ffprobe -v error -select_streams v -of " "default=noprint_wrappers=1:nokey=1 -show_entries stream=r_frame_rate {video}".format( video=video ), shell=True ).strip().decode() os.system( "ffmpeg -loglevel quiet -i {video} {uuid}_%8d.jpg".format( video=video, uuid=uuid ) ) prng = Random(seed) for p in sorted(Path().rglob(f"{uuid}_*.jpg")): jpeg(str(p), prng.getrandbits(2500), min_amount, max_amount, inplace=True) os.system( "ffmpeg -loglevel quiet -r {fps} -i {uuid}_%8d.jpg {out} -y".format( fps=fps, uuid=uuid, out=out ) ) except Exception as e: log.error(e) finally: for p in Path().rglob(f"{uuid}_*.jpg"): try: os.remove(str(p)) except OSError: pass return Path(out).absolute() async def mp4_async(video: str, seed: int = None, min_amount: int = MIN_AMOUNT_VIDEO, max_amount: int = MAX_AMOUNT_VIDEO, inplace: bool = False): out = video if inplace else OUT_NAME_TEMPLATE.format(Path(video).stem, "mp4") uuid = uuid4() try: fps = subprocess.check_output( "ffprobe -v error -select_streams v -of " "default=noprint_wrappers=1:nokey=1 -show_entries stream=r_frame_rate {video}".format( video=video ), shell=True ).strip().decode() process = await asyncio.create_subprocess_shell( "ffmpeg -loglevel quiet -i {video} {uuid}_%8d.jpg".format( video=video, uuid=uuid ) ) await process.wait() prng = Random(seed) for p in sorted(Path().rglob(f"{uuid}_*.jpg")): jpeg(str(p), prng.randint(MIN_SEED, MAX_SEED), min_amount, max_amount, inplace=True) process = await asyncio.create_subprocess_shell( "ffmpeg -loglevel quiet -r {fps} -i {uuid}_%8d.jpg {out} -y".format( fps=fps, uuid=uuid, out=out ) ) await process.wait() except Exception as e: log.error(e) finally: for p in Path().rglob("{uuid}_*.jpg".format(uuid=uuid)): try: os.remove(str(p)) except OSError: pass return Path(out).absolute()
[ "PIL.Image.new", "os.remove", "uuid.uuid4", "random.Random", "PIL.Image.open", "pathlib.Path", "logging.getLogger" ]
[((1533, 1560), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1550, 1560), False, 'import logging\n'), ((3400, 3412), 'random.Random', 'Random', (['seed'], {}), '(seed)\n', (3406, 3412), False, 'from random import Random\n'), ((4887, 4930), 'PIL.Image.new', 'Image.new', (['"""RGB"""', 'png.size', '(255, 255, 255)'], {}), "('RGB', png.size, (255, 255, 255))\n", (4896, 4930), False, 'from PIL import Image\n'), ((5089, 5108), 'os.remove', 'os.remove', (['jpg_path'], {}), '(jpg_path)\n', (5098, 5108), False, 'import os\n'), ((5522, 5539), 'PIL.Image.open', 'Image.open', (['photo'], {}), '(photo)\n', (5532, 5539), False, 'from PIL import Image\n'), ((5660, 5679), 'os.remove', 'os.remove', (['png_path'], {}), '(png_path)\n', (5669, 5679), False, 'import os\n'), ((6048, 6055), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (6053, 6055), False, 'from uuid import uuid4\n'), ((7458, 7465), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (7463, 7465), False, 'from uuid import uuid4\n'), ((4823, 4830), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (4828, 4830), False, 'from uuid import uuid4\n'), ((5501, 5508), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (5506, 5508), False, 'from uuid import uuid4\n'), ((6537, 6549), 'random.Random', 'Random', (['seed'], {}), '(seed)\n', (6543, 6549), False, 'from random import Random\n'), ((8014, 8026), 'random.Random', 'Random', (['seed'], {}), '(seed)\n', (8020, 8026), False, 'from random import Random\n'), ((4447, 4456), 'pathlib.Path', 'Path', (['out'], {}), '(out)\n', (4451, 4456), False, 'from pathlib import Path\n'), ((4843, 4860), 'PIL.Image.open', 'Image.open', (['photo'], {}), '(photo)\n', (4853, 4860), False, 'from PIL import Image\n'), ((5121, 5130), 'pathlib.Path', 'Path', (['out'], {}), '(out)\n', (5125, 5130), False, 'from pathlib import Path\n'), ((5624, 5644), 'PIL.Image.open', 'Image.open', (['png_path'], {}), '(png_path)\n', (5634, 5644), False, 'from PIL import Image\n'), ((5692, 5701), 'pathlib.Path', 'Path', (['out'], {}), '(out)\n', (5696, 5701), False, 'from pathlib import Path\n'), ((7116, 7125), 'pathlib.Path', 'Path', (['out'], {}), '(out)\n', (7120, 7125), False, 'from pathlib import Path\n'), ((8687, 8696), 'pathlib.Path', 'Path', (['out'], {}), '(out)\n', (8691, 8696), False, 'from pathlib import Path\n'), ((3364, 3375), 'pathlib.Path', 'Path', (['photo'], {}), '(photo)\n', (3368, 3375), False, 'from pathlib import Path\n'), ((4767, 4778), 'pathlib.Path', 'Path', (['photo'], {}), '(photo)\n', (4771, 4778), False, 'from pathlib import Path\n'), ((5444, 5455), 'pathlib.Path', 'Path', (['photo'], {}), '(photo)\n', (5448, 5455), False, 'from pathlib import Path\n'), ((6012, 6023), 'pathlib.Path', 'Path', (['video'], {}), '(video)\n', (6016, 6023), False, 'from pathlib import Path\n'), ((6973, 6979), 'pathlib.Path', 'Path', ([], {}), '()\n', (6977, 6979), False, 'from pathlib import Path\n'), ((7422, 7433), 'pathlib.Path', 'Path', (['video'], {}), '(video)\n', (7426, 7433), False, 'from pathlib import Path\n'), ((8527, 8533), 'pathlib.Path', 'Path', ([], {}), '()\n', (8531, 8533), False, 'from pathlib import Path\n'), ((5037, 5057), 'PIL.Image.open', 'Image.open', (['jpg_path'], {}), '(jpg_path)\n', (5047, 5057), False, 'from PIL import Image\n'), ((6575, 6581), 'pathlib.Path', 'Path', ([], {}), '()\n', (6579, 6581), False, 'from pathlib import Path\n'), ((8052, 8058), 'pathlib.Path', 'Path', ([], {}), '()\n', (8056, 8058), False, 'from pathlib import Path\n')]
from mininet.topo import Topo from mininet.net import OVSKernelSwitch class GedungSatu(Topo): def __init__(self, **opts): Topo.__init__(self, **opts) # tambah client sebanyak 15 komputer h1 = self.addHost(name="h1", mac="00:00:00:00:0h:01", ip="192.168.100.10/27") h2 = self.addHost(name="h2", mac="00:00:00:00:0h:02", ip="192.168.100.11/27") h3 = self.addHost(name="h3", mac="00:00:00:00:0h:03", ip="192.168.100.12/27") h4 = self.addHost(name="h4", mac="00:00:00:00:0h:04", ip="192.168.100.13/27") h5 = self.addHost(name="h5", mac="00:00:00:00:0h:05", ip="192.168.100.14/27") h6 = self.addHost(name="h6", mac="00:00:00:00:0h:06", ip="192.168.100.15/27") h7 = self.addHost(name="h7", mac="00:00:00:00:0h:07", ip="192.168.100.16/27") h8 = self.addHost(name="h8", mac="00:00:00:00:0h:08", ip="192.168.100.17/27") h9 = self.addHost(name="h9", mac="00:00:00:00:0h:09", ip="192.168.100.18/27") h10 = self.addHost(name="h10", mac="00:00:00:00:0h:10", ip="192.168.100.19/27") h11 = self.addHost(name="h11", mac="00:00:00:00:0h:11", ip="192.168.100.20/27") h12 = self.addHost(name="h12", mac="00:00:00:00:0h:12", ip="192.168.100.21/27") h13 = self.addHost(name="h13", mac="00:00:00:00:0h:13", ip="192.168.100.22/27") h14 = self.addHost(name="h14", mac="00:00:00:00:0h:14", ip="192.168.100.23/27") h15 = self.addHost(name="h15", mac="00:00:00:00:0h:15", ip="192.168.100.24/27") # tambah switch sebanyak 7 buah s1 = self.addSwitch(name="s1", cls=OVSKernelSwitch, mac="00:00:00:00:0s:01",) s2 = self.addSwitch(name="s2", cls=OVSKernelSwitch, mac="00:00:00:00:0s:02",) s3 = self.addSwitch(name="s3", cls=OVSKernelSwitch, mac="00:00:00:00:0s:03",) s4 = self.addSwitch(name="s4", cls=OVSKernelSwitch, mac="00:00:00:00:0s:04",) s5 = self.addSwitch(name="s5", cls=OVSKernelSwitch, mac="00:00:00:00:0s:05",) s6 = self.addSwitch(name="s6", cls=OVSKernelSwitch, mac="00:00:00:00:0s:06",) s7 = self.addSwitch(name="s7", cls=OVSKernelSwitch, mac="00:00:00:00:0s:07",) # membuat topologi tree # menghubungkan switch bagian atas # menghubungkan switch s1,s2,s3 ke switch s1 self.addLink(s2, s1) self.addLink(s3, s1) self.addLink(s4, s1) # menguhubungkan kedua switch untuk jaringan atas dan bawah self.addLink(s5, s1) # menghubungkan switch bagian bawah # menghubungkan switch s6,s7 ke switch s5 self.addLink(s6, s5) self.addLink(s7, s5) # menghubungkan client ke setiap switch bagian atas # menghubungkan client ke switch s2 self.addLink(h1, s2) self.addLink(h2, s2) self.addLink(h3, s2) # menghubungkan client ke switch s3 self.addLink(h4, s3) self.addLink(h5, s3) self.addLink(h6, s3) # menghubungkan client ke switch s4 self.addLink(h7, s4) self.addLink(h8, s4) self.addLink(h9, s4) # menguhubungkan client ke setiap switch bagian bawah # menghubungkan client ke switch s6 self.addLink(h10, s6) self.addLink(h11, s6) self.addLink(h12, s6) # menghubungkan client ke switch s7 self.addLink(h13, s7) self.addLink(h14, s7) self.addLink(h15, s7) topos = {"g1": (lambda: GedungSatu())}
[ "mininet.topo.Topo.__init__" ]
[((135, 162), 'mininet.topo.Topo.__init__', 'Topo.__init__', (['self'], {}), '(self, **opts)\n', (148, 162), False, 'from mininet.topo import Topo\n')]
""" Django settings for example project. """ import os from pathlib import Path # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = Path.cwd() # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = "a_not_so_secret_key" # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'channels', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'sockpuppet', 'tests.example', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'tests.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(BASE_DIR, 'templates'), ], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] ASGI_APPLICATION = 'sockpuppet.routing.application' WSGI_APPLICATION = 'tests.example.wsgi.application' CHANNEL_LAYERS = { "default": { "BACKEND": "channels.layers.InMemoryChannelLayer" } } # CHANNEL_LAYERS = { # "default": { # "BACKEND": "channels_redis.core.RedisChannelLayer", # "CONFIG": { # "hosts": [("127.0.0.1", 6379)], # }, # }, # } LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'root': { 'handlers': ['console'], 'level': 'DEBUG' }, 'handlers': { 'console': { 'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'), 'class': 'logging.StreamHandler', 'formatter': 'simple' }, 'sockpuppet': { 'level': 'DEBUG', 'class': 'logging.StreamHandler', 'formatter': 'simple' } }, 'formatters': { 'verbose': { 'format': "[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s", 'datefmt': "%d/%b/%Y %H:%M:%S" }, 'simple': { 'format': '%(levelname)s %(message)s' }, }, 'loggers': { 'django.db.backends': { # uncomment to see all queries # 'level': 'DEBUG', 'handlers': ['console'], }, 'sockpuppet': { 'level': 'DEBUG', 'handlers': ['sockpuppet'] } } } DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Internationalization LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) STATIC_URL = '/static/' STATICFILES_DIRS = [ ('js', '{}/jsdist/js/'.format(BASE_DIR)), ]
[ "pathlib.Path.cwd", "os.path.join", "os.getenv" ]
[((165, 175), 'pathlib.Path.cwd', 'Path.cwd', ([], {}), '()\n', (173, 175), False, 'from pathlib import Path\n'), ((3175, 3211), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""db.sqlite3"""'], {}), "(BASE_DIR, 'db.sqlite3')\n", (3187, 3211), False, 'import os\n'), ((1217, 1252), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""templates"""'], {}), "(BASE_DIR, 'templates')\n", (1229, 1252), False, 'import os\n'), ((2245, 2282), 'os.getenv', 'os.getenv', (['"""DJANGO_LOG_LEVEL"""', '"""INFO"""'], {}), "('DJANGO_LOG_LEVEL', 'INFO')\n", (2254, 2282), False, 'import os\n')]
import argparse import codecs import csv from utils.sparql_queries import load_dataset, clean_dataset from utils.sparql_queries import get_uri_suffix, exec_ner_query import filepath_config as RDFfiles CSV_DELIMITER = '|' REPLACE_DELIMITER = ' ' def results_to_csv(results, filename): with codecs.open(filename, "w") as fp: writer = csv.writer(fp, delimiter=CSV_DELIMITER) header = ["entity", "entity_local_name", "label", "language", "type", "type_local_name"] writer.writerow(header) while results.hasNext(): row = [] try: next_result = results.next() entity = next_result.get("?entity").toString().replace(CSV_DELIMITER, REPLACE_DELIMITER) row.append(entity) row.append(get_uri_suffix(entity)) label = next_result.get("?label") language = 'undefined' if label: label = label.toString().replace(CSV_DELIMITER, REPLACE_DELIMITER) label, language = split_string_lang(label) row.append(label) row.append(language) typ = next_result.get("?type").toString().replace(CSV_DELIMITER, REPLACE_DELIMITER) row.append(typ) row.append(get_uri_suffix(typ)) writer.writerow(row) except Exception: continue def split_string_lang(obj): splitted_obj = obj.replace('"', '').split('@') string = splitted_obj[0].replace('^^http://www.w3.org/2001/XMLSchema#string', '') language = 'undefined' if len(splitted_obj) > 1: language = splitted_obj[1] return string, language def dbpedia_ner_dataset(): dataset = load_dataset(RDFfiles.DBPEDIA_LABELS_EN) dataset = load_dataset(RDFfiles.DBPEDIA_TRANSITIVE_TYPES_EN) dataset = load_dataset(RDFfiles.DBPEDIA_LABELS_FR) dataset = load_dataset(RDFfiles.DBPEDIA_TRANSITIVE_TYPES_FR) results = exec_ner_query(dataset) results_to_csv(results, "ner_dbpedia.csv") def main(): argparse.ArgumentParser(prog='ner-dataset', description='Transform rdf dataset into a dataset for NER') clean_dataset() dbpedia_ner_dataset() clean_dataset() if __name__ == "__main__": main()
[ "utils.sparql_queries.get_uri_suffix", "codecs.open", "argparse.ArgumentParser", "csv.writer", "utils.sparql_queries.load_dataset", "utils.sparql_queries.exec_ner_query", "utils.sparql_queries.clean_dataset" ]
[((1762, 1802), 'utils.sparql_queries.load_dataset', 'load_dataset', (['RDFfiles.DBPEDIA_LABELS_EN'], {}), '(RDFfiles.DBPEDIA_LABELS_EN)\n', (1774, 1802), False, 'from utils.sparql_queries import load_dataset, clean_dataset\n'), ((1817, 1867), 'utils.sparql_queries.load_dataset', 'load_dataset', (['RDFfiles.DBPEDIA_TRANSITIVE_TYPES_EN'], {}), '(RDFfiles.DBPEDIA_TRANSITIVE_TYPES_EN)\n', (1829, 1867), False, 'from utils.sparql_queries import load_dataset, clean_dataset\n'), ((1882, 1922), 'utils.sparql_queries.load_dataset', 'load_dataset', (['RDFfiles.DBPEDIA_LABELS_FR'], {}), '(RDFfiles.DBPEDIA_LABELS_FR)\n', (1894, 1922), False, 'from utils.sparql_queries import load_dataset, clean_dataset\n'), ((1937, 1987), 'utils.sparql_queries.load_dataset', 'load_dataset', (['RDFfiles.DBPEDIA_TRANSITIVE_TYPES_FR'], {}), '(RDFfiles.DBPEDIA_TRANSITIVE_TYPES_FR)\n', (1949, 1987), False, 'from utils.sparql_queries import load_dataset, clean_dataset\n'), ((2002, 2025), 'utils.sparql_queries.exec_ner_query', 'exec_ner_query', (['dataset'], {}), '(dataset)\n', (2016, 2025), False, 'from utils.sparql_queries import get_uri_suffix, exec_ner_query\n'), ((2091, 2199), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""ner-dataset"""', 'description': '"""Transform rdf dataset into a dataset for NER"""'}), "(prog='ner-dataset', description=\n 'Transform rdf dataset into a dataset for NER')\n", (2114, 2199), False, 'import argparse\n'), ((2199, 2214), 'utils.sparql_queries.clean_dataset', 'clean_dataset', ([], {}), '()\n', (2212, 2214), False, 'from utils.sparql_queries import load_dataset, clean_dataset\n'), ((2245, 2260), 'utils.sparql_queries.clean_dataset', 'clean_dataset', ([], {}), '()\n', (2258, 2260), False, 'from utils.sparql_queries import load_dataset, clean_dataset\n'), ((297, 323), 'codecs.open', 'codecs.open', (['filename', '"""w"""'], {}), "(filename, 'w')\n", (308, 323), False, 'import codecs\n'), ((348, 387), 'csv.writer', 'csv.writer', (['fp'], {'delimiter': 'CSV_DELIMITER'}), '(fp, delimiter=CSV_DELIMITER)\n', (358, 387), False, 'import csv\n'), ((800, 822), 'utils.sparql_queries.get_uri_suffix', 'get_uri_suffix', (['entity'], {}), '(entity)\n', (814, 822), False, 'from utils.sparql_queries import get_uri_suffix, exec_ner_query\n'), ((1319, 1338), 'utils.sparql_queries.get_uri_suffix', 'get_uri_suffix', (['typ'], {}), '(typ)\n', (1333, 1338), False, 'from utils.sparql_queries import get_uri_suffix, exec_ner_query\n')]
''' Created on 01 Oct 2018 @author: <NAME> ''' import logging import pandas from glamod.parser.settings import (INPUT_ENCODING, INPUT_DELIMITER, VERBOSE_LOGGING, CHUNK_SIZE) logger = logging.getLogger(__name__) class FileParser(object): def __init__(self, fpath, delimiter=INPUT_DELIMITER, encoding=INPUT_ENCODING): self.fpath = fpath self.delimiter = delimiter self.encoding = encoding self._fh = open(fpath, 'r', encoding=self.encoding) self._col_names = self._parse_header() def rewind(self, to_line=0): "Sets the seek position at the start of the file." self._fh.seek(0) if to_line > 0: for _ in range(to_line): self._fh.readline() def _parse_header(self): assert(self._fh.tell() == 0) return self.readline() def readline(self): "Reads next line and splits on delimiter." return [_.strip() for _ in self._fh.readline().rstrip().split(self.delimiter)] def get_column_names(self): return self._col_names def get_subset_dataframe(self, convertors=None, columns=None): self.rewind() df = pandas.read_csv(self._fh, encoding=self.encoding, delimiter=self.delimiter, converters=convertors, usecols=columns, skipinitialspace=True, verbose=VERBOSE_LOGGING) return df def read_chunks(self, convertors=None): self.rewind() chunk_reader = pandas.read_csv(self._fh, encoding=self.encoding, delimiter=self.delimiter, converters=convertors, skipinitialspace=True, verbose=VERBOSE_LOGGING, chunksize=CHUNK_SIZE) for chunk in chunk_reader: yield chunk def close(self): self._fh.close()
[ "pandas.read_csv", "logging.getLogger" ]
[((193, 220), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (210, 220), False, 'import logging\n'), ((1193, 1365), 'pandas.read_csv', 'pandas.read_csv', (['self._fh'], {'encoding': 'self.encoding', 'delimiter': 'self.delimiter', 'converters': 'convertors', 'usecols': 'columns', 'skipinitialspace': '(True)', 'verbose': 'VERBOSE_LOGGING'}), '(self._fh, encoding=self.encoding, delimiter=self.delimiter,\n converters=convertors, usecols=columns, skipinitialspace=True, verbose=\n VERBOSE_LOGGING)\n', (1208, 1365), False, 'import pandas\n'), ((1563, 1739), 'pandas.read_csv', 'pandas.read_csv', (['self._fh'], {'encoding': 'self.encoding', 'delimiter': 'self.delimiter', 'converters': 'convertors', 'skipinitialspace': '(True)', 'verbose': 'VERBOSE_LOGGING', 'chunksize': 'CHUNK_SIZE'}), '(self._fh, encoding=self.encoding, delimiter=self.delimiter,\n converters=convertors, skipinitialspace=True, verbose=VERBOSE_LOGGING,\n chunksize=CHUNK_SIZE)\n', (1578, 1739), False, 'import pandas\n')]
# ***** BEGIN LICENSE BLOCK ***** # Version: MPL 1.1/GPL 2.0/LGPL 2.1 # # The contents of this file are subject to the Mozilla Public License Version # 1.1 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # http://www.mozilla.org/MPL/ # # Software distributed under the License is distributed on an "AS IS" basis, # WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License # for the specific language governing rights and limitations under the # License. # # The Original Code is Mozilla Sheriff Duty. # # The Initial Developer of the Original Code is Mozilla Corporation. # Portions created by the Initial Developer are Copyright (C) 2011 # the Initial Developer. All Rights Reserved. # # Contributor(s): # <NAME> <<EMAIL>> # # Alternatively, the contents of this file may be used under the terms of # either the GNU General Public License Version 2 or later (the "GPL"), or # the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), # in which case the provisions of the GPL or the LGPL are applicable instead # of those above. If you wish to allow use of your version of this file only # under the terms of either the GPL or the LGPL, and not to allow others to # use your version of this file under the terms of the MPL, indicate your # decision by deleting the provisions above and replace them with the notice # and other provisions required by the GPL or the LGPL. If you do not delete # the provisions above, a recipient may use your version of this file under # the terms of any one of the MPL, the GPL or the LGPL. # # ***** END LICENSE BLOCK ***** import datetime from collections import defaultdict from django.shortcuts import redirect from django import http from django.core.urlresolvers import reverse from django.conf import settings from django.db.models import Max, Min from django.shortcuts import render from django.contrib.auth.models import User from roster.models import Slot def handler404(request): data = {} return render(request, '404.html', data, status=404) def home(request): """Main calendar view.""" # special shortcuts based on calendar filtering if request.GET.get('cal_today'): return redirect(reverse('cal.home')) elif request.GET.get('cal_month'): try: year, month = [int(x) for x in request.GET['cal_month'].split(',')] as_date = datetime.date(year, month, 1) except: raise http.Http404("Invalid month") return redirect(reverse('cal.home') + '?cal_y=%s&cal_m=%s' % (as_date.year, as_date.month)) elif request.GET.get('cal_m_diff'): m_diff = int(request.GET.get('cal_m_diff')) cal_m = request.GET.get('cal_m') if cal_m: cal_m = int(cal_m) else: cal_m = datetime.date.today().month cal_y = request.GET.get('cal_y') if cal_y: cal_y = int(cal_y) else: cal_y = datetime.date.today().year if m_diff < 0 and (cal_m + m_diff) <= 0: # trouble cal_m += m_diff + 12 cal_y -= 1 elif m_diff > 0 and (cal_m + m_diff) > 12: # trouble cal_m += m_diff - 12 cal_y += 1 else: cal_m += m_diff return redirect(reverse('cal.home') + '?cal_y=%s&cal_m=%s' % (cal_y, cal_m)) data = {} # You'd add data here that you're sending to the template. on_duty_next = [] today = datetime.date.today() def label(date): if date == today - datetime.timedelta(days=1): return 'Yesterday' if date == today: return 'Today' if date == (today + datetime.timedelta(days=1)): return 'Tomorrow' return date.strftime('%A') extra_i = 0 def is_weekend(date): return date.strftime('%A') in ('Saturday', 'Sunday') _days_back_and_forth = -1, 0, 1, 2, 3 # to reduce queries, first make a collection of all slots access these days _all_slots = {} _all_user_ids = set() _min_date = today + datetime.timedelta(days=_days_back_and_forth[0]) _max_date = today + datetime.timedelta(days=_days_back_and_forth[-1] + 1) for slot in (Slot.objects .filter(date__gte=_min_date, date__lt=_max_date)): _all_slots[slot.date] = slot _all_user_ids.add(slot.user_id) # to avoid having to do a JOIN in the query above, fetch all users once _all_users = {} for user in User.objects.filter(pk__in=_all_user_ids): _all_users[user.pk] = user for i in _days_back_and_forth: date = today + datetime.timedelta(days=i + extra_i) remarks = [] users = [] try: slot = _all_slots[date] pk = slot.pk slot.user = _all_users[slot.user_id] users.append(slot.user) if slot.swap_needed: remarks.append('swap-needed') if slot.user == request.user: remarks.append('self') except KeyError: pk = None if date >= today: if is_weekend(date): remarks.append('weekend') else: remarks.append('offer-needed') if date == today: remarks.append('today') elif date < today: remarks.append('past') on_duty_next.append({ 'label': label(date), 'users': users, 'remarks': remarks, 'pk': pk, 'date': date, }) my_duty_dates = [] if request.user.is_authenticated(): try: last_past = (Slot.objects .filter(user=request.user, date__lt=today) .order_by('-date'))[0] my_duty_dates.append({ 'pk': last_past.pk, 'date': last_past.date, 'remarks': ['last'] }) except IndexError: last_past = None next_slots = (Slot.objects .filter(user=request.user, date__gte=today) .order_by('date'))[:last_past and 4 or 5] _first_next = None for slot in next_slots: remarks = [] if _first_next is None: _first_next = slot.date remarks.append('next') my_duty_dates.append({ 'pk': slot.pk, 'date': slot.date, 'remarks': remarks }) data['my_duty_dates'] = my_duty_dates data['on_duty_next'] = on_duty_next data['date_format'] = settings.DEFAULT_DATE_FORMAT month, year = request.GET.get('cal_m'), request.GET.get('cal_y') if month: month = int(month) data['cal_m'] = month if year: year = int(year) data['cal_y'] = year week = None if request.GET.get('cal_w'): week = int(request.GET.get('cal_w')) data['cal_w'] = week data['weeks'] = _get_calendar_data(year, month, week, request.user, sunday_first=True, weeks=6) data['month_options'] = _get_month_options(year, month, week, weeks=6) return render(request, 'cal/home.html', data) class Dict(dict): def __getattr__(self, key): return self[key] def _get_month_options(year, month, week, weeks=5): min_date = Slot.objects.aggregate(Min('date'))['date__min'] max_date = Slot.objects.aggregate(Max('date'))['date__max'] if year is None or month is None: date = datetime.date.today() else: date = datetime.date(year, month, 1) if week: date += datetime.timedelta(days=week * 7) one_day = datetime.timedelta(days=1) current_months = [] first_on_calendar = date last_on_calendar = date + datetime.timedelta(days=7 * (weeks - 1)) while first_on_calendar < last_on_calendar: first_on_calendar += one_day if (first_on_calendar.month, first_on_calendar.year) not in current_months: current_months.append((first_on_calendar.month, first_on_calendar.year)) d = min_date months = [] done = [] while d < max_date: if (d.month, d.year) not in done: done.append((d.month, d.year)) current = today_month = False if (d.month, d.year) == (date.month, date.year): today_month = True if (d.month, d.year) in current_months: current = True month = { 'label': d.strftime('%Y %B'), 'current': current, 'today_month': today_month, 'value': d.strftime('%Y,%m'), } months.append(Dict(month)) d += one_day return months def _get_calendar_data(year, month, week, user, sunday_first=False, weeks=5): if year is None or month is None: date = datetime.date.today() else: date = datetime.date(year, month, 1) if week: date += datetime.timedelta(days=week * 7) no_weeks = weeks weeks = [] _months = [] _rowspans = {} _is_authenticated = user.is_authenticated() _today = datetime.date.today() # the code below (with no_weeks==5) causes 100+ SQL queries so instead # we're going to use a dict to save lookups date_range = ( date, date + datetime.timedelta(days=7 * no_weeks) ) all_slots = defaultdict(list) unclaimed = [] users = defaultdict(list) for slot in (Slot.objects .filter(date__range=date_range) .select_related('user')): all_slots[slot.date].append(slot) users[slot.date].append(slot.user) if slot.swap_needed: unclaimed.append(slot.date) while len(weeks) < no_weeks: days = [] for day_date in _get_day_dates(date, sunday_first=sunday_first): remarks = [] if day_date < _today: remarks.append('past') if day_date in unclaimed: remarks.append('unclaimed') elif day_date == _today: remarks.append('today') elif _is_authenticated and user in users.get(day_date, []): remarks.append('self') day = {'date': day_date, 'remarks': remarks, 'slots': all_slots[day_date]} days.append(Dict(day)) week = {'days': days} if not _months or (_months and date.month != _months[-1]): week['month'] = Dict({ 'label': date.strftime('%b'), 'month_number': date.month, 'rowspan': None, }) _rowspans[date.month] = 0 _months.append(date.month) else: _rowspans[date.month] += 1 week['month'] = None weeks.append(Dict(week)) date += datetime.timedelta(days=7) for week in weeks: if getattr(week, 'month', None): week.month['rowspan'] = _rowspans[week.month.month_number] + 1 return weeks def _get_day_dates(this_date, sunday_first=False): """return 7 date instances that cover this week for any given date. If this_date is a Wednesday, return [<Monday's date>, <Tuesday's date>, this_date, ..., <Sunday's date>] However, if @sunday_first==True return this: [<Sunday's date>, <Monday's date>, <Tuesday's date>, this_date, ..., <Saturday's date>] """ if sunday_first and this_date.strftime('%A') == 'Sunday': this_date += datetime.timedelta(days=1) this_week = this_date.strftime('%W') date = this_date - datetime.timedelta(days=7) dates = [] while len(dates) < 7: if date.strftime('%W') == this_week: dates.append(date) date += datetime.timedelta(days=1) if sunday_first: one_day = datetime.timedelta(days=1) dates = [x - one_day for x in dates] return dates
[ "django.db.models.Max", "django.core.urlresolvers.reverse", "django.db.models.Min", "django.contrib.auth.models.User.objects.filter", "datetime.date.today", "datetime.date", "collections.defaultdict", "datetime.timedelta", "django.http.Http404", "roster.models.Slot.objects.filter", "django.shortcuts.render" ]
[((2062, 2107), 'django.shortcuts.render', 'render', (['request', '"""404.html"""', 'data'], {'status': '(404)'}), "(request, '404.html', data, status=404)\n", (2068, 2107), False, 'from django.shortcuts import render\n'), ((3573, 3594), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (3592, 3594), False, 'import datetime\n'), ((4320, 4380), 'roster.models.Slot.objects.filter', 'Slot.objects.filter', ([], {'date__gte': '_min_date', 'date__lt': '_max_date'}), '(date__gte=_min_date, date__lt=_max_date)\n', (4339, 4380), False, 'from roster.models import Slot\n'), ((4616, 4657), 'django.contrib.auth.models.User.objects.filter', 'User.objects.filter', ([], {'pk__in': '_all_user_ids'}), '(pk__in=_all_user_ids)\n', (4635, 4657), False, 'from django.contrib.auth.models import User\n'), ((7285, 7323), 'django.shortcuts.render', 'render', (['request', '"""cal/home.html"""', 'data'], {}), "(request, 'cal/home.html', data)\n", (7291, 7323), False, 'from django.shortcuts import render\n'), ((7791, 7817), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (7809, 7817), False, 'import datetime\n'), ((9304, 9325), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (9323, 9325), False, 'import datetime\n'), ((9554, 9571), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (9565, 9571), False, 'from collections import defaultdict\n'), ((9603, 9620), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (9614, 9620), False, 'from collections import defaultdict\n'), ((4176, 4224), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '_days_back_and_forth[0]'}), '(days=_days_back_and_forth[0])\n', (4194, 4224), False, 'import datetime\n'), ((4249, 4302), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(_days_back_and_forth[-1] + 1)'}), '(days=_days_back_and_forth[-1] + 1)\n', (4267, 4302), False, 'import datetime\n'), ((7637, 7658), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (7656, 7658), False, 'import datetime\n'), ((7684, 7713), 'datetime.date', 'datetime.date', (['year', 'month', '(1)'], {}), '(year, month, 1)\n', (7697, 7713), False, 'import datetime\n'), ((7743, 7776), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(week * 7)'}), '(days=week * 7)\n', (7761, 7776), False, 'import datetime\n'), ((7901, 7941), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(7 * (weeks - 1))'}), '(days=7 * (weeks - 1))\n', (7919, 7941), False, 'import datetime\n'), ((9031, 9052), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (9050, 9052), False, 'import datetime\n'), ((9078, 9107), 'datetime.date', 'datetime.date', (['year', 'month', '(1)'], {}), '(year, month, 1)\n', (9091, 9107), False, 'import datetime\n'), ((9137, 9170), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(week * 7)'}), '(days=week * 7)\n', (9155, 9170), False, 'import datetime\n'), ((11027, 11053), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(7)'}), '(days=7)\n', (11045, 11053), False, 'import datetime\n'), ((11695, 11721), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (11713, 11721), False, 'import datetime\n'), ((11786, 11812), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(7)'}), '(days=7)\n', (11804, 11812), False, 'import datetime\n'), ((11946, 11972), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (11964, 11972), False, 'import datetime\n'), ((12013, 12039), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (12031, 12039), False, 'import datetime\n'), ((2273, 2292), 'django.core.urlresolvers.reverse', 'reverse', (['"""cal.home"""'], {}), "('cal.home')\n", (2280, 2292), False, 'from django.core.urlresolvers import reverse\n'), ((4753, 4789), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(i + extra_i)'}), '(days=i + extra_i)\n', (4771, 4789), False, 'import datetime\n'), ((7493, 7504), 'django.db.models.Min', 'Min', (['"""date"""'], {}), "('date')\n", (7496, 7504), False, 'from django.db.models import Max, Min\n'), ((7557, 7568), 'django.db.models.Max', 'Max', (['"""date"""'], {}), "('date')\n", (7560, 7568), False, 'from django.db.models import Max, Min\n'), ((9494, 9531), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(7 * no_weeks)'}), '(days=7 * no_weeks)\n', (9512, 9531), False, 'import datetime\n'), ((9638, 9681), 'roster.models.Slot.objects.filter', 'Slot.objects.filter', ([], {'date__range': 'date_range'}), '(date__range=date_range)\n', (9657, 9681), False, 'from roster.models import Slot\n'), ((2462, 2491), 'datetime.date', 'datetime.date', (['year', 'month', '(1)'], {}), '(year, month, 1)\n', (2475, 2491), False, 'import datetime\n'), ((3644, 3670), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (3662, 3670), False, 'import datetime\n'), ((3784, 3810), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (3802, 3810), False, 'import datetime\n'), ((2526, 2555), 'django.http.Http404', 'http.Http404', (['"""Invalid month"""'], {}), "('Invalid month')\n", (2538, 2555), False, 'from django import http\n'), ((2580, 2599), 'django.core.urlresolvers.reverse', 'reverse', (['"""cal.home"""'], {}), "('cal.home')\n", (2587, 2599), False, 'from django.core.urlresolvers import reverse\n'), ((6124, 6179), 'roster.models.Slot.objects.filter', 'Slot.objects.filter', ([], {'user': 'request.user', 'date__gte': 'today'}), '(user=request.user, date__gte=today)\n', (6143, 6179), False, 'from roster.models import Slot\n'), ((2886, 2907), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (2905, 2907), False, 'import datetime\n'), ((3038, 3059), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (3057, 3059), False, 'import datetime\n'), ((3388, 3407), 'django.core.urlresolvers.reverse', 'reverse', (['"""cal.home"""'], {}), "('cal.home')\n", (3395, 3407), False, 'from django.core.urlresolvers import reverse\n'), ((5783, 5837), 'roster.models.Slot.objects.filter', 'Slot.objects.filter', ([], {'user': 'request.user', 'date__lt': 'today'}), '(user=request.user, date__lt=today)\n', (5802, 5837), False, 'from roster.models import Slot\n')]
"""Low-level driver for communicating with PurpleDrop via serial messages """ import inspect import fnmatch import logging import queue import struct import serial import serial.tools.list_ports import sys import threading import time from typing import Any, AnyStr, Callable, Dict, List, Optional, Sequence from purpledrop.calibration import ElectrodeOffsetCalibration from purpledrop.electrode_board import Board import purpledrop.messages as messages import purpledrop.protobuf.messages_pb2 as messages_pb2 from .messages import PurpleDropMessage, ElectrodeEnableMsg, SetPwmMsg from .message_framer import MessageFramer, serialize from .move_drop import move_drop, MoveDropResult logger = logging.getLogger("purpledrop") # Versions of purpledrop software supported by this driver SUPPORTED_VERSIONS = [ "v0.5.*", ] # List of USB VID/PID pairs which will be recognized as a purpledrop PURPLEDROP_VIDPIDS = [ (0x02dd, 0x7da3), (0x1209, 0xCCAA), ] def pinlist2bool(pins): pin_state = [False] * N_PINS for p in pins: if(p >= N_PINS): raise ValueError(f"Pin {p} is invalid. Must be < {N_PINS}") pin_state[p] = True return pin_state def pinlist2mask(pins): mask = [0] * int(((N_PINS + 7) / 8)) for p in pins: word = int(p / 8) bit = p % 8 mask[word] |= (1<<bit) return mask def validate_version(v): for pattern in SUPPORTED_VERSIONS: if fnmatch.fnmatch(v, pattern): return True return False def resolve_msg_filter(filt): """If the filter provided is a message type, then create a filter which returns any message of that type. Otherwise, assume the filter is a lambda method. """ if inspect.isclass(filt): # and issubclass(filt, PurpleDropMessage): return lambda x: isinstance(x, filt) else: return filt def list_purpledrop_devices() -> List[serial.tools.list_ports_common.ListPortInfo]: """Get a list of detected purpledrop devices Returns: A list of `ListPortInfo` objects """ devices = serial.tools.list_ports.comports() selected_devices = [x for x in devices if (x.vid, x.pid) in PURPLEDROP_VIDPIDS] return selected_devices def get_pb_timestamp(): """Get a protobuf timestamp for the current system time """ time_f = time.time() ts = messages_pb2.Timestamp() ts.seconds = int(time_f) ts.nanos = int((time_f % 1) * 1e9) return ts class PurpleDropRxThread(object): def __init__(self, port: serial.Serial, callback: Callable[[PurpleDropMessage], None]=None): self._thread = threading.Thread(target=self.run, name="PurpleDrop Rx", daemon=True) self._ser = port self._framer = MessageFramer(PurpleDropMessage.predictSize) self._callback = callback self.running = True def start(self): self._thread.start() def stop(self): self.running = False def join(self): self._thread.join() def run(self): while self.running: rxBytes = None try: rxBytes = self._ser.read(64) except serial.serialutil.SerialException as e: logger.warn(f"Failed reading from port: {e}") self.running = False return if(len(rxBytes) > 0): for buf in self._framer.parse(rxBytes): if(self._callback): try: self._callback(PurpleDropMessage.from_bytes(buf)) except Exception as e: logger.exception(e) def set_callback(self, callback): self._callback = callback class SyncListener(object): class MsgDelegate(object): def __init__(self, filter_func, fifo): self.filter = filter_func self.fifo = fifo def __call__(self, msg: PurpleDropMessage): if self.filter is None or self.filter(msg): self.fifo.put(msg) def __init__(self, owner, msg_filter=None): self.owner = owner self.filter = resolve_msg_filter(msg_filter) self.fifo = queue.Queue() self.delegate = self.MsgDelegate(self.filter, self.fifo) def __del__(self): self.unregister() def unregister(self): self.owner.unregister_listener(self.delegate) def get_msg_handler(self): return self.delegate def wait(self, timeout: Optional[float]=None) -> Optional[PurpleDropMessage]: try: return self.fifo.get(timeout=timeout) except queue.Empty: return None class AsyncListener(object): class MsgDelegate(object): def __init__(self, filter_func, callback): self.filter = filter_func self.callback = callback def __call__(self, msg: PurpleDropMessage): if self.filter is None or self.filter(msg): self.callback(msg) def __init__(self, owner, callback, msg_filter=None): self.owner = owner self.callback = callback self.filter = resolve_msg_filter(msg_filter) self.delegate = self.MsgDelegate(self.filter, callback) def __del__(self): self.owner.unregister_listener(self.delegate) def get_msg_handler(self): return self.delegate class PurpleDropDevice(): """Low level messaging for controlling a PurpleDrop via a serial port Use `list_purpledrop_devices()` to find devices based on their USB VID/PID and serial number. Then provide the com port (e.g. `/dev/ttyACM0`) when instantiating a PurpleDropDevice. NOTE: This class provides low level control of the device. For most uses, you should be using PurpleDropControl which provides higher level functionality and matches the JSON-RPC methods provided by `pd-server`. """ def __init__(self, port=None): self._rx_thread = None self._ser = None self.lock = threading.Lock() self.listeners = [] self.__connected_callbacks: List[Callable] = [] self.__disconnected_callbacks: List[Callable] = [] if port is not None: self.open(port) def register_connected_callback(self, callback: Callable): self.__connected_callbacks.append(callback) def register_disconnected_callback(self, callback: Callable): self.__disconnected_callbacks.append(callback) def open(self, port): logger.debug(f"PurpleDropDevice: opening {port}") self._ser = serial.Serial(port, timeout=0.01, write_timeout=0.5) self._rx_thread = PurpleDropRxThread(self._ser, callback=self.message_callback) self._rx_thread.start() for cb in self.__connected_callbacks: cb() def close(self): logger.debug("Closing PurpleDropDevice") if self._rx_thread is not None: self._rx_thread.stop() self._rx_thread.join() if self._ser is not None: self._ser.close() for cb in self.__disconnected_callbacks: cb() def connected(self): return self._ser is not None and \ self._rx_thread is not None and \ self._rx_thread.running def unregister_listener(self, listener): with self.lock: self.listeners.remove(listener) def get_sync_listener(self, msg_filter=None) -> SyncListener: new_listener = SyncListener(owner=self, msg_filter=msg_filter) with self.lock: self.listeners.append(new_listener.get_msg_handler()) return new_listener def get_async_listener(self, callback, msg_filter=None) -> AsyncListener: new_listener = AsyncListener(owner=self, callback=callback, msg_filter=msg_filter) with self.lock: self.listeners.append(new_listener.get_msg_handler()) return new_listener def send_message(self, msg: PurpleDropMessage): tx_bytes = serialize(msg.to_bytes()) with self.lock: self._ser.write(tx_bytes) def message_callback(self, msg: PurpleDropMessage): with self.lock: for handler in self.listeners: handler(msg) class PersistentPurpleDropDevice(PurpleDropDevice): """A wrapper for PurpleDropDevice that transparently tries to connect/reconnect to a device. If a serial is provided, it will only connect to that serial number. Otherwise, it will connect to any purple drop detected (and may choose one arbitrarilty if there are multiple). """ def __init__(self, serial_number: Optional[str]=None): super().__init__() self.target_serial_number: Optional[str] = serial_number self.device_info: Optional[Any] = None self.__thread = threading.Thread( name="PersistentPurpleDropDevice Monitor", target=self.__thread_entry, daemon=True) self.__thread.start() def connected_serial_number(self) -> Optional[str]: """Returns the serial number of the connected device """ if self.device_info is None: return None else: return self.device_info.serial_number def __try_to_connect(self) -> bool: device_list = list_purpledrop_devices() selected_device = None if len(device_list) == 0: logger.debug("No purpledrop devices found to connect to") return False if self.target_serial_number: for device in device_list: if device.serial_number == self.target_serial_number: selected_device = device else: selected_device = device_list[0] if selected_device is None: serial_numbers = [d.serial_number for d in device_list] logger.warn(f"Found purpledrop, but not connecting because it has unexpected serial number ({serial_numbers}") return False self.device_info = selected_device self.open(selected_device.device) logger.warning(f"Connected to purpledrop {selected_device.serial_number} on {selected_device.device}") return True def __thread_entry(self): status = False while True: if not self.connected(): if status: logger.warning("Closing purpledrop device") self.close() self.device_info = None status = False logger.debug("Attempting to connect to purpledrop") status = self.__try_to_connect() time.sleep(5.0) N_PINS = 128 N_MASK_BYTES = 16 # Compute coefficients to convert integrated voltage to integrated charge # These values are nominal calculated values, not calibrated in any way # Divide by the voltage to get farads. # First stage gain GAIN1 = 2.0 # Integrator gain (Vout per integrated input V*s) GAIN2 = 25000.0 # Output stage gain GAIN3 = 22.36 # Sense resistances for high/low gain RLOW = 33.0 RHIGH = 220.0 CAPGAIN_HIGH = RHIGH * GAIN1 * GAIN2 * GAIN3 * 4096. / 3.3 CAPGAIN_LOW = RLOW * GAIN1 * GAIN2 * GAIN3 * 4096. / 3.3 class PinState(object): """Data record to store the state of purpledrop pin setting, including active pins and capacitance scan groups """ N_DRIVE_GROUPS = 2 N_SCAN_GROUPS = 5 class PinGroup(object): def __init__(self, pin_mask: Sequence[int], setting: int): self.pin_mask = pin_mask self.setting = setting class DriveGroup(PinGroup): def __init__(self, pin_mask=None, duty_cycle=255): if pin_mask is None: pin_mask = pinlist2bool([]) super().__init__(pin_mask, duty_cycle) @property def duty_cycle(self): return self.setting @duty_cycle.setter def duty_cycle(self, dc): self.setting = dc def to_dict(self): return { 'pins': self.pin_mask, 'duty_cycle': self.duty_cycle, } class ScanGroup(PinGroup): def __init__(self, pin_mask=None, setting=0): if pin_mask is None: pin_mask = pinlist2bool([]) super().__init__(pin_mask, setting) def to_dict(self): return { 'pins': self.pin_mask, 'setting': self.setting, } def __init__(self): self.drive_groups = [self.DriveGroup() for _ in range(self.N_DRIVE_GROUPS)] self.scan_groups = [self.ScanGroup() for _ in range(self.N_SCAN_GROUPS)] def to_dict(self): return { 'drive_groups': [x.to_dict() for x in self.drive_groups], 'scan_groups': [x.to_dict() for x in self.scan_groups], } class PurpleDropController(object): # Define the method names which will be made available via RPC server RPC_METHODS = [ 'get_board_definition', 'get_parameter_definitions', 'get_parameter', 'set_parameter', 'get_bulk_capacitance', 'get_scan_capacitance', 'get_group_capacitance', 'get_active_capacitance', 'set_capacitance_group', 'set_electrode_pins', 'get_electrode_pins', 'set_feedback_command', 'move_drop', 'get_temperatures', 'set_pwm_duty_cycle', 'get_hv_supply_voltage', 'calibrate_capacitance_offset', 'get_device_info', 'read_gpio', 'write_gpio', 'set_scan_gains', 'get_scan_gains', 'set_electrode_calibration', ] def __init__(self, purpledrop, board_definition: Board, electrode_calibration: Optional[ElectrodeOffsetCalibration]=None): self.purpledrop = purpledrop self.board_definition = board_definition self.active_capacitance = 0.0 self.electrode_calibration = electrode_calibration self.raw_scan_capacitance: List[float] = [] self.calibrated_scan_capacitance: List[float] = [] self.raw_group_capacitance: List[float] = [] self.calibrated_group_capacitance: List[float] = [] self.scan_gains = [1.0] * N_PINS self.temperatures: Sequence[float] = [] self.duty_cycles: Dict[int, float] = {} self.hv_supply_voltage = 0.0 self.parameter_list: List[dict] = [] self.lock = threading.Lock() self.event_listeners: List[Callable] = [] self.active_capacitance_counter = 0 self.group_capacitance_counter = 0 self.duty_cycle_updated_counter = 0 self.hv_regulator_counter = 0 self.pin_state = PinState() def msg_filter(msg): desired_types = [ messages.ActiveCapacitanceMsg, messages.BulkCapacitanceMsg, messages.CommandAckMsg, messages.DutyCycleUpdatedMsg, messages.TemperatureMsg, messages.HvRegulatorMsg, ] for t in desired_types: if isinstance(msg, t): return True return False if self.purpledrop.connected(): self.__on_connected() self.purpledrop.register_connected_callback(self.__on_connected) self.purpledrop.register_disconnected_callback(self.__on_disconnected) self.listener = self.purpledrop.get_async_listener(self.__message_callback, msg_filter) def __on_connected(self): self.__set_scan_gains() self.__get_parameter_descriptors() software_version = self.get_software_version() if not validate_version(software_version): logger.error(f"Unsupported software version '{software_version}'. This driver may not" + \ "work correcly, and you should upgrade your purpledrop firmware to one of the following: " + \ f"{SUPPORTED_VERSIONS}") self.__send_device_info_event( True, self.purpledrop.connected_serial_number() or '', software_version or '' ) if self.electrode_calibration is not None: logger.info("Loading electrode calibration") self.set_electrode_calibration(self.electrode_calibration.voltage, self.electrode_calibration.offsets) def __on_disconnected(self): self.__send_device_info_event(False, '', '') def __send_device_info_event(self, connected: bool, serial_number: str, software_version: str): event = messages_pb2.PurpleDropEvent() event.device_info.connected = connected event.device_info.serial_number = serial_number event.device_info.software_version = software_version self.__fire_event(event) def __get_parameter_descriptors(self): """Request and receive the list of parameters from device """ listener = self.purpledrop.get_sync_listener(messages.ParameterDescriptorMsg) self.purpledrop.send_message(messages.ParameterDescriptorMsg()) descriptors = [] while True: msg = listener.wait(timeout=1.0) if msg is None: logger.error("Timed out waiting for parameter descriptors") break descriptors.append({ 'id': msg.param_id, 'name': msg.name, 'description': msg.description, 'type': msg.type, }) if msg.sequence_number == msg.sequence_total - 1: break self.parameter_list = descriptors def __set_scan_gains(self, gains: Sequence[bool]=None): """Setup gains used for capacitance scan If no gains are provided, the gains will be set based on the "oversized" electrodes defined in the active board definition. Any oversized electrodes are set to low gain, and the rest to high gain. Args: gains: A list of booleans. True indicates low gain should be used for the corresponding electrode """ if gains is None: gains = [False] * N_PINS for pin in self.board_definition.oversized_electrodes: gains[pin] = True # low gain self.scan_gains = list(map(lambda x: CAPGAIN_LOW if x else CAPGAIN_HIGH, gains)) msg = messages.SetGainMsg() msg.gains = list(map(lambda x: 1 if x else 0, gains)) listener = self.purpledrop.get_sync_listener(messages.CommandAckMsg) self.purpledrop.send_message(msg) ack = listener.wait(timeout=1.0) if ack is None: logger.error("Got no ACK for SetGains message") def __calibrate_capacitance(self, raw, gain): # Can't measure capacitance unless high voltage is on if self.hv_supply_voltage < 60.0: return 0.0 # Return as pF return raw * 1e12 / gain / self.hv_supply_voltage def __message_callback(self, msg): if isinstance(msg, messages.ActiveCapacitanceMsg): # TODO: I-sense resistor values are adjustable, and the # CAPGAIN_HIGH/CAPGAIN_LOW should be gotten from the device at some # point, rather than duplicated here capgain = CAPGAIN_LOW if (msg.settings & 1 == 1) else CAPGAIN_HIGH self.active_capacitance = self.__calibrate_capacitance(msg.measurement - msg.baseline, capgain) self.active_capacitance_counter += 1 # Throttle the events. 500Hz messages is a lot for the browser to process. # This also means logs don't have a full resolution, and it would be better # if clients could choose what they get if (self.active_capacitance_counter % 10) == 0: cap_event = messages_pb2.PurpleDropEvent() cap_event.active_capacitance.baseline = msg.baseline cap_event.active_capacitance.measurement = msg.measurement cap_event.active_capacitance.calibrated = float(self.active_capacitance) cap_event.active_capacitance.timestamp.CopyFrom(get_pb_timestamp()) self.__fire_event(cap_event) elif isinstance(msg, messages.BulkCapacitanceMsg): if(msg.group_scan != 0): self.group_capacitance_counter += 1 if (self.group_capacitance_counter % 10) == 0: self.raw_group_capacitance = msg.measurements self.calibrated_group_capacitance = [0.0] * len(self.raw_group_capacitance) for i in range(msg.count): if self.pin_state.scan_groups[i].setting == 0: gain = CAPGAIN_HIGH else: gain = CAPGAIN_LOW self.calibrated_group_capacitance[i] = self.__calibrate_capacitance(msg.measurements[i], gain) group_event = messages_pb2.PurpleDropEvent() group_event.group_capacitance.timestamp.CopyFrom(get_pb_timestamp()) group_event.group_capacitance.measurements[:] = self.calibrated_group_capacitance group_event.group_capacitance.raw_measurements[:] = self.raw_group_capacitance self.__fire_event(group_event) else: # Scan capacitance measurements are broken up into multiple messages if len(self.raw_scan_capacitance) < msg.start_index + msg.count: self.raw_scan_capacitance.extend([0] * (msg.start_index + msg.count - len(self.raw_scan_capacitance))) self.calibrated_scan_capacitance.extend([0] * (msg.start_index + msg.count - len(self.calibrated_scan_capacitance))) for i in range(msg.count): chan = msg.start_index + i gain = self.scan_gains[chan] self.raw_scan_capacitance[chan] = msg.measurements[i] self.calibrated_scan_capacitance[chan] = self.__calibrate_capacitance(msg.measurements[i], gain) # Fire event on the last group if msg.start_index + msg.count == 128: bulk_event = messages_pb2.PurpleDropEvent() def make_cap_measurement(raw, calibrated): m = messages_pb2.CapacitanceMeasurement() m.raw = float(raw) m.capacitance = float(calibrated) return m bulk_event.scan_capacitance.measurements.extend( [make_cap_measurement(raw, cal) for (raw, cal) in zip(self.raw_scan_capacitance, self.calibrated_scan_capacitance)] ) bulk_event.scan_capacitance.timestamp.CopyFrom(get_pb_timestamp()) self.__fire_event(bulk_event) elif isinstance(msg, messages.DutyCycleUpdatedMsg): self.duty_cycle_updated_counter += 1 if (self.duty_cycle_updated_counter%10) == 0: # Update local state of duty cycle self.pin_state.drive_groups[0].duty_cycle = msg.duty_cycle_A self.pin_state.drive_groups[1].duty_cycle = msg.duty_cycle_B # Publish event with new values duty_cycle_event = messages_pb2.PurpleDropEvent() duty_cycle_event.duty_cycle_updated.timestamp.CopyFrom(get_pb_timestamp()) duty_cycle_event.duty_cycle_updated.duty_cycles[:] = [msg.duty_cycle_A, msg.duty_cycle_B] self.__fire_event(duty_cycle_event) elif isinstance(msg, messages.HvRegulatorMsg): self.hv_regulator_counter += 1 if (self.hv_regulator_counter % 10) == 0: self.hv_supply_voltage = msg.voltage event = messages_pb2.PurpleDropEvent() event.hv_regulator.voltage = msg.voltage event.hv_regulator.v_target_out = msg.v_target_out event.hv_regulator.timestamp.CopyFrom(get_pb_timestamp()) self.__fire_event(event) elif isinstance(msg, messages.TemperatureMsg): self.temperatures = [float(x) / 100.0 for x in msg.measurements] event = messages_pb2.PurpleDropEvent() event.temperature_control.temperatures[:] = self.temperatures duty_cycles = [] for i in range(len(self.temperatures)): duty_cycles.append(self.duty_cycles.get(i, 0.0)) event.temperature_control.duty_cycles[:] = duty_cycles event.temperature_control.timestamp.CopyFrom(get_pb_timestamp()) self.__fire_event(event) def __fire_event(self, event): with self.lock: for listener in self.event_listeners: listener(event) def __get_parameter_definition(self, id): for p in self.parameter_list: if p['id'] == id: return p return None def __fire_pinstate_event(self): def create_electrode_group(x): eg = messages_pb2.ElectrodeGroup() eg.electrodes[:] = x.pin_mask eg.setting = x.setting return eg event = messages_pb2.PurpleDropEvent() for g in self.pin_state.drive_groups: event.electrode_state.drive_groups.add(electrodes=g.pin_mask, setting=g.setting) for g in self.pin_state.scan_groups: event.electrode_state.scan_groups.add(electrodes=g.pin_mask, setting=g.setting) self.__fire_event(event) def get_software_version(self) -> Optional[str]: listener = self.purpledrop.get_sync_listener(msg_filter=messages.DataBlobMsg) versionRequest = messages.DataBlobMsg() versionRequest.blob_id = messages.DataBlobMsg.SOFTWARE_VERSION_ID self.purpledrop.send_message(versionRequest) msg = listener.wait(0.5) if msg is None: software_version = None logger.warning("Timed out requesting software version") else: software_version = msg.payload.decode('utf-8') return software_version def register_event_listener(self, func): """Register a callback for state update events """ with self.lock: self.event_listeners.append(func) def get_parameter_definitions(self): """Get a list of all of the parameters supported by the PurpleDrop Arguments: - None """ logger.debug("Recieved get_parameter_definitions") return { "parameters": self.parameter_list, } def get_parameter(self, paramIdx): """Request the current value of a parameter from the device Arguments: - paramIdx: The ID of the parameter to request (from the list of parameters provided by 'get_parameter_definition') """ req_msg = messages.SetParameterMsg() req_msg.set_param_idx(paramIdx) req_msg.set_param_value_int(0) req_msg.set_write_flag(0) def msg_filter(msg): return isinstance(msg, messages.SetParameterMsg) and msg.param_idx() == paramIdx listener = self.purpledrop.get_sync_listener(msg_filter=msg_filter) self.purpledrop.send_message(req_msg) resp = listener.wait(timeout=0.5) if resp is None: raise TimeoutError("No response from purpledrop") else: paramDesc = self.__get_parameter_definition(paramIdx) value = None if paramDesc is not None and paramDesc['type'] == 'float': value = resp.param_value_float() else: value = resp.param_value_int() logger.debug(f"get_parameter({paramIdx}) returning {value}") return value def set_parameter(self, paramIdx, value): """Set a config parameter A special paramIdx value of 0xFFFFFFFF is used to trigger the saving of all parameters to flash. Arguments: - paramIdx: The index of the parameter to set (from 'get_parameter_definitions') - value: A float or int (based on the definition) with the new value to assign """ logging.debug(f"Received set_parameter({paramIdx}, {value})") req_msg = messages.SetParameterMsg() req_msg.set_param_idx(paramIdx) paramDesc = self.__get_parameter_definition(paramIdx) if paramDesc is not None and paramDesc['type'] == 'float': req_msg.set_param_value_float(value) else: req_msg.set_param_value_int(value) req_msg.set_write_flag(1) def msg_filter(msg): return isinstance(msg, messages.SetParameterMsg) and msg.param_idx() == paramIdx listener = self.purpledrop.get_sync_listener(msg_filter=msg_filter) self.purpledrop.send_message(req_msg) resp = listener.wait(timeout=0.5) if resp is None: raise TimeoutError(f"No response from purpledrop to set parameter ({paramIdx})") def get_board_definition(self): """Get electrode board configuratin object Arguments: None """ logger.debug(f"Received get_board_definition") return self.board_definition.as_dict() def get_bulk_capacitance(self) -> List[float]: """Get the most recent capacitance scan results DEPRECATED. Use get_scan_capacitance. Arguments: None """ logging.debug("Received get_bulk_capacitance") return self.calibrated_scan_capacitance def get_scan_capacitance(self) -> Dict[str, Any]: """Get the most recent capacitance scan results Arguments: None """ return { "raw": self.raw_scan_capacitance, "calibrated": self.calibrated_scan_capacitance } def get_group_capacitance(self) -> Dict[str, List[float]]: """Get the latest group scan capacitances Arguments: None """ return { "raw": self.raw_group_capacitance, "calibrated": self.calibrated_group_capacitance, } def get_active_capacitance(self) -> float: """Get the most recent active electrode capacitance Arguments: None """ logging.debug("Received get_active_capacitance") return self.active_capacitance def get_electrode_pins(self): """Get the current state of all electrodes Arguments: None Returns: List of booleans """ logging.debug("Received get_electrode_pins") return self.pin_state.to_dict() def set_capacitance_group(self, pins: Sequence[int], group_id: int, setting: int): """Set a capacitance scan group. Purpledrop support 5 scan groups. Each group defines a set of electrodes which are measured together after each AC drive cycle. Arguments: - pins: A list of pins included in the group (may be empty to clear the group) - group_id: The group number to set (0-4) """ if group_id >= 5: raise ValueError("group_id must be < 5") # Send message to device to update msg = ElectrodeEnableMsg() msg.group_id = group_id + 100 msg.setting = setting msg.values = pinlist2mask(pins) self.purpledrop.send_message(msg) # Update local state self.pin_state.scan_groups[group_id] = PinState.ScanGroup(pinlist2bool(pins), setting) # Send event with new state self.__fire_pinstate_event() def set_electrode_pins(self, pins: Sequence[int], group_id: int=0, duty_cycle: int=255): """Set the currently enabled pins Specified electrodes will be activated, all other will be deactivated. Providing an empty array will deactivate all electrodes. Arguments: - pins: A list of pin numbers to activate - group_id: Which electrode enable group to be set (default: 0) 0: Drive group A 1: Drive group B - duty_cycle: Duty cycle for the group (0-255) """ logging.debug(f"Received set_electrode_pins({pins})") if group_id < 0 or group_id > 1: raise ValueError(f"group_id={group_id} is invalid. It must be 0 or 1.") # Send message to device to update msg = ElectrodeEnableMsg() msg.group_id = group_id msg.setting = duty_cycle msg.values = pinlist2mask(pins) self.purpledrop.send_message(msg) # Update local state self.pin_state.drive_groups[group_id] = PinState.DriveGroup(pinlist2bool(pins), duty_cycle) # Send event with new state self.__fire_pinstate_event() def set_feedback_command(self, target, mode, input_groups_p_mask, input_groups_n_mask, baseline): """Update feedback control settings When enabled, the purpledrop controller will adjust the duty cycle of electrode drive groups based on capacitance measurements. Arguments: - target: The controller target in counts - mode: - 0: Disabled - 1: Normal - 2: Differential - input_groups_p_mask: Bit mask indicating which capacitance groups to sum for positive input (e.g. for groups 0 and 2: 5) - input_groups_n_mask: Bit mask for negative input groups (used in differential mode) - baseline: The duty cycle to apply to both drive groups when no error signal is present (0-255) """ msg = messages.FeedbackCommandMsg() msg.target = target msg.mode = mode msg.input_groups_p_mask = input_groups_p_mask msg.input_groups_n_mask = input_groups_n_mask msg.baseline = baseline self.purpledrop.send_message(msg) def move_drop(self, start: Sequence[int], size: Sequence[int], direction: str) -> MoveDropResult: """Execute a drop move sequence Arguments: - start: A list -- [x, y] -- specifying the top-left corner of the current drop location - size: A list -- [width, height] -- specifying the size of the drop to be moved - direction: One of, "Up", "Down", "Left", "Right" """ logging.debug(f"Received move_drop({start}, {size}, {direction})") return move_drop(self, start, size, direction) def get_temperatures(self) -> Sequence[float]: """Returns an array of all temperature sensor measurements in degrees C Arguments: None """ logging.debug("Received get_temperatures") return self.temperatures def set_pwm_duty_cycle(self, chan: int, duty_cycle: float): """Set the PWM output duty cycle for a single channel Arguments: - chan: An integer specifying the channel to set - duty_cycle: A float specifying the duty cycle in range [0, 1.0] """ logging.debug(f"Received set_pwm_duty_cycle({chan}, {duty_cycle})") self.duty_cycles[chan] = duty_cycle msg = SetPwmMsg() msg.chan = chan msg.duty_cycle = duty_cycle self.purpledrop.send_message(msg) def get_hv_supply_voltage(self): """Return the latest high voltage rail measurement Arguments: None Returns: A float, in volts """ logging.debug("Received get_hv_supply_voltage") return self.hv_supply_voltage def calibrate_capacitance_offset(self): """Request a calibration of the capacitance measurement zero offset Arguments: None Returns: None """ msg = messages.CalibrateCommandMsg() msg.command = messages.CalibrateCommandMsg.CAP_OFFSET_CMD self.purpledrop.send_message(msg) def get_device_info(self): """Gets information about the connected purpledrop device Arguments: None Returns: Object with the following fields: - connected: boolean indicating if a device is currently connected - serial_number: The serial number of the connected device - software_version: The software version string of the connected device """ serial_number = self.purpledrop.connected_serial_number() if serial_number is None: return { 'connected': False, 'serial_number': '', 'software_version': '' } else: software_version = self.get_software_version() return { 'connected': True, 'serial_number': serial_number, 'software_version': software_version } def read_gpio(self, gpio_num): """Reads the current input value of a GPIO pin Arguments: - gpio_num: The ID of the GPIO to read Returns: A bool """ msg = messages.GpioControlMsg() msg.pin = gpio_num msg.read = True listener = self.purpledrop.get_sync_listener(msg_filter=messages.GpioControlMsg) self.purpledrop.send_message(msg) rxmsg = listener.wait(0.5) if rxmsg is None: raise TimeoutError("No response from purpledrop to GPIO read request") else: return rxmsg.value def write_gpio(self, gpio_num, value, output_enable): """Set the output state of a GPIO pin Arguments: - gpio_num: The ID of the GPIO to set - value: The output value (boolean) - output_enable: Set the GPIO as an output (true) or input (false) Returns: - The value read on the GPIO (bool) """ msg = messages.GpioControlMsg() msg.pin = gpio_num msg.read = False msg.value = value msg.output_enable = output_enable listener = self.purpledrop.get_sync_listener(msg_filter=messages.GpioControlMsg) self.purpledrop.send_message(msg) rxmsg = listener.wait(0.5) if rxmsg is None: raise TimeoutError("No response from purpledrop to GPIO read request") else: return rxmsg.value def set_electrode_calibration(self, voltage: float, offsets: Sequence[int]): """Set the capacitance offset for each electrode Provides a table of values to be subtracted for each electrode to compensate for parasitic capacitance of the electrode. Values are measured at high gain, with no liquid on the device, at a certain voltage. These values will be adjusted for changes in voltage from the measured voltage, and for low gain when applied by the purpledrop. Arguments: - voltage: The voltage setting at which the offsets were measured - offsets: A list of 128 16-bit values to be subtracted Returns: None """ offsets = list(map(int, offsets)) table = struct.pack("<f128H", voltage, *offsets) tx_pos = 0 while tx_pos < len(table): tx_size = min(64, len(table) - tx_pos) msg = messages.DataBlobMsg() msg.blob_id = msg.OFFSET_CALIBRATION_ID msg.chunk_index = tx_pos msg.payload_size = tx_size msg.payload = table[tx_pos:tx_pos+tx_size] tx_pos += tx_size listener = self.purpledrop.get_sync_listener(messages.CommandAckMsg) self.purpledrop.send_message(msg) ack = listener.wait(timeout=0.5) if ack is None: raise TimeoutError("No ACK while setting electrode calibration") def set_scan_gains(self, gains: Optional[Sequence[bool]]=None): """Set the gains used for capacitance scan measurement If no gains argument is provided, scan gains will be set based on oversized electrodes defined in the board definition file. Arguments: - gains: A list of 128 booleans, true indicating that an electrode should be scanned with low gain """ if gains is not None: if len(gains) != 128: raise ValueError("Scan gains must have 128 values") # Make sure they are all convertible to bool gains = [bool(x) for x in gains] self.__set_scan_gains(gains) def get_scan_gains(self) -> List[bool]: """Return the current scan gain settings """ return [x == CAPGAIN_LOW for x in self.scan_gains]
[ "purpledrop.protobuf.messages_pb2.PurpleDropEvent", "purpledrop.messages.FeedbackCommandMsg", "serial.Serial", "purpledrop.messages.DataBlobMsg", "purpledrop.messages.ParameterDescriptorMsg", "serial.tools.list_ports.comports", "inspect.isclass", "struct.pack", "threading.Lock", "threading.Thread", "purpledrop.messages.GpioControlMsg", "time.sleep", "purpledrop.protobuf.messages_pb2.CapacitanceMeasurement", "purpledrop.protobuf.messages_pb2.ElectrodeGroup", "queue.Queue", "purpledrop.messages.SetParameterMsg", "logging.debug", "time.time", "purpledrop.messages.SetGainMsg", "purpledrop.messages.CalibrateCommandMsg", "purpledrop.protobuf.messages_pb2.Timestamp", "fnmatch.fnmatch", "logging.getLogger" ]
[((694, 725), 'logging.getLogger', 'logging.getLogger', (['"""purpledrop"""'], {}), "('purpledrop')\n", (711, 725), False, 'import logging\n'), ((1721, 1742), 'inspect.isclass', 'inspect.isclass', (['filt'], {}), '(filt)\n', (1736, 1742), False, 'import inspect\n'), ((2073, 2107), 'serial.tools.list_ports.comports', 'serial.tools.list_ports.comports', ([], {}), '()\n', (2105, 2107), False, 'import serial\n'), ((2326, 2337), 'time.time', 'time.time', ([], {}), '()\n', (2335, 2337), False, 'import time\n'), ((2347, 2371), 'purpledrop.protobuf.messages_pb2.Timestamp', 'messages_pb2.Timestamp', ([], {}), '()\n', (2369, 2371), True, 'import purpledrop.protobuf.messages_pb2 as messages_pb2\n'), ((1442, 1469), 'fnmatch.fnmatch', 'fnmatch.fnmatch', (['v', 'pattern'], {}), '(v, pattern)\n', (1457, 1469), False, 'import fnmatch\n'), ((2608, 2676), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.run', 'name': '"""PurpleDrop Rx"""', 'daemon': '(True)'}), "(target=self.run, name='PurpleDrop Rx', daemon=True)\n", (2624, 2676), False, 'import threading\n'), ((4173, 4186), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (4184, 4186), False, 'import queue\n'), ((5988, 6004), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (6002, 6004), False, 'import threading\n'), ((6549, 6601), 'serial.Serial', 'serial.Serial', (['port'], {'timeout': '(0.01)', 'write_timeout': '(0.5)'}), '(port, timeout=0.01, write_timeout=0.5)\n', (6562, 6601), False, 'import serial\n'), ((8804, 8909), 'threading.Thread', 'threading.Thread', ([], {'name': '"""PersistentPurpleDropDevice Monitor"""', 'target': 'self.__thread_entry', 'daemon': '(True)'}), "(name='PersistentPurpleDropDevice Monitor', target=self.\n __thread_entry, daemon=True)\n", (8820, 8909), False, 'import threading\n'), ((14425, 14441), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (14439, 14441), False, 'import threading\n'), ((16559, 16589), 'purpledrop.protobuf.messages_pb2.PurpleDropEvent', 'messages_pb2.PurpleDropEvent', ([], {}), '()\n', (16587, 16589), True, 'import purpledrop.protobuf.messages_pb2 as messages_pb2\n'), ((18368, 18389), 'purpledrop.messages.SetGainMsg', 'messages.SetGainMsg', ([], {}), '()\n', (18387, 18389), True, 'import purpledrop.messages as messages\n'), ((25303, 25333), 'purpledrop.protobuf.messages_pb2.PurpleDropEvent', 'messages_pb2.PurpleDropEvent', ([], {}), '()\n', (25331, 25333), True, 'import purpledrop.protobuf.messages_pb2 as messages_pb2\n'), ((25809, 25831), 'purpledrop.messages.DataBlobMsg', 'messages.DataBlobMsg', ([], {}), '()\n', (25829, 25831), True, 'import purpledrop.messages as messages\n'), ((27003, 27029), 'purpledrop.messages.SetParameterMsg', 'messages.SetParameterMsg', ([], {}), '()\n', (27027, 27029), True, 'import purpledrop.messages as messages\n'), ((28350, 28411), 'logging.debug', 'logging.debug', (['f"""Received set_parameter({paramIdx}, {value})"""'], {}), "(f'Received set_parameter({paramIdx}, {value})')\n", (28363, 28411), False, 'import logging\n'), ((28430, 28456), 'purpledrop.messages.SetParameterMsg', 'messages.SetParameterMsg', ([], {}), '()\n', (28454, 28456), True, 'import purpledrop.messages as messages\n'), ((29601, 29647), 'logging.debug', 'logging.debug', (['"""Received get_bulk_capacitance"""'], {}), "('Received get_bulk_capacitance')\n", (29614, 29647), False, 'import logging\n'), ((30415, 30463), 'logging.debug', 'logging.debug', (['"""Received get_active_capacitance"""'], {}), "('Received get_active_capacitance')\n", (30428, 30463), False, 'import logging\n'), ((30669, 30713), 'logging.debug', 'logging.debug', (['"""Received get_electrode_pins"""'], {}), "('Received get_electrode_pins')\n", (30682, 30713), False, 'import logging\n'), ((32284, 32337), 'logging.debug', 'logging.debug', (['f"""Received set_electrode_pins({pins})"""'], {}), "(f'Received set_electrode_pins({pins})')\n", (32297, 32337), False, 'import logging\n'), ((33770, 33799), 'purpledrop.messages.FeedbackCommandMsg', 'messages.FeedbackCommandMsg', ([], {}), '()\n', (33797, 33799), True, 'import purpledrop.messages as messages\n'), ((34528, 34594), 'logging.debug', 'logging.debug', (['f"""Received move_drop({start}, {size}, {direction})"""'], {}), "(f'Received move_drop({start}, {size}, {direction})')\n", (34541, 34594), False, 'import logging\n'), ((34827, 34869), 'logging.debug', 'logging.debug', (['"""Received get_temperatures"""'], {}), "('Received get_temperatures')\n", (34840, 34869), False, 'import logging\n'), ((35209, 35276), 'logging.debug', 'logging.debug', (['f"""Received set_pwm_duty_cycle({chan}, {duty_cycle})"""'], {}), "(f'Received set_pwm_duty_cycle({chan}, {duty_cycle})')\n", (35222, 35276), False, 'import logging\n'), ((35627, 35674), 'logging.debug', 'logging.debug', (['"""Received get_hv_supply_voltage"""'], {}), "('Received get_hv_supply_voltage')\n", (35640, 35674), False, 'import logging\n'), ((35908, 35938), 'purpledrop.messages.CalibrateCommandMsg', 'messages.CalibrateCommandMsg', ([], {}), '()\n', (35936, 35938), True, 'import purpledrop.messages as messages\n'), ((37166, 37191), 'purpledrop.messages.GpioControlMsg', 'messages.GpioControlMsg', ([], {}), '()\n', (37189, 37191), True, 'import purpledrop.messages as messages\n'), ((37952, 37977), 'purpledrop.messages.GpioControlMsg', 'messages.GpioControlMsg', ([], {}), '()\n', (37975, 37977), True, 'import purpledrop.messages as messages\n'), ((39201, 39241), 'struct.pack', 'struct.pack', (['"""<f128H"""', 'voltage', '*offsets'], {}), "('<f128H', voltage, *offsets)\n", (39212, 39241), False, 'import struct\n'), ((10640, 10655), 'time.sleep', 'time.sleep', (['(5.0)'], {}), '(5.0)\n', (10650, 10655), False, 'import time\n'), ((17035, 17068), 'purpledrop.messages.ParameterDescriptorMsg', 'messages.ParameterDescriptorMsg', ([], {}), '()\n', (17066, 17068), True, 'import purpledrop.messages as messages\n'), ((25158, 25187), 'purpledrop.protobuf.messages_pb2.ElectrodeGroup', 'messages_pb2.ElectrodeGroup', ([], {}), '()\n', (25185, 25187), True, 'import purpledrop.protobuf.messages_pb2 as messages_pb2\n'), ((39366, 39388), 'purpledrop.messages.DataBlobMsg', 'messages.DataBlobMsg', ([], {}), '()\n', (39386, 39388), True, 'import purpledrop.messages as messages\n'), ((19810, 19840), 'purpledrop.protobuf.messages_pb2.PurpleDropEvent', 'messages_pb2.PurpleDropEvent', ([], {}), '()\n', (19838, 19840), True, 'import purpledrop.protobuf.messages_pb2 as messages_pb2\n'), ((20973, 21003), 'purpledrop.protobuf.messages_pb2.PurpleDropEvent', 'messages_pb2.PurpleDropEvent', ([], {}), '()\n', (21001, 21003), True, 'import purpledrop.protobuf.messages_pb2 as messages_pb2\n'), ((22254, 22284), 'purpledrop.protobuf.messages_pb2.PurpleDropEvent', 'messages_pb2.PurpleDropEvent', ([], {}), '()\n', (22282, 22284), True, 'import purpledrop.protobuf.messages_pb2 as messages_pb2\n'), ((23397, 23427), 'purpledrop.protobuf.messages_pb2.PurpleDropEvent', 'messages_pb2.PurpleDropEvent', ([], {}), '()\n', (23425, 23427), True, 'import purpledrop.protobuf.messages_pb2 as messages_pb2\n'), ((22376, 22413), 'purpledrop.protobuf.messages_pb2.CapacitanceMeasurement', 'messages_pb2.CapacitanceMeasurement', ([], {}), '()\n', (22411, 22413), True, 'import purpledrop.protobuf.messages_pb2 as messages_pb2\n'), ((23907, 23937), 'purpledrop.protobuf.messages_pb2.PurpleDropEvent', 'messages_pb2.PurpleDropEvent', ([], {}), '()\n', (23935, 23937), True, 'import purpledrop.protobuf.messages_pb2 as messages_pb2\n'), ((24330, 24360), 'purpledrop.protobuf.messages_pb2.PurpleDropEvent', 'messages_pb2.PurpleDropEvent', ([], {}), '()\n', (24358, 24360), True, 'import purpledrop.protobuf.messages_pb2 as messages_pb2\n')]
import ignore import tensorflow as tf import numpy as np char_arr = [c for c in 'SEPabcdefghijklmnopqrstuvwxyz단어나무놀이소녀키스사랑'] num_dic = {n: i for i, n in enumerate(char_arr)} dic_len = len(num_dic) seq_data = [['word', '단어'], ['wood', '나무'], ['game', '놀이'], ['girl', '소녀'], ['kiss', '키스'], ['love', '사랑']] def make_batch(seq_data): input_batch = [] output_batch = [] target_batch = [] for seq in seq_data: input = [num_dic[n] for n in seq[0]] output = [num_dic[n] for n in ('S' + seq[1])] target = [num_dic[n] for n in (seq[1] + 'E')] input_batch.append(np.eye(dic_len)[input]) output_batch.append(np.eye(dic_len)[output]) target_batch.append(target) return input_batch, output_batch, target_batch # 옵션 설정 learning_rate = 0.01 n_hidden = 128 total_epoch = 100 n_class = n_input = dic_len # 신경망 모델 구성 enc_input = tf.placeholder(tf.float32, [None, None, n_input]) dec_input = tf.placeholder(tf.float32, [None, None, n_input]) targets = tf.placeholder(tf.int64, [None, None]) with tf.variable_scope('encode'): enc_cell = tf.nn.rnn_cell.BasicRNNCell(n_hidden) enc_cell = tf.nn.rnn_cell.DropoutWrapper(enc_cell, output_keep_prob=0.5) outputs, enc_states = tf.nn.dynamic_rnn(enc_cell, enc_input, dtype=tf.float32) with tf.variable_scope('decode'): dec_cell = tf.nn.rnn_cell.BasicRNNCell(n_hidden) dec_cell = tf.nn.rnn_cell.DropoutWrapper(dec_cell, output_keep_prob=0.5) outputs, dec_states = tf.nn.dynamic_rnn(dec_cell, dec_input, initial_state=enc_states, dtype=tf.float32) model = tf.layers.dense(outputs, n_class, activation=None) cost = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(logits=model, labels=targets)) optimizer = tf.train.AdamOptimizer(learning_rate).minimize(cost) # 신경망 모델 학습 sess = tf.Session() sess.run(tf.global_variables_initializer()) input_batch, output_batch, target_batch = make_batch(seq_data) for epoch in range(total_epoch): _, loss = sess.run([optimizer, cost], feed_dict={enc_input: input_batch, dec_input: output_batch, targets: target_batch}) print('Epoch:', '%04d' % (epoch + 1), 'cost =', '{:.6}'.format(loss)) print('최적화 완료!') # 번역 테스트 def translate(word): seq_data = [word, 'P' * len(word)] input_batch, output_batch, target_batch = make_batch([seq_data]) prediction = tf.argmax(model, 2) result = sess.run(prediction, feed_dict={enc_input: input_batch, dec_input: output_batch, targets: target_batch}) decoded = [char_arr[i] for i in result[0]] end = decoded.index('E') translated = ''.join(decoded[:end]) return translated print('\n=== 번역 테스트 ===') print('word ->', translate('word')) print('wodr ->', translate('wodr')) print('love ->', translate('love')) print('loev ->', translate('loev')) print('abcd ->', translate('abcd'))
[ "tensorflow.nn.rnn_cell.BasicRNNCell", "tensorflow.nn.dynamic_rnn", "tensorflow.global_variables_initializer", "tensorflow.argmax", "tensorflow.layers.dense", "tensorflow.Session", "tensorflow.variable_scope", "tensorflow.nn.rnn_cell.DropoutWrapper", "tensorflow.placeholder", "numpy.eye", "tensorflow.train.AdamOptimizer", "tensorflow.nn.sparse_softmax_cross_entropy_with_logits" ]
[((912, 961), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, None, n_input]'], {}), '(tf.float32, [None, None, n_input])\n', (926, 961), True, 'import tensorflow as tf\n'), ((974, 1023), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, None, n_input]'], {}), '(tf.float32, [None, None, n_input])\n', (988, 1023), True, 'import tensorflow as tf\n'), ((1034, 1072), 'tensorflow.placeholder', 'tf.placeholder', (['tf.int64', '[None, None]'], {}), '(tf.int64, [None, None])\n', (1048, 1072), True, 'import tensorflow as tf\n'), ((1606, 1656), 'tensorflow.layers.dense', 'tf.layers.dense', (['outputs', 'n_class'], {'activation': 'None'}), '(outputs, n_class, activation=None)\n', (1621, 1656), True, 'import tensorflow as tf\n'), ((1842, 1854), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (1852, 1854), True, 'import tensorflow as tf\n'), ((1079, 1106), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""encode"""'], {}), "('encode')\n", (1096, 1106), True, 'import tensorflow as tf\n'), ((1123, 1160), 'tensorflow.nn.rnn_cell.BasicRNNCell', 'tf.nn.rnn_cell.BasicRNNCell', (['n_hidden'], {}), '(n_hidden)\n', (1150, 1160), True, 'import tensorflow as tf\n'), ((1176, 1237), 'tensorflow.nn.rnn_cell.DropoutWrapper', 'tf.nn.rnn_cell.DropoutWrapper', (['enc_cell'], {'output_keep_prob': '(0.5)'}), '(enc_cell, output_keep_prob=0.5)\n', (1205, 1237), True, 'import tensorflow as tf\n'), ((1265, 1321), 'tensorflow.nn.dynamic_rnn', 'tf.nn.dynamic_rnn', (['enc_cell', 'enc_input'], {'dtype': 'tf.float32'}), '(enc_cell, enc_input, dtype=tf.float32)\n', (1282, 1321), True, 'import tensorflow as tf\n'), ((1328, 1355), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""decode"""'], {}), "('decode')\n", (1345, 1355), True, 'import tensorflow as tf\n'), ((1372, 1409), 'tensorflow.nn.rnn_cell.BasicRNNCell', 'tf.nn.rnn_cell.BasicRNNCell', (['n_hidden'], {}), '(n_hidden)\n', (1399, 1409), True, 'import tensorflow as tf\n'), ((1425, 1486), 'tensorflow.nn.rnn_cell.DropoutWrapper', 'tf.nn.rnn_cell.DropoutWrapper', (['dec_cell'], {'output_keep_prob': '(0.5)'}), '(dec_cell, output_keep_prob=0.5)\n', (1454, 1486), True, 'import tensorflow as tf\n'), ((1514, 1601), 'tensorflow.nn.dynamic_rnn', 'tf.nn.dynamic_rnn', (['dec_cell', 'dec_input'], {'initial_state': 'enc_states', 'dtype': 'tf.float32'}), '(dec_cell, dec_input, initial_state=enc_states, dtype=tf.\n float32)\n', (1531, 1601), True, 'import tensorflow as tf\n'), ((1679, 1755), 'tensorflow.nn.sparse_softmax_cross_entropy_with_logits', 'tf.nn.sparse_softmax_cross_entropy_with_logits', ([], {'logits': 'model', 'labels': 'targets'}), '(logits=model, labels=targets)\n', (1725, 1755), True, 'import tensorflow as tf\n'), ((1864, 1897), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (1895, 1897), True, 'import tensorflow as tf\n'), ((2374, 2393), 'tensorflow.argmax', 'tf.argmax', (['model', '(2)'], {}), '(model, 2)\n', (2383, 2393), True, 'import tensorflow as tf\n'), ((1769, 1806), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', (['learning_rate'], {}), '(learning_rate)\n', (1791, 1806), True, 'import tensorflow as tf\n'), ((630, 645), 'numpy.eye', 'np.eye', (['dic_len'], {}), '(dic_len)\n', (636, 645), True, 'import numpy as np\n'), ((682, 697), 'numpy.eye', 'np.eye', (['dic_len'], {}), '(dic_len)\n', (688, 697), True, 'import numpy as np\n')]
from datetime import date, datetime, timedelta from accountancy.helpers import AuditTransaction, get_all_historical_changes from cashbook.models import CashBook from contacts.models import Contact from controls.models import FinancialYear, Period from django.test import TestCase from nominals.models import Nominal from purchases.models import (PurchaseHeader, PurchaseLine, PurchaseMatching, Supplier) from vat.models import Vat DATE_INPUT_FORMAT = '%d-%m-%Y' MODEL_DATE_INPUT_FORMAT = '%Y-%m-%d' class GetAllHistoricalChangesTest(TestCase): def test_create_only(self): contact = Contact.objects.create(code="1", name="11", email="111") historical_records = Contact.history.all().order_by("pk") self.assertEqual( len(historical_records), 1 ) changes = get_all_historical_changes(historical_records) self.assertEqual( len(changes), 1 ) creation_change = changes[0] self.assertEqual( creation_change["id"]["old"], "" ) self.assertEqual( creation_change["id"]["new"], str(contact.id) ) self.assertEqual( creation_change["code"]["old"], "" ) self.assertEqual( creation_change["code"]["new"], "1" ) self.assertEqual( creation_change["name"]["old"], "" ) self.assertEqual( creation_change["name"]["new"], "11" ) self.assertEqual( creation_change["meta"]["AUDIT_action"], "Create" ) def test_create_and_update(self): contact = Contact.objects.create(code="1", name="11", email="111") contact.name = "12" contact.save() historical_records = Contact.history.all().order_by("pk") self.assertEqual( len(historical_records), 2 ) changes = get_all_historical_changes(historical_records) self.assertEqual( len(changes), 2 ) creation_change = changes[0] update_change = changes[1] self.assertEqual( creation_change["id"]["old"], "" ) self.assertEqual( creation_change["id"]["new"], str(contact.id) ) self.assertEqual( creation_change["code"]["old"], "" ) self.assertEqual( creation_change["code"]["new"], "1" ) self.assertEqual( creation_change["name"]["old"], "" ) self.assertEqual( creation_change["name"]["new"], "11" ) self.assertEqual( creation_change["meta"]["AUDIT_action"], "Create" ) self.assertEqual( update_change["name"]["old"], "11" ) self.assertEqual( update_change["name"]["new"], "12" ) self.assertEqual( update_change["meta"]["AUDIT_action"], "Update" ) def test_create_and_update_and_delete(self): contact = Contact.objects.create(code="1", name="11", email="111") contact_dict = contact.__dict__.copy() contact.name = "12" contact.save() contact.delete() historical_records = Contact.history.all().order_by("pk") self.assertEqual( len(historical_records), 3 ) changes = get_all_historical_changes(historical_records) self.assertEqual( len(changes), 3 ) creation_change = changes[0] update_change = changes[1] deleted_change = changes[2] self.assertEqual( creation_change["id"]["old"], "" ) self.assertEqual( creation_change["id"]["new"], str(contact_dict["id"]) ) self.assertEqual( creation_change["code"]["old"], "" ) self.assertEqual( creation_change["code"]["new"], "1" ) self.assertEqual( creation_change["name"]["old"], "" ) self.assertEqual( creation_change["name"]["new"], "11" ) self.assertEqual( creation_change["meta"]["AUDIT_action"], "Create" ) self.assertEqual( update_change["name"]["old"], "11" ) self.assertEqual( update_change["name"]["new"], "12" ) self.assertEqual( update_change["meta"]["AUDIT_action"], "Update" ) self.assertEqual( deleted_change["id"]["old"], str(contact_dict["id"]) ) self.assertEqual( deleted_change["id"]["new"], "" ) self.assertEqual( deleted_change["code"]["old"], contact_dict["code"] ) self.assertEqual( deleted_change["code"]["new"], "" ) self.assertEqual( deleted_change["name"]["old"], "12" ) self.assertEqual( deleted_change["name"]["new"], "" ) self.assertEqual( deleted_change["email"]["old"], contact_dict["email"] ) self.assertEqual( deleted_change["email"]["new"], "" ) self.assertEqual( deleted_change["customer"]["old"], str(contact_dict["customer"]) ) self.assertEqual( deleted_change["customer"]["new"], "" ) self.assertEqual( deleted_change["supplier"]["old"], str(contact_dict["supplier"]) ) self.assertEqual( deleted_change["supplier"]["new"], "" ) self.assertEqual( deleted_change["meta"]["AUDIT_action"], "Delete" ) class AuditTransactionTest(TestCase): """ Test with PL header, line, matching """ @classmethod def setUpTestData(cls): cls.date = datetime.now().strftime(DATE_INPUT_FORMAT) cls.due_date = (datetime.now() + timedelta(days=31) ).strftime(DATE_INPUT_FORMAT) cls.model_date = datetime.now().strftime(MODEL_DATE_INPUT_FORMAT) cls.model_due_date = (datetime.now() + timedelta(days=31) ).strftime(MODEL_DATE_INPUT_FORMAT) fy = FinancialYear.objects.create(financial_year=2020) cls.fy = fy cls.period = Period.objects.create( fy=fy, period="01", fy_and_period="202001", month_start=date(2020, 1, 31)) def test_no_lines(self): cash_book = CashBook.objects.create( nominal=None, name="current" ) supplier = Supplier.objects.create( code="1", name="2", email="3" ) h = PurchaseHeader.objects.create( type="pp", # payment date=date.today(), goods=120, vat=0, total=120, ref="123", cash_book=cash_book, supplier=supplier, paid=0, due=0, period=self.period ) self.assertEqual( len(PurchaseHeader.history.all()), 1 ) h.ref = "1234" # update the header h.save() h.refresh_from_db() self.assertEqual( len(PurchaseHeader.history.all()), 2 ) audit_transaction = AuditTransaction( h, PurchaseHeader, PurchaseLine, PurchaseMatching) self.assertEqual( len(audit_transaction.audit_header_history), 2 ) self.assertEqual( len(audit_transaction.audit_lines_history), 0 ) self.assertEqual( len(audit_transaction.audit_matches_history), 0 ) all_changes = audit_transaction.get_historical_changes() self.assertEqual( len(all_changes), 2 ) self.assertTrue( all_changes[0]["meta"]["AUDIT_date"] < all_changes[1]["meta"]["AUDIT_date"] ) create = all_changes[0] self.assertEqual( create["id"]["old"], "", ) self.assertEqual( create["id"]["new"], str(h.id), ) self.assertEqual( create["ref"]["old"], "", ) self.assertEqual( create["ref"]["new"], "123", ) self.assertEqual( create["goods"]["old"], "", ) self.assertEqual( create["goods"]["new"], str(h.goods * -1), # payment ui value is positive ) self.assertEqual( create["vat"]["old"], "", ) self.assertEqual( create["vat"]["new"], str(h.vat), ) self.assertEqual( create["total"]["old"], "", ) self.assertEqual( create["total"]["new"], str(h.total * -1), # payment ui value is positive ) self.assertEqual( create["paid"]["old"], "", ) self.assertEqual( create["paid"]["new"], str(h.paid), ) self.assertEqual( create["due"]["old"], "", ) self.assertEqual( create["due"]["new"], str(h.due), ) self.assertEqual( create["date"]["old"], "", ) self.assertEqual( create["date"]["new"], str(h.date), ) self.assertEqual( create["due_date"]["old"], "", ) self.assertEqual( create["due_date"]["new"], str(h.due_date), ) self.assertEqual( create["period_id"]["old"], "", ) self.assertEqual( create["period_id"]["new"], str(self.period.pk), ) self.assertEqual( create["status"]["old"], "", ) self.assertEqual( create["status"]["new"], str(h.status), ) self.assertEqual( create["type"]["old"], "", ) self.assertEqual( create["type"]["new"], str(h.type), ) self.assertEqual( create["cash_book_id"]["old"], "", ) self.assertEqual( create["cash_book_id"]["new"], str(h.cash_book_id), ) self.assertEqual( create["supplier_id"]["old"], "", ) self.assertEqual( create["supplier_id"]["new"], str(h.supplier_id), ) self.assertEqual( create["meta"]["AUDIT_action"], "Create" ) self.assertEqual( create["meta"]["transaction_aspect"], "header" ) update = all_changes[1] self.assertEqual( update["ref"]["old"], "123", ) self.assertEqual( update["ref"]["new"], h.ref, ) self.assertEqual( update["meta"]["AUDIT_action"], "Update" ) self.assertEqual( update["meta"]["transaction_aspect"], "header" ) def test_lines(self): # same as above except for change a line # above has no lines cash_book = CashBook.objects.create( nominal=None, name="current" ) supplier = Supplier.objects.create( code="1", name="2", email="3" ) h = PurchaseHeader.objects.create( type="pi", # payment date=date.today(), goods=100, vat=20, total=120, ref="123", cash_book=cash_book, supplier=supplier, paid=0, due=0, period=self.period ) nominal = Nominal.objects.create( name="something", parent=None ) vat_code = Vat.objects.create( code="1", name="2", rate=20 ) l = PurchaseLine.objects.create( nominal=nominal, goods=100, vat=20, vat_code=vat_code, description="123", line_no=1, header=h ) self.assertEqual( len(PurchaseHeader.history.all()), 1 ) h.ref = "1234" # update the header h.save() h.refresh_from_db() l.description = "12345" l.save() l.refresh_from_db() self.assertEqual( len(PurchaseHeader.history.all()), 2 ) audit_transaction = AuditTransaction( h, PurchaseHeader, PurchaseLine, PurchaseMatching) self.assertEqual( len(audit_transaction.audit_header_history), 2 ) self.assertEqual( len(audit_transaction.audit_lines_history), 2 ) self.assertEqual( len(audit_transaction.audit_matches_history), 0 ) all_changes = audit_transaction.get_historical_changes() self.assertEqual( len(all_changes), 4 ) self.assertTrue( all_changes[0]["meta"]["AUDIT_date"] < all_changes[1]["meta"]["AUDIT_date"] ) self.assertTrue( all_changes[1]["meta"]["AUDIT_date"] < all_changes[2]["meta"]["AUDIT_date"] ) self.assertTrue( all_changes[2]["meta"]["AUDIT_date"] < all_changes[3]["meta"]["AUDIT_date"] ) create = all_changes[0] self.assertEqual( create["id"]["old"], "", ) self.assertEqual( create["id"]["new"], str(h.id), ) self.assertEqual( create["ref"]["old"], "", ) self.assertEqual( create["ref"]["new"], "123", ) self.assertEqual( create["goods"]["old"], "", ) self.assertEqual( create["goods"]["new"], str(h.goods), ) self.assertEqual( create["vat"]["old"], "", ) self.assertEqual( create["vat"]["new"], str(h.vat), ) self.assertEqual( create["total"]["old"], "", ) self.assertEqual( create["total"]["new"], str(h.total), ) self.assertEqual( create["paid"]["old"], "", ) self.assertEqual( create["paid"]["new"], str(h.paid), ) self.assertEqual( create["due"]["old"], "", ) self.assertEqual( create["due"]["new"], str(h.due), ) self.assertEqual( create["date"]["old"], "", ) self.assertEqual( create["date"]["new"], str(h.date), ) self.assertEqual( create["due_date"]["old"], "", ) self.assertEqual( create["due_date"]["new"], str(h.due_date), ) self.assertEqual( create["period_id"]["old"], "", ) self.assertEqual( create["period_id"]["new"], str(self.period.pk), ) self.assertEqual( create["status"]["old"], "", ) self.assertEqual( create["status"]["new"], str(h.status), ) self.assertEqual( create["type"]["old"], "", ) self.assertEqual( create["type"]["new"], str(h.type), ) self.assertEqual( create["cash_book_id"]["old"], "", ) self.assertEqual( create["cash_book_id"]["new"], str(h.cash_book_id), ) self.assertEqual( create["supplier_id"]["old"], "", ) self.assertEqual( create["supplier_id"]["new"], str(h.supplier_id), ) self.assertEqual( create["meta"]["AUDIT_action"], "Create" ) self.assertEqual( create["meta"]["transaction_aspect"], "header" ) update = all_changes[2] self.assertEqual( update["ref"]["old"], "123", ) self.assertEqual( update["ref"]["new"], h.ref, ) self.assertEqual( update["meta"]["AUDIT_action"], "Update" ) self.assertEqual( update["meta"]["transaction_aspect"], "header" ) # now for the line change create = all_changes[1] self.assertEqual( create["id"]["old"], "", ) self.assertEqual( create["id"]["new"], str(l.id), ) self.assertEqual( create["description"]["old"], "", ) self.assertEqual( create["description"]["new"], "123", ) self.assertEqual( create["goods"]["old"], "" ) self.assertEqual( create["goods"]["new"], str(l.goods), ) self.assertEqual( create["vat"]["old"], "", ) self.assertEqual( create["vat"]["new"], str(l.vat), ) self.assertEqual( create["line_no"]["old"], "", ) self.assertEqual( create["line_no"]["new"], str(l.line_no), ) self.assertEqual( create["nominal_id"]["old"], "", ) self.assertEqual( create["nominal_id"]["new"], str(l.nominal.pk), ) self.assertEqual( create["vat_code_id"]["old"], "", ) self.assertEqual( create["vat_code_id"]["new"], str(l.vat_code.pk), ) self.assertEqual( create["header_id"]["old"], "", ) self.assertEqual( create["header_id"]["new"], str(l.header.pk), ) self.assertEqual( create["meta"]["AUDIT_action"], "Create" ) self.assertEqual( create["meta"]["transaction_aspect"], "line" ) update = all_changes[3] self.assertEqual( update["description"]["old"], "123", ) self.assertEqual( update["description"]["new"], l.description, ) self.assertEqual( update["meta"]["AUDIT_action"], "Update" ) self.assertEqual( update["meta"]["transaction_aspect"], "line" ) def test_matching(self): # same as above except for change a line # above has no lines cash_book = CashBook.objects.create( nominal=None, name="current" ) supplier = Supplier.objects.create( code="1", name="2", email="3" ) to_match_against = PurchaseHeader.objects.create( type="pi", # payment date=date.today(), goods=-100, vat=-20, total=-120, ref="123", cash_book=cash_book, supplier=supplier, paid=0, due=0, period=self.period ) h = PurchaseHeader.objects.create( type="pi", # payment date=date.today(), goods=100, vat=20, total=120, ref="123", cash_book=cash_book, supplier=supplier, paid=0, due=0, period=self.period ) nominal = Nominal.objects.create( name="something", parent=None ) vat_code = Vat.objects.create( code="1", name="2", rate=20 ) l = PurchaseLine.objects.create( nominal=nominal, goods=100, vat=20, vat_code=vat_code, description="123", line_no=1, header=h ) match = PurchaseMatching.objects.create( matched_by=h, matched_to=to_match_against, period=self.period, value=-100 ) self.assertEqual( len(PurchaseHeader.history.all()), 2 ) self.assertEqual( len(PurchaseMatching.history.all()), 1 ) h.ref = "1234" # update the header h.save() h.refresh_from_db() l.description = "12345" l.save() l.refresh_from_db() match.value = -120 match.save() match.refresh_from_db() audit_transaction = AuditTransaction( h, PurchaseHeader, PurchaseLine, PurchaseMatching) self.assertEqual( len(audit_transaction.audit_header_history), 2 ) self.assertEqual( len(audit_transaction.audit_lines_history), 2 ) self.assertEqual( len(audit_transaction.audit_matches_history), 2 ) all_changes = audit_transaction.get_historical_changes() self.assertEqual( len(all_changes), 6 ) self.assertTrue( all_changes[0]["meta"]["AUDIT_date"] <= all_changes[1]["meta"]["AUDIT_date"] ) self.assertTrue( all_changes[1]["meta"]["AUDIT_date"] <= all_changes[2]["meta"]["AUDIT_date"] ) self.assertTrue( all_changes[2]["meta"]["AUDIT_date"] <= all_changes[3]["meta"]["AUDIT_date"] ) self.assertTrue( all_changes[3]["meta"]["AUDIT_date"] <= all_changes[4]["meta"]["AUDIT_date"] ) self.assertTrue( all_changes[4]["meta"]["AUDIT_date"] <= all_changes[5]["meta"]["AUDIT_date"] ) create = all_changes[0] self.assertEqual( create["id"]["old"], "", ) self.assertEqual( create["id"]["new"], str(h.id), ) self.assertEqual( create["ref"]["old"], "", ) self.assertEqual( create["ref"]["new"], "123", ) self.assertEqual( create["goods"]["old"], "", ) self.assertEqual( create["goods"]["new"], str(h.goods), ) self.assertEqual( create["vat"]["old"], "", ) self.assertEqual( create["vat"]["new"], str(h.vat), ) self.assertEqual( create["total"]["old"], "", ) self.assertEqual( create["total"]["new"], str(h.total), ) self.assertEqual( create["paid"]["old"], "", ) self.assertEqual( create["paid"]["new"], str(h.paid), ) self.assertEqual( create["due"]["old"], "", ) self.assertEqual( create["due"]["new"], str(h.due), ) self.assertEqual( create["date"]["old"], "", ) self.assertEqual( create["date"]["new"], str(h.date), ) self.assertEqual( create["due_date"]["old"], "", ) self.assertEqual( create["due_date"]["new"], str(h.due_date), ) self.assertEqual( create["period_id"]["old"], "", ) self.assertEqual( create["period_id"]["new"], str(self.period.pk), ) self.assertEqual( create["status"]["old"], "", ) self.assertEqual( create["status"]["new"], str(h.status), ) self.assertEqual( create["type"]["old"], "", ) self.assertEqual( create["type"]["new"], str(h.type), ) self.assertEqual( create["cash_book_id"]["old"], "", ) self.assertEqual( create["cash_book_id"]["new"], str(h.cash_book_id), ) self.assertEqual( create["supplier_id"]["old"], "", ) self.assertEqual( create["supplier_id"]["new"], str(h.supplier_id), ) self.assertEqual( create["meta"]["AUDIT_action"], "Create" ) self.assertEqual( create["meta"]["transaction_aspect"], "header" ) update = all_changes[3] self.assertEqual( update["ref"]["old"], "123", ) self.assertEqual( update["ref"]["new"], h.ref, ) self.assertEqual( update["meta"]["AUDIT_action"], "Update" ) self.assertEqual( update["meta"]["transaction_aspect"], "header" ) # now for the line change create = all_changes[1] self.assertEqual( create["id"]["old"], "", ) self.assertEqual( create["id"]["new"], str(l.id), ) self.assertEqual( create["description"]["old"], "", ) self.assertEqual( create["description"]["new"], "123", ) self.assertEqual( create["goods"]["old"], "" ) self.assertEqual( create["goods"]["new"], str(l.goods), ) self.assertEqual( create["vat"]["old"], "", ) self.assertEqual( create["vat"]["new"], str(l.vat), ) self.assertEqual( create["line_no"]["old"], "", ) self.assertEqual( create["line_no"]["new"], str(l.line_no), ) self.assertEqual( create["nominal_id"]["old"], "", ) self.assertEqual( create["nominal_id"]["new"], str(l.nominal.pk), ) self.assertEqual( create["vat_code_id"]["old"], "", ) self.assertEqual( create["vat_code_id"]["new"], str(l.vat_code.pk), ) self.assertEqual( create["header_id"]["old"], "", ) self.assertEqual( create["header_id"]["new"], str(l.header.pk), ) self.assertEqual( create["meta"]["AUDIT_action"], "Create" ) self.assertEqual( create["meta"]["transaction_aspect"], "line" ) update = all_changes[4] self.assertEqual( update["description"]["old"], "123", ) self.assertEqual( update["description"]["new"], l.description, ) self.assertEqual( update["meta"]["AUDIT_action"], "Update" ) self.assertEqual( update["meta"]["transaction_aspect"], "line" ) create = all_changes[2] self.assertEqual( create["matched_by_id"]["old"], "", ) self.assertEqual( create["matched_by_id"]["new"], str(match.matched_by_id), ) self.assertEqual( create["matched_to_id"]["old"], "", ) self.assertEqual( create["matched_to_id"]["new"], str(match.matched_to_id), ) self.assertEqual( create["value"]["old"], "", ) self.assertEqual( create["value"]["new"], "-100.00", ) self.assertEqual( create["period_id"]["old"], "", ) self.assertEqual( create["period_id"]["new"], str(self.period.pk), ) self.assertEqual( create["meta"]["AUDIT_action"], "Create" ) self.assertEqual( create["meta"]["transaction_aspect"], "match" ) update = all_changes[5] self.assertEqual( update["value"]["old"], "-100.00" ) self.assertEqual( update["value"]["new"], "-120.00" ) self.assertEqual( update["meta"]["AUDIT_action"], "Update" ) self.assertEqual( update["meta"]["transaction_aspect"], "match" )
[ "nominals.models.Nominal.objects.create", "contacts.models.Contact.objects.create", "accountancy.helpers.AuditTransaction", "datetime.date", "datetime.date.today", "purchases.models.PurchaseHeader.history.all", "contacts.models.Contact.history.all", "datetime.timedelta", "controls.models.FinancialYear.objects.create", "vat.models.Vat.objects.create", "cashbook.models.CashBook.objects.create", "purchases.models.PurchaseMatching.objects.create", "accountancy.helpers.get_all_historical_changes", "purchases.models.PurchaseMatching.history.all", "datetime.datetime.now", "purchases.models.PurchaseLine.objects.create", "purchases.models.Supplier.objects.create" ]
[((629, 685), 'contacts.models.Contact.objects.create', 'Contact.objects.create', ([], {'code': '"""1"""', 'name': '"""11"""', 'email': '"""111"""'}), "(code='1', name='11', email='111')\n", (651, 685), False, 'from contacts.models import Contact\n'), ((857, 903), 'accountancy.helpers.get_all_historical_changes', 'get_all_historical_changes', (['historical_records'], {}), '(historical_records)\n', (883, 903), False, 'from accountancy.helpers import AuditTransaction, get_all_historical_changes\n'), ((1766, 1822), 'contacts.models.Contact.objects.create', 'Contact.objects.create', ([], {'code': '"""1"""', 'name': '"""11"""', 'email': '"""111"""'}), "(code='1', name='11', email='111')\n", (1788, 1822), False, 'from contacts.models import Contact\n'), ((2045, 2091), 'accountancy.helpers.get_all_historical_changes', 'get_all_historical_changes', (['historical_records'], {}), '(historical_records)\n', (2071, 2091), False, 'from accountancy.helpers import AuditTransaction, get_all_historical_changes\n'), ((3299, 3355), 'contacts.models.Contact.objects.create', 'Contact.objects.create', ([], {'code': '"""1"""', 'name': '"""11"""', 'email': '"""111"""'}), "(code='1', name='11', email='111')\n", (3321, 3355), False, 'from contacts.models import Contact\n'), ((3650, 3696), 'accountancy.helpers.get_all_historical_changes', 'get_all_historical_changes', (['historical_records'], {}), '(historical_records)\n', (3676, 3696), False, 'from accountancy.helpers import AuditTransaction, get_all_historical_changes\n'), ((6783, 6832), 'controls.models.FinancialYear.objects.create', 'FinancialYear.objects.create', ([], {'financial_year': '(2020)'}), '(financial_year=2020)\n', (6811, 6832), False, 'from controls.models import FinancialYear, Period\n'), ((7034, 7087), 'cashbook.models.CashBook.objects.create', 'CashBook.objects.create', ([], {'nominal': 'None', 'name': '"""current"""'}), "(nominal=None, name='current')\n", (7057, 7087), False, 'from cashbook.models import CashBook\n'), ((7141, 7195), 'purchases.models.Supplier.objects.create', 'Supplier.objects.create', ([], {'code': '"""1"""', 'name': '"""2"""', 'email': '"""3"""'}), "(code='1', name='2', email='3')\n", (7164, 7195), False, 'from purchases.models import PurchaseHeader, PurchaseLine, PurchaseMatching, Supplier\n'), ((7893, 7960), 'accountancy.helpers.AuditTransaction', 'AuditTransaction', (['h', 'PurchaseHeader', 'PurchaseLine', 'PurchaseMatching'], {}), '(h, PurchaseHeader, PurchaseLine, PurchaseMatching)\n', (7909, 7960), False, 'from accountancy.helpers import AuditTransaction, get_all_historical_changes\n'), ((12047, 12100), 'cashbook.models.CashBook.objects.create', 'CashBook.objects.create', ([], {'nominal': 'None', 'name': '"""current"""'}), "(nominal=None, name='current')\n", (12070, 12100), False, 'from cashbook.models import CashBook\n'), ((12154, 12208), 'purchases.models.Supplier.objects.create', 'Supplier.objects.create', ([], {'code': '"""1"""', 'name': '"""2"""', 'email': '"""3"""'}), "(code='1', name='2', email='3')\n", (12177, 12208), False, 'from purchases.models import PurchaseHeader, PurchaseLine, PurchaseMatching, Supplier\n'), ((12615, 12668), 'nominals.models.Nominal.objects.create', 'Nominal.objects.create', ([], {'name': '"""something"""', 'parent': 'None'}), "(name='something', parent=None)\n", (12637, 12668), False, 'from nominals.models import Nominal\n'), ((12722, 12769), 'vat.models.Vat.objects.create', 'Vat.objects.create', ([], {'code': '"""1"""', 'name': '"""2"""', 'rate': '(20)'}), "(code='1', name='2', rate=20)\n", (12740, 12769), False, 'from vat.models import Vat\n'), ((12828, 12955), 'purchases.models.PurchaseLine.objects.create', 'PurchaseLine.objects.create', ([], {'nominal': 'nominal', 'goods': '(100)', 'vat': '(20)', 'vat_code': 'vat_code', 'description': '"""123"""', 'line_no': '(1)', 'header': 'h'}), "(nominal=nominal, goods=100, vat=20, vat_code=\n vat_code, description='123', line_no=1, header=h)\n", (12855, 12955), False, 'from purchases.models import PurchaseHeader, PurchaseLine, PurchaseMatching, Supplier\n'), ((13433, 13500), 'accountancy.helpers.AuditTransaction', 'AuditTransaction', (['h', 'PurchaseHeader', 'PurchaseLine', 'PurchaseMatching'], {}), '(h, PurchaseHeader, PurchaseLine, PurchaseMatching)\n', (13449, 13500), False, 'from accountancy.helpers import AuditTransaction, get_all_historical_changes\n'), ((20008, 20061), 'cashbook.models.CashBook.objects.create', 'CashBook.objects.create', ([], {'nominal': 'None', 'name': '"""current"""'}), "(nominal=None, name='current')\n", (20031, 20061), False, 'from cashbook.models import CashBook\n'), ((20115, 20169), 'purchases.models.Supplier.objects.create', 'Supplier.objects.create', ([], {'code': '"""1"""', 'name': '"""2"""', 'email': '"""3"""'}), "(code='1', name='2', email='3')\n", (20138, 20169), False, 'from purchases.models import PurchaseHeader, PurchaseLine, PurchaseMatching, Supplier\n'), ((20934, 20987), 'nominals.models.Nominal.objects.create', 'Nominal.objects.create', ([], {'name': '"""something"""', 'parent': 'None'}), "(name='something', parent=None)\n", (20956, 20987), False, 'from nominals.models import Nominal\n'), ((21041, 21088), 'vat.models.Vat.objects.create', 'Vat.objects.create', ([], {'code': '"""1"""', 'name': '"""2"""', 'rate': '(20)'}), "(code='1', name='2', rate=20)\n", (21059, 21088), False, 'from vat.models import Vat\n'), ((21147, 21274), 'purchases.models.PurchaseLine.objects.create', 'PurchaseLine.objects.create', ([], {'nominal': 'nominal', 'goods': '(100)', 'vat': '(20)', 'vat_code': 'vat_code', 'description': '"""123"""', 'line_no': '(1)', 'header': 'h'}), "(nominal=nominal, goods=100, vat=20, vat_code=\n vat_code, description='123', line_no=1, header=h)\n", (21174, 21274), False, 'from purchases.models import PurchaseHeader, PurchaseLine, PurchaseMatching, Supplier\n'), ((21380, 21490), 'purchases.models.PurchaseMatching.objects.create', 'PurchaseMatching.objects.create', ([], {'matched_by': 'h', 'matched_to': 'to_match_against', 'period': 'self.period', 'value': '(-100)'}), '(matched_by=h, matched_to=to_match_against,\n period=self.period, value=-100)\n', (21411, 21490), False, 'from purchases.models import PurchaseHeader, PurchaseLine, PurchaseMatching, Supplier\n'), ((22018, 22085), 'accountancy.helpers.AuditTransaction', 'AuditTransaction', (['h', 'PurchaseHeader', 'PurchaseLine', 'PurchaseMatching'], {}), '(h, PurchaseHeader, PurchaseLine, PurchaseMatching)\n', (22034, 22085), False, 'from accountancy.helpers import AuditTransaction, get_all_historical_changes\n'), ((715, 736), 'contacts.models.Contact.history.all', 'Contact.history.all', ([], {}), '()\n', (734, 736), False, 'from contacts.models import Contact\n'), ((1903, 1924), 'contacts.models.Contact.history.all', 'Contact.history.all', ([], {}), '()\n', (1922, 1924), False, 'from contacts.models import Contact\n'), ((3508, 3529), 'contacts.models.Contact.history.all', 'Contact.history.all', ([], {}), '()\n', (3527, 3529), False, 'from contacts.models import Contact\n'), ((6407, 6421), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6419, 6421), False, 'from datetime import date, datetime, timedelta\n'), ((6589, 6603), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6601, 6603), False, 'from datetime import date, datetime, timedelta\n'), ((6965, 6982), 'datetime.date', 'date', (['(2020)', '(1)', '(31)'], {}), '(2020, 1, 31)\n', (6969, 6982), False, 'from datetime import date, datetime, timedelta\n'), ((7336, 7348), 'datetime.date.today', 'date.today', ([], {}), '()\n', (7346, 7348), False, 'from datetime import date, datetime, timedelta\n'), ((7624, 7652), 'purchases.models.PurchaseHeader.history.all', 'PurchaseHeader.history.all', ([], {}), '()\n', (7650, 7652), False, 'from purchases.models import PurchaseHeader, PurchaseLine, PurchaseMatching, Supplier\n'), ((7810, 7838), 'purchases.models.PurchaseHeader.history.all', 'PurchaseHeader.history.all', ([], {}), '()\n', (7836, 7838), False, 'from purchases.models import PurchaseHeader, PurchaseLine, PurchaseMatching, Supplier\n'), ((12349, 12361), 'datetime.date.today', 'date.today', ([], {}), '()\n', (12359, 12361), False, 'from datetime import date, datetime, timedelta\n'), ((13087, 13115), 'purchases.models.PurchaseHeader.history.all', 'PurchaseHeader.history.all', ([], {}), '()\n', (13113, 13115), False, 'from purchases.models import PurchaseHeader, PurchaseLine, PurchaseMatching, Supplier\n'), ((13350, 13378), 'purchases.models.PurchaseHeader.history.all', 'PurchaseHeader.history.all', ([], {}), '()\n', (13376, 13378), False, 'from purchases.models import PurchaseHeader, PurchaseLine, PurchaseMatching, Supplier\n'), ((20325, 20337), 'datetime.date.today', 'date.today', ([], {}), '()\n', (20335, 20337), False, 'from datetime import date, datetime, timedelta\n'), ((20669, 20681), 'datetime.date.today', 'date.today', ([], {}), '()\n', (20679, 20681), False, 'from datetime import date, datetime, timedelta\n'), ((21587, 21615), 'purchases.models.PurchaseHeader.history.all', 'PurchaseHeader.history.all', ([], {}), '()\n', (21613, 21615), False, 'from purchases.models import PurchaseHeader, PurchaseLine, PurchaseMatching, Supplier\n'), ((21684, 21714), 'purchases.models.PurchaseMatching.history.all', 'PurchaseMatching.history.all', ([], {}), '()\n', (21712, 21714), False, 'from purchases.models import PurchaseHeader, PurchaseLine, PurchaseMatching, Supplier\n'), ((6474, 6488), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6486, 6488), False, 'from datetime import date, datetime, timedelta\n'), ((6491, 6509), 'datetime.timedelta', 'timedelta', ([], {'days': '(31)'}), '(days=31)\n', (6500, 6509), False, 'from datetime import date, datetime, timedelta\n'), ((6668, 6682), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6680, 6682), False, 'from datetime import date, datetime, timedelta\n'), ((6685, 6703), 'datetime.timedelta', 'timedelta', ([], {'days': '(31)'}), '(days=31)\n', (6694, 6703), False, 'from datetime import date, datetime, timedelta\n')]
#!/usr/bin/env python3 # # Copyright (c) 2018, NXP # # SPDX-License-Identifier: Apache-2.0 """Import files from an NXP MCUXpresso SDK archive into Zephyr The MCUXpresso SDK provides device header files and peripheral drivers for NXP Kinetis, LPC, and i.MX SoCs. Zephyr drivers for these SoCs are shims that adapt MCUXpresso SDK APIs to Zephyr APIs. This script automates updating Zephyr to a newer version of the MCUXpresso SDK. """ import argparse import os import re import shutil import sys import tempfile if "ZEPHYR_BASE" not in os.environ: sys.stderr.write("$ZEPHYR_BASE environment variable undefined.\n") exit(1) ZEPHYR_BASE = os.environ["ZEPHYR_BASE"] def get_soc_family(device): if device.startswith('MK'): return 'kinetis' elif device.startswith('LPC'): return 'lpc' elif device.startswith('MIMX'): return 'imx' def get_files(src, pattern): matches = [] nonmatches = [] if os.path.exists(src): for filename in os.listdir(src): path = os.path.join(src, filename) if re.search(pattern, filename): matches.append(path) else: nonmatches.append(path) return [matches, nonmatches] def copy_files(files, dst): if not files: return os.makedirs(dst, exist_ok=True) for f in files: shutil.copy2(f, dst) def import_sdk(directory): devices = os.listdir(os.path.join(directory, 'devices')) boards = os.listdir(os.path.join(directory, 'boards')) for device in devices: family = get_soc_family(device) shared_dst = os.path.join(ZEPHYR_BASE, 'ext/hal/nxp/mcux/drivers', family) device_dst = os.path.join(ZEPHYR_BASE, 'ext/hal/nxp/mcux/devices', device) device_src = os.path.join(directory, 'devices', device) device_pattern = "|".join([device, 'fsl_device_registers']) [device_headers, ignore] = get_files(device_src, device_pattern) drivers_src = os.path.join(directory, 'devices', device, 'drivers') drivers_pattern = "fsl_clock|fsl_iomuxc" [device_drivers, shared_drivers] = get_files(drivers_src, drivers_pattern) xip_boot_src = os.path.join(directory, 'devices', device, 'xip') xip_boot_pattern = ".*" [xip_boot, ignore] = get_files(xip_boot_src, xip_boot_pattern) print('Importing {} device headers to {}'.format(device, device_dst)) copy_files(device_headers, device_dst) print('Importing {} device-specific drivers to {}'.format(device, device_dst)) copy_files(device_drivers, device_dst) print('Importing {} family shared drivers to {}'.format(family, shared_dst)) copy_files(shared_drivers, shared_dst) print('Importing {} xip boot to {}'.format(device, shared_dst)) copy_files(xip_boot, shared_dst) for board in boards: board_src = os.path.join(directory, 'boards', board) board_dst = os.path.join(ZEPHYR_BASE, 'ext/hal/nxp/mcux/boards', board) xip_config_src = os.path.join(board_src, 'xip') xip_config_pattern = ".*" [xip_config, ignore] = get_files(xip_config_src, xip_config_pattern) print('Importing {} xip config to {}'.format(board, board_dst)) copy_files(xip_config, board_dst) def parse_args(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("-f", "--file", required=True, help="MCUXpresso SDK archive file to import from") args = parser.parse_args() with tempfile.TemporaryDirectory() as d: print('Extracting MCUXpresso SDK into temporary directory {}'.format(d)) shutil.unpack_archive(args.file, d) import_sdk(d) def main(): parse_args() if __name__ == "__main__": main()
[ "shutil.unpack_archive", "tempfile.TemporaryDirectory", "os.makedirs", "argparse.ArgumentParser", "shutil.copy2", "os.path.exists", "re.search", "sys.stderr.write", "os.path.join", "os.listdir" ]
[((555, 621), 'sys.stderr.write', 'sys.stderr.write', (['"""$ZEPHYR_BASE environment variable undefined.\n"""'], {}), "('$ZEPHYR_BASE environment variable undefined.\\n')\n", (571, 621), False, 'import sys\n'), ((948, 967), 'os.path.exists', 'os.path.exists', (['src'], {}), '(src)\n', (962, 967), False, 'import os\n'), ((1297, 1328), 'os.makedirs', 'os.makedirs', (['dst'], {'exist_ok': '(True)'}), '(dst, exist_ok=True)\n', (1308, 1328), False, 'import os\n'), ((3342, 3386), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '__doc__'}), '(description=__doc__)\n', (3365, 3386), False, 'import argparse\n'), ((993, 1008), 'os.listdir', 'os.listdir', (['src'], {}), '(src)\n', (1003, 1008), False, 'import os\n'), ((1357, 1377), 'shutil.copy2', 'shutil.copy2', (['f', 'dst'], {}), '(f, dst)\n', (1369, 1377), False, 'import shutil\n'), ((1431, 1465), 'os.path.join', 'os.path.join', (['directory', '"""devices"""'], {}), "(directory, 'devices')\n", (1443, 1465), False, 'import os\n'), ((1491, 1524), 'os.path.join', 'os.path.join', (['directory', '"""boards"""'], {}), "(directory, 'boards')\n", (1503, 1524), False, 'import os\n'), ((1615, 1676), 'os.path.join', 'os.path.join', (['ZEPHYR_BASE', '"""ext/hal/nxp/mcux/drivers"""', 'family'], {}), "(ZEPHYR_BASE, 'ext/hal/nxp/mcux/drivers', family)\n", (1627, 1676), False, 'import os\n'), ((1698, 1759), 'os.path.join', 'os.path.join', (['ZEPHYR_BASE', '"""ext/hal/nxp/mcux/devices"""', 'device'], {}), "(ZEPHYR_BASE, 'ext/hal/nxp/mcux/devices', device)\n", (1710, 1759), False, 'import os\n'), ((1782, 1824), 'os.path.join', 'os.path.join', (['directory', '"""devices"""', 'device'], {}), "(directory, 'devices', device)\n", (1794, 1824), False, 'import os\n'), ((1989, 2042), 'os.path.join', 'os.path.join', (['directory', '"""devices"""', 'device', '"""drivers"""'], {}), "(directory, 'devices', device, 'drivers')\n", (2001, 2042), False, 'import os\n'), ((2199, 2248), 'os.path.join', 'os.path.join', (['directory', '"""devices"""', 'device', '"""xip"""'], {}), "(directory, 'devices', device, 'xip')\n", (2211, 2248), False, 'import os\n'), ((2906, 2946), 'os.path.join', 'os.path.join', (['directory', '"""boards"""', 'board'], {}), "(directory, 'boards', board)\n", (2918, 2946), False, 'import os\n'), ((2967, 3026), 'os.path.join', 'os.path.join', (['ZEPHYR_BASE', '"""ext/hal/nxp/mcux/boards"""', 'board'], {}), "(ZEPHYR_BASE, 'ext/hal/nxp/mcux/boards', board)\n", (2979, 3026), False, 'import os\n'), ((3053, 3083), 'os.path.join', 'os.path.join', (['board_src', '"""xip"""'], {}), "(board_src, 'xip')\n", (3065, 3083), False, 'import os\n'), ((3560, 3589), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (3587, 3589), False, 'import tempfile\n'), ((3685, 3720), 'shutil.unpack_archive', 'shutil.unpack_archive', (['args.file', 'd'], {}), '(args.file, d)\n', (3706, 3720), False, 'import shutil\n'), ((1029, 1056), 'os.path.join', 'os.path.join', (['src', 'filename'], {}), '(src, filename)\n', (1041, 1056), False, 'import os\n'), ((1072, 1100), 're.search', 're.search', (['pattern', 'filename'], {}), '(pattern, filename)\n', (1081, 1100), False, 'import re\n')]
# Copyright (c) 2021 <NAME> # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from random import Random import pytest from tonto.card import Card from tonto.deck import Deck from tonto.exception import GameError from tonto.game import Game MESSAGE = { "WELCOME": ["Welcome."], "ROUND START": ["Round $current_round."], "ROUND END": ["$round_winner won the round."], "ROUND END TIE": ["Round was a tie."], "TURN START": ["$current_player_name turn."], "FALSE TURN END": [ ("Negative, $current_player_name drew $current_card " "bringing score to $current_player_score.") ], "TRUE TURN END": [ ("Positive, $current_player_name drew $current_card " "bringing score to $current_player_score.") ], "GAME OVER TIE": ["Game was a tie."], "GAME OVER": ["Game over, $game_winner won."], "EMPTY DECK": ["Deck empty."], } GAME_1_RESULTS = ("Welcome.\nRound 1.\nBerkelly turn.\n" "Positive, Berkelly drew 10 of Spades " "bringing score to 10.\nBerkelly won the round.\n" "Game over, Berkelly won.\n1: Berkelly (10)\n") GAME_2_RESULTS = ("Welcome.\nRound 1.\nBerkelly turn.\n" "Positive, Berkelly drew King of Spades bringing " "score to 13.\nBerkelly won the round.\nGame over, " "Berkelly won.\n1: Berkelly (13)\n") GAME_3_RESULTS = ("Welcome.\nRound 1.\nBerkelly turn.\n" "Positive, Berkelly drew 9 of Clubs bringing score to 36.\n" "Berkelly won the round.\nRound 2.\nBerkelly turn.\nDeck " "empty.\nPositive, Berkelly drew 10 of Spades bringing " "score to 46.\nBerkelly won the round.\nGame over, " "Berkelly won.\n1: Berkelly (46)\n") GAME_4_RESULTS = ("Welcome.\nRound 1.\nBerkelly turn.\nPositive, Berkelly " "drew 10 of Spades bringing score to 10.\nCez turn." "\nPositive, Cez drew Queen of Hearts bringing score to 36." "\nTonto turn.\nPositive, Tonto drew Jack of Clubs " "bringing score to 44.\nTonto won the round.\nRound 2.\n" "Berkelly turn.\nNegative, Berkelly drew 6 of Spades " "bringing score to 16.\nCez turn.\nPositive, Cez drew 5 " "of Diamonds bringing score to 46.\nTonto turn.\nPositive, " "Tonto drew 9 of Spades bringing score to 53.\nCez won the " "round.\nRound 3.\nBerkelly turn.\nNegative, Berkelly drew " "7 of Hearts bringing score to 37.\nCez turn.\nPositive, " "Cez drew 4 of Hearts bringing score to 58.\nTonto turn." "\nPositive, Tonto drew 6 of Hearts bringing score to 71." "\nBerkelly won the round.\nGame over, Tonto won.\n1: " "Tonto (71)\n2: Cez (58)\n3: Berkelly (37)\n") GAME_5_RESULTS = ("Welcome.\nRound 1.\nBerkelly turn.\nPositive, Berkelly drew" " 10 of Spades bringing score to 10.\nCez turn.\nNegative, " "Cez drew 3 of Diamonds bringing score to 6.\nTonto turn." "\nPositive, Tonto drew 5 of Diamonds bringing score to 10." "\nRound was a tie.\nRound 2.\nBerkelly turn.\nPositive, " "Berkelly drew King of Spades bringing score to 23.\nCez " "turn.\nPositive, Cez drew Ace of Clubs bringing score to " "62.\nTonto turn.\nNegative, Tonto drew 9 of Clubs " "bringing score to 46.\nCez won the round.\nRound 3." "\nBerkelly turn.\nNegative, Berkelly drew 3 of Diamonds " "bringing score to 29.\nCez turn.\nPositive, Cez drew 8 " "of Spades bringing score to 70.\nTonto turn.\nPositive, " "Tonto drew 6 of Clubs bringing score to 70.\nTonto won " "the round.\nGame was a tie.\n1: Cez (70)\n1: Tonto (70)" "\n2: Berkelly (29)\n") PLAYERS_1 = ["Berkelly"] PLAYERS_2 = ["Berkelly", "Cez", "Tonto"] __author__ = "<NAME>" __copyright__ = "Copyright 2020, Tonto's Card Game" __credits__ = ["<NAME>", "<NAME>"] __email__ = "<EMAIL>" __license__ = "MIT" __maintainer__ = "<NAME>" __status__ = "Production" __version__ = "1.0.0" def test_basic_game_1(monkeypatch): monkeypatch.setattr("builtins.input", lambda x: "") with pytest.raises(GameError): Game([]) with pytest.raises(GameError): Game(PLAYERS_1, max_rounds=0) game = Game(PLAYERS_1) assert game game.play() assert not game game.new_game() assert game def test_basic_game_2(capsys, monkeypatch): monkeypatch.setattr("builtins.input", lambda x: "") rand = Random() rand.seed(1) deck = Deck(random_instance=rand) deck.shuffle() game = Game(PLAYERS_1, deck=deck, message=MESSAGE, max_rounds=1) assert game game.play() assert not game captured = capsys.readouterr() assert captured.out == GAME_1_RESULTS game.new_game() assert game game.play() assert not game captured = capsys.readouterr() assert captured.out == GAME_2_RESULTS def test__basic_game_3(capsys, monkeypatch): monkeypatch.setattr("builtins.input", lambda x: "") rand = Random() rand.seed(1) deck = Deck(empty=True, random_instance=rand) deck.add_card(Card("Clubs", "9")) deck.shuffle() game = Game(PLAYERS_1, deck=deck, message=MESSAGE, max_rounds=2) game.play() captured = capsys.readouterr() assert captured.out == GAME_3_RESULTS def test_basic_game_4(capsys, monkeypatch): monkeypatch.setattr("builtins.input", lambda x: "") rand = Random() rand.seed(1) deck = Deck(random_instance=rand) deck.shuffle() game = Game(PLAYERS_2, deck=deck, message=MESSAGE) game.play() captured = capsys.readouterr() assert captured.out == GAME_4_RESULTS def test_basic_game_5(capsys, monkeypatch): monkeypatch.setattr("builtins.input", lambda x: "") cards = [ Card("Clubs", "6"), Card("Spades", "8"), Card("Diamonds", "3"), Card("Clubs", "9"), Card("Clubs", "Ace"), Card("Spades", "King"), Card("Diamonds", "5"), Card("Diamonds", "3"), Card("Spades", "10"), ] deck = Deck(empty=True) for card in cards: deck.add_card(card) game = Game(PLAYERS_2, deck=deck, message=MESSAGE) game.play() captured = capsys.readouterr() assert captured.out == GAME_5_RESULTS
[ "random.Random", "pytest.raises", "tonto.game.Game", "tonto.deck.Deck", "tonto.card.Card" ]
[((5587, 5602), 'tonto.game.Game', 'Game', (['PLAYERS_1'], {}), '(PLAYERS_1)\n', (5591, 5602), False, 'from tonto.game import Game\n'), ((5805, 5813), 'random.Random', 'Random', ([], {}), '()\n', (5811, 5813), False, 'from random import Random\n'), ((5842, 5868), 'tonto.deck.Deck', 'Deck', ([], {'random_instance': 'rand'}), '(random_instance=rand)\n', (5846, 5868), False, 'from tonto.deck import Deck\n'), ((5900, 5957), 'tonto.game.Game', 'Game', (['PLAYERS_1'], {'deck': 'deck', 'message': 'MESSAGE', 'max_rounds': '(1)'}), '(PLAYERS_1, deck=deck, message=MESSAGE, max_rounds=1)\n', (5904, 5957), False, 'from tonto.game import Game\n'), ((6352, 6360), 'random.Random', 'Random', ([], {}), '()\n', (6358, 6360), False, 'from random import Random\n'), ((6389, 6427), 'tonto.deck.Deck', 'Deck', ([], {'empty': '(True)', 'random_instance': 'rand'}), '(empty=True, random_instance=rand)\n', (6393, 6427), False, 'from tonto.deck import Deck\n'), ((6497, 6554), 'tonto.game.Game', 'Game', (['PLAYERS_1'], {'deck': 'deck', 'message': 'MESSAGE', 'max_rounds': '(2)'}), '(PLAYERS_1, deck=deck, message=MESSAGE, max_rounds=2)\n', (6501, 6554), False, 'from tonto.game import Game\n'), ((6762, 6770), 'random.Random', 'Random', ([], {}), '()\n', (6768, 6770), False, 'from random import Random\n'), ((6799, 6825), 'tonto.deck.Deck', 'Deck', ([], {'random_instance': 'rand'}), '(random_instance=rand)\n', (6803, 6825), False, 'from tonto.deck import Deck\n'), ((6857, 6900), 'tonto.game.Game', 'Game', (['PLAYERS_2'], {'deck': 'deck', 'message': 'MESSAGE'}), '(PLAYERS_2, deck=deck, message=MESSAGE)\n', (6861, 6900), False, 'from tonto.game import Game\n'), ((7398, 7414), 'tonto.deck.Deck', 'Deck', ([], {'empty': '(True)'}), '(empty=True)\n', (7402, 7414), False, 'from tonto.deck import Deck\n'), ((7477, 7520), 'tonto.game.Game', 'Game', (['PLAYERS_2'], {'deck': 'deck', 'message': 'MESSAGE'}), '(PLAYERS_2, deck=deck, message=MESSAGE)\n', (7481, 7520), False, 'from tonto.game import Game\n'), ((5460, 5484), 'pytest.raises', 'pytest.raises', (['GameError'], {}), '(GameError)\n', (5473, 5484), False, 'import pytest\n'), ((5494, 5502), 'tonto.game.Game', 'Game', (['[]'], {}), '([])\n', (5498, 5502), False, 'from tonto.game import Game\n'), ((5512, 5536), 'pytest.raises', 'pytest.raises', (['GameError'], {}), '(GameError)\n', (5525, 5536), False, 'import pytest\n'), ((5546, 5575), 'tonto.game.Game', 'Game', (['PLAYERS_1'], {'max_rounds': '(0)'}), '(PLAYERS_1, max_rounds=0)\n', (5550, 5575), False, 'from tonto.game import Game\n'), ((6446, 6464), 'tonto.card.Card', 'Card', (['"""Clubs"""', '"""9"""'], {}), "('Clubs', '9')\n", (6450, 6464), False, 'from tonto.card import Card\n'), ((7119, 7137), 'tonto.card.Card', 'Card', (['"""Clubs"""', '"""6"""'], {}), "('Clubs', '6')\n", (7123, 7137), False, 'from tonto.card import Card\n'), ((7147, 7166), 'tonto.card.Card', 'Card', (['"""Spades"""', '"""8"""'], {}), "('Spades', '8')\n", (7151, 7166), False, 'from tonto.card import Card\n'), ((7176, 7197), 'tonto.card.Card', 'Card', (['"""Diamonds"""', '"""3"""'], {}), "('Diamonds', '3')\n", (7180, 7197), False, 'from tonto.card import Card\n'), ((7207, 7225), 'tonto.card.Card', 'Card', (['"""Clubs"""', '"""9"""'], {}), "('Clubs', '9')\n", (7211, 7225), False, 'from tonto.card import Card\n'), ((7235, 7255), 'tonto.card.Card', 'Card', (['"""Clubs"""', '"""Ace"""'], {}), "('Clubs', 'Ace')\n", (7239, 7255), False, 'from tonto.card import Card\n'), ((7265, 7287), 'tonto.card.Card', 'Card', (['"""Spades"""', '"""King"""'], {}), "('Spades', 'King')\n", (7269, 7287), False, 'from tonto.card import Card\n'), ((7297, 7318), 'tonto.card.Card', 'Card', (['"""Diamonds"""', '"""5"""'], {}), "('Diamonds', '5')\n", (7301, 7318), False, 'from tonto.card import Card\n'), ((7328, 7349), 'tonto.card.Card', 'Card', (['"""Diamonds"""', '"""3"""'], {}), "('Diamonds', '3')\n", (7332, 7349), False, 'from tonto.card import Card\n'), ((7359, 7379), 'tonto.card.Card', 'Card', (['"""Spades"""', '"""10"""'], {}), "('Spades', '10')\n", (7363, 7379), False, 'from tonto.card import Card\n')]
from schieber.suit import Suit from schieber.card import Card class Deck: def __init__(self): """ Initializes a deck of cards used for Jassen (from 6 to 10, Jack, Queen, King and Ace; each card in 4 suits) """ self.cards = [] for suit in Suit: self.cards += [Card(suit=suit, value=i) for i in range(6, 15)] def __str__(self): return str([str(card) for card in self.cards])
[ "schieber.card.Card" ]
[((317, 341), 'schieber.card.Card', 'Card', ([], {'suit': 'suit', 'value': 'i'}), '(suit=suit, value=i)\n', (321, 341), False, 'from schieber.card import Card\n')]
"""Perform actions related to building flash executables and directories""" # type annotations from __future__ import annotations # internal libraries from ...core.custom import DictApp # external libraries from cmdkit.app import ApplicationGroup from cmdkit.cli import Interface # commands from . import jobs, port, scaling, simulation COMMANDS: DictApp = { 'jobs': jobs.JobsBuildApp, 'port': port.PortBuildApp, 'scaling': scaling.ScalingBuildApp, 'simulation': simulation.SimulationBuildApp, } PROGRAM = f'flashkit build' USAGE = f"""\ usage: {PROGRAM} [-h] <command> [<args>...] {__doc__}\ """ HELP = f"""\ {USAGE} commands: jobs {jobs.__doc__} port {port.__doc__} scaling {scaling.__doc__} simulation {simulation.__doc__} options: -h, --help Show this message and exit. Use the -h/--help flag with the above commands to learn more about their usage.\ """ class BuildApp(ApplicationGroup): """Application class for build command group.""" interface = Interface(PROGRAM, USAGE, HELP) commands = COMMANDS interface.add_argument('command')
[ "cmdkit.cli.Interface" ]
[((1043, 1074), 'cmdkit.cli.Interface', 'Interface', (['PROGRAM', 'USAGE', 'HELP'], {}), '(PROGRAM, USAGE, HELP)\n', (1052, 1074), False, 'from cmdkit.cli import Interface\n')]
import zipfile import argparse from threading import Thread import time def extract_zip(zip_file, password): try: with open(password, 'r') as a: for i in a.readlines(): passw = i.strip('\n') time.sleep(0.3) try: with zipfile.ZipFile(zip_file) as myzipfile: print(f'[-] password : { passw }') a = myzipfile.extractall(pwd=passw.encode()) print(f'[+] password found : {passw} \n') break except Exception as e: pass except Exception as e: pass def main(): parser = argparse.ArgumentParser() parser.add_argument('-z', help='zip file path') parser.add_argument('-d', help='password list file path') args = parser.parse_args() zipname = args.z passname = args.d t = Thread(target=extract_zip, args=(zipname, passname)) t.start() if __name__ == "__main__": main()
[ "threading.Thread", "zipfile.ZipFile", "argparse.ArgumentParser", "time.sleep" ]
[((706, 731), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (729, 731), False, 'import argparse\n'), ((930, 982), 'threading.Thread', 'Thread', ([], {'target': 'extract_zip', 'args': '(zipname, passname)'}), '(target=extract_zip, args=(zipname, passname))\n', (936, 982), False, 'from threading import Thread\n'), ((249, 264), 'time.sleep', 'time.sleep', (['(0.3)'], {}), '(0.3)\n', (259, 264), False, 'import time\n'), ((312, 337), 'zipfile.ZipFile', 'zipfile.ZipFile', (['zip_file'], {}), '(zip_file)\n', (327, 337), False, 'import zipfile\n')]
"""This exposes the analytics API.""" from http import HTTPStatus import requests from flask import current_app class RedashAPIService: # pylint: disable=too-few-public-methods """This class manages all the Redash analytics service API calls.""" @staticmethod def get_request(url_path, page_no=None, limit=None): """This method makes the GET request to Redash API.""" if page_no is None: url = f"{current_app.config.get('ANALYTICS_API_URL')}/api/{url_path}" else: url = ( f"{current_app.config.get('ANALYTICS_API_URL')}" f"/api/{url_path}?page={page_no}&page_size={limit}" ) analytics_admin_token = current_app.config.get("ANALYTICS_API_KEY") headers = {"Authorization": analytics_admin_token} response = requests.get(url, headers=headers) if response.ok: return response.json() if response.status_code == HTTPStatus.NOT_FOUND: return "unauthorized" return None
[ "flask.current_app.config.get", "requests.get" ]
[((716, 759), 'flask.current_app.config.get', 'current_app.config.get', (['"""ANALYTICS_API_KEY"""'], {}), "('ANALYTICS_API_KEY')\n", (738, 759), False, 'from flask import current_app\n'), ((838, 872), 'requests.get', 'requests.get', (['url'], {'headers': 'headers'}), '(url, headers=headers)\n', (850, 872), False, 'import requests\n'), ((442, 485), 'flask.current_app.config.get', 'current_app.config.get', (['"""ANALYTICS_API_URL"""'], {}), "('ANALYTICS_API_URL')\n", (464, 485), False, 'from flask import current_app\n'), ((556, 599), 'flask.current_app.config.get', 'current_app.config.get', (['"""ANALYTICS_API_URL"""'], {}), "('ANALYTICS_API_URL')\n", (578, 599), False, 'from flask import current_app\n')]
""" dariah.topics.utils ~~~~~~~~~~~~~~~~~~~ This module implements helper functions for topic modeling. """ from typing import Generator, List from pathlib import Path import cophi def read_mallet_topics(path: Path, num_words: int) -> Generator[List[str], None, None]: """Read a MALLET topics file. Args: path: Filepath to the topics file. num_words: Number of words for a topic. Yields: A list of tokens, i.e. a topic. """ with path.open("r", encoding="utf-8") as file: for row in file: sequence = row.split("\t")[2] yield list(cophi.text.utils.find_tokens(sequence))[:200]
[ "cophi.text.utils.find_tokens" ]
[((612, 650), 'cophi.text.utils.find_tokens', 'cophi.text.utils.find_tokens', (['sequence'], {}), '(sequence)\n', (640, 650), False, 'import cophi\n')]
# coding: utf-8 """ Control-M Services Provides access to BMC Control-M Services # noqa: E501 OpenAPI spec version: 9.20.30 Contact: <EMAIL> Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re # noqa: F401 import six from ctm_saas_client.configuration import Configuration class ZooKeeper(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'zookeeper_id': 'int', 'zookeeper_server_host': 'str', 'zookeeper_admin_server_port': 'int', 'zookeeper_client_port': 'int', 'zookeeper_leader_port': 'int', 'zookeeper_leader_election_port': 'int' } attribute_map = { 'zookeeper_id': 'zookeeperId', 'zookeeper_server_host': 'zookeeperServerHost', 'zookeeper_admin_server_port': 'zookeeperAdminServerPort', 'zookeeper_client_port': 'zookeeperClientPort', 'zookeeper_leader_port': 'zookeeperLeaderPort', 'zookeeper_leader_election_port': 'zookeeperLeaderElectionPort' } def __init__(self, zookeeper_id=None, zookeeper_server_host=None, zookeeper_admin_server_port=None, zookeeper_client_port=None, zookeeper_leader_port=None, zookeeper_leader_election_port=None, _configuration=None): # noqa: E501 """ZooKeeper - a model defined in Swagger""" # noqa: E501 if _configuration is None: _configuration = Configuration() self._configuration = _configuration self._zookeeper_id = None self._zookeeper_server_host = None self._zookeeper_admin_server_port = None self._zookeeper_client_port = None self._zookeeper_leader_port = None self._zookeeper_leader_election_port = None self.discriminator = None if zookeeper_id is not None: self.zookeeper_id = zookeeper_id if zookeeper_server_host is not None: self.zookeeper_server_host = zookeeper_server_host if zookeeper_admin_server_port is not None: self.zookeeper_admin_server_port = zookeeper_admin_server_port if zookeeper_client_port is not None: self.zookeeper_client_port = zookeeper_client_port if zookeeper_leader_port is not None: self.zookeeper_leader_port = zookeeper_leader_port if zookeeper_leader_election_port is not None: self.zookeeper_leader_election_port = zookeeper_leader_election_port @property def zookeeper_id(self): """Gets the zookeeper_id of this ZooKeeper. # noqa: E501 zookeeper Id # noqa: E501 :return: The zookeeper_id of this ZooKeeper. # noqa: E501 :rtype: int """ return self._zookeeper_id @zookeeper_id.setter def zookeeper_id(self, zookeeper_id): """Sets the zookeeper_id of this ZooKeeper. zookeeper Id # noqa: E501 :param zookeeper_id: The zookeeper_id of this ZooKeeper. # noqa: E501 :type: int """ self._zookeeper_id = zookeeper_id @property def zookeeper_server_host(self): """Gets the zookeeper_server_host of this ZooKeeper. # noqa: E501 zookeeper Server Host # noqa: E501 :return: The zookeeper_server_host of this ZooKeeper. # noqa: E501 :rtype: str """ return self._zookeeper_server_host @zookeeper_server_host.setter def zookeeper_server_host(self, zookeeper_server_host): """Sets the zookeeper_server_host of this ZooKeeper. zookeeper Server Host # noqa: E501 :param zookeeper_server_host: The zookeeper_server_host of this ZooKeeper. # noqa: E501 :type: str """ self._zookeeper_server_host = zookeeper_server_host @property def zookeeper_admin_server_port(self): """Gets the zookeeper_admin_server_port of this ZooKeeper. # noqa: E501 zookeeper Admin Server Port # noqa: E501 :return: The zookeeper_admin_server_port of this ZooKeeper. # noqa: E501 :rtype: int """ return self._zookeeper_admin_server_port @zookeeper_admin_server_port.setter def zookeeper_admin_server_port(self, zookeeper_admin_server_port): """Sets the zookeeper_admin_server_port of this ZooKeeper. zookeeper Admin Server Port # noqa: E501 :param zookeeper_admin_server_port: The zookeeper_admin_server_port of this ZooKeeper. # noqa: E501 :type: int """ self._zookeeper_admin_server_port = zookeeper_admin_server_port @property def zookeeper_client_port(self): """Gets the zookeeper_client_port of this ZooKeeper. # noqa: E501 zookeeper Client Port # noqa: E501 :return: The zookeeper_client_port of this ZooKeeper. # noqa: E501 :rtype: int """ return self._zookeeper_client_port @zookeeper_client_port.setter def zookeeper_client_port(self, zookeeper_client_port): """Sets the zookeeper_client_port of this ZooKeeper. zookeeper Client Port # noqa: E501 :param zookeeper_client_port: The zookeeper_client_port of this ZooKeeper. # noqa: E501 :type: int """ self._zookeeper_client_port = zookeeper_client_port @property def zookeeper_leader_port(self): """Gets the zookeeper_leader_port of this ZooKeeper. # noqa: E501 zookeeper Leader Port # noqa: E501 :return: The zookeeper_leader_port of this ZooKeeper. # noqa: E501 :rtype: int """ return self._zookeeper_leader_port @zookeeper_leader_port.setter def zookeeper_leader_port(self, zookeeper_leader_port): """Sets the zookeeper_leader_port of this ZooKeeper. zookeeper Leader Port # noqa: E501 :param zookeeper_leader_port: The zookeeper_leader_port of this ZooKeeper. # noqa: E501 :type: int """ self._zookeeper_leader_port = zookeeper_leader_port @property def zookeeper_leader_election_port(self): """Gets the zookeeper_leader_election_port of this ZooKeeper. # noqa: E501 zookeeper Leader Election Port # noqa: E501 :return: The zookeeper_leader_election_port of this ZooKeeper. # noqa: E501 :rtype: int """ return self._zookeeper_leader_election_port @zookeeper_leader_election_port.setter def zookeeper_leader_election_port(self, zookeeper_leader_election_port): """Sets the zookeeper_leader_election_port of this ZooKeeper. zookeeper Leader Election Port # noqa: E501 :param zookeeper_leader_election_port: The zookeeper_leader_election_port of this ZooKeeper. # noqa: E501 :type: int """ self._zookeeper_leader_election_port = zookeeper_leader_election_port def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(ZooKeeper, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, ZooKeeper): return False return self.to_dict() == other.to_dict() def __ne__(self, other): """Returns true if both objects are not equal""" if not isinstance(other, ZooKeeper): return True return self.to_dict() != other.to_dict()
[ "ctm_saas_client.configuration.Configuration", "six.iteritems" ]
[((7308, 7341), 'six.iteritems', 'six.iteritems', (['self.swagger_types'], {}), '(self.swagger_types)\n', (7321, 7341), False, 'import six\n'), ((1780, 1795), 'ctm_saas_client.configuration.Configuration', 'Configuration', ([], {}), '()\n', (1793, 1795), False, 'from ctm_saas_client.configuration import Configuration\n')]
from mock import MagicMock from baelfire.parrented import parrented class TestParrented(object): @property def parented(self): def method(self): return self return method def test_parrented_method_without_parent(self): """ parrented should return own method if no parrent set. """ obj = MagicMock() obj.parent = None method = parrented(self.parented) assert method(obj) == obj def test_parrented_method_with_parent(self): """ parrented should return parent method if parrent is set. """ parent = MagicMock() obj = MagicMock() obj.parent = parent method = parrented(self.parented) assert method(obj) == parent.method.return_value parent.method.assert_called_once_with(obj) def test_parrented_method_property(self): """ parrented should return parent property. """ parent = MagicMock() parent.method = 15 obj = MagicMock() obj.parent = parent method = parrented(self.parented) assert method(obj) == parent.method
[ "mock.MagicMock", "baelfire.parrented.parrented" ]
[((364, 375), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (373, 375), False, 'from mock import MagicMock\n'), ((419, 443), 'baelfire.parrented.parrented', 'parrented', (['self.parented'], {}), '(self.parented)\n', (428, 443), False, 'from baelfire.parrented import parrented\n'), ((635, 646), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (644, 646), False, 'from mock import MagicMock\n'), ((661, 672), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (670, 672), False, 'from mock import MagicMock\n'), ((718, 742), 'baelfire.parrented.parrented', 'parrented', (['self.parented'], {}), '(self.parented)\n', (727, 742), False, 'from baelfire.parrented import parrented\n'), ((989, 1000), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (998, 1000), False, 'from mock import MagicMock\n'), ((1042, 1053), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1051, 1053), False, 'from mock import MagicMock\n'), ((1099, 1123), 'baelfire.parrented.parrented', 'parrented', (['self.parented'], {}), '(self.parented)\n', (1108, 1123), False, 'from baelfire.parrented import parrented\n')]
# Echo server program import socket import pickle HOST = '' # Symbolic name meaning the local host PORT = 50007 # Arbitrary non-privileged port s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind((HOST, PORT)) s.listen(1) while 1: conn, addr = s.accept() print('Connected by', addr) while 1: data = conn.recv(1024) if not data: break conn.send(data) conn.close()
[ "socket.socket" ]
[((178, 227), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (191, 227), False, 'import socket\n')]
#!/usr/bin/env python3 from flask import Flask from views import jobs from utils import Config from jobs_queue import queue def register_blueprints(app): app.register_blueprint(jobs.blueprint, url_prefix="/api/jobs") def make_app(cfg_path): app = Flask("AsyncPage") app.cfg = Config() app.cfg.load_from_yaml(cfg_path) # Assign queue to app app.queue = queue register_blueprints(app) return app def main(): app = make_app("config.yml") app.run(host=app.cfg.get("app.server.host"), port=app.cfg.get("app.server.port"), debug=app.cfg.get("app.server.debug")) if __name__ == "__main__": main()
[ "flask.Flask", "utils.Config" ]
[((260, 278), 'flask.Flask', 'Flask', (['"""AsyncPage"""'], {}), "('AsyncPage')\n", (265, 278), False, 'from flask import Flask\n'), ((293, 301), 'utils.Config', 'Config', ([], {}), '()\n', (299, 301), False, 'from utils import Config\n')]
import time import RPi.GPIO as GPIO GPIO.setmode(GPIO.BCM) class Led(object): """RGB Led control module.""" color = (0, 0, 0) __gpio_module__ = GPIO def __init__(self, red_pin, green_pin, blue_pin): """Module constructor.""" self.gpio = self.__gpio_module__ self.red_pin = red_pin self.green_pin = green_pin self.blue_pin = blue_pin self.gpio.setup(red_pin, self.gpio.OUT) self.gpio.setup(green_pin, self.gpio.OUT) self.gpio.setup(blue_pin, self.gpio.OUT) self.red = self.gpio.PWM(red_pin, 100) self.green = self.gpio.PWM(green_pin, 100) self.blue = self.gpio.PWM(blue_pin, 100) self.red.start(0) self.green.start(0) self.blue.start(0) def __del__(self): """Cleaning.""" self.red.stop() self.green.stop() self.blue.stop() def set_color(self, color): """Set RGB color. color - tuple: (R, G, B) R, G, B: 0-255 """ self.color = color def on(self): """Turn led on.""" self.red.ChangeDutyCycle(self.prepare_data(self.reverse(self.color[0]))) self.green.ChangeDutyCycle(self.prepare_data(self.reverse(self.color[1]))) self.blue.ChangeDutyCycle(self.prepare_data(self.reverse(self.color[2]))) def off(self): """Turn led off.""" self.red.ChangeDutyCycle(self.reverse(0)) self.green.ChangeDutyCycle(self.reverse(0)) self.blue.ChangeDutyCycle(self.reverse(0)) @staticmethod def reverse(color): """Reverse values for katoda led type.""" return 255 - color @staticmethod def prepare_data(val): """Translate 0-255 value to 0-100.""" return round((100 * val)/255) class Motor(object): """Motor module class.""" __gpio_module__ = GPIO def __init__(self, enable_pin, input1_pin, input2_pin, correction=1.0): """Motor constructor.""" self.gpio = self.__gpio_module__ self.enable_pin = enable_pin self.input1_pin = input1_pin self.input2_pin = input2_pin self.gpio.setup(enable_pin, self.gpio.OUT) self.gpio.setup(input1_pin, self.gpio.OUT) self.gpio.setup(input2_pin, self.gpio.OUT) self.enable = self.gpio.PWM(enable_pin, 100) self.enable.start(0) self.correction = correction def forward(self, speed): """Run motor forward. speed: motor speed 0-100 """ if speed < 0 or speed > 100: raise TypeError("Speed must be between 0 and 100") self.gpio.output(self.input1_pin, 1) self.gpio.output(self.input2_pin, 0) self.enable.ChangeDutyCycle(speed * self.correction) def backward(self, speed=None): """Move motor backward.""" if speed < 0 or speed > 100: raise TypeError("Speed must be between 0 and 100") self.gpio.output(self.input1_pin, 0) self.gpio.output(self.input2_pin, 1) self.enable.ChangeDutyCycle(speed * self.correction) def stop(self): """Stop motor.""" self.enable.ChangeDutyCycle(0) class Button(object): """Button module.""" __gpio_module__ = GPIO time_set_status = None hold_time = 3 def __init__(self, pin): """Button constructor.""" self.gpio = self.__gpio_module__ self.pin = pin self.gpio.setup(pin, self.gpio.IN) self.status = 0 def is_pressed(self): """Check if button is pressed.""" new_status = self.gpio.input(self.pin) if self.status != new_status: self.status = new_status return self.status else: return 0 def is_hold(self): """Check if button is holded by x seconds.""" status = self.gpio.input(self.pin) if status == 1: if not self.time_set_status: self.time_set_status = time.time() if time.time() - self.time_set_status > self.hold_time: self.time_set_status = time.time() return 1 else: self.time_set_status = None return 0
[ "RPi.GPIO.setmode", "time.time" ]
[((37, 59), 'RPi.GPIO.setmode', 'GPIO.setmode', (['GPIO.BCM'], {}), '(GPIO.BCM)\n', (49, 59), True, 'import RPi.GPIO as GPIO\n'), ((3985, 3996), 'time.time', 'time.time', ([], {}), '()\n', (3994, 3996), False, 'import time\n'), ((4104, 4115), 'time.time', 'time.time', ([], {}), '()\n', (4113, 4115), False, 'import time\n'), ((4012, 4023), 'time.time', 'time.time', ([], {}), '()\n', (4021, 4023), False, 'import time\n')]
from matplotlib import pyplot as plt years = [1950, 1960, 1970, 1980, 1990, 2000, 2010] gdp = [300.2, 543.3, 1075.9, 2862.5, 5979.6, 10289.7, 14958.3] # Crear un gráfico de línea años en el ejec x y gdp en el eje y plt.plot(years, gdp, color ='green', marker='o', linestyle='solid') # -add a atitle plt.title("Nominal GDP") # Añadir etiquetas plt.ylabel("Billons of dollars") plt.show()
[ "matplotlib.pyplot.title", "matplotlib.pyplot.ylabel", "matplotlib.pyplot.show", "matplotlib.pyplot.plot" ]
[((218, 284), 'matplotlib.pyplot.plot', 'plt.plot', (['years', 'gdp'], {'color': '"""green"""', 'marker': '"""o"""', 'linestyle': '"""solid"""'}), "(years, gdp, color='green', marker='o', linestyle='solid')\n", (226, 284), True, 'from matplotlib import pyplot as plt\n'), ((305, 329), 'matplotlib.pyplot.title', 'plt.title', (['"""Nominal GDP"""'], {}), "('Nominal GDP')\n", (314, 329), True, 'from matplotlib import pyplot as plt\n'), ((351, 383), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Billons of dollars"""'], {}), "('Billons of dollars')\n", (361, 383), True, 'from matplotlib import pyplot as plt\n'), ((384, 394), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (392, 394), True, 'from matplotlib import pyplot as plt\n')]
from collections import Counter, defaultdict from utils import * from copy import copy with open("input.txt", "r") as file: data = file.read() @test(1588) def part1(data): template, instruction_lines = data.strip().split("\n\n") # data = list(map(int, data)) instructions = {} for line in instruction_lines.strip().split('\n'): join, insert = line.strip().split(' -> ') instructions[join] = join[0] +insert template = template.strip() for _ in range(10): new_template = '' for i, char_a in enumerate(template): char_b = template[i+1] if len(template) > i+1 else None if char_b is None: new_template += char_a continue if char_a+char_b in instructions: new_template += instructions[char_a+char_b] else: new_template += char_a template = new_template count = { char: template.count(char) for char in template } print({a+b: template.count(a+b) for a, b in zip(template, template[1:])}) return count[max(count, key=lambda x: count[x])] - count[min(count, key=lambda x: count[x])] @test(2188189693529) def part2(data): template_str, instruction_lines = data.strip().split("\n\n") # data = list(map(int, data)) instructions = {} for line in instruction_lines.strip().split('\n'): join, insert = line.strip().split(' -> ') instructions[join] = (join[0] + insert, insert + join[1]) template = defaultdict(int) for a, b in zip(template_str.strip(), template_str.strip()[1:]): template[a+b] += 1 template = dict(template) for step in range(40): new_template = defaultdict(int) for key, value in template.items(): if key in instructions: keys = instructions[key] for key in keys: new_template[key] += value else: new_template[key] = value template = dict(new_template) print(template) counts = defaultdict(int) for key, value in template.items(): counts[key[0]] += value counts[template_str.strip()[-1]] += 1 sorts = sorted(counts, key=lambda x: counts[x]) print(sorts[-1], counts[sorts[-1]], sorts[0], counts[sorts[0]]) return counts[sorts[-1]] - counts[sorts[0]] print("Part 1:", "\u001b[36;1m", part1(data), "\u001b[0m") print("Part 2:", "\u001b[36;1m", part2(data), "\u001b[0m")
[ "collections.defaultdict" ]
[((1541, 1557), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (1552, 1557), False, 'from collections import Counter, defaultdict\n'), ((2084, 2100), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (2095, 2100), False, 'from collections import Counter, defaultdict\n'), ((1734, 1750), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (1745, 1750), False, 'from collections import Counter, defaultdict\n')]
# coding=utf-8 """This module, useful_file_operations, simply contains lots of functions for file + directory operations.""" # Needed for copying files. from shutil import copyfile # Needed for running regular expressions. import re # Needed for system level operations. import os # Has super useful file + directory operations. from pathlib import Path # Used for advanced IDE typing. from typing import List # Used for recursively traversing directories. import glob # Needed for calculating the md5_checksum of files. import hashlib # Needed for running shell commands. from libraries.universal_code.system_abstraction import bash_interactive as bash # Needed for utility debugging calls. Such as termination on error with exception thrown. from libraries.universal_code import debugging as dbg # Used for copying files and other operations such as deleting directories. import shutil # Needed for compression. from PIL import Image # GLOBAL TODO (s): # Add more safety checks on all functions. # Create automated QA tests for all these functions! # Automatically log in DB the results + time-taken for all QA tests! ''' ___ ___ ___ __ ___ __ __ | | | | | | | \ / |__ | | |\ | / ` | | / \ |\ | /__` \__/ | | |___ | | | | \__/ | \| \__, | | \__/ | \| .__/ ''' def _is_valid_path_parameter(path: str) -> bool: if path is not None and type(path) is str and path != '': return True #if os.path.exists(path): # return True dbg.raise_exception('The provided path {' + str(path) + '} of type {' + str(type(path)) + '} is not valid!') return False ''' __ __ ___ __ ___ __ __ ___ __ | \ | |__) |__ / ` | / \ |__) | |__ /__` |__/ | | \ |___ \__, | \__/ | \ | |___ .__/ ______________________________________________________________ ''' # ------------------------------------------------ O P E R A T I O N S ------------------------------------------------ def directory_op_create(path: str) -> None: """Creates the directory at the provided path.""" os.makedirs(path) def directory_op_delete(path: str) -> None: """Deletes the directory at the provided path.""" shutil.rmtree(path) def directory_op_copy(path_source: str, path_destination: str) -> None: """Copies the specified directory to the provided path.""" shutil.copytree(path_source, path_destination) # --------------------------------------------------- S E T T E R S --------------------------------------------------- # --------------------------------------------------- G E T T E R S --------------------------------------------------- def directory_get_does_exist(path: str) -> bool: """Returns a boolean indicating if the directory exists or not.""" if os.path.exists(path): return os.path.isdir(path) return False def directory_get_is_directory(path: str) -> bool: """Determines if the path provided points to a directory or not.""" if _is_valid_path_parameter(path): return os.path.isdir(path) return False def directory_get_basename(path: str) -> str: """Returns the last directory in a path.""" p = path if p.endswith('/'): p = p[:-1] elif '.' in p: p = p[:p.rfind('/')] return p[p.rfind('/') + 1:] def directory_get_all_internal_directory_paths(path: str, recursively=False) -> list: """Returns all the directory paths from the directory path provided.""" directory_paths = [] for full_path in glob.glob(path + '/**', recursive=recursively): # Ignore files, only look at directories. if not file_get_is_file(full_path): directory_paths.append(full_path) if path in directory_paths: directory_paths.remove(path) return directory_paths def directory_get_all_internal_file_paths(path: str, recursively=False) -> List[str]: """Returns a list of all file paths found inside the provided directory.""" if _is_valid_path_parameter(path): file_paths = [] for full_path in glob.glob(path + '/**', recursive=recursively): if not directory_get_is_directory(full_path): file_paths.append(full_path) return file_paths return [] '''___ ___ __ |__ | | |__ /__` | | |___ |___ .__/ __________________________________________________________________________________________ ''' # ------------------------------------------------ O P E R A T I O N S ------------------------------------------------ def file_op_delete(path: str) -> None: """Deletes the file.""" os.remove(path) def file_op_copy(path_source: str, path_destination: str) -> None: """Copies the source file to the destination.""" copyfile(path_source, path_destination) def file_op_convert_image_to_webp(path_source: str, path_destination:str) -> None: """Converts the provided PNG or JPG file to a compressed WebP format.""" is_png = '.png' in path_source def file_op_convert_png_to_compressed_jpg(path_source: str, path_destination=None) -> None: """Generates a compressed JPG file from the provided PNG file.""" jpg = path_source.replace('.png', '.jpg') Image.open(path_source).convert('RGB').save(jpg) Image.open(jpg).save(path_destination, quality=85, optimize=True, progressive=False) def file_op_compress_image(path_source: str, path_destination=None) -> None: """Compressed the provided JPG or PNG image.""" is_png = '.png' in path_source image = Image.open(path_source) if is_png: image.save(path_destination, quality=85, optimize=True, compress_level=9) else: image.save(path_destination, quality=85, optimize=True, progressive=False) def file_op_append_files_content(source_path: str, append_file_path: str) -> None: """Appends the 'append_file_path' file's content to the 'source_path' file.""" content = file_get_contents_as_string(append_file_path) with open(source_path, 'a') as f: f.write(content) def file_op_replace_text(path: str, text_to_find, text_to_replace_with) -> None: """Replaces 'text_to_find' instances with 'text_to_replace_with'.""" with open(path) as f: s = f.read() with open(path, 'w') as f: s = s.replace(text_to_find, text_to_replace_with) f.write(s) def file_op_create_or_override(path: str, file_text) -> None: """Creates the file with the specified file text at the specified file path.""" raw_text = file_text if type(file_text) == list: raw_text = '' for l in file_text: raw_text += l with open(path, 'w+') as file_handler: file_handler.write(raw_text) def file_op_replace_line_from_text_match(path: str, text_to_match, line_to_replace_with) -> None: """Replaces lines that contain the 'text_to_match' text and replaces those lines with the 'line_to_replace_with'.""" file_lines = [] with open(path) as f: for l in f: file_lines.append(l) for i, l in enumerate(file_lines): if text_to_match in l: file_lines[i] = line_to_replace_with if not file_lines[i].endswith('\n'): file_lines[i] += '\n' with open(path, 'w') as f: for l in file_lines: f.write(l) # --------------------------------------------------- S E T T E R S --------------------------------------------------- # --------------------------------------------------- G E T T E R S --------------------------------------------------- def file_get_size_in_bytes(path: str) -> int: """Return the size of the file in bytes.""" return int(os.stat(path).st_size) def file_get_is_file(path: str) -> bool: """Determines if the path provided points to a file or not.""" if _is_valid_path_parameter(path): return os.path.isfile(path) return False def file_get_extensions(path: str) -> List[str]: """Extracts all the file extensions from the provided path (if any exist, returns [] otherwise).""" if _is_valid_path_parameter(path): return Path(path).suffixes return [] def file_get_basename(path: str) -> str: """Extracts the basename of the provided path.""" # Thanks to stackoverflow for showing how to get_file_basename : https://stackoverflow.com/questions/8384737/extract-file-name-from-path-no-matter-what-the-os-path-format basename = re.search(r'[^\\/]+(?=[\\/]?$)', path) if basename: return basename.group(0) return '' def file_get_contents_as_lines(path: str) -> list: """Returns a list of strings containing the file content.""" lines = [] with open(path, 'r') as file_handler: for line in file_handler: lines.append(line) return lines def file_get_contents_as_string(path: str) -> list: """Returns a list of strings containing the file content.""" lines = [] with open(path, 'r') as file_handler: for line in file_handler: lines.append(line) text = '' for l in lines: text += l return text def file_get_sha256_checksm(path, block_size=65536): """Returns sha256 for a given file.""" # From : https://gist.github.com/rji/b38c7238128edf53a181 sha256 = hashlib.sha256() with open(path, 'rb') as f: for block in iter(lambda: f.read(block_size), b''): sha256.update(block) return sha256.hexdigest() def file_get_md5_checksum(path, block_size= 2 ** 20): """Returns MD% checksum for given file.""" # Function source originally from : https://gist.github.com/juusimaa/5846242. md5 = hashlib.md5() try: file = open(path, 'rb') while True: data = file.read(block_size) if not data: break md5.update(data) except IOError: print('File \'' + path + '\' not found!') return None except: return None return md5.hexdigest() ################################################################################ # Temporary on hold. ''' # "This module makes available standard errno system symbols. The value of each symbol is the corresponding integer value. The names and descriptions are borrowed from linux/include/errno.h, which should be pretty all-inclusive." # List of error definitions : https://docs.python.org/3.1/library/errno.html #import errno # TODO : Create unit tests (but deleting unit tests will be required as well). def create_file_if_it_does_not_exist(path: str, list_of_text) -> bool: """Creates the file with the provided path and list of strings that makeup the file. Returns a boolean indicating if successful. :param path : The full path of the file. :param list_of_text : A list of lines to compose the file of OR a string to be decomposed into a list of strings split by the '\n' character.""" if not _is_valid_path_parameter(path): return False # TODO : Add error checking to make sure that list_of_text is either string or a list of strings. But the debugging module needs to be completed first. # if # Thanks to stackoverflow post : https://stackoverflow.com/questions/10978869/safely-create-a-file-if-and-only-if-it-does-not-exist-with-python flags = os.O_CREAT | os.O_EXCL | os.O_WRONLY try: file_handler = os.open(path, flags) except OSError as e: if e.errno == errno.EEXIST: # Failed as the file already exists. pass else: # Something unexpected went wrong so re-raise the exception. raise else: # No exception, so the file must have been created successfully. with os.fdopen(file_handler, 'w') as file_object: # Using 'os.fdopen' converts the handle to an object that acts like a regular Python file object, and the 'with' context manager means the file will be automatically closed when we're done with it. for line in list_of_text: ''' # TODOS:!!! ''' def get_file_last_extension(path: str) -> str: """Extracts the last extension from the provided path (if it exists, returns '' otherwise).""" if _is_valid_path_parameter(path): return Path(path).suffix return '' def download_file_from_url_to_path(url, save_path): """This will download a file from the provided URL into the provided save path. :param url : The URL to download the file from. :param save_path : The location to place the file. :return: Void.""" make_any_missing_directories(save_path) urllib.request.urlretrieve(url, save_path) def get_file_as_binary_data(file_path): """Returns the the provided file as binary data. :param file_path : The full path to the file to get binary data from. :return: The raw binary of the provided file.""" return open(file_path, 'rb') def does_file_exist(file_path): """Checks if the file exists at the provided file path. :param file_path : The provided file path to check for a file for. :return : Boolean indicating if a file exists at the provided file path or not.""" if os.path.exists(file_path): return os.path.isfile(file_path) return False def create_text_file(file_path, content): file = open(file_path, 'w') for line in content.split('\n'): file.write(line + '\n') file.close() def create_csv_file(file_path, content): """Creates the CSV file.""" lines = [] if type(content) == str: lines = content.split('\n') content = lines with open(file_path, 'w+') as file_handler: for l in content: file_handler.write(l + '\n') # DIRECTORY STUFF BELOW! def get_all_directory_paths_from_directory(directory_path): """Returns all the directory paths from the directory path provided. :param directory_path : The directory path to get all the directory paths from. :return : A list of strings that each map to a full directory path for all directories in this directory.""" directory_paths = [] for full_path in glob.glob(directory_path + '/**', recursive=True): # Ignore files, only look at directories. if not is_file(full_path): directory_paths.append(full_path) return directory_paths def get_all_sub_directory_paths_from_directory(directory_path): """Returns all the directory paths from the directory path provided. :param directory_path : The directory path to get all the directory paths from. :return : A list of strings that each map to a full directory path for all directories in this directory.""" directory_paths = [] for full_path in glob.glob(directory_path + '/**', recursive=False): # Ignore files, only look at directories. if not is_file(full_path): directory_paths.append(full_path) return directory_paths # Inspiration/source for this function : https://stackoverflow.com/questions/4187564/recursive-dircmp-compare-two-directories-to-ensure-they-have-the-same-files-and def are_two_directories_the_same(directory_path_0, directory_path_1): """Compares two directories for equality. Will compare file contents. :param directory_path_0 : The first directory to compare against. :param directory_path_1 : The second directory to compare against. :return: Boolean indicating if the two directories are the same or not.""" compared = filecmp.dircmp(directory_path_0, directory_path_1) if compared.left_only or compared.right_only or compared.diff_files or compared.funny_files: return False for subdir in compared.common_dirs: if not are_two_directories_the_same(os.path.join(directory_path_0, subdir), os.path.join(directory_path_1, subdir)): return False return True def delete_all_files_in_directory(directory_path): """Deletes all the files located in this directory. Including those in sub-directories.""" all_files = get_all_file_paths_from_directory(directory_path) for f in all_files: delete_file(f) def get_all_non_empty_directory_paths_from_directory(directory_path): """Returns all the directory paths that contain at least one entity from the directory path provided. :param directory_path : The root directory path to get all directory paths from. :return: A list of strings that each map to a directory path (for a non-empty directory) for all directories in this directory.""" directory_paths = [] for full_path in glob.glob(directory_path + '/**', recursive=True): # Ignore files, only look at directories. if not is_file(full_path): # Ignore empty directories. if len(os.listdir(full_path)) > 0: directory_paths.append(full_path) return directory_paths def make_any_missing_directories(path): """Make any directories that do not exist for the provided file path. :param path : The path to ensure that all needed parent directories exist.""" if '.' in path: ending = path.replace(os.path.dirname(path), '') if '.' in ending: pathlib.Path(os.path.dirname(path)).mkdir(parents=True, exist_ok=True) else: pathlib.Path(path).mkdir(parents=True, exist_ok=True) def get_easy_to_read_size_of_directory(directory): """Returns the size of the directory converted to the most logical unit. :return: A string representation of the size of the provided directory.""" return str(humanize.naturalsize(sum(os.path.getsize(x) for x in iglob(directory + '/**')))) def get_raw_size_as_bytes_of_directory(directory): """Returns the "RAWWWWWWWW"-(Gordan Ramsey) size of the directory. :return: A string representation of the size of the provided directory.""" return str(sum(os.path.getsize(x) for x in iglob(directory + '/**'))) # ZIP stuff def get_all_file_names_in_zip_directory(zip_file_path) -> List[str]: """Returns a list of all the file names inside of a zipped directory (without unzipping the directory).""" file_names = [] output = bash_runner.run_bash_command_and_get_output(['unzip', '-l', zip_file_path]) zip_file_name = get_file_basename(zip_file_path).replace('.zip', '') for l in output.split('\n'): if zip_file_name in l: sections = l.split() if len(sections) == 4: if sections[0] != '0' and 'Thumbs.db' not in sections[3]: file_names.append(sections[3].replace(zip_file_name + '/', '')) return file_names '''
[ "os.remove", "hashlib.md5", "os.makedirs", "shutil.copytree", "os.stat", "os.path.isdir", "os.path.exists", "PIL.Image.open", "hashlib.sha256", "os.path.isfile", "pathlib.Path", "glob.glob", "shutil.copyfile", "shutil.rmtree", "re.search" ]
[((2082, 2099), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (2093, 2099), False, 'import os\n'), ((2198, 2217), 'shutil.rmtree', 'shutil.rmtree', (['path'], {}), '(path)\n', (2211, 2217), False, 'import shutil\n'), ((2353, 2399), 'shutil.copytree', 'shutil.copytree', (['path_source', 'path_destination'], {}), '(path_source, path_destination)\n', (2368, 2399), False, 'import shutil\n'), ((2766, 2786), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (2780, 2786), False, 'import os\n'), ((3438, 3484), 'glob.glob', 'glob.glob', (["(path + '/**')"], {'recursive': 'recursively'}), "(path + '/**', recursive=recursively)\n", (3447, 3484), False, 'import glob\n'), ((4446, 4461), 'os.remove', 'os.remove', (['path'], {}), '(path)\n', (4455, 4461), False, 'import os\n'), ((4582, 4621), 'shutil.copyfile', 'copyfile', (['path_source', 'path_destination'], {}), '(path_source, path_destination)\n', (4590, 4621), False, 'from shutil import copyfile\n'), ((5325, 5348), 'PIL.Image.open', 'Image.open', (['path_source'], {}), '(path_source)\n', (5335, 5348), False, 'from PIL import Image\n'), ((7997, 8038), 're.search', 're.search', (['"""[^\\\\\\\\/]+(?=[\\\\\\\\/]?$)"""', 'path'], {}), "('[^\\\\\\\\/]+(?=[\\\\\\\\/]?$)', path)\n", (8006, 8038), False, 'import re\n'), ((8752, 8768), 'hashlib.sha256', 'hashlib.sha256', ([], {}), '()\n', (8766, 8768), False, 'import hashlib\n'), ((9089, 9102), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (9100, 9102), False, 'import hashlib\n'), ((2797, 2816), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (2810, 2816), False, 'import os\n'), ((2998, 3017), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (3011, 3017), False, 'import os\n'), ((3927, 3973), 'glob.glob', 'glob.glob', (["(path + '/**')"], {'recursive': 'recursively'}), "(path + '/**', recursive=recursively)\n", (3936, 3973), False, 'import glob\n'), ((7456, 7476), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (7470, 7476), False, 'import os\n'), ((5071, 5086), 'PIL.Image.open', 'Image.open', (['jpg'], {}), '(jpg)\n', (5081, 5086), False, 'from PIL import Image\n'), ((7281, 7294), 'os.stat', 'os.stat', (['path'], {}), '(path)\n', (7288, 7294), False, 'import os\n'), ((7688, 7698), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (7692, 7698), False, 'from pathlib import Path\n'), ((5021, 5044), 'PIL.Image.open', 'Image.open', (['path_source'], {}), '(path_source)\n', (5031, 5044), False, 'from PIL import Image\n')]
import ipywidgets as widgets import cartopy.crs as ccrs import geoviews as gv import holoviews as hv import numpy as np import panel as pn import param from shapely.geometry import Polygon as sPolygon, LineString as sLineString from .interface import EDRInterface from .lookup import CRS_LOOKUP class EDRExplorer(param.Parameterized): """ A `Panel` dashboard from which you can explore the data presented by an EDR Server. """ # Metadata widgets. coll_uri = widgets.Text(placeholder='Specify an EDR Server...', description='Server') coll = widgets.Dropdown(options=[], description='Collections', disabled=True) locations = widgets.Dropdown(options=[], description='Locations', disabled=True) datasets = widgets.SelectMultiple(options=[], description="Datasets", disabled=True) start_time = widgets.Dropdown(options=[], description='Start Date', disabled=True) end_time = widgets.Dropdown(options=[], description='End Date', disabled=True) start_z = widgets.Dropdown(options=[], description='Z Lower', disabled=True) end_z = widgets.Dropdown(options=[], description='Z Upper', disabled=True) # Error display widgets. connect_error_box = widgets.HTML("", layout=widgets.Layout(display="none")) data_error_box = widgets.HTML("", layout=widgets.Layout(display="none")) # Plot control widgets. pc_times = widgets.SelectionSlider(options=[""], description="Timestep", disabled=True) pc_zs = widgets.SelectionSlider(options=[""], description="Z Level", disabled=True) pc_params = widgets.Dropdown(options=[], description="Parameter", disabled=True) use_colours = pn.widgets.Checkbox(name="Use supplied colours", disabled=True) use_levels = pn.widgets.Checkbox(name="Use supplied levels", disabled=True) # Parameters for triggering plot updates. _data_key = param.String("") _colours = param.Boolean(use_colours.value) _levels = param.Boolean(use_levels.value) cmap = param.String("viridis") alpha = param.Magnitude(0.85) # Buttons. connect_button = widgets.Button(description="Connect") submit_button = widgets.Button(description="Submit", disabled=True) dataset_button = widgets.Button( description="Get Dataset", disabled=True, layout=widgets.Layout(top="-0.5rem") ) # Lists and boxes aggregating multiple widgets. wlist = [coll, locations, datasets, start_time, end_time, start_z, end_z] # Metadata widgets. pwlist = [pc_times, pc_zs, pc_params] # Plot widgets. pchecklist = [use_colours, use_levels] wbox = widgets.VBox(wlist) pwbox = pn.Row(pn.Column(*pwlist[:2]), pwlist[-1], pn.Column(*pchecklist)) # Map projection code web_mercator_epsg = "EPSG:3857" def __init__(self, server_address=None): """ Set up a new `Panel` dashboard to use to explore the data presented by an EDR Server. This constructs an instance of `.interface.EDRInterface` to submit requests to the EDR Server on the dashboard's behalf and displays results from these requests in the dashboard. Optionally pass the hostname of an EDR server via `server_address`. If specified, this value will pre-populate the `Server` field of the interface. """ self.server_address = server_address if self.server_address is not None: self.coll_uri.value = self.server_address super().__init__() # Class properties. self._edr_interface = None self._dataset = None self._no_t = "No t values in collection" self._no_z = "No z values in collection" # Plot. self.plot = gv.DynamicMap(self.make_plot) # Button click bindings. self.connect_button.on_click(self._load_collections) self.submit_button.on_click(self._request_plot_data) self.dataset_button.on_click(self._get_dataset) # Watches on widgets. self.coll.observe(self._populate_contents_callback, names='value') self.start_time.observe(self._filter_end_time, names='value') self.start_z.observe(self._filter_end_z, names='value') self.pc_times.observe(self._plot_change, names='value') self.pc_zs.observe(self._plot_change, names='value') self.pc_params.observe(self._plot_change, names='value') self.use_colours.param.watch(self._checkbox_change, "value", onlychanged=True) self.use_levels.param.watch(self._checkbox_change, "value", onlychanged=True) # Items for geometry-based queries. self._area_poly = None self._corridor_path = None self._area_stream = None self._corridor_stream = None self._query_tools() @property def edr_interface(self): """The instance of `.interface.EDRInterface` used to handle requests to the EDR Server.""" return self._edr_interface @edr_interface.setter def edr_interface(self, value): """Set the instance of `.interface.EDRInterface` used to handle requests to the EDR Server.""" self._edr_interface = value @property def dataset(self): """ A well-known Python data object containing all the data represented by the current state of select widgets on the dashboard. """ return self._dataset @dataset.setter def dataset(self, value): self._dataset = value @property def layout(self): """ Construct a layout of `Panel` objects to produce the EDR explorer dashboard. To view the dashboard: explorer = EDRExplorer() explorer.layout The layout is composed of two main elements: * a set of selector widgets in a column on the left that define the values passed in queries to the EDR Server via the `.interface.EDRInterface` instance * a plot on the right that displays graphical results from queries submitted to the EDR Server via the `.interface.EDRInterface` instance There are some extra elements too: * the widgets column on the left contains three buttons: * one to connect to the server at the URI specified in the `Server` text field widget, * one to submit a query to the EDR Server via the `.interface.EDRInterface` instance based on the values set in the selector widgets, and * one to request and return to the user all the data referenced by the current state of the dashboard's select widgets as a well-known Python data object (such as an Iris cube). * the widgets column on the left also contains two fields for displaying error messages when connecting to or retrieving data from the EDR Server. These are hidden by default and are made visible when there is a relevant error message to display. Once the error has been resolved the field will become hidden again. * the plot area on the right contains two plot control widgets to select specific data from queries submitted to the EDR Server to show on the plot. * the plot areas on the right also contains two checkboxes to select whether or not to show data on the plot rendered using colours and levels supplied in the query response. """ connect_row = pn.Row( pn.Column(self.coll_uri, self.connect_error_box), self.connect_button ) control_widgets = pn.Column(self.wbox, self.data_error_box) buttons = pn.Column(self.submit_button, self.dataset_button) control_row = pn.Row(control_widgets, buttons, align=("end", "start")) control_col = pn.Column(connect_row, control_row) tiles = gv.tile_sources.Wikipedia.opts(width=800, height=600) plot = tiles * self.plot plot_col = pn.Column(plot, self.pwbox) return pn.Row(control_col, plot_col).servable() def _populate_error_box(self, error_box_ref, errors): error_box = getattr(self, error_box_ref) good_layout = widgets.Layout( display="none", visibility="hidden", border="none", ) bad_layout = widgets.Layout( border="2px solid #dc3545", padding="0.05rem 0.5rem", margin="0 0.25rem 0 5.625rem", width="70%", overflow="auto", display="flex", ) error_box.value = errors error_box.layout = good_layout if errors == "" else bad_layout def _load_collections(self, event): """ Callback when the `connect` button is clicked. Set up the EDR interface instance and connect to the server's collections. """ self._clear_controls() server_loc = self.coll_uri.value self.edr_interface = EDRInterface(server_loc) error_box = "connect_error_box" if self.edr_interface.errors is None: # Independent check to see if we can clear the error box. self._populate_error_box(error_box, "") if self.edr_interface.json is not None and self.edr_interface.errors is None: # The only state in which the controls can be populated and enabled. self.coll.options = [(ct, cid) for (cid, ct) in zip(self.edr_interface.collection_ids, self.edr_interface.collection_titles)] self.coll.value = self.edr_interface.collection_ids[0] self._enable_controls() elif self.edr_interface.errors is not None: # We have known errors to show. self._populate_error_box(error_box, self.edr_interface.errors) else: # Something else has gone wrong, which we need to show. self._populate_error_box(error_box, "UnspecifiedError") def _enable_controls(self): """Enable query control widgets in the left column.""" for widget in self.wlist: widget.disabled = False self.submit_button.disabled = False def _clear_controls(self): """Clear state of all control and error display widgets and disable them.""" for widget in self.wlist + self.pwlist: widget.disabled = True if isinstance(widget, widgets.SelectMultiple): widget.options = ("",) widget.value = ("",) elif isinstance(widget, widgets.SelectionSlider): widget.options = ("",) widget.value = "" else: widget.options = [] widget.value = None for box in self.pchecklist: box.value = False box.disabled = True self.submit_button.disabled = True self.dataset_button.disabled = True self._populate_error_box("connect_error_box", "") self._populate_error_box("data_error_box", "") def _check_enable_checkboxes(self): """ Check if we can enable the checkboxes to specify the plot should use colours and levels specified in the data JSON. This is only possible if this information is present in the data JSON. """ box_disabled = self.edr_interface.data_handler.get_colours(self.pc_params.value) is None for box in self.pchecklist: box.disabled = box_disabled def _checkbox_change(self, event): """ Bind a change in a checkbox to the relevant param object to trigger a plot update. """ name = event.obj.name if "colour" in name: self._colours = event.new elif "level" in name: self._levels = event.new def _enable_plot_controls(self): """Enable plot control widgets for updating the specific data shown on the plot.""" for widget in self.pwlist: widget.disabled = False self.dataset_button.disabled = False self._check_enable_checkboxes() def _populate_contents_callback(self, change): """ Populate the options and values attributes of all the left column query control widgets when a collection provided by the EDR Server is specified. """ collection_id = change["new"] if collection_id is not None: # Parameters and locations. self._populate_params(collection_id) locs = self.edr_interface.get_locations(collection_id) self.locations.options = locs # Times. if self.edr_interface.has_temporal_extent(collection_id): times = self.edr_interface.get_temporal_extent(collection_id) else: times = [self._no_t] self.start_time.options = times self.end_time.options = times # Vertical levels. if self.edr_interface.has_vertical_extent(collection_id): zs = self.edr_interface.get_vertical_extent(collection_id) else: zs = [self._no_z] self.start_z.options = zs self.end_z.options = zs def _populate_params(self, collection_id): """ Populate the `Datasets` widget with a descriptive list (names and units) of the parameters provided by the selected collection. """ params_dict = self.edr_interface.get_collection_parameters(collection_id) options = [] for k, v in params_dict.items(): choice = f'{v["label"].replace("_", " ").title()} ({v["units"]})' options.append((choice, k)) self.datasets.options = options def _filter_end_time(self, change): """ Only show end datetimes in the `End Date` widget that are later than the value selected in the `Start Date` widget. """ start_time_selected = change["new"] if start_time_selected is not None: # Avoid errors when clearing widget state. times = self.start_time.options sel_idx = times.index(start_time_selected) self.end_time.options = times[sel_idx:] def _filter_end_z(self, change): """ Only show end vertical values in the `End Z` widget that are greater than the value selected in the `Start Z` widget. """ start_z_selected = change["new"] if start_z_selected is not None: # Avoid errors when clearing widget state. zs = self.start_z.options sel_idx = zs.index(start_z_selected) self.end_z.options = zs[sel_idx:] def _get_dataset(self, _): """ Callback when the `get dataset` button is clicked. Request from the EDR Server all data represented by the current states of the select widgets and provide this data as a well-known Python data object (such as an Iris Cube). """ # XXX somewhere we should check if the server supports `Cube` queries, # and preferentially use that if available. from .dataset import make_dataset collection_id = self.coll.value params = self.edr_interface.get_collection_parameters(collection_id) keys = self.datasets.value names_dict = {k: v["label"] for k, v in params.items() if k in keys} dataset = make_dataset(self.edr_interface.data_handler, names_dict) self.dataset = dataset def _geometry_stream_data(self, query_name): """ Return the data attribute of the holoviews stream referenced by `query_name`. """ ref = f"_{query_name}_stream" geom_stream = getattr(self, ref) return geom_stream.data def _geometry_query_is_defined(self, query_name): """ Determine whether a geometry specified by `query_name` has been defined. We determine this by checking if all the values in its x and y coords are 0 - if they are, we assume it's in its default state and thus undefined. """ data = self._geometry_stream_data(query_name) return all(data["xs"][0]) and all(data["ys"][0]) def _hv_stream_to_wkt(self, query_name): """ Convert the data points in the geometry specified by `query_name` to the appropriate Shapely geometry, and return the WKT string representation of the geometry. """ constructor = sPolygon if query_name == "area" else sLineString data = self._geometry_stream_data(query_name) xpoints, ypoints = np.array(data["xs"][0]), np.array(data["ys"][0]) wgs84_points = ccrs.PlateCarree().transform_points( ccrs.Mercator(), xpoints, ypoints ) result = None errors = None try: geom = constructor(wgs84_points) except ValueError: errors = f"Invalid {query_name!r} geometry provided" else: result = geom.wkt return result, errors def _request_plot_data(self, _): """ Callback when the `submit` button is clicked. This makes a get data request to the EDR Server via the `.interface.EDRInterface` instance. """ # Get selection widgets state for request. coll_id = self.coll.value param_names = self.datasets.value locations = self.locations.value start_date = self.start_time.value end_date = self.end_time.value start_z = self.start_z.value end_z = self.end_z.value # Define common query parameters. query_params = {"crs": "EPSG:4326"} if start_date != self._no_t: query_params["datetime"] = "/".join([start_date, end_date]) if start_z != self._no_z: query_params["z"] = [start_z, end_z] # Set query type. query_type = None errors = None query_types = ["area", "corridor"] for qtype in query_types: if self._geometry_query_is_defined(qtype): print(f"Query type: {qtype}") query_type = qtype coords, errors = self._hv_stream_to_wkt(query_type) if coords is not None: query_params["coords"] = coords if query_type is None: query_type = "locations" query_params["loc_id"] = locations # Request dataset. self.edr_interface.query(coll_id, query_type, param_names, **query_params) # Collect coords and query errors, if present. all_errors = [] if errors is not None: all_errors.append(errors) if self.edr_interface.errors is not None: all_errors.append(self.edr_interface.errors) if len(all_errors): self.edr_interface.errors = "\n".join(all_errors) error_box = "data_error_box" if self.edr_interface.errors is None: # Independent check to see if we can clear the error box. self._populate_error_box(error_box, "") if self.edr_interface.data_handler is not None and self.edr_interface.errors is None: # Generate and enable the plot controls. if self.edr_interface.has_temporal_extent(coll_id): plot_control_times = list(self.edr_interface.data_handler.coords["t"]) else: plot_control_times = [self._no_t] self.pc_times.options = plot_control_times self.pc_times.value = plot_control_times[0] if self.edr_interface.has_vertical_extent(coll_id): plot_control_zs = list(self.edr_interface.data_handler.coords["z"]) else: plot_control_zs = [self._no_z] self.pc_zs.options = plot_control_zs self.pc_zs.value = plot_control_zs[0] plot_control_params = list(param_names) self.pc_params.options = list(filter(lambda o: o[1] in plot_control_params, self.datasets.options)) self.pc_params.value = plot_control_params[0] self._enable_plot_controls() elif self.edr_interface.errors is not None: self._populate_error_box(error_box, self.edr_interface.errors) else: self._populate_error_box(error_box, "Uncaught error (data retrieval)") def _plot_change(self, _): """ Helper function to capture changes from either plot control widget and trigger an update of the plot. """ param = self.pc_params.value t = self.pc_times.value z = self.pc_zs.value can_request_data = False self._check_enable_checkboxes() value_dict = {} if t not in (None, "", self._no_t): value_dict.update({"t": t}) can_request_data = True if z not in (None, "", self._no_z): value_dict.update({"z": z}) can_request_data = True if param is not None and can_request_data: self._data_key = self.edr_interface.data_handler.make_key(param, value_dict) def _query_tools(self): self._area_poly = hv.Polygons( [[(0, 0), (0, 0)]] ).opts( line_color="gray", line_width=1.5, line_alpha=0.75, fill_color="gray", fill_alpha=0.3, ) self._corridor_path = hv.Path( [[(0, 0), (0, 0)]] ).opts( color="gray", line_width=2, line_alpha=0.75, ) self._area_stream = hv.streams.PolyDraw( source=self._area_poly, num_objects=1, tooltip="Area Query Tool" ) self._corridor_stream = hv.streams.PolyDraw( source=self._corridor_path, num_objects=1, tooltip="Corridor Query Tool" ) @param.depends('_data_key', '_colours', '_levels', 'cmap', 'alpha') def make_plot(self): """Show data from a data request to the EDR Server on the plot.""" showable = gv.Image( ([-8, -1], [53, 58], [[0, 0], [0, 0]]), # Approximate UK extent. crs=CRS_LOOKUP["WGS_1984"], ).opts(alpha=0.0) if self._data_key != "": dataset = self.edr_interface.data_handler[self._data_key] opts = {"cmap": self.cmap, "alpha": self.alpha, "colorbar": True} colours = self.edr_interface.data_handler.get_colours(self.pc_params.value) if colours is not None: opts.update({"clim": (colours["vmin"], colours["vmax"])}) if self.use_colours.value: opts["cmap"] = colours["colours"] if self.use_levels.value: opts["color_levels"] = colours["values"] error_box = "data_error_box" if self.edr_interface.data_handler.errors is None: # Independent check to see if we can clear the data error box. self._populate_error_box(error_box, "") if dataset is not None and self.edr_interface.data_handler.errors is None: showable = dataset.to(gv.Image, ['longitude', 'latitude']).opts(**opts) elif self.edr_interface.data_handler.errors is not None: self._populate_error_box( error_box, self.edr_interface.data_handler.errors ) else: self._populate_error_box( error_box, "Unspecified error (plotting)" ) return showable * self._area_poly * self._corridor_path
[ "param.Magnitude", "cartopy.crs.Mercator", "holoviews.Polygons", "geoviews.Image", "ipywidgets.Text", "geoviews.tile_sources.Wikipedia.opts", "panel.widgets.Checkbox", "panel.Row", "param.String", "ipywidgets.SelectionSlider", "ipywidgets.Button", "geoviews.DynamicMap", "param.Boolean", "ipywidgets.Dropdown", "ipywidgets.SelectMultiple", "panel.Column", "ipywidgets.Layout", "holoviews.Path", "ipywidgets.VBox", "param.depends", "numpy.array", "cartopy.crs.PlateCarree", "holoviews.streams.PolyDraw" ]
[((483, 557), 'ipywidgets.Text', 'widgets.Text', ([], {'placeholder': '"""Specify an EDR Server..."""', 'description': '"""Server"""'}), "(placeholder='Specify an EDR Server...', description='Server')\n", (495, 557), True, 'import ipywidgets as widgets\n'), ((569, 639), 'ipywidgets.Dropdown', 'widgets.Dropdown', ([], {'options': '[]', 'description': '"""Collections"""', 'disabled': '(True)'}), "(options=[], description='Collections', disabled=True)\n", (585, 639), True, 'import ipywidgets as widgets\n'), ((656, 724), 'ipywidgets.Dropdown', 'widgets.Dropdown', ([], {'options': '[]', 'description': '"""Locations"""', 'disabled': '(True)'}), "(options=[], description='Locations', disabled=True)\n", (672, 724), True, 'import ipywidgets as widgets\n'), ((740, 813), 'ipywidgets.SelectMultiple', 'widgets.SelectMultiple', ([], {'options': '[]', 'description': '"""Datasets"""', 'disabled': '(True)'}), "(options=[], description='Datasets', disabled=True)\n", (762, 813), True, 'import ipywidgets as widgets\n'), ((831, 900), 'ipywidgets.Dropdown', 'widgets.Dropdown', ([], {'options': '[]', 'description': '"""Start Date"""', 'disabled': '(True)'}), "(options=[], description='Start Date', disabled=True)\n", (847, 900), True, 'import ipywidgets as widgets\n'), ((916, 983), 'ipywidgets.Dropdown', 'widgets.Dropdown', ([], {'options': '[]', 'description': '"""End Date"""', 'disabled': '(True)'}), "(options=[], description='End Date', disabled=True)\n", (932, 983), True, 'import ipywidgets as widgets\n'), ((998, 1064), 'ipywidgets.Dropdown', 'widgets.Dropdown', ([], {'options': '[]', 'description': '"""Z Lower"""', 'disabled': '(True)'}), "(options=[], description='Z Lower', disabled=True)\n", (1014, 1064), True, 'import ipywidgets as widgets\n'), ((1077, 1143), 'ipywidgets.Dropdown', 'widgets.Dropdown', ([], {'options': '[]', 'description': '"""Z Upper"""', 'disabled': '(True)'}), "(options=[], description='Z Upper', disabled=True)\n", (1093, 1143), True, 'import ipywidgets as widgets\n'), ((1375, 1451), 'ipywidgets.SelectionSlider', 'widgets.SelectionSlider', ([], {'options': "['']", 'description': '"""Timestep"""', 'disabled': '(True)'}), "(options=[''], description='Timestep', disabled=True)\n", (1398, 1451), True, 'import ipywidgets as widgets\n'), ((1464, 1539), 'ipywidgets.SelectionSlider', 'widgets.SelectionSlider', ([], {'options': "['']", 'description': '"""Z Level"""', 'disabled': '(True)'}), "(options=[''], description='Z Level', disabled=True)\n", (1487, 1539), True, 'import ipywidgets as widgets\n'), ((1556, 1624), 'ipywidgets.Dropdown', 'widgets.Dropdown', ([], {'options': '[]', 'description': '"""Parameter"""', 'disabled': '(True)'}), "(options=[], description='Parameter', disabled=True)\n", (1572, 1624), True, 'import ipywidgets as widgets\n'), ((1643, 1706), 'panel.widgets.Checkbox', 'pn.widgets.Checkbox', ([], {'name': '"""Use supplied colours"""', 'disabled': '(True)'}), "(name='Use supplied colours', disabled=True)\n", (1662, 1706), True, 'import panel as pn\n'), ((1724, 1786), 'panel.widgets.Checkbox', 'pn.widgets.Checkbox', ([], {'name': '"""Use supplied levels"""', 'disabled': '(True)'}), "(name='Use supplied levels', disabled=True)\n", (1743, 1786), True, 'import panel as pn\n'), ((1850, 1866), 'param.String', 'param.String', (['""""""'], {}), "('')\n", (1862, 1866), False, 'import param\n'), ((1882, 1914), 'param.Boolean', 'param.Boolean', (['use_colours.value'], {}), '(use_colours.value)\n', (1895, 1914), False, 'import param\n'), ((1929, 1960), 'param.Boolean', 'param.Boolean', (['use_levels.value'], {}), '(use_levels.value)\n', (1942, 1960), False, 'import param\n'), ((1972, 1995), 'param.String', 'param.String', (['"""viridis"""'], {}), "('viridis')\n", (1984, 1995), False, 'import param\n'), ((2008, 2029), 'param.Magnitude', 'param.Magnitude', (['(0.85)'], {}), '(0.85)\n', (2023, 2029), False, 'import param\n'), ((2067, 2104), 'ipywidgets.Button', 'widgets.Button', ([], {'description': '"""Connect"""'}), "(description='Connect')\n", (2081, 2104), True, 'import ipywidgets as widgets\n'), ((2125, 2176), 'ipywidgets.Button', 'widgets.Button', ([], {'description': '"""Submit"""', 'disabled': '(True)'}), "(description='Submit', disabled=True)\n", (2139, 2176), True, 'import ipywidgets as widgets\n'), ((2588, 2607), 'ipywidgets.VBox', 'widgets.VBox', (['wlist'], {}), '(wlist)\n', (2600, 2607), True, 'import ipywidgets as widgets\n'), ((21809, 21875), 'param.depends', 'param.depends', (['"""_data_key"""', '"""_colours"""', '"""_levels"""', '"""cmap"""', '"""alpha"""'], {}), "('_data_key', '_colours', '_levels', 'cmap', 'alpha')\n", (21822, 21875), False, 'import param\n'), ((2627, 2649), 'panel.Column', 'pn.Column', (['*pwlist[:2]'], {}), '(*pwlist[:2])\n', (2636, 2649), True, 'import panel as pn\n'), ((2663, 2685), 'panel.Column', 'pn.Column', (['*pchecklist'], {}), '(*pchecklist)\n', (2672, 2685), True, 'import panel as pn\n'), ((3682, 3711), 'geoviews.DynamicMap', 'gv.DynamicMap', (['self.make_plot'], {}), '(self.make_plot)\n', (3695, 3711), True, 'import geoviews as gv\n'), ((7541, 7582), 'panel.Column', 'pn.Column', (['self.wbox', 'self.data_error_box'], {}), '(self.wbox, self.data_error_box)\n', (7550, 7582), True, 'import panel as pn\n'), ((7601, 7651), 'panel.Column', 'pn.Column', (['self.submit_button', 'self.dataset_button'], {}), '(self.submit_button, self.dataset_button)\n', (7610, 7651), True, 'import panel as pn\n'), ((7674, 7730), 'panel.Row', 'pn.Row', (['control_widgets', 'buttons'], {'align': "('end', 'start')"}), "(control_widgets, buttons, align=('end', 'start'))\n", (7680, 7730), True, 'import panel as pn\n'), ((7753, 7788), 'panel.Column', 'pn.Column', (['connect_row', 'control_row'], {}), '(connect_row, control_row)\n', (7762, 7788), True, 'import panel as pn\n'), ((7806, 7859), 'geoviews.tile_sources.Wikipedia.opts', 'gv.tile_sources.Wikipedia.opts', ([], {'width': '(800)', 'height': '(600)'}), '(width=800, height=600)\n', (7836, 7859), True, 'import geoviews as gv\n'), ((7912, 7939), 'panel.Column', 'pn.Column', (['plot', 'self.pwbox'], {}), '(plot, self.pwbox)\n', (7921, 7939), True, 'import panel as pn\n'), ((8126, 8192), 'ipywidgets.Layout', 'widgets.Layout', ([], {'display': '"""none"""', 'visibility': '"""hidden"""', 'border': '"""none"""'}), "(display='none', visibility='hidden', border='none')\n", (8140, 8192), True, 'import ipywidgets as widgets\n'), ((8261, 8411), 'ipywidgets.Layout', 'widgets.Layout', ([], {'border': '"""2px solid #dc3545"""', 'padding': '"""0.05rem 0.5rem"""', 'margin': '"""0 0.25rem 0 5.625rem"""', 'width': '"""70%"""', 'overflow': '"""auto"""', 'display': '"""flex"""'}), "(border='2px solid #dc3545', padding='0.05rem 0.5rem', margin\n ='0 0.25rem 0 5.625rem', width='70%', overflow='auto', display='flex')\n", (8275, 8411), True, 'import ipywidgets as widgets\n'), ((21499, 21589), 'holoviews.streams.PolyDraw', 'hv.streams.PolyDraw', ([], {'source': 'self._area_poly', 'num_objects': '(1)', 'tooltip': '"""Area Query Tool"""'}), "(source=self._area_poly, num_objects=1, tooltip=\n 'Area Query Tool')\n", (21518, 21589), True, 'import holoviews as hv\n'), ((21663, 21761), 'holoviews.streams.PolyDraw', 'hv.streams.PolyDraw', ([], {'source': 'self._corridor_path', 'num_objects': '(1)', 'tooltip': '"""Corridor Query Tool"""'}), "(source=self._corridor_path, num_objects=1, tooltip=\n 'Corridor Query Tool')\n", (21682, 21761), True, 'import holoviews as hv\n'), ((1222, 1252), 'ipywidgets.Layout', 'widgets.Layout', ([], {'display': '"""none"""'}), "(display='none')\n", (1236, 1252), True, 'import ipywidgets as widgets\n'), ((1299, 1329), 'ipywidgets.Layout', 'widgets.Layout', ([], {'display': '"""none"""'}), "(display='none')\n", (1313, 1329), True, 'import ipywidgets as widgets\n'), ((2287, 2316), 'ipywidgets.Layout', 'widgets.Layout', ([], {'top': '"""-0.5rem"""'}), "(top='-0.5rem')\n", (2301, 2316), True, 'import ipywidgets as widgets\n'), ((7423, 7471), 'panel.Column', 'pn.Column', (['self.coll_uri', 'self.connect_error_box'], {}), '(self.coll_uri, self.connect_error_box)\n', (7432, 7471), True, 'import panel as pn\n'), ((16568, 16591), 'numpy.array', 'np.array', (["data['xs'][0]"], {}), "(data['xs'][0])\n", (16576, 16591), True, 'import numpy as np\n'), ((16593, 16616), 'numpy.array', 'np.array', (["data['ys'][0]"], {}), "(data['ys'][0])\n", (16601, 16616), True, 'import numpy as np\n'), ((16689, 16704), 'cartopy.crs.Mercator', 'ccrs.Mercator', ([], {}), '()\n', (16702, 16704), True, 'import cartopy.crs as ccrs\n'), ((7955, 7984), 'panel.Row', 'pn.Row', (['control_col', 'plot_col'], {}), '(control_col, plot_col)\n', (7961, 7984), True, 'import panel as pn\n'), ((16640, 16658), 'cartopy.crs.PlateCarree', 'ccrs.PlateCarree', ([], {}), '()\n', (16656, 16658), True, 'import cartopy.crs as ccrs\n'), ((21137, 21168), 'holoviews.Polygons', 'hv.Polygons', (['[[(0, 0), (0, 0)]]'], {}), '([[(0, 0), (0, 0)]])\n', (21148, 21168), True, 'import holoviews as hv\n'), ((21348, 21375), 'holoviews.Path', 'hv.Path', (['[[(0, 0), (0, 0)]]'], {}), '([[(0, 0), (0, 0)]])\n', (21355, 21375), True, 'import holoviews as hv\n'), ((21995, 22071), 'geoviews.Image', 'gv.Image', (['([-8, -1], [53, 58], [[0, 0], [0, 0]])'], {'crs': "CRS_LOOKUP['WGS_1984']"}), "(([-8, -1], [53, 58], [[0, 0], [0, 0]]), crs=CRS_LOOKUP['WGS_1984'])\n", (22003, 22071), True, 'import geoviews as gv\n')]
# -*- coding: utf-8 -*- """ Created on Fri Aug 27 10:35:53 2021 @author: Peace4Lv """ from pyecharts.components import Image from pyecharts.options import ComponentTitleOpts from os import path import matplotlib.pyplot as plt import numpy as np from datetime import datetime, timedelta plt.rcParams['font.sans-serif'] = ['KaiTi'] plt.rcParams['axes.unicode_minus'] = False def DrawImage(imgUrl="../html/pic/horizontalLine.png", **kw): image = Image() # check file exists if not path.isfile(imgUrl): imgUrl = r"https://gitee.com/RiskyJR/pic-bed/raw/master/comm-timeline-graphic-1024x380.png" image.add( src=imgUrl, # image align center should modify outside style_opts={ "style": "margin-top: 20px;text-align: center;width:1800px;height:900px;"}, ) image.set_global_opts( title_opts=ComponentTitleOpts(title="Time Line") ) image.render("../html/imageTest.html") print("horizontal line image finished...\n") return image def UpdateTimeLineImage(startTick_x=['2021-08-09 09:00:00', '2021-08-09 09:45:00', '2021-08-09 11:11:00', '2021-08-09 14:30:00', '2021-08-09 15:18:00', '2021-08-09 16:40:00', '2021-08-09 17:19:00'], eventName_x=['开会', '发票', 'visual-code', '舆情分析', 'AOA-Paper', 'AOA-Paper', 'visual-code'], eventLast_x=[30, 78, 33, 47, 69, 39, 15], *k, **kw): colors = ['#E5562D', '#E0A459', '#CFBE65', '#A8CF65', '#6FD67D', '#68D5AE' '#6FD0DB', '#5294D0', '#595CD0', '#9E59D0', '#D05994'] # datetime-str→datetime→baseline→gap # Create the base bar from 5am to 1am startTick_t = [datetime.strptime(x, "%Y-%m-%d %H:%M:%S") for x in startTick_x] zeroTick_t = datetime.strptime(datetime.strftime( startTick_t[1], "%Y-%m-%d")+" 05:00:00", "%Y-%m-%d %H:%M:%S") endTick_t = zeroTick_t+timedelta(hours=19) eventName = eventName_x eventLast = eventLast_x levels = np.array([-5, 5, -3, 3, -1, 1]) fig, ax = plt.subplots(figsize=(36, 36*0.5625), facecolor='#D6D7C5', dpi=500) baseGapMin = (endTick_t-zeroTick_t).total_seconds()/60 ax.set(facecolor="#D6D7C5") ax.broken_barh( [(0, baseGapMin)], (-1/2, 1), alpha=.5, facecolors='#ace9e8', edgecolors='white', lw=4, capstyle='round') ax.set_ylim(-8, 8) # set as page background image no need title # ax.set_title('Daily Time Line', fontsize=60, color='white') for ii, (iname, itick, ieventLast) in enumerate(zip(eventName, startTick_t, eventLast)): barhColor = colors[ii % 4] level = levels[ii % 6] vert = 'top' if level < 0 else 'bottom' # tickTemp = datetime.strptime(itick, "%Y-%m-%d %H:%M:%S") curPointX = (itick-zeroTick_t).total_seconds()/60 curPointX_M = curPointX + ieventLast/2 ax.scatter(curPointX_M, 0, s=100, facecolor='w', edgecolor=barhColor, zorder=9999) # a line up to the text ax.plot((curPointX_M, curPointX_M), (0, level), c='white', alpha=.5) # text itickStr = datetime.strftime(itick, "%m-%d %H:%M") itext = iname+"\n"+itickStr+"|"+str(ieventLast) textInstance = ax.text( curPointX_M, level, itext, horizontalalignment='center', verticalalignment=vert, fontsize=20, fontfamily='Microsoft YaHei') textInstance.set_bbox( dict(boxstyle="round", alpha=0.5, color='#C3EAE9')) # broken_bar ax.broken_barh([(curPointX, ieventLast)], (-1/2, 1), facecolors=barhColor, edgecolors='white', lw=4) # Remove components for a cleaner look plt.setp((ax.get_yticklabels() + ax.get_yticklines() + list(ax.spines.values())), visible=False) plt.setp((ax.get_xticklabels() + ax.get_xticklines() + list(ax.spines.values())), visible=False) plt.xlabel(startTick_t[int(len(startTick_t)/2)].strftime("%Y-%m-%d")+' Time Line', loc='left', fontsize=30, fontfamily='Microsoft YaHei', color='white') plt.ylabel('Update:'+datetime.now().strftime("%Y-%m-%d"), loc='bottom', fontsize=30, fontfamily='Microsoft YaHei', color='white') if True: imageFile = r'../html/pic/timeline.jpg' plt.savefig(imageFile,dpi=400, bbox_inches='tight') print('image generated', imageFile) return imageFile else: plt.show() if __name__ == "__main__": UpdateTimeLineImage() # DrawImage()
[ "datetime.datetime.strftime", "pyecharts.components.Image", "matplotlib.pyplot.show", "pyecharts.options.ComponentTitleOpts", "datetime.datetime.now", "datetime.datetime.strptime", "os.path.isfile", "numpy.array", "datetime.timedelta", "matplotlib.pyplot.subplots", "matplotlib.pyplot.savefig" ]
[((452, 459), 'pyecharts.components.Image', 'Image', ([], {}), '()\n', (457, 459), False, 'from pyecharts.components import Image\n'), ((2129, 2160), 'numpy.array', 'np.array', (['[-5, 5, -3, 3, -1, 1]'], {}), '([-5, 5, -3, 3, -1, 1])\n', (2137, 2160), True, 'import numpy as np\n'), ((2175, 2244), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(36, 36 * 0.5625)', 'facecolor': '"""#D6D7C5"""', 'dpi': '(500)'}), "(figsize=(36, 36 * 0.5625), facecolor='#D6D7C5', dpi=500)\n", (2187, 2244), True, 'import matplotlib.pyplot as plt\n'), ((495, 514), 'os.path.isfile', 'path.isfile', (['imgUrl'], {}), '(imgUrl)\n', (506, 514), False, 'from os import path\n'), ((1806, 1847), 'datetime.datetime.strptime', 'datetime.strptime', (['x', '"""%Y-%m-%d %H:%M:%S"""'], {}), "(x, '%Y-%m-%d %H:%M:%S')\n", (1823, 1847), False, 'from datetime import datetime, timedelta\n'), ((2040, 2059), 'datetime.timedelta', 'timedelta', ([], {'hours': '(19)'}), '(hours=19)\n', (2049, 2059), False, 'from datetime import datetime, timedelta\n'), ((3274, 3313), 'datetime.datetime.strftime', 'datetime.strftime', (['itick', '"""%m-%d %H:%M"""'], {}), "(itick, '%m-%d %H:%M')\n", (3291, 3313), False, 'from datetime import datetime, timedelta\n'), ((4474, 4526), 'matplotlib.pyplot.savefig', 'plt.savefig', (['imageFile'], {'dpi': '(400)', 'bbox_inches': '"""tight"""'}), "(imageFile, dpi=400, bbox_inches='tight')\n", (4485, 4526), True, 'import matplotlib.pyplot as plt\n'), ((4613, 4623), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4621, 4623), True, 'import matplotlib.pyplot as plt\n'), ((863, 900), 'pyecharts.options.ComponentTitleOpts', 'ComponentTitleOpts', ([], {'title': '"""Time Line"""'}), "(title='Time Line')\n", (881, 900), False, 'from pyecharts.options import ComponentTitleOpts\n'), ((1924, 1969), 'datetime.datetime.strftime', 'datetime.strftime', (['startTick_t[1]', '"""%Y-%m-%d"""'], {}), "(startTick_t[1], '%Y-%m-%d')\n", (1941, 1969), False, 'from datetime import datetime, timedelta\n'), ((4281, 4295), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4293, 4295), False, 'from datetime import datetime, timedelta\n')]
# -*- coding: utf-8 -*- # Generated by Django 1.9.5 on 2016-04-18 22:52 from __future__ import unicode_literals import about.models from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Entry', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('order', models.PositiveIntegerField(db_index=True, editable=False)), ('name', models.CharField(max_length=140)), ('slug', models.SlugField(editable=False)), ('text', models.TextField()), ('dateCreated', models.DateField(auto_now_add=True)), ('dateUpdated', models.DateField(auto_now=True)), ('notes', models.TextField(blank=True)), ], options={ 'ordering': ['order', 'name'], }, ), migrations.CreateModel( name='Image', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('order', models.PositiveIntegerField(db_index=True, editable=False)), ('publish', models.BooleanField(default=False)), ('name', models.CharField(max_length=140)), ('caption', models.CharField(blank=True, max_length=140)), ('image', models.ImageField(upload_to=about.models.imageLocation)), ('dateCreated', models.DateField(auto_now_add=True)), ('entry', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='about.Entry')), ], options={ 'ordering': ['order', 'dateCreated'], }, ), ]
[ "django.db.models.TextField", "django.db.models.CharField", "django.db.models.ForeignKey", "django.db.models.PositiveIntegerField", "django.db.models.SlugField", "django.db.models.AutoField", "django.db.models.BooleanField", "django.db.models.ImageField", "django.db.models.DateField" ]
[((418, 511), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (434, 511), False, 'from django.db import migrations, models\n'), ((536, 594), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'db_index': '(True)', 'editable': '(False)'}), '(db_index=True, editable=False)\n', (563, 594), False, 'from django.db import migrations, models\n'), ((622, 654), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(140)'}), '(max_length=140)\n', (638, 654), False, 'from django.db import migrations, models\n'), ((682, 714), 'django.db.models.SlugField', 'models.SlugField', ([], {'editable': '(False)'}), '(editable=False)\n', (698, 714), False, 'from django.db import migrations, models\n'), ((742, 760), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (758, 760), False, 'from django.db import migrations, models\n'), ((795, 830), 'django.db.models.DateField', 'models.DateField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (811, 830), False, 'from django.db import migrations, models\n'), ((865, 896), 'django.db.models.DateField', 'models.DateField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (881, 896), False, 'from django.db import migrations, models\n'), ((925, 953), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (941, 953), False, 'from django.db import migrations, models\n'), ((1168, 1261), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1184, 1261), False, 'from django.db import migrations, models\n'), ((1286, 1344), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'db_index': '(True)', 'editable': '(False)'}), '(db_index=True, editable=False)\n', (1313, 1344), False, 'from django.db import migrations, models\n'), ((1375, 1409), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (1394, 1409), False, 'from django.db import migrations, models\n'), ((1437, 1469), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(140)'}), '(max_length=140)\n', (1453, 1469), False, 'from django.db import migrations, models\n'), ((1500, 1544), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(140)'}), '(blank=True, max_length=140)\n', (1516, 1544), False, 'from django.db import migrations, models\n'), ((1573, 1628), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': 'about.models.imageLocation'}), '(upload_to=about.models.imageLocation)\n', (1590, 1628), False, 'from django.db import migrations, models\n'), ((1663, 1698), 'django.db.models.DateField', 'models.DateField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1679, 1698), False, 'from django.db import migrations, models\n'), ((1727, 1812), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""about.Entry"""'}), "(on_delete=django.db.models.deletion.CASCADE, to='about.Entry'\n )\n", (1744, 1812), False, 'from django.db import migrations, models\n')]
import re import statistics import sys from collections import defaultdict from sys import getsizeof from timeit import default_timer from types import BuiltinFunctionType, FunctionType, MethodType from typing import Any, Dict, Iterable, Optional, Tuple, Union from penchmark._defs import ( AnyCallee, AnyInData, ByDataReport, CallableAny, Report, ReportItem, Summary, SummaryItem, ) class Estimator: def __call__(self, callee: CallableAny, data: Any, count_of_call: int, expected: Any = None) -> float: with Estimator.Elapsed() as elapsed: for _ in range(count_of_call): ret = callee(data) if expected is not None: assert ret == expected return elapsed() class Elapsed: __slots__ = '_start', 'dx' FLOAT_FMT = '.3f' def __init__(self): self._start = 0 self.dx = 0 def __enter__(self): self._start = default_timer() return self def __exit__(self, exc_type, exc_val, exc_tb): self.dx = default_timer() - self._start def __call__(self, fmt=None) -> Union[float, str]: return self.dx if fmt is None else format(self.dx, fmt) class ByDataSummary: def __init__(self): self.by_data_ratios = defaultdict(list) self._with_errors = set() def __call__(self, by_data_report: ByDataReport): for x in by_data_report: if x.callee_name not in self._with_errors: if x.valid: self.by_data_ratios[x.callee_name].append(x.ratio) else: self._with_errors.add(x.callee_name) if x.callee_name in self.by_data_ratios: del self.by_data_ratios[x.callee_name] def calc_summary(self) -> Summary: ret = [] for callee_name, ratios in self.by_data_ratios.items(): ret.append(SummaryItem( callee_name=callee_name, mean=statistics.mean(ratios), median=statistics.median(ratios) )) ret.sort(key=lambda x: x.median) return ret class NameGenerator: def __init__(self, module_as_prefix=True): self.module_as_prefix = module_as_prefix self._name_counters = defaultdict(int) self._cache = {} # type: Dict[object, str] def __call__(self, obj: CallableAny): ret = self._cache.get(obj, None) if ret is not None: return ret ret = self.scan_name(obj) if not ret: ret = 'callable' if ret in self._name_counters: count = self._name_counters[ret] + 1 ret = ret + '-' + str(count) self._name_counters[ret] = count else: self._name_counters[ret] = 1 self._cache[obj] = ret return ret _REGEXS = [ re.compile('<bound method (.+) of <.*>>'), re.compile('<function (.+) at .*'), re.compile('<built-in function (.+)>'), ] @classmethod def scan_name(cls, x: object): ret = None if isinstance(x, (BuiltinFunctionType, FunctionType, MethodType)): ret = str(x) for regex in cls._REGEXS: m = regex.match(ret) if m: ret = m[1] break if '<lambda>' in ret: ret = 'lambda' if '<locals>.' in ret: ret = ret[ret.find('<locals>.') + 9:] elif hasattr(x, '__name__'): ret = x.__name__ # type: ignore if not ret: # special cases # - functools.partial s = repr(x) if s.startswith('functools'): ret = s[:s.find('(')] if not ret and hasattr(x, '__class__'): ret = x.__class__.__name__ # type: ignore if ret and hasattr(x, '__module__') and x.__module__ and x.__module__ != '__main__': ret = x.__module__ + '.' + ret return ret def benchmark(callees: Iterable[AnyCallee], dataset: Iterable[AnyInData], *, count_factor=1.0, estimator=None, summary=None, name_generator=None, verbose=True) -> Tuple[Report, Optional[Union[Summary, Any]]]: """ :param callees: :param dataset: :param count_factor: :param estimator: Default is Estimator() :param summary: None, False or summary object, default is ByDataSummary() :param name_generator: :param verbose: :return: """ # pylint: disable=too-many-branches, too-many-arguments, too-many-locals if not estimator: estimator = Estimator() if summary is None: summary = ByDataSummary() if not name_generator: name_generator = NameGenerator() ret = {} for data_name, data, count_of_call, *data_expected in dataset: expected = data_expected[0] if data_expected else None count_of_call = round(count_of_call * count_factor) if count_of_call <= 0: continue if verbose: print(data_name, 'count of call:', count_of_call, 'size of data:', getsizeof(data)) group = [] for callee_data in callees: if not callable(callee_data): callee_name, callee = callee_data else: callee_name, callee = name_generator(callee_data), callee_data if verbose: print(' -', callee_name) try: elapsed = estimator(callee, data, count_of_call, expected) ri = ReportItem(callee_name=callee_name, elapsed=elapsed) except Exception: # pylint: disable=broad-except ri = ReportItem(callee_name=callee_name) group.append(ri) group.sort(key=lambda x: x.elapsed if x.valid else sys.maxsize) first = group[0] if first.valid: for item in group: if item == first: item.ratio = 1.0 elif item.valid: item.ratio = item.elapsed / first.elapsed if summary: summary(group) ret[data_name] = group if verbose: print() return ret, summary.calc_summary() if summary else None
[ "statistics.median", "timeit.default_timer", "collections.defaultdict", "statistics.mean", "penchmark._defs.ReportItem", "sys.getsizeof", "re.compile" ]
[((1417, 1434), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (1428, 1434), False, 'from collections import defaultdict\n'), ((2432, 2448), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (2443, 2448), False, 'from collections import defaultdict\n'), ((3026, 3067), 're.compile', 're.compile', (['"""<bound method (.+) of <.*>>"""'], {}), "('<bound method (.+) of <.*>>')\n", (3036, 3067), False, 'import re\n'), ((3077, 3111), 're.compile', 're.compile', (['"""<function (.+) at .*"""'], {}), "('<function (.+) at .*')\n", (3087, 3111), False, 'import re\n'), ((3121, 3159), 're.compile', 're.compile', (['"""<built-in function (.+)>"""'], {}), "('<built-in function (.+)>')\n", (3131, 3159), False, 'import re\n'), ((1063, 1078), 'timeit.default_timer', 'default_timer', ([], {}), '()\n', (1076, 1078), False, 'from timeit import default_timer\n'), ((1181, 1196), 'timeit.default_timer', 'default_timer', ([], {}), '()\n', (1194, 1196), False, 'from timeit import default_timer\n'), ((5364, 5379), 'sys.getsizeof', 'getsizeof', (['data'], {}), '(data)\n', (5373, 5379), False, 'from sys import getsizeof\n'), ((5806, 5858), 'penchmark._defs.ReportItem', 'ReportItem', ([], {'callee_name': 'callee_name', 'elapsed': 'elapsed'}), '(callee_name=callee_name, elapsed=elapsed)\n', (5816, 5858), False, 'from penchmark._defs import AnyCallee, AnyInData, ByDataReport, CallableAny, Report, ReportItem, Summary, SummaryItem\n'), ((5942, 5977), 'penchmark._defs.ReportItem', 'ReportItem', ([], {'callee_name': 'callee_name'}), '(callee_name=callee_name)\n', (5952, 5977), False, 'from penchmark._defs import AnyCallee, AnyInData, ByDataReport, CallableAny, Report, ReportItem, Summary, SummaryItem\n'), ((2133, 2156), 'statistics.mean', 'statistics.mean', (['ratios'], {}), '(ratios)\n', (2148, 2156), False, 'import statistics\n'), ((2181, 2206), 'statistics.median', 'statistics.median', (['ratios'], {}), '(ratios)\n', (2198, 2206), False, 'import statistics\n')]
""" PagerMaid module to handle jd command. """ from pagermaid import version from pagermaid.listener import listener from pagermaid.utils import lang, alias_command, obtain_message, client @listener(is_plugin=False, outgoing=True, command=alias_command("jd_cmd"), description="解析 JD 口令", parameters="<JD 口令>") async def jd_cmd(context): try: text = await obtain_message(context) except ValueError: return await context.edit("[jd_cmd] " + lang("msg_ValueError")) try: data = (await client.post("https://api.jds.codes/jCommand", json={"code": text})).json() except: return await context.edit("[jd_cmd] 网络错误!") if data["code"] != 200: return await context.edit("[jd_cmd] 未找到 JD 口令!") try: data = data["data"] await context.edit(f"【jd_cmd】 [【{data['title']}】 - {data['userName']}]({data['jumpUrl']})") except KeyError: return await context.edit("[jd_cmd] 数据错误!")
[ "pagermaid.utils.client.post", "pagermaid.utils.obtain_message", "pagermaid.utils.lang", "pagermaid.utils.alias_command" ]
[((242, 265), 'pagermaid.utils.alias_command', 'alias_command', (['"""jd_cmd"""'], {}), "('jd_cmd')\n", (255, 265), False, 'from pagermaid.utils import lang, alias_command, obtain_message, client\n'), ((390, 413), 'pagermaid.utils.obtain_message', 'obtain_message', (['context'], {}), '(context)\n', (404, 413), False, 'from pagermaid.utils import lang, alias_command, obtain_message, client\n'), ((540, 606), 'pagermaid.utils.client.post', 'client.post', (['"""https://api.jds.codes/jCommand"""'], {'json': "{'code': text}"}), "('https://api.jds.codes/jCommand', json={'code': text})\n", (551, 606), False, 'from pagermaid.utils import lang, alias_command, obtain_message, client\n'), ((485, 507), 'pagermaid.utils.lang', 'lang', (['"""msg_ValueError"""'], {}), "('msg_ValueError')\n", (489, 507), False, 'from pagermaid.utils import lang, alias_command, obtain_message, client\n')]
import sys import argparse import asyncio import gc import os import fastthreadpool from socket import socket, AF_UNIX, AF_INET, SOCK_STREAM, SOL_SOCKET, SO_REUSEADDR, IPPROTO_TCP, TCP_NODELAY PRINT = 0 def pool_echo_server(address, unix, threads, size): if unix: sock = socket(AF_UNIX, SOCK_STREAM) else: sock = socket(AF_INET, SOCK_STREAM) sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1) sock.bind(address) sock.listen(threads) if PRINT: print('Server listening at', address) with sock: while True: client, addr = sock.accept() if PRINT: print('Connection from', addr) pool.submit(pool_echo_client, client, size) def pool_echo_client(client, size): try: client.setsockopt(IPPROTO_TCP, TCP_NODELAY, 1) except (OSError, NameError): pass b = bytearray(size) bl = [b] with client: try: while True: client.recvmsg_into(bl) client.sendall(b) except: pass if PRINT: print('Connection closed') async def echo_server(loop, address, unix): if unix: sock = socket(AF_UNIX, SOCK_STREAM) else: sock = socket(AF_INET, SOCK_STREAM) sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1) sock.bind(address) sock.listen(16) sock.setblocking(False) if PRINT: print('Server listening at', address) with sock: while True: client, addr = await loop.sock_accept(sock) if PRINT: print('Connection from', addr) loop.create_task(echo_client(loop, client)) async def echo_client(loop, client): try: client.setsockopt(IPPROTO_TCP, TCP_NODELAY, 1) except (OSError, NameError): pass with client: while True: data = await loop.sock_recv(client, 4096) if not data: break await loop.sock_sendall(client, data) if PRINT: print('Connection closed') async def echo_client_streams(reader, writer): sock = writer.get_extra_info('socket') try: sock.setsockopt(IPPROTO_TCP, TCP_NODELAY, 1) except (OSError, NameError): pass if PRINT: print('Connection from', sock.getpeername()) while True: data = await reader.read(4096) if not data: break writer.write(data) if PRINT: print('Connection closed') writer.close() class EchoProtocol(asyncio.Protocol): def connection_made(self, transport): self.transport = transport def connection_lost(self, exc): self.transport = None def data_received(self, data): self.transport.write(data) async def print_debug(loop): while True: print(chr(27) + "[2J") # clear screen loop.print_debug_info() await asyncio.sleep(0.5, loop=loop) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--uvloop', default=False, action='store_true', help='use uvloop instead of asyncio') parser.add_argument('--streams', default=False, action='store_true', help='use asyncio/uvloop streams') parser.add_argument('--proto', default=False, action='store_true', help='use asyncio/uvloop protocol') parser.add_argument('--pool', default=False, action='store_true', help='use thread pool instead of asyncio/uvloop') parser.add_argument('--threads', default=os.cpu_count(), type=int, help='number of parallel threads in case of thread pool') parser.add_argument('--bufsize', default=4096, type=int) parser.add_argument('--addr', default='127.0.0.1:25000', type=str) parser.add_argument('--print', default=False, action='store_true') args = parser.parse_args() unix = False if args.addr.startswith('file:'): unix = True addr = args.addr[5:] if os.path.exists(addr): os.remove(addr) else: addr = args.addr.split(':') addr[1] = int(addr[1]) addr = tuple(addr) print('serving on: {}'.format(addr)) if args.pool: print(f"creating thread pool with {args.threads} threads") print(f"buffer size is {args.bufsize} bytes") pool = fastthreadpool.Pool(args.threads) pool.submit(pool_echo_server, addr, unix, args.threads, args.bufsize) pool.join() sys.exit(0) if args.uvloop: import uvloop loop = uvloop.new_event_loop() print('using uvloop') else: loop = asyncio.new_event_loop() print('using asyncio loop') asyncio.set_event_loop(loop) loop.set_debug(False) if args.print: PRINT = 1 if hasattr(loop, 'print_debug_info'): loop.create_task(print_debug(loop)) PRINT = 0 if args.streams: if args.proto: print('cannot use --stream and --proto simultaneously') exit(1) print('using streams') if unix: coro = asyncio.start_unix_server(echo_client_streams, addr, loop=loop) else: coro = asyncio.start_server(echo_client_streams, *addr, loop=loop) srv = loop.run_until_complete(coro) elif args.proto: if args.streams: print('cannot use --stream and --proto simultaneously') exit(1) print('using simple protocol') if unix: coro = loop.create_unix_server(EchoProtocol, addr) else: coro = loop.create_server(EchoProtocol, *addr) srv = loop.run_until_complete(coro) else: print('using sock_recv/sock_sendall') loop.create_task(echo_server(loop, addr, unix)) try: loop.run_forever() finally: if hasattr(loop, 'print_debug_info'): gc.collect() print(chr(27) + "[2J") loop.print_debug_info() loop.close()
[ "asyncio.start_server", "os.remove", "argparse.ArgumentParser", "asyncio.new_event_loop", "asyncio.sleep", "asyncio.set_event_loop", "socket.socket", "os.path.exists", "fastthreadpool.Pool", "os.cpu_count", "gc.collect", "uvloop.new_event_loop", "asyncio.start_unix_server", "sys.exit" ]
[((2996, 3021), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (3019, 3021), False, 'import argparse\n'), ((4772, 4800), 'asyncio.set_event_loop', 'asyncio.set_event_loop', (['loop'], {}), '(loop)\n', (4794, 4800), False, 'import asyncio\n'), ((288, 316), 'socket.socket', 'socket', (['AF_UNIX', 'SOCK_STREAM'], {}), '(AF_UNIX, SOCK_STREAM)\n', (294, 316), False, 'from socket import socket, AF_UNIX, AF_INET, SOCK_STREAM, SOL_SOCKET, SO_REUSEADDR, IPPROTO_TCP, TCP_NODELAY\n'), ((342, 370), 'socket.socket', 'socket', (['AF_INET', 'SOCK_STREAM'], {}), '(AF_INET, SOCK_STREAM)\n', (348, 370), False, 'from socket import socket, AF_UNIX, AF_INET, SOCK_STREAM, SOL_SOCKET, SO_REUSEADDR, IPPROTO_TCP, TCP_NODELAY\n'), ((1202, 1230), 'socket.socket', 'socket', (['AF_UNIX', 'SOCK_STREAM'], {}), '(AF_UNIX, SOCK_STREAM)\n', (1208, 1230), False, 'from socket import socket, AF_UNIX, AF_INET, SOCK_STREAM, SOL_SOCKET, SO_REUSEADDR, IPPROTO_TCP, TCP_NODELAY\n'), ((1256, 1284), 'socket.socket', 'socket', (['AF_INET', 'SOCK_STREAM'], {}), '(AF_INET, SOCK_STREAM)\n', (1262, 1284), False, 'from socket import socket, AF_UNIX, AF_INET, SOCK_STREAM, SOL_SOCKET, SO_REUSEADDR, IPPROTO_TCP, TCP_NODELAY\n'), ((4066, 4086), 'os.path.exists', 'os.path.exists', (['addr'], {}), '(addr)\n', (4080, 4086), False, 'import os\n'), ((4417, 4450), 'fastthreadpool.Pool', 'fastthreadpool.Pool', (['args.threads'], {}), '(args.threads)\n', (4436, 4450), False, 'import fastthreadpool\n'), ((4557, 4568), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (4565, 4568), False, 'import sys\n'), ((4627, 4650), 'uvloop.new_event_loop', 'uvloop.new_event_loop', ([], {}), '()\n', (4648, 4650), False, 'import uvloop\n'), ((4706, 4730), 'asyncio.new_event_loop', 'asyncio.new_event_loop', ([], {}), '()\n', (4728, 4730), False, 'import asyncio\n'), ((2924, 2953), 'asyncio.sleep', 'asyncio.sleep', (['(0.5)'], {'loop': 'loop'}), '(0.5, loop=loop)\n', (2937, 2953), False, 'import asyncio\n'), ((3608, 3622), 'os.cpu_count', 'os.cpu_count', ([], {}), '()\n', (3620, 3622), False, 'import os\n'), ((4100, 4115), 'os.remove', 'os.remove', (['addr'], {}), '(addr)\n', (4109, 4115), False, 'import os\n'), ((5170, 5233), 'asyncio.start_unix_server', 'asyncio.start_unix_server', (['echo_client_streams', 'addr'], {'loop': 'loop'}), '(echo_client_streams, addr, loop=loop)\n', (5195, 5233), False, 'import asyncio\n'), ((5312, 5371), 'asyncio.start_server', 'asyncio.start_server', (['echo_client_streams', '*addr'], {'loop': 'loop'}), '(echo_client_streams, *addr, loop=loop)\n', (5332, 5371), False, 'import asyncio\n'), ((6045, 6057), 'gc.collect', 'gc.collect', ([], {}), '()\n', (6055, 6057), False, 'import gc\n')]
import numpy as np import unittest import ray from ray.rllib.evaluation.postprocessing import adjust_nstep, discount_cumsum from ray.rllib.policy.sample_batch import SampleBatch from ray.rllib.utils.test_utils import check class TestPostprocessing(unittest.TestCase): @classmethod def setUpClass(cls) -> None: ray.init() @classmethod def tearDownClass(cls) -> None: ray.shutdown() def test_n_step_3(self): """Tests, whether n-step adjustments of trajectories work.""" # n-step = 3 gamma = 0.9 obs = [1, 2, 3, 4, 5, 6, 7] actions = ["ac1", "ac2", "ac1", "ac1", "ac1", "ac2", "ac1"] rewards = [10.0, 0.0, 100.0, 100.0, 100.0, 100.0, 100.0] dones = [0, 0, 0, 0, 0, 0, 1] next_obs = [2, 3, 4, 5, 6, 7, 8] batch = SampleBatch( { SampleBatch.OBS: obs, SampleBatch.ACTIONS: actions, SampleBatch.REWARDS: rewards, SampleBatch.DONES: dones, SampleBatch.NEXT_OBS: next_obs, } ) adjust_nstep(3, gamma, batch) check(batch[SampleBatch.OBS], [1, 2, 3, 4, 5, 6, 7]) check( batch[SampleBatch.ACTIONS], ["ac1", "ac2", "ac1", "ac1", "ac1", "ac2", "ac1"], ) check(batch[SampleBatch.NEXT_OBS], [4, 5, 6, 7, 8, 8, 8]) check(batch[SampleBatch.DONES], [0, 0, 0, 0, 1, 1, 1]) check( batch[SampleBatch.REWARDS], [91.0, 171.0, 271.0, 271.0, 271.0, 190.0, 100.0] ) def test_n_step_4(self): """Tests, whether n-step adjustments of trajectories work.""" # n-step = 4 gamma = 0.99 obs = np.arange(0, 7) actions = np.random.randint(-1, 3, size=(7,)) check_actions = actions.copy() rewards = [10.0, 0.0, 100.0, 50.0, 60.0, 10.0, 100.0] dones = [False, False, False, False, False, False, True] next_obs = np.arange(1, 8) batch = SampleBatch( { SampleBatch.OBS: obs, SampleBatch.ACTIONS: actions, SampleBatch.REWARDS: rewards, SampleBatch.DONES: dones, SampleBatch.NEXT_OBS: next_obs, } ) adjust_nstep(4, gamma, batch) check(batch[SampleBatch.OBS], [0, 1, 2, 3, 4, 5, 6]) check(batch[SampleBatch.ACTIONS], check_actions) check(batch[SampleBatch.NEXT_OBS], [4, 5, 6, 7, 7, 7, 7]) check(batch[SampleBatch.DONES], [False, False, False, True, True, True, True]) check( batch[SampleBatch.REWARDS], [ discount_cumsum(np.array(rewards[0:4]), gamma)[0], discount_cumsum(np.array(rewards[1:5]), gamma)[0], discount_cumsum(np.array(rewards[2:6]), gamma)[0], discount_cumsum(np.array(rewards[3:7]), gamma)[0], discount_cumsum(np.array(rewards[4:]), gamma)[0], discount_cumsum(np.array(rewards[5:]), gamma)[0], discount_cumsum(np.array(rewards[6:]), gamma)[0], ], ) def test_n_step_malformed_dones(self): # Test bad input (trajectory has dones in middle). # Re-use same batch, but change dones. gamma = 1.0 obs = np.arange(0, 7) actions = np.random.randint(-1, 3, size=(7,)) rewards = [10.0, 0.0, 100.0, 50.0, 60.0, 10.0, 100.0] next_obs = np.arange(1, 8) batch = SampleBatch( { SampleBatch.OBS: obs, SampleBatch.ACTIONS: actions, SampleBatch.REWARDS: rewards, SampleBatch.DONES: [False, False, True, False, False, False, True], SampleBatch.NEXT_OBS: next_obs, } ) self.assertRaisesRegex( AssertionError, "Unexpected done in middle", lambda: adjust_nstep(5, gamma, batch), ) def test_n_step_very_short_trajectory(self): """Tests, whether n-step also works for very small trajectories.""" gamma = 1.0 obs = np.arange(0, 2) actions = np.random.randint(-100, 300, size=(2,)) check_actions = actions.copy() rewards = [10.0, 100.0] next_obs = np.arange(1, 3) batch = SampleBatch( { SampleBatch.OBS: obs, SampleBatch.ACTIONS: actions, SampleBatch.REWARDS: rewards, SampleBatch.DONES: [False, False], SampleBatch.NEXT_OBS: next_obs, } ) adjust_nstep(3, gamma, batch) check(batch[SampleBatch.OBS], [0, 1]) check(batch[SampleBatch.ACTIONS], check_actions) check(batch[SampleBatch.DONES], [False, False]) check(batch[SampleBatch.REWARDS], [10.0 + gamma * 100.0, 100.0]) check(batch[SampleBatch.NEXT_OBS], [2, 2]) def test_n_step_from_same_obs_source_array(self): """Tests, whether n-step also works on a shared obs/new-obs array.""" gamma = 0.99 # The underlying observation data. Both obs and next_obs will # be references into that same np.array. underlying_obs = np.arange(0, 8) obs = underlying_obs[:7] next_obs = underlying_obs[1:] actions = np.random.randint(-1, 3, size=(7,)) check_actions = actions.copy() rewards = [10.0, 0.0, 100.0, 50.0, 60.0, 10.0, 100.0] dones = [False, False, False, False, False, False, True] batch = SampleBatch( { SampleBatch.OBS: obs, SampleBatch.ACTIONS: actions, SampleBatch.REWARDS: rewards, SampleBatch.DONES: dones, SampleBatch.NEXT_OBS: next_obs, } ) adjust_nstep(4, gamma, batch) check(batch[SampleBatch.OBS], [0, 1, 2, 3, 4, 5, 6]) check(batch[SampleBatch.ACTIONS], check_actions) check(batch[SampleBatch.NEXT_OBS], [4, 5, 6, 7, 7, 7, 7]) check(batch[SampleBatch.DONES], [False, False, False, True, True, True, True]) check( batch[SampleBatch.REWARDS], [ discount_cumsum(np.array(rewards[0:4]), gamma)[0], discount_cumsum(np.array(rewards[1:5]), gamma)[0], discount_cumsum(np.array(rewards[2:6]), gamma)[0], discount_cumsum(np.array(rewards[3:7]), gamma)[0], discount_cumsum(np.array(rewards[4:]), gamma)[0], discount_cumsum(np.array(rewards[5:]), gamma)[0], discount_cumsum(np.array(rewards[6:]), gamma)[0], ], ) if __name__ == "__main__": import pytest import sys sys.exit(pytest.main(["-v", __file__]))
[ "ray.init", "ray.rllib.policy.sample_batch.SampleBatch", "ray.rllib.utils.test_utils.check", "pytest.main", "numpy.random.randint", "ray.shutdown", "numpy.arange", "ray.rllib.evaluation.postprocessing.adjust_nstep", "numpy.array" ]
[((329, 339), 'ray.init', 'ray.init', ([], {}), '()\n', (337, 339), False, 'import ray\n'), ((402, 416), 'ray.shutdown', 'ray.shutdown', ([], {}), '()\n', (414, 416), False, 'import ray\n'), ((822, 984), 'ray.rllib.policy.sample_batch.SampleBatch', 'SampleBatch', (['{SampleBatch.OBS: obs, SampleBatch.ACTIONS: actions, SampleBatch.REWARDS:\n rewards, SampleBatch.DONES: dones, SampleBatch.NEXT_OBS: next_obs}'], {}), '({SampleBatch.OBS: obs, SampleBatch.ACTIONS: actions,\n SampleBatch.REWARDS: rewards, SampleBatch.DONES: dones, SampleBatch.\n NEXT_OBS: next_obs})\n', (833, 984), False, 'from ray.rllib.policy.sample_batch import SampleBatch\n'), ((1101, 1130), 'ray.rllib.evaluation.postprocessing.adjust_nstep', 'adjust_nstep', (['(3)', 'gamma', 'batch'], {}), '(3, gamma, batch)\n', (1113, 1130), False, 'from ray.rllib.evaluation.postprocessing import adjust_nstep, discount_cumsum\n'), ((1139, 1191), 'ray.rllib.utils.test_utils.check', 'check', (['batch[SampleBatch.OBS]', '[1, 2, 3, 4, 5, 6, 7]'], {}), '(batch[SampleBatch.OBS], [1, 2, 3, 4, 5, 6, 7])\n', (1144, 1191), False, 'from ray.rllib.utils.test_utils import check\n'), ((1200, 1288), 'ray.rllib.utils.test_utils.check', 'check', (['batch[SampleBatch.ACTIONS]', "['ac1', 'ac2', 'ac1', 'ac1', 'ac1', 'ac2', 'ac1']"], {}), "(batch[SampleBatch.ACTIONS], ['ac1', 'ac2', 'ac1', 'ac1', 'ac1', 'ac2',\n 'ac1'])\n", (1205, 1288), False, 'from ray.rllib.utils.test_utils import check\n'), ((1328, 1385), 'ray.rllib.utils.test_utils.check', 'check', (['batch[SampleBatch.NEXT_OBS]', '[4, 5, 6, 7, 8, 8, 8]'], {}), '(batch[SampleBatch.NEXT_OBS], [4, 5, 6, 7, 8, 8, 8])\n', (1333, 1385), False, 'from ray.rllib.utils.test_utils import check\n'), ((1394, 1448), 'ray.rllib.utils.test_utils.check', 'check', (['batch[SampleBatch.DONES]', '[0, 0, 0, 0, 1, 1, 1]'], {}), '(batch[SampleBatch.DONES], [0, 0, 0, 0, 1, 1, 1])\n', (1399, 1448), False, 'from ray.rllib.utils.test_utils import check\n'), ((1457, 1544), 'ray.rllib.utils.test_utils.check', 'check', (['batch[SampleBatch.REWARDS]', '[91.0, 171.0, 271.0, 271.0, 271.0, 190.0, 100.0]'], {}), '(batch[SampleBatch.REWARDS], [91.0, 171.0, 271.0, 271.0, 271.0, 190.0,\n 100.0])\n', (1462, 1544), False, 'from ray.rllib.utils.test_utils import check\n'), ((1719, 1734), 'numpy.arange', 'np.arange', (['(0)', '(7)'], {}), '(0, 7)\n', (1728, 1734), True, 'import numpy as np\n'), ((1753, 1788), 'numpy.random.randint', 'np.random.randint', (['(-1)', '(3)'], {'size': '(7,)'}), '(-1, 3, size=(7,))\n', (1770, 1788), True, 'import numpy as np\n'), ((1974, 1989), 'numpy.arange', 'np.arange', (['(1)', '(8)'], {}), '(1, 8)\n', (1983, 1989), True, 'import numpy as np\n'), ((2006, 2168), 'ray.rllib.policy.sample_batch.SampleBatch', 'SampleBatch', (['{SampleBatch.OBS: obs, SampleBatch.ACTIONS: actions, SampleBatch.REWARDS:\n rewards, SampleBatch.DONES: dones, SampleBatch.NEXT_OBS: next_obs}'], {}), '({SampleBatch.OBS: obs, SampleBatch.ACTIONS: actions,\n SampleBatch.REWARDS: rewards, SampleBatch.DONES: dones, SampleBatch.\n NEXT_OBS: next_obs})\n', (2017, 2168), False, 'from ray.rllib.policy.sample_batch import SampleBatch\n'), ((2285, 2314), 'ray.rllib.evaluation.postprocessing.adjust_nstep', 'adjust_nstep', (['(4)', 'gamma', 'batch'], {}), '(4, gamma, batch)\n', (2297, 2314), False, 'from ray.rllib.evaluation.postprocessing import adjust_nstep, discount_cumsum\n'), ((2323, 2375), 'ray.rllib.utils.test_utils.check', 'check', (['batch[SampleBatch.OBS]', '[0, 1, 2, 3, 4, 5, 6]'], {}), '(batch[SampleBatch.OBS], [0, 1, 2, 3, 4, 5, 6])\n', (2328, 2375), False, 'from ray.rllib.utils.test_utils import check\n'), ((2384, 2432), 'ray.rllib.utils.test_utils.check', 'check', (['batch[SampleBatch.ACTIONS]', 'check_actions'], {}), '(batch[SampleBatch.ACTIONS], check_actions)\n', (2389, 2432), False, 'from ray.rllib.utils.test_utils import check\n'), ((2441, 2498), 'ray.rllib.utils.test_utils.check', 'check', (['batch[SampleBatch.NEXT_OBS]', '[4, 5, 6, 7, 7, 7, 7]'], {}), '(batch[SampleBatch.NEXT_OBS], [4, 5, 6, 7, 7, 7, 7])\n', (2446, 2498), False, 'from ray.rllib.utils.test_utils import check\n'), ((2507, 2585), 'ray.rllib.utils.test_utils.check', 'check', (['batch[SampleBatch.DONES]', '[False, False, False, True, True, True, True]'], {}), '(batch[SampleBatch.DONES], [False, False, False, True, True, True, True])\n', (2512, 2585), False, 'from ray.rllib.utils.test_utils import check\n'), ((3330, 3345), 'numpy.arange', 'np.arange', (['(0)', '(7)'], {}), '(0, 7)\n', (3339, 3345), True, 'import numpy as np\n'), ((3364, 3399), 'numpy.random.randint', 'np.random.randint', (['(-1)', '(3)'], {'size': '(7,)'}), '(-1, 3, size=(7,))\n', (3381, 3399), True, 'import numpy as np\n'), ((3481, 3496), 'numpy.arange', 'np.arange', (['(1)', '(8)'], {}), '(1, 8)\n', (3490, 3496), True, 'import numpy as np\n'), ((3513, 3717), 'ray.rllib.policy.sample_batch.SampleBatch', 'SampleBatch', (['{SampleBatch.OBS: obs, SampleBatch.ACTIONS: actions, SampleBatch.REWARDS:\n rewards, SampleBatch.DONES: [False, False, True, False, False, False, \n True], SampleBatch.NEXT_OBS: next_obs}'], {}), '({SampleBatch.OBS: obs, SampleBatch.ACTIONS: actions,\n SampleBatch.REWARDS: rewards, SampleBatch.DONES: [False, False, True, \n False, False, False, True], SampleBatch.NEXT_OBS: next_obs})\n', (3524, 3717), False, 'from ray.rllib.policy.sample_batch import SampleBatch\n'), ((4148, 4163), 'numpy.arange', 'np.arange', (['(0)', '(2)'], {}), '(0, 2)\n', (4157, 4163), True, 'import numpy as np\n'), ((4182, 4221), 'numpy.random.randint', 'np.random.randint', (['(-100)', '(300)'], {'size': '(2,)'}), '(-100, 300, size=(2,))\n', (4199, 4221), True, 'import numpy as np\n'), ((4312, 4327), 'numpy.arange', 'np.arange', (['(1)', '(3)'], {}), '(1, 3)\n', (4321, 4327), True, 'import numpy as np\n'), ((4344, 4514), 'ray.rllib.policy.sample_batch.SampleBatch', 'SampleBatch', (['{SampleBatch.OBS: obs, SampleBatch.ACTIONS: actions, SampleBatch.REWARDS:\n rewards, SampleBatch.DONES: [False, False], SampleBatch.NEXT_OBS: next_obs}'], {}), '({SampleBatch.OBS: obs, SampleBatch.ACTIONS: actions,\n SampleBatch.REWARDS: rewards, SampleBatch.DONES: [False, False],\n SampleBatch.NEXT_OBS: next_obs})\n', (4355, 4514), False, 'from ray.rllib.policy.sample_batch import SampleBatch\n'), ((4632, 4661), 'ray.rllib.evaluation.postprocessing.adjust_nstep', 'adjust_nstep', (['(3)', 'gamma', 'batch'], {}), '(3, gamma, batch)\n', (4644, 4661), False, 'from ray.rllib.evaluation.postprocessing import adjust_nstep, discount_cumsum\n'), ((4670, 4707), 'ray.rllib.utils.test_utils.check', 'check', (['batch[SampleBatch.OBS]', '[0, 1]'], {}), '(batch[SampleBatch.OBS], [0, 1])\n', (4675, 4707), False, 'from ray.rllib.utils.test_utils import check\n'), ((4716, 4764), 'ray.rllib.utils.test_utils.check', 'check', (['batch[SampleBatch.ACTIONS]', 'check_actions'], {}), '(batch[SampleBatch.ACTIONS], check_actions)\n', (4721, 4764), False, 'from ray.rllib.utils.test_utils import check\n'), ((4773, 4820), 'ray.rllib.utils.test_utils.check', 'check', (['batch[SampleBatch.DONES]', '[False, False]'], {}), '(batch[SampleBatch.DONES], [False, False])\n', (4778, 4820), False, 'from ray.rllib.utils.test_utils import check\n'), ((4829, 4893), 'ray.rllib.utils.test_utils.check', 'check', (['batch[SampleBatch.REWARDS]', '[10.0 + gamma * 100.0, 100.0]'], {}), '(batch[SampleBatch.REWARDS], [10.0 + gamma * 100.0, 100.0])\n', (4834, 4893), False, 'from ray.rllib.utils.test_utils import check\n'), ((4902, 4944), 'ray.rllib.utils.test_utils.check', 'check', (['batch[SampleBatch.NEXT_OBS]', '[2, 2]'], {}), '(batch[SampleBatch.NEXT_OBS], [2, 2])\n', (4907, 4944), False, 'from ray.rllib.utils.test_utils import check\n'), ((5243, 5258), 'numpy.arange', 'np.arange', (['(0)', '(8)'], {}), '(0, 8)\n', (5252, 5258), True, 'import numpy as np\n'), ((5349, 5384), 'numpy.random.randint', 'np.random.randint', (['(-1)', '(3)'], {'size': '(7,)'}), '(-1, 3, size=(7,))\n', (5366, 5384), True, 'import numpy as np\n'), ((5568, 5730), 'ray.rllib.policy.sample_batch.SampleBatch', 'SampleBatch', (['{SampleBatch.OBS: obs, SampleBatch.ACTIONS: actions, SampleBatch.REWARDS:\n rewards, SampleBatch.DONES: dones, SampleBatch.NEXT_OBS: next_obs}'], {}), '({SampleBatch.OBS: obs, SampleBatch.ACTIONS: actions,\n SampleBatch.REWARDS: rewards, SampleBatch.DONES: dones, SampleBatch.\n NEXT_OBS: next_obs})\n', (5579, 5730), False, 'from ray.rllib.policy.sample_batch import SampleBatch\n'), ((5847, 5876), 'ray.rllib.evaluation.postprocessing.adjust_nstep', 'adjust_nstep', (['(4)', 'gamma', 'batch'], {}), '(4, gamma, batch)\n', (5859, 5876), False, 'from ray.rllib.evaluation.postprocessing import adjust_nstep, discount_cumsum\n'), ((5886, 5938), 'ray.rllib.utils.test_utils.check', 'check', (['batch[SampleBatch.OBS]', '[0, 1, 2, 3, 4, 5, 6]'], {}), '(batch[SampleBatch.OBS], [0, 1, 2, 3, 4, 5, 6])\n', (5891, 5938), False, 'from ray.rllib.utils.test_utils import check\n'), ((5947, 5995), 'ray.rllib.utils.test_utils.check', 'check', (['batch[SampleBatch.ACTIONS]', 'check_actions'], {}), '(batch[SampleBatch.ACTIONS], check_actions)\n', (5952, 5995), False, 'from ray.rllib.utils.test_utils import check\n'), ((6004, 6061), 'ray.rllib.utils.test_utils.check', 'check', (['batch[SampleBatch.NEXT_OBS]', '[4, 5, 6, 7, 7, 7, 7]'], {}), '(batch[SampleBatch.NEXT_OBS], [4, 5, 6, 7, 7, 7, 7])\n', (6009, 6061), False, 'from ray.rllib.utils.test_utils import check\n'), ((6070, 6148), 'ray.rllib.utils.test_utils.check', 'check', (['batch[SampleBatch.DONES]', '[False, False, False, True, True, True, True]'], {}), '(batch[SampleBatch.DONES], [False, False, False, True, True, True, True])\n', (6075, 6148), False, 'from ray.rllib.utils.test_utils import check\n'), ((6785, 6814), 'pytest.main', 'pytest.main', (["['-v', __file__]"], {}), "(['-v', __file__])\n", (6796, 6814), False, 'import pytest\n'), ((3947, 3976), 'ray.rllib.evaluation.postprocessing.adjust_nstep', 'adjust_nstep', (['(5)', 'gamma', 'batch'], {}), '(5, gamma, batch)\n', (3959, 3976), False, 'from ray.rllib.evaluation.postprocessing import adjust_nstep, discount_cumsum\n'), ((2687, 2709), 'numpy.array', 'np.array', (['rewards[0:4]'], {}), '(rewards[0:4])\n', (2695, 2709), True, 'import numpy as np\n'), ((2754, 2776), 'numpy.array', 'np.array', (['rewards[1:5]'], {}), '(rewards[1:5])\n', (2762, 2776), True, 'import numpy as np\n'), ((2821, 2843), 'numpy.array', 'np.array', (['rewards[2:6]'], {}), '(rewards[2:6])\n', (2829, 2843), True, 'import numpy as np\n'), ((2888, 2910), 'numpy.array', 'np.array', (['rewards[3:7]'], {}), '(rewards[3:7])\n', (2896, 2910), True, 'import numpy as np\n'), ((2955, 2976), 'numpy.array', 'np.array', (['rewards[4:]'], {}), '(rewards[4:])\n', (2963, 2976), True, 'import numpy as np\n'), ((3021, 3042), 'numpy.array', 'np.array', (['rewards[5:]'], {}), '(rewards[5:])\n', (3029, 3042), True, 'import numpy as np\n'), ((3087, 3108), 'numpy.array', 'np.array', (['rewards[6:]'], {}), '(rewards[6:])\n', (3095, 3108), True, 'import numpy as np\n'), ((6250, 6272), 'numpy.array', 'np.array', (['rewards[0:4]'], {}), '(rewards[0:4])\n', (6258, 6272), True, 'import numpy as np\n'), ((6317, 6339), 'numpy.array', 'np.array', (['rewards[1:5]'], {}), '(rewards[1:5])\n', (6325, 6339), True, 'import numpy as np\n'), ((6384, 6406), 'numpy.array', 'np.array', (['rewards[2:6]'], {}), '(rewards[2:6])\n', (6392, 6406), True, 'import numpy as np\n'), ((6451, 6473), 'numpy.array', 'np.array', (['rewards[3:7]'], {}), '(rewards[3:7])\n', (6459, 6473), True, 'import numpy as np\n'), ((6518, 6539), 'numpy.array', 'np.array', (['rewards[4:]'], {}), '(rewards[4:])\n', (6526, 6539), True, 'import numpy as np\n'), ((6584, 6605), 'numpy.array', 'np.array', (['rewards[5:]'], {}), '(rewards[5:])\n', (6592, 6605), True, 'import numpy as np\n'), ((6650, 6671), 'numpy.array', 'np.array', (['rewards[6:]'], {}), '(rewards[6:])\n', (6658, 6671), True, 'import numpy as np\n')]
# Copyright (c) 2021 <NAME> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. """Module for Algorand mnemonic utility classes.""" # Imports from typing import List, Optional, Union from bip_utils.algorand.mnemonic.algorand_mnemonic import AlgorandMnemonicConst from bip_utils.utils.misc import CryptoUtils class AlgorandMnemonicUtils: """Class container for Algorand mnemonic utility functions.""" @staticmethod def ComputeChecksum(data_bytes: bytes) -> bytes: """ Compute checksum. Args: data_bytes (bytes): Data bytes Returns: bytes: Computed checksum """ return CryptoUtils.Sha512_256(data_bytes)[:AlgorandMnemonicConst.CHECKSUM_BYTE_LEN] @staticmethod def ComputeChecksumWordIndex(data_bytes: bytes) -> int: """ Compute checksum word index. Args: data_bytes (bytes): Data bytes Returns: str: Computed checksum word index """ # Compute checksum and convert it to 11-bit chksum = AlgorandMnemonicUtils.ComputeChecksum(data_bytes) chksum_11bit = AlgorandMnemonicUtils.ConvertBits(chksum, 8, 11) # Cannot be None by converting bytes from 8-bit to 11-bit assert chksum_11bit is not None return chksum_11bit[0] @staticmethod def ConvertBits(data: Union[bytes, List[int]], from_bits: int, to_bits: int) -> Optional[List[int]]: """ Perform bit conversion. The function takes the input data (list of integers or byte sequence) and convert every value from the specified number of bits to the specified one. It returns a list of integer where every number is less than 2^to_bits. Args: data (list[int] or bytes): Data to be converted from_bits (int) : Number of bits to start from to_bits (int) : Number of bits to end with Returns: list[int]: List of converted values, None in case of errors """ max_out_val = (1 << to_bits) - 1 acc = 0 bits = 0 ret = [] for value in data: # Value shall not be less than zero or greater than 2^from_bits if value < 0 or (value >> from_bits): return None # Continue accumulating until greater than to_bits acc |= value << bits bits += from_bits while bits >= to_bits: ret.append(acc & max_out_val) acc = acc >> to_bits bits -= to_bits if bits != 0: ret.append(acc & max_out_val) return ret
[ "bip_utils.utils.misc.CryptoUtils.Sha512_256" ]
[((1663, 1697), 'bip_utils.utils.misc.CryptoUtils.Sha512_256', 'CryptoUtils.Sha512_256', (['data_bytes'], {}), '(data_bytes)\n', (1685, 1697), False, 'from bip_utils.utils.misc import CryptoUtils\n')]
from snake import Snake from snake_curses_view import SnakeCursesView from game import Game import time import curses class SnakeCursesGame: def __init__(self): self.snake = Snake() self.game = Game(80,24) self.game.add_snake(self.snake) self.game.start() self.w = curses.initscr() self.w.nodelay(True) self.w.keypad(True) self.curs_set(0) self.view = SnakeCursesView(self.w,self.game) self.view.add_action_listener() def turn_action(self,direction): self.snake.turn(direction) def run(self): while True: self.view.draw() self.w.refresh() time.sleep(0.1) self.view.undraw() ch = self.w.getch() if ch in [curses.KEY_UP, curses.KEY_DOWN, curses.KEY_LEFT, curses.KEY_RIGHT]: self.view.get_key() elif ch !=-1: break self.game.tick() def main(): try: game = SnakeCursesGame() game.run() finally: try: curses.endwin() except: pass if __name__ == '__main__': main()
[ "curses.initscr", "time.sleep", "curses.endwin", "snake_curses_view.SnakeCursesView", "snake.Snake", "game.Game" ]
[((188, 195), 'snake.Snake', 'Snake', ([], {}), '()\n', (193, 195), False, 'from snake import Snake\n'), ((216, 228), 'game.Game', 'Game', (['(80)', '(24)'], {}), '(80, 24)\n', (220, 228), False, 'from game import Game\n'), ((312, 328), 'curses.initscr', 'curses.initscr', ([], {}), '()\n', (326, 328), False, 'import curses\n'), ((432, 466), 'snake_curses_view.SnakeCursesView', 'SnakeCursesView', (['self.w', 'self.game'], {}), '(self.w, self.game)\n', (447, 466), False, 'from snake_curses_view import SnakeCursesView\n'), ((689, 704), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (699, 704), False, 'import time\n'), ((1089, 1104), 'curses.endwin', 'curses.endwin', ([], {}), '()\n', (1102, 1104), False, 'import curses\n')]
import torch.nn as nn import torch import numpy as np import torch.nn.functional as F from util import PointNetSetAbstractionOrg, PointNetSetAbstraction,PointNetFeaturePropogation,FlowEmbedding,PointNetSetUpConv class FlowNet3D(nn.Module): def __init__(self,args): super(FlowNet3D,self).__init__() """ self.sa1 = PointNetSetAbstraction(npoint=1024, radius=0.5, nsample=16, in_channel=3, mlp=[32,32,64], group_all=False) self.sa2 = PointNetSetAbstraction(npoint=256, radius=1.0, nsample=16, in_channel=64, mlp=[64, 64, 128], group_all=False) self.sa3 = PointNetSetAbstraction(npoint=64, radius=2.0, nsample=8, in_channel=128, mlp=[128, 128, 256], group_all=False) self.sa4 = PointNetSetAbstraction(npoint=16, radius=4.0, nsample=8, in_channel=256, mlp=[256,256,512], group_all=False) """ self.sa1 = PointNetSetAbstractionOrg(npoint=1024, radius=0.5, nsample=16, in_channel=3, mlp=[32, 64, 128], group_all=False) self.sa2 = PointNetSetAbstraction(npoint=256, radius=1.0, nsample=16, in_channel=128, mlp=[128, 128, 128], group_all=False) self.sa3 = PointNetSetAbstraction(npoint=64, radius=2.0, nsample=8, in_channel=128, mlp=[128, 128, 128], group_all=False) self.sa4 = PointNetSetAbstraction(npoint=16, radius=4.0, nsample=8, in_channel=128+3, mlp=[128, 128, 128], group_all=False) self.fe_layer = FlowEmbedding(radius=10.0, nsample=64, in_channel = 128+3, mlp=[128, 128, 128], pooling='max', corr_func='concat') self.su1 = PointNetSetUpConv(nsample=8, radius=2.4, f1_channel = 128+3, f2_channel = 128+6, mlp=[], mlp2=[256, 256]) self.su2 = PointNetSetUpConv(nsample=8, radius=1.2, f1_channel = 128+128+3+3, f2_channel = 256, mlp=[128, 128, 256], mlp2=[256]) self.su3 = PointNetSetUpConv(nsample=8, radius=0.6, f1_channel = 128+3, f2_channel = 256, mlp=[128, 128, 256], mlp2=[256]) self.fp = PointNetFeaturePropogation(in_channel = 256+3, mlp = [256, 256]) self.conv1 = nn.Conv1d(256, 128, kernel_size=1, bias=False) self.bn1 = nn.BatchNorm1d(128) self.conv2=nn.Conv1d(128, 3, kernel_size=1, bias=True) def forward(self, pc1, pc2, feature1, feature2): l1_pc1, l1_feature1 = self.sa1(pc1, feature1) # l1_pc1 128, l1_f1 128 l2_pc1, l2_feature1 = self.sa2(l1_pc1, l1_feature1) # l2_pc1 128+3, l2_f1 128+3 l1_pc2, l1_feature2 = self.sa1(pc2, feature2) # l2_pc1 128, l1_f2 128 l2_pc2, l2_feature2 = self.sa2(l1_pc2, l1_feature2) # l2_pc2 128+3, l2_f2 128+3 _, l2_feature1_new = self.fe_layer(l2_pc1, l2_pc2, l2_feature1, l2_feature2) # l2_f1n 128 l3_pc1, l3_feature1 = self.sa3(l2_pc1, l2_feature1_new) # l3_pc1 l3_f1 128+6 l4_pc1, l4_feature1 = self.sa4(l3_pc1, l3_feature1) # l4_pc1 l4_f1 128+9 l3_fnew1 = self.su1(l3_pc1, l4_pc1, l3_feature1, l4_feature1) # l3_fn1 = 256 l2_fnew1 = self.su2(l2_pc1, l3_pc1, torch.cat([l2_feature1, l2_feature1_new], dim=1), l3_fnew1) l1_fnew1 = self.su3(l1_pc1, l2_pc1, l1_feature1, l2_fnew1) l0_fnew1 = self.fp(pc1, l1_pc1, feature1, l1_fnew1) x = F.relu(self.bn1(self.conv1(l0_fnew1))) sf = self.conv2(x) return sf if __name__ == '__main__': import os import torch os.environ["CUDA_VISIBLE_DEVICES"] = '0' input = torch.randn((8,3,2048)) label = torch.randn(8,16) model = FlowNet3D() output = model(input,input) print(output.size())
[ "util.PointNetSetAbstraction", "torch.nn.Conv1d", "torch.nn.BatchNorm1d", "torch.randn", "torch.cat", "util.PointNetSetUpConv", "util.PointNetFeaturePropogation", "util.PointNetSetAbstractionOrg", "util.FlowEmbedding" ]
[((3419, 3444), 'torch.randn', 'torch.randn', (['(8, 3, 2048)'], {}), '((8, 3, 2048))\n', (3430, 3444), False, 'import torch\n'), ((3455, 3473), 'torch.randn', 'torch.randn', (['(8)', '(16)'], {}), '(8, 16)\n', (3466, 3473), False, 'import torch\n'), ((869, 985), 'util.PointNetSetAbstractionOrg', 'PointNetSetAbstractionOrg', ([], {'npoint': '(1024)', 'radius': '(0.5)', 'nsample': '(16)', 'in_channel': '(3)', 'mlp': '[32, 64, 128]', 'group_all': '(False)'}), '(npoint=1024, radius=0.5, nsample=16, in_channel=3,\n mlp=[32, 64, 128], group_all=False)\n', (894, 985), False, 'from util import PointNetSetAbstractionOrg, PointNetSetAbstraction, PointNetFeaturePropogation, FlowEmbedding, PointNetSetUpConv\n'), ((1001, 1117), 'util.PointNetSetAbstraction', 'PointNetSetAbstraction', ([], {'npoint': '(256)', 'radius': '(1.0)', 'nsample': '(16)', 'in_channel': '(128)', 'mlp': '[128, 128, 128]', 'group_all': '(False)'}), '(npoint=256, radius=1.0, nsample=16, in_channel=128,\n mlp=[128, 128, 128], group_all=False)\n', (1023, 1117), False, 'from util import PointNetSetAbstractionOrg, PointNetSetAbstraction, PointNetFeaturePropogation, FlowEmbedding, PointNetSetUpConv\n'), ((1133, 1247), 'util.PointNetSetAbstraction', 'PointNetSetAbstraction', ([], {'npoint': '(64)', 'radius': '(2.0)', 'nsample': '(8)', 'in_channel': '(128)', 'mlp': '[128, 128, 128]', 'group_all': '(False)'}), '(npoint=64, radius=2.0, nsample=8, in_channel=128,\n mlp=[128, 128, 128], group_all=False)\n', (1155, 1247), False, 'from util import PointNetSetAbstractionOrg, PointNetSetAbstraction, PointNetFeaturePropogation, FlowEmbedding, PointNetSetUpConv\n'), ((1263, 1381), 'util.PointNetSetAbstraction', 'PointNetSetAbstraction', ([], {'npoint': '(16)', 'radius': '(4.0)', 'nsample': '(8)', 'in_channel': '(128 + 3)', 'mlp': '[128, 128, 128]', 'group_all': '(False)'}), '(npoint=16, radius=4.0, nsample=8, in_channel=128 + 3,\n mlp=[128, 128, 128], group_all=False)\n', (1285, 1381), False, 'from util import PointNetSetAbstractionOrg, PointNetSetAbstraction, PointNetFeaturePropogation, FlowEmbedding, PointNetSetUpConv\n'), ((1409, 1528), 'util.FlowEmbedding', 'FlowEmbedding', ([], {'radius': '(10.0)', 'nsample': '(64)', 'in_channel': '(128 + 3)', 'mlp': '[128, 128, 128]', 'pooling': '"""max"""', 'corr_func': '"""concat"""'}), "(radius=10.0, nsample=64, in_channel=128 + 3, mlp=[128, 128, \n 128], pooling='max', corr_func='concat')\n", (1422, 1528), False, 'from util import PointNetSetAbstractionOrg, PointNetSetAbstraction, PointNetFeaturePropogation, FlowEmbedding, PointNetSetUpConv\n'), ((1552, 1661), 'util.PointNetSetUpConv', 'PointNetSetUpConv', ([], {'nsample': '(8)', 'radius': '(2.4)', 'f1_channel': '(128 + 3)', 'f2_channel': '(128 + 6)', 'mlp': '[]', 'mlp2': '[256, 256]'}), '(nsample=8, radius=2.4, f1_channel=128 + 3, f2_channel=128 +\n 6, mlp=[], mlp2=[256, 256])\n', (1569, 1661), False, 'from util import PointNetSetAbstractionOrg, PointNetSetAbstraction, PointNetFeaturePropogation, FlowEmbedding, PointNetSetUpConv\n'), ((1677, 1800), 'util.PointNetSetUpConv', 'PointNetSetUpConv', ([], {'nsample': '(8)', 'radius': '(1.2)', 'f1_channel': '(128 + 128 + 3 + 3)', 'f2_channel': '(256)', 'mlp': '[128, 128, 256]', 'mlp2': '[256]'}), '(nsample=8, radius=1.2, f1_channel=128 + 128 + 3 + 3,\n f2_channel=256, mlp=[128, 128, 256], mlp2=[256])\n', (1694, 1800), False, 'from util import PointNetSetAbstractionOrg, PointNetSetAbstraction, PointNetFeaturePropogation, FlowEmbedding, PointNetSetUpConv\n'), ((1814, 1927), 'util.PointNetSetUpConv', 'PointNetSetUpConv', ([], {'nsample': '(8)', 'radius': '(0.6)', 'f1_channel': '(128 + 3)', 'f2_channel': '(256)', 'mlp': '[128, 128, 256]', 'mlp2': '[256]'}), '(nsample=8, radius=0.6, f1_channel=128 + 3, f2_channel=256,\n mlp=[128, 128, 256], mlp2=[256])\n', (1831, 1927), False, 'from util import PointNetSetAbstractionOrg, PointNetSetAbstraction, PointNetFeaturePropogation, FlowEmbedding, PointNetSetUpConv\n'), ((1944, 2006), 'util.PointNetFeaturePropogation', 'PointNetFeaturePropogation', ([], {'in_channel': '(256 + 3)', 'mlp': '[256, 256]'}), '(in_channel=256 + 3, mlp=[256, 256])\n', (1970, 2006), False, 'from util import PointNetSetAbstractionOrg, PointNetSetAbstraction, PointNetFeaturePropogation, FlowEmbedding, PointNetSetUpConv\n'), ((2039, 2085), 'torch.nn.Conv1d', 'nn.Conv1d', (['(256)', '(128)'], {'kernel_size': '(1)', 'bias': '(False)'}), '(256, 128, kernel_size=1, bias=False)\n', (2048, 2085), True, 'import torch.nn as nn\n'), ((2105, 2124), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(128)'], {}), '(128)\n', (2119, 2124), True, 'import torch.nn as nn\n'), ((2144, 2187), 'torch.nn.Conv1d', 'nn.Conv1d', (['(128)', '(3)'], {'kernel_size': '(1)', 'bias': '(True)'}), '(128, 3, kernel_size=1, bias=True)\n', (2153, 2187), True, 'import torch.nn as nn\n'), ((3003, 3051), 'torch.cat', 'torch.cat', (['[l2_feature1, l2_feature1_new]'], {'dim': '(1)'}), '([l2_feature1, l2_feature1_new], dim=1)\n', (3012, 3051), False, 'import torch\n')]
from arch._version import get_versions from arch.univariate.mean import arch_model from arch.utility import test __version__ = get_versions()["version"] del get_versions def doc() -> None: import webbrowser webbrowser.open("http://arch.readthedocs.org/en/latest/") __all__ = ["arch_model", "__version__", "doc", "test"]
[ "webbrowser.open", "arch._version.get_versions" ]
[((128, 142), 'arch._version.get_versions', 'get_versions', ([], {}), '()\n', (140, 142), False, 'from arch._version import get_versions\n'), ((219, 276), 'webbrowser.open', 'webbrowser.open', (['"""http://arch.readthedocs.org/en/latest/"""'], {}), "('http://arch.readthedocs.org/en/latest/')\n", (234, 276), False, 'import webbrowser\n')]
from __future__ import division import os import pandas as pd from pyomo.environ import ( ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value ) from .util import alphanum_sorted from pyomo.environ import TerminationCondition as tc def build_model(): m = ConcreteModel() m.BigM = Suffix(direction=Suffix.LOCAL) m.periods_per_year = Param(initialize=4, doc="Quarters per year") m.project_life = Param(initialize=15, doc="Years") m.time = RangeSet(0, m.periods_per_year * m.project_life - 1, doc="Time periods") m.discount_rate = Param(initialize=0.08, doc="8%") m.learning_rate = Param(initialize=0.1, doc="Fraction discount for doubling of quantity") m.module_setup_time = Param( initialize=1, doc="1 quarter for module transfer") @m.Param(m.time) def discount_factor(m, t): return (1 + m.discount_rate / m.periods_per_year) ** (-t / m.periods_per_year) xlsx_data = pd.read_excel(os.path.join(os.path.dirname(__file__), "data.xlsx"), sheet_name=None) module_sheet = xlsx_data['modules'].set_index('Type') m.module_types = Set(initialize=module_sheet.columns.tolist(),) @m.Param(m.module_types) def module_base_cost(m, mtype): return float(module_sheet[mtype]['Capital Cost [MM$]']) @m.Param(m.module_types, doc="Natural gas consumption per module of this type [MMSCF/d]") def unit_gas_consumption(m, mtype): return float(module_sheet[mtype]['Nat Gas [MMSCF/d]']) @m.Param(m.module_types, doc="Gasoline production per module of this type [kBD]") def gasoline_production(m, mtype): return float(module_sheet[mtype]['Gasoline [kBD]']) @m.Param(m.module_types, doc="Overall conversion of natural gas into gasoline per module of this type [kB/MMSCF]") def module_conversion(m, mtype): return float(module_sheet[mtype]['Conversion [kB/MMSCF]']) site_sheet = xlsx_data['sites'].set_index('Potential site') m.potential_sites = Set(initialize=site_sheet.index.tolist()) m.site_pairs = Set( doc="Pairs of potential sites", initialize=m.potential_sites * m.potential_sites, filter=lambda _, x, y: not x == y) @m.Param(m.potential_sites) def site_x(m, site): return float(site_sheet['x'][site]) @m.Param(m.potential_sites) def site_y(m, site): return float(site_sheet['y'][site]) well_sheet = xlsx_data['wells'].set_index('Well') m.well_clusters = Set(initialize=well_sheet.index.tolist()) @m.Param(m.well_clusters) def well_x(m, well): return float(well_sheet['x'][well]) @m.Param(m.well_clusters) def well_y(m, well): return float(well_sheet['y'][well]) sched_sheet = xlsx_data['well-schedule'] decay_curve = [1] + [3.69 * exp(-1.31 * (t + 1) ** 0.292) for t in range(m.project_life * 12)] well_profiles = {well: [0 for _ in decay_curve] for well in m.well_clusters} for _, well_info in sched_sheet.iterrows(): start_time = int(well_info['Month']) prod = [0] * start_time + decay_curve[:len(decay_curve) - start_time] prod = [x * float(well_info['max prod [MMSCF/d]']) for x in prod] current_profile = well_profiles[well_info['well-cluster']] well_profiles[well_info['well-cluster']] = [val + prod[i] for i, val in enumerate(current_profile)] @m.Param(m.well_clusters, m.time, doc="Supply of gas from well cluster [MMSCF/day]") def gas_supply(m, well, t): return sum(well_profiles[well][t * 3:t * 3 + 2]) / 3 mkt_sheet = xlsx_data['markets'].set_index('Market') m.markets = Set(initialize=mkt_sheet.index.tolist()) @m.Param(m.markets) def mkt_x(m, mkt): return float(mkt_sheet['x'][mkt]) @m.Param(m.markets) def mkt_y(m, mkt): return float(mkt_sheet['y'][mkt]) @m.Param(m.markets, doc="Gasoline demand [kBD]") def mkt_demand(m, mkt): return float(mkt_sheet['demand [kBD]'][mkt]) m.sources = Set(initialize=m.well_clusters | m.potential_sites) m.destinations = Set(initialize=m.potential_sites | m.markets) @m.Param(m.sources, m.destinations, doc="Distance [mi]") def distance(m, src, dest): if src in m.well_clusters: src_x = m.well_x[src] src_y = m.well_y[src] else: src_x = m.site_x[src] src_y = m.site_y[src] if dest in m.markets: dest_x = m.mkt_x[dest] dest_y = m.mkt_y[dest] else: dest_x = m.site_x[dest] dest_y = m.site_y[dest] return sqrt((src_x - dest_x) ** 2 + (src_y - dest_y) ** 2) m.num_modules = Var( m.module_types, m.potential_sites, m.time, doc="Number of active modules of each type at a site in a period", domain=Integers, bounds=(0, 50), initialize=1) m.modules_transferred = Var( m.module_types, m.site_pairs, m.time, doc="Number of module transfers initiated from one site to another in a period.", domain=Integers, bounds=(0, 15), initialize=0) m.modules_purchased = Var( m.module_types, m.potential_sites, m.time, doc="Number of modules of each type purchased for a site in a period", domain=Integers, bounds=(0, 30), initialize=1) m.pipeline_unit_cost = Param(doc="MM$/mile", initialize=2) @m.Param(m.time, doc="Module transport cost per mile [M$/100 miles]") def module_transport_distance_cost(m, t): return 50 * m.discount_factor[t] @m.Param(m.time, doc="Module transport cost per unit [MM$/module]") def module_transport_unit_cost(m, t): return 3 * m.discount_factor[t] @m.Param(m.time, doc="Stranded gas price [$/MSCF]") def nat_gas_price(m, t): return 5 * m.discount_factor[t] @m.Param(m.time, doc="Gasoline price [$/gal]") def gasoline_price(m, t): return 2.5 * m.discount_factor[t] @m.Param(m.time, doc="Gasoline transport cost [$/gal/100 miles]") def gasoline_tranport_cost(m, t): return 0.045 * m.discount_factor[t] m.gal_per_bbl = Param(initialize=42, doc="Gallons per barrel") m.days_per_period = Param(initialize=90, doc="Days in a production period") m.learning_factor = Var( m.module_types, doc="Fraction of cost due to economies of mass production", domain=NonNegativeReals, bounds=(0, 1), initialize=1) @m.Disjunct(m.module_types) def mtype_exists(disj, mtype): disj.learning_factor_calc = Constraint( expr=m.learning_factor[mtype] == (1 - m.learning_rate) ** ( log(sum(m.modules_purchased[mtype, :, :])) / log(2))) m.BigM[disj.learning_factor_calc] = 1 disj.require_module_purchases = Constraint( expr=sum(m.modules_purchased[mtype, :, :]) >= 1) @m.Disjunct(m.module_types) def mtype_absent(disj, mtype): disj.constant_learning_factor = Constraint( expr=m.learning_factor[mtype] == 1) @m.Disjunction(m.module_types) def mtype_existence(m, mtype): return [m.mtype_exists[mtype], m.mtype_absent[mtype]] @m.Expression(m.module_types, m.time, doc="Module unit cost [MM$/module]") def module_unit_cost(m, mtype, t): return m.module_base_cost[mtype] * m.learning_factor[mtype] * m.discount_factor[t] m.production = Var( m.potential_sites, m.time, doc="Production of gasoline in a time period [kBD]", domain=NonNegativeReals, bounds=(0, 30), initialize=10) m.gas_consumption = Var( m.potential_sites, m.module_types, m.time, doc="Consumption of natural gas by each module type " "at each site in a time period [MMSCF/d]", domain=NonNegativeReals, bounds=(0, 250), initialize=50) m.gas_flows = Var( m.well_clusters, m.potential_sites, m.time, doc="Flow of gas from a well cluster to a site [MMSCF/d]", domain=NonNegativeReals, bounds=(0, 200), initialize=15) m.product_flows = Var( m.potential_sites, m.markets, m.time, doc="Product shipments from a site to a market in a period [kBD]", domain=NonNegativeReals, bounds=(0, 30), initialize=10) @m.Constraint(m.potential_sites, m.module_types, m.time) def consumption_capacity(m, site, mtype, t): return m.gas_consumption[site, mtype, t] <= ( m.num_modules[mtype, site, t] * m.unit_gas_consumption[mtype]) @m.Constraint(m.potential_sites, m.time) def production_limit(m, site, t): return m.production[site, t] <= sum( m.gas_consumption[site, mtype, t] * m.module_conversion[mtype] for mtype in m.module_types) @m.Expression(m.potential_sites, m.time) def capacity(m, site, t): return sum( m.num_modules[mtype, site, t] * m.unit_gas_consumption[mtype] * m.module_conversion[mtype] for mtype in m.module_types) @m.Constraint(m.potential_sites, m.time) def gas_supply_meets_consumption(m, site, t): return sum(m.gas_consumption[site, :, t]) == sum(m.gas_flows[:, site, t]) @m.Constraint(m.well_clusters, m.time) def gas_supply_limit(m, well, t): return sum(m.gas_flows[well, site, t] for site in m.potential_sites) <= m.gas_supply[well, t] @m.Constraint(m.potential_sites, m.time) def gasoline_production_requirement(m, site, t): return sum(m.product_flows[site, mkt, t] for mkt in m.markets) == m.production[site, t] @m.Constraint(m.potential_sites, m.module_types, m.time) def module_balance(m, site, mtype, t): if t >= m.module_setup_time: modules_added = m.modules_purchased[ mtype, site, t - m.module_setup_time] modules_transferred_in = sum( m.modules_transferred[ mtype, from_site, to_site, t - m.module_setup_time] for from_site, to_site in m.site_pairs if to_site == site) else: modules_added = 0 modules_transferred_in = 0 if t >= 1: existing_modules = m.num_modules[mtype, site, t - 1] else: existing_modules = 0 modules_transferred_out = sum( m.modules_transferred[mtype, from_site, to_site, t] for from_site, to_site in m.site_pairs if from_site == site) return m.num_modules[mtype, site, t] == ( existing_modules + modules_added + modules_transferred_in - modules_transferred_out) @m.Disjunct(m.potential_sites) def site_active(disj, site): pass @m.Disjunct(m.potential_sites) def site_inactive(disj, site): disj.no_production = Constraint( expr=sum(m.production[site, :]) == 0) disj.no_gas_consumption = Constraint( expr=sum(m.gas_consumption[site, :, :]) == 0) disj.no_gas_flows = Constraint( expr=sum(m.gas_flows[:, site, :]) == 0) disj.no_product_flows = Constraint( expr=sum(m.product_flows[site, :, :]) == 0) disj.no_modules = Constraint( expr=sum(m.num_modules[:, site, :]) == 0) disj.no_modules_transferred = Constraint( expr=sum( m.modules_transferred[mtypes, from_site, to_site, t] for mtypes in m.module_types for from_site, to_site in m.site_pairs for t in m.time if from_site == site or to_site == site) == 0) disj.no_modules_purchased = Constraint( expr=sum( m.modules_purchased[mtype, site, t] for mtype in m.module_types for t in m.time) == 0) @m.Disjunction(m.potential_sites) def site_active_or_not(m, site): return [m.site_active[site], m.site_inactive[site]] @m.Disjunct(m.well_clusters, m.potential_sites) def pipeline_exists(disj, well, site): pass @m.Disjunct(m.well_clusters, m.potential_sites) def pipeline_absent(disj, well, site): disj.no_natural_gas_flow = Constraint( expr=sum(m.gas_flows[well, site, t] for t in m.time) == 0) @m.Disjunction(m.well_clusters, m.potential_sites) def pipeline_existence(m, well, site): return [m.pipeline_exists[well, site], m.pipeline_absent[well, site]] # Objective Function Construnction @m.Expression(m.potential_sites, doc="MM$") def product_revenue(m, site): return sum( m.product_flows[site, mkt, t] # kBD * 1000 # bbl/kB / 1E6 # $ to MM$ * m.days_per_period * m.gasoline_price[t] * m.gal_per_bbl for mkt in m.markets for t in m.time) @m.Expression(m.potential_sites, doc="MM$") def raw_material_cost(m, site): return sum( m.gas_consumption[site, mtype, t] * m.days_per_period / 1E6 # $ to MM$ * m.nat_gas_price[t] * 1000 # MMSCF to MSCF for mtype in m.module_types for t in m.time) @m.Expression( m.potential_sites, m.markets, doc="Aggregate cost to transport gasoline from a site to market [MM$]") def product_transport_cost(m, site, mkt): return sum( m.product_flows[site, mkt, t] * m.gal_per_bbl * 1000 # bbl/kB / 1E6 # $ to MM$ * m.distance[site, mkt] / 100 * m.gasoline_tranport_cost[t] for t in m.time) @m.Expression(m.well_clusters, m.potential_sites, doc="MM$") def pipeline_construction_cost(m, well, site): return (m.pipeline_unit_cost * m.distance[well, site] * m.pipeline_exists[well, site].indicator_var) # Module transport cost @m.Expression(m.site_pairs, doc="MM$") def module_relocation_cost(m, from_site, to_site): return sum( m.modules_transferred[mtype, from_site, to_site, t] * m.distance[from_site, to_site] / 100 * m.module_transport_distance_cost[t] / 1E3 # M$ to MM$ + m.modules_transferred[mtype, from_site, to_site, t] * m.module_transport_unit_cost[t] for mtype in m.module_types for t in m.time) @m.Expression(m.potential_sites, doc="MM$") def module_purchase_cost(m, site): return sum( m.module_unit_cost[mtype, t] * m.modules_purchased[mtype, site, t] for mtype in m.module_types for t in m.time) @m.Expression(doc="MM$") def profit(m): return ( summation(m.product_revenue) - summation(m.raw_material_cost) - summation(m.product_transport_cost) - summation(m.pipeline_construction_cost) - summation(m.module_relocation_cost) - summation(m.module_purchase_cost) ) m.neg_profit = Objective(expr=-m.profit) # Tightening constraints @m.Constraint(doc="Limit total module purchases over project span.") def restrict_module_purchases(m): return sum(m.modules_purchased[...]) <= 5 @m.Constraint(m.site_pairs, doc="Limit transfers between any two sites") def restrict_module_transfers(m, from_site, to_site): return sum(m.modules_transferred[:, from_site, to_site, :]) <= 5 return m if __name__ == "__main__": m = build_model() # Restrict number of module types; A, R, S, U # valid_modules = ['A500', 'A1000', 'A2000', 'A5000'] # valid_modules = ['A500', 'R500', 'A5000', 'R5000'] # valid_modules = ['U500', 'U5000'] # valid_modules = ['U100', 'U250'] # valid_modules = ['U1000'] # valid_modules = ['U500'] valid_modules = ['U250'] # valid_modules = ['U100'] for mtype in m.module_types - valid_modules: m.gas_consumption[:, mtype, :].fix(0) m.num_modules[mtype, :, :].fix(0) m.modules_transferred[mtype, :, :, :].fix(0) m.modules_purchased[mtype, :, :].fix(0) m.mtype_exists[mtype].deactivate() m.mtype_absent[mtype].indicator_var.fix(1)
[ "pyomo.environ.log", "pyomo.environ.Suffix", "pyomo.environ.Constraint", "pyomo.environ.RangeSet", "pyomo.environ.Var", "pyomo.environ.sqrt", "os.path.dirname", "pyomo.environ.Objective", "pyomo.environ.exp", "pyomo.environ.summation", "pyomo.environ.Param", "pyomo.environ.ConcreteModel", "pyomo.environ.Set" ]
[((395, 410), 'pyomo.environ.ConcreteModel', 'ConcreteModel', ([], {}), '()\n', (408, 410), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((424, 454), 'pyomo.environ.Suffix', 'Suffix', ([], {'direction': 'Suffix.LOCAL'}), '(direction=Suffix.LOCAL)\n', (430, 454), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((481, 525), 'pyomo.environ.Param', 'Param', ([], {'initialize': '(4)', 'doc': '"""Quarters per year"""'}), "(initialize=4, doc='Quarters per year')\n", (486, 525), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((547, 580), 'pyomo.environ.Param', 'Param', ([], {'initialize': '(15)', 'doc': '"""Years"""'}), "(initialize=15, doc='Years')\n", (552, 580), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((594, 666), 'pyomo.environ.RangeSet', 'RangeSet', (['(0)', '(m.periods_per_year * m.project_life - 1)'], {'doc': '"""Time periods"""'}), "(0, m.periods_per_year * m.project_life - 1, doc='Time periods')\n", (602, 666), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((711, 743), 'pyomo.environ.Param', 'Param', ([], {'initialize': '(0.08)', 'doc': '"""8%"""'}), "(initialize=0.08, doc='8%')\n", (716, 743), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((766, 837), 'pyomo.environ.Param', 'Param', ([], {'initialize': '(0.1)', 'doc': '"""Fraction discount for doubling of quantity"""'}), "(initialize=0.1, doc='Fraction discount for doubling of quantity')\n", (771, 837), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((865, 921), 'pyomo.environ.Param', 'Param', ([], {'initialize': '(1)', 'doc': '"""1 quarter for module transfer"""'}), "(initialize=1, doc='1 quarter for module transfer')\n", (870, 921), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((2187, 2312), 'pyomo.environ.Set', 'Set', ([], {'doc': '"""Pairs of potential sites"""', 'initialize': '(m.potential_sites * m.potential_sites)', 'filter': '(lambda _, x, y: not x == y)'}), "(doc='Pairs of potential sites', initialize=m.potential_sites * m.\n potential_sites, filter=lambda _, x, y: not x == y)\n", (2190, 2312), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((4132, 4183), 'pyomo.environ.Set', 'Set', ([], {'initialize': '(m.well_clusters | m.potential_sites)'}), '(initialize=m.well_clusters | m.potential_sites)\n', (4135, 4183), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((4205, 4250), 'pyomo.environ.Set', 'Set', ([], {'initialize': '(m.potential_sites | m.markets)'}), '(initialize=m.potential_sites | m.markets)\n', (4208, 4250), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((4804, 4974), 'pyomo.environ.Var', 'Var', (['m.module_types', 'm.potential_sites', 'm.time'], {'doc': '"""Number of active modules of each type at a site in a period"""', 'domain': 'Integers', 'bounds': '(0, 50)', 'initialize': '(1)'}), "(m.module_types, m.potential_sites, m.time, doc=\n 'Number of active modules of each type at a site in a period', domain=\n Integers, bounds=(0, 50), initialize=1)\n", (4807, 4974), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((5018, 5198), 'pyomo.environ.Var', 'Var', (['m.module_types', 'm.site_pairs', 'm.time'], {'doc': '"""Number of module transfers initiated from one site to another in a period."""', 'domain': 'Integers', 'bounds': '(0, 15)', 'initialize': '(0)'}), "(m.module_types, m.site_pairs, m.time, doc=\n 'Number of module transfers initiated from one site to another in a period.'\n , domain=Integers, bounds=(0, 15), initialize=0)\n", (5021, 5198), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((5240, 5413), 'pyomo.environ.Var', 'Var', (['m.module_types', 'm.potential_sites', 'm.time'], {'doc': '"""Number of modules of each type purchased for a site in a period"""', 'domain': 'Integers', 'bounds': '(0, 30)', 'initialize': '(1)'}), "(m.module_types, m.potential_sites, m.time, doc=\n 'Number of modules of each type purchased for a site in a period',\n domain=Integers, bounds=(0, 30), initialize=1)\n", (5243, 5413), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((5458, 5493), 'pyomo.environ.Param', 'Param', ([], {'doc': '"""MM$/mile"""', 'initialize': '(2)'}), "(doc='MM$/mile', initialize=2)\n", (5463, 5493), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((6235, 6281), 'pyomo.environ.Param', 'Param', ([], {'initialize': '(42)', 'doc': '"""Gallons per barrel"""'}), "(initialize=42, doc='Gallons per barrel')\n", (6240, 6281), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((6306, 6361), 'pyomo.environ.Param', 'Param', ([], {'initialize': '(90)', 'doc': '"""Days in a production period"""'}), "(initialize=90, doc='Days in a production period')\n", (6311, 6361), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((6387, 6530), 'pyomo.environ.Var', 'Var', (['m.module_types'], {'doc': '"""Fraction of cost due to economies of mass production"""', 'domain': 'NonNegativeReals', 'bounds': '(0, 1)', 'initialize': '(1)'}), "(m.module_types, doc=\n 'Fraction of cost due to economies of mass production', domain=\n NonNegativeReals, bounds=(0, 1), initialize=1)\n", (6390, 6530), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((7494, 7643), 'pyomo.environ.Var', 'Var', (['m.potential_sites', 'm.time'], {'doc': '"""Production of gasoline in a time period [kBD]"""', 'domain': 'NonNegativeReals', 'bounds': '(0, 30)', 'initialize': '(10)'}), "(m.potential_sites, m.time, doc=\n 'Production of gasoline in a time period [kBD]', domain=\n NonNegativeReals, bounds=(0, 30), initialize=10)\n", (7497, 7643), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((7683, 7890), 'pyomo.environ.Var', 'Var', (['m.potential_sites', 'm.module_types', 'm.time'], {'doc': '"""Consumption of natural gas by each module type at each site in a time period [MMSCF/d]"""', 'domain': 'NonNegativeReals', 'bounds': '(0, 250)', 'initialize': '(50)'}), "(m.potential_sites, m.module_types, m.time, doc=\n 'Consumption of natural gas by each module type at each site in a time period [MMSCF/d]'\n , domain=NonNegativeReals, bounds=(0, 250), initialize=50)\n", (7686, 7890), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((7935, 8108), 'pyomo.environ.Var', 'Var', (['m.well_clusters', 'm.potential_sites', 'm.time'], {'doc': '"""Flow of gas from a well cluster to a site [MMSCF/d]"""', 'domain': 'NonNegativeReals', 'bounds': '(0, 200)', 'initialize': '(15)'}), "(m.well_clusters, m.potential_sites, m.time, doc=\n 'Flow of gas from a well cluster to a site [MMSCF/d]', domain=\n NonNegativeReals, bounds=(0, 200), initialize=15)\n", (7938, 8108), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((8146, 8320), 'pyomo.environ.Var', 'Var', (['m.potential_sites', 'm.markets', 'm.time'], {'doc': '"""Product shipments from a site to a market in a period [kBD]"""', 'domain': 'NonNegativeReals', 'bounds': '(0, 30)', 'initialize': '(10)'}), "(m.potential_sites, m.markets, m.time, doc=\n 'Product shipments from a site to a market in a period [kBD]', domain=\n NonNegativeReals, bounds=(0, 30), initialize=10)\n", (8149, 8320), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((15021, 15046), 'pyomo.environ.Objective', 'Objective', ([], {'expr': '(-m.profit)'}), '(expr=-m.profit)\n', (15030, 15046), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((4731, 4782), 'pyomo.environ.sqrt', 'sqrt', (['((src_x - dest_x) ** 2 + (src_y - dest_y) ** 2)'], {}), '((src_x - dest_x) ** 2 + (src_y - dest_y) ** 2)\n', (4735, 4782), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((7071, 7117), 'pyomo.environ.Constraint', 'Constraint', ([], {'expr': '(m.learning_factor[mtype] == 1)'}), '(expr=m.learning_factor[mtype] == 1)\n', (7081, 7117), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((1115, 1140), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1130, 1140), False, 'import os\n'), ((14957, 14990), 'pyomo.environ.summation', 'summation', (['m.module_purchase_cost'], {}), '(m.module_purchase_cost)\n', (14966, 14990), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((2934, 2963), 'pyomo.environ.exp', 'exp', (['(-1.31 * (t + 1) ** 0.292)'], {}), '(-1.31 * (t + 1) ** 0.292)\n', (2937, 2963), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((14907, 14942), 'pyomo.environ.summation', 'summation', (['m.module_relocation_cost'], {}), '(m.module_relocation_cost)\n', (14916, 14942), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((14853, 14892), 'pyomo.environ.summation', 'summation', (['m.pipeline_construction_cost'], {}), '(m.pipeline_construction_cost)\n', (14862, 14892), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((14803, 14838), 'pyomo.environ.summation', 'summation', (['m.product_transport_cost'], {}), '(m.product_transport_cost)\n', (14812, 14838), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((6795, 6801), 'pyomo.environ.log', 'log', (['(2)'], {}), '(2)\n', (6798, 6801), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((14715, 14743), 'pyomo.environ.summation', 'summation', (['m.product_revenue'], {}), '(m.product_revenue)\n', (14724, 14743), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n'), ((14758, 14788), 'pyomo.environ.summation', 'summation', (['m.raw_material_cost'], {}), '(m.raw_material_cost)\n', (14767, 14788), False, 'from pyomo.environ import ConcreteModel, Constraint, Integers, NonNegativeReals, Objective, Param, RangeSet, Set, SolverFactory, Suffix, TransformationFactory, Var, exp, log, sqrt, summation, value\n')]
#!/usr/bin/env python # # Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import argparse import os import re import shutil import sys import tempfile import zipfile import dex import dex_jdk_libs from util import build_utils from util import diff_utils _API_LEVEL_VERSION_CODE = [ (21, 'L'), (22, 'LolliopoMR1'), (23, 'M'), (24, 'N'), (25, 'NMR1'), (26, 'O'), (27, 'OMR1'), (28, 'P'), (29, 'Q'), (30, 'R'), ] _CHECKDISCARD_RE = re.compile(r'^\s*-checkdiscard[\s\S]*?}', re.MULTILINE) _DIRECTIVE_RE = re.compile(r'^\s*-', re.MULTILINE) def _ValidateAndFilterCheckDiscards(configs): """Check for invalid -checkdiscard rules and filter out -checkdiscards. -checkdiscard assertions often don't work for test APKs and are not actually helpful. Additionally, test APKs may pull in dependency proguard configs which makes filtering out these rules difficult in GN. Instead, we enforce that configs that use -checkdiscard do not contain any other rules so that we can filter out the undesired -checkdiscard rule files here. Args: configs: List of paths to proguard configuration files. Returns: A list of configs with -checkdiscard-containing-configs removed. """ valid_configs = [] for config_path in configs: with open(config_path) as f: contents = f.read() if _CHECKDISCARD_RE.search(contents): contents = _CHECKDISCARD_RE.sub('', contents) if _DIRECTIVE_RE.search(contents): raise Exception('Proguard configs containing -checkdiscards cannot ' 'contain other directives so that they can be ' 'disabled in test APKs ({}).'.format(config_path)) else: valid_configs.append(config_path) return valid_configs def _ParseOptions(): args = build_utils.ExpandFileArgs(sys.argv[1:]) parser = argparse.ArgumentParser() build_utils.AddDepfileOption(parser) parser.add_argument('--r8-path', required=True, help='Path to the R8.jar to use.') parser.add_argument( '--desugar-jdk-libs-json', help='Path to desugar_jdk_libs.json.') parser.add_argument('--input-paths', action='append', required=True, help='GN-list of .jar files to optimize.') parser.add_argument('--desugar-jdk-libs-jar', help='Path to desugar_jdk_libs.jar.') parser.add_argument('--desugar-jdk-libs-configuration-jar', help='Path to desugar_jdk_libs_configuration.jar.') parser.add_argument('--output-path', help='Path to the generated .jar file.') parser.add_argument( '--proguard-configs', action='append', required=True, help='GN-list of configuration files.') parser.add_argument( '--apply-mapping', help='Path to ProGuard mapping to apply.') parser.add_argument( '--mapping-output', required=True, help='Path for ProGuard to output mapping file to.') parser.add_argument( '--extra-mapping-output-paths', help='GN-list of additional paths to copy output mapping file to.') parser.add_argument( '--classpath', action='append', help='GN-list of .jar files to include as libraries.') parser.add_argument( '--main-dex-rules-path', action='append', help='Path to main dex rules for multidex' '- only works with R8.') parser.add_argument( '--min-api', help='Minimum Android API level compatibility.') parser.add_argument( '--verbose', '-v', action='store_true', help='Print all ProGuard output') parser.add_argument( '--repackage-classes', help='Package all optimized classes are put in.') parser.add_argument( '--disable-outlining', action='store_true', help='Disable the outlining optimization provided by R8.') parser.add_argument( '--disable-checkdiscard', action='store_true', help='Disable -checkdiscard directives') parser.add_argument('--sourcefile', help='Value for source file attribute') parser.add_argument( '--force-enable-assertions', action='store_true', help='Forcefully enable javac generated assertion code.') parser.add_argument( '--feature-jars', action='append', help='GN list of path to jars which comprise the corresponding feature.') parser.add_argument( '--dex-dest', action='append', dest='dex_dests', help='Destination for dex file of the corresponding feature.') parser.add_argument( '--feature-name', action='append', dest='feature_names', help='The name of the feature module.') parser.add_argument('--warnings-as-errors', action='store_true', help='Treat all warnings as errors.') parser.add_argument('--show-desugar-default-interface-warnings', action='store_true', help='Enable desugaring warnings.') parser.add_argument( '--stamp', help='File to touch upon success. Mutually exclusive with --output-path') parser.add_argument('--desugared-library-keep-rule-output', help='Path to desugared library keep rule output file.') diff_utils.AddCommandLineFlags(parser) options = parser.parse_args(args) if options.feature_names: if options.output_path: parser.error('Feature splits cannot specify an output in GN.') if not options.actual_file and not options.stamp: parser.error('Feature splits require a stamp file as output.') elif not options.output_path: parser.error('Output path required when feature splits aren\'t used') if options.main_dex_rules_path and not options.r8_path: parser.error('R8 must be enabled to pass main dex rules.') options.classpath = build_utils.ParseGnList(options.classpath) options.proguard_configs = build_utils.ParseGnList(options.proguard_configs) options.input_paths = build_utils.ParseGnList(options.input_paths) options.extra_mapping_output_paths = build_utils.ParseGnList( options.extra_mapping_output_paths) if options.feature_names: if 'base' not in options.feature_names: parser.error('"base" feature required when feature arguments are used.') if len(options.feature_names) != len(options.feature_jars) or len( options.feature_names) != len(options.dex_dests): parser.error('Invalid feature argument lengths.') options.feature_jars = [ build_utils.ParseGnList(x) for x in options.feature_jars ] return options class _DexPathContext(object): def __init__(self, name, output_path, input_jars, work_dir): self.name = name self.input_paths = input_jars self._final_output_path = output_path self.staging_dir = os.path.join(work_dir, name) os.mkdir(self.staging_dir) def CreateOutput(self, has_imported_lib=False, keep_rule_output=None): found_files = build_utils.FindInDirectory(self.staging_dir) if not found_files: raise Exception('Missing dex outputs in {}'.format(self.staging_dir)) if self._final_output_path.endswith('.dex'): if has_imported_lib: raise Exception( 'Trying to create a single .dex file, but a dependency requires ' 'JDK Library Desugaring (which necessitates a second file).' 'Refer to %s to see what desugaring was required' % keep_rule_output) if len(found_files) != 1: raise Exception('Expected exactly 1 dex file output, found: {}'.format( '\t'.join(found_files))) shutil.move(found_files[0], self._final_output_path) return # Add to .jar using Python rather than having R8 output to a .zip directly # in order to disable compression of the .jar, saving ~500ms. tmp_jar_output = self.staging_dir + '.jar' build_utils.DoZip(found_files, tmp_jar_output, base_dir=self.staging_dir) shutil.move(tmp_jar_output, self._final_output_path) def _OptimizeWithR8(options, config_paths, libraries, dynamic_config_data, print_stdout=False): with build_utils.TempDir() as tmp_dir: if dynamic_config_data: tmp_config_path = os.path.join(tmp_dir, 'proguard_config.txt') with open(tmp_config_path, 'w') as f: f.write(dynamic_config_data) config_paths = config_paths + [tmp_config_path] tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt') # If there is no output (no classes are kept), this prevents this script # from failing. build_utils.Touch(tmp_mapping_path) tmp_output = os.path.join(tmp_dir, 'r8out') os.mkdir(tmp_output) feature_contexts = [] if options.feature_names: for name, dest_dex, input_paths in zip( options.feature_names, options.dex_dests, options.feature_jars): feature_context = _DexPathContext(name, dest_dex, input_paths, tmp_output) if name == 'base': base_dex_context = feature_context else: feature_contexts.append(feature_context) else: base_dex_context = _DexPathContext('base', options.output_path, options.input_paths, tmp_output) cmd = [ build_utils.JAVA_PATH, '-Dcom.android.tools.r8.allowTestProguardOptions=1', ] if options.disable_outlining: cmd += [' -Dcom.android.tools.r8.disableOutlining=1'] cmd += [ '-Xmx1G', '-cp', options.r8_path, 'com.android.tools.r8.R8', '--no-data-resources', '--output', base_dex_context.staging_dir, '--pg-map-output', tmp_mapping_path, ] if options.desugar_jdk_libs_json: cmd += [ '--desugared-lib', options.desugar_jdk_libs_json, '--desugared-lib-pg-conf-output', options.desugared_library_keep_rule_output, ] if options.min_api: cmd += ['--min-api', options.min_api] if options.force_enable_assertions: cmd += ['--force-enable-assertions'] for lib in libraries: cmd += ['--lib', lib] for config_file in config_paths: cmd += ['--pg-conf', config_file] if options.main_dex_rules_path: for main_dex_rule in options.main_dex_rules_path: cmd += ['--main-dex-rules', main_dex_rule] module_input_jars = set(base_dex_context.input_paths) for feature in feature_contexts: feature_input_jars = [ p for p in feature.input_paths if p not in module_input_jars ] module_input_jars.update(feature_input_jars) for in_jar in feature_input_jars: cmd += ['--feature', in_jar, feature.staging_dir] cmd += base_dex_context.input_paths # Add any extra input jars to the base module (e.g. desugar runtime). extra_jars = set(options.input_paths) - module_input_jars cmd += sorted(extra_jars) try: stderr_filter = dex.CreateStderrFilter( options.show_desugar_default_interface_warnings) build_utils.CheckOutput(cmd, print_stdout=print_stdout, stderr_filter=stderr_filter, fail_on_output=options.warnings_as_errors) except build_utils.CalledProcessError as err: debugging_link = ('\n\nR8 failed. Please see {}.'.format( 'https://chromium.googlesource.com/chromium/src/+/HEAD/build/' 'android/docs/java_optimization.md#Debugging-common-failures\n')) raise build_utils.CalledProcessError(err.cwd, err.args, err.output + debugging_link) base_has_imported_lib = False if options.desugar_jdk_libs_json: existing_files = build_utils.FindInDirectory(base_dex_context.staging_dir) jdk_dex_output = os.path.join(base_dex_context.staging_dir, 'classes%d.dex' % (len(existing_files) + 1)) base_has_imported_lib = dex_jdk_libs.DexJdkLibJar( options.r8_path, options.min_api, options.desugar_jdk_libs_json, options.desugar_jdk_libs_jar, options.desugar_jdk_libs_configuration_jar, options.desugared_library_keep_rule_output, jdk_dex_output, options.warnings_as_errors) base_dex_context.CreateOutput(base_has_imported_lib, options.desugared_library_keep_rule_output) for feature in feature_contexts: feature.CreateOutput() with open(options.mapping_output, 'w') as out_file, \ open(tmp_mapping_path) as in_file: # Mapping files generated by R8 include comments that may break # some of our tooling so remove those (specifically: apkanalyzer). out_file.writelines(l for l in in_file if not l.startswith('#')) def _CombineConfigs(configs, dynamic_config_data, exclude_generated=False): ret = [] # Sort in this way so //clank versions of the same libraries will sort # to the same spot in the file. def sort_key(path): return tuple(reversed(path.split(os.path.sep))) for config in sorted(configs, key=sort_key): if exclude_generated and config.endswith('.resources.proguard.txt'): continue ret.append('# File: ' + config) with open(config) as config_file: contents = config_file.read().rstrip() # Fix up line endings (third_party configs can have windows endings). contents = contents.replace('\r', '') # Remove numbers from generated rule comments to make file more # diff'able. contents = re.sub(r' #generated:\d+', '', contents) ret.append(contents) ret.append('') if dynamic_config_data: ret.append('# File: //build/android/gyp/proguard.py (generated rules)') ret.append(dynamic_config_data) ret.append('') return '\n'.join(ret) def _CreateDynamicConfig(options): ret = [] if not options.r8_path and options.min_api: # R8 adds this option automatically, and uses -assumenosideeffects instead # (which ProGuard doesn't support doing). ret.append("""\ -assumevalues class android.os.Build$VERSION { public static final int SDK_INT return %s..9999; }""" % options.min_api) if options.sourcefile: ret.append("-renamesourcefileattribute '%s' # OMIT FROM EXPECTATIONS" % options.sourcefile) if options.apply_mapping: ret.append("-applymapping '%s'" % os.path.abspath(options.apply_mapping)) if options.repackage_classes: ret.append("-repackageclasses '%s'" % options.repackage_classes) _min_api = int(options.min_api) if options.min_api else 0 for api_level, version_code in _API_LEVEL_VERSION_CODE: annotation_name = 'org.chromium.base.annotations.VerifiesOn' + version_code if api_level > _min_api: ret.append('-keep @interface %s' % annotation_name) ret.append("""\ -if @%s class * { *** *(...); } -keep,allowobfuscation class <1> { *** <2>(...); }""" % annotation_name) ret.append("""\ -keepclassmembers,allowobfuscation class ** { @%s <methods>; }""" % annotation_name) return '\n'.join(ret) def _VerifyNoEmbeddedConfigs(jar_paths): failed = False for jar_path in jar_paths: with zipfile.ZipFile(jar_path) as z: for name in z.namelist(): if name.startswith('META-INF/proguard/'): failed = True sys.stderr.write("""\ Found embedded proguard config within {}. Embedded configs are not permitted (https://crbug.com/989505) """.format(jar_path)) break if failed: sys.exit(1) def _ContainsDebuggingConfig(config_str): debugging_configs = ('-whyareyoukeeping', '-whyareyounotinlining') return any(config in config_str for config in debugging_configs) def _MaybeWriteStampAndDepFile(options, inputs): output = options.output_path if options.stamp: build_utils.Touch(options.stamp) output = options.stamp if options.depfile: build_utils.WriteDepfile(options.depfile, output, inputs=inputs) def main(): options = _ParseOptions() libraries = [] for p in options.classpath: # TODO(bjoyce): Remove filter once old android support libraries are gone. # Fix for having Library class extend program class dependency problem. if 'com_android_support' in p or 'android_support_test' in p: continue # If a jar is part of input no need to include it as library jar. if p not in libraries and p not in options.input_paths: libraries.append(p) _VerifyNoEmbeddedConfigs(options.input_paths + libraries) proguard_configs = options.proguard_configs if options.disable_checkdiscard: proguard_configs = _ValidateAndFilterCheckDiscards(proguard_configs) # ProGuard configs that are derived from flags. dynamic_config_data = _CreateDynamicConfig(options) # ProGuard configs that are derived from flags. merged_configs = _CombineConfigs( proguard_configs, dynamic_config_data, exclude_generated=True) print_stdout = _ContainsDebuggingConfig(merged_configs) or options.verbose if options.expected_file: diff_utils.CheckExpectations(merged_configs, options) if options.only_verify_expectations: build_utils.WriteDepfile(options.depfile, options.actual_file, inputs=options.proguard_configs) return _OptimizeWithR8(options, proguard_configs, libraries, dynamic_config_data, print_stdout) # After ProGuard / R8 has run: for output in options.extra_mapping_output_paths: shutil.copy(options.mapping_output, output) inputs = options.proguard_configs + options.input_paths + libraries if options.apply_mapping: inputs.append(options.apply_mapping) _MaybeWriteStampAndDepFile(options, inputs) if __name__ == '__main__': main()
[ "os.mkdir", "util.build_utils.DoZip", "argparse.ArgumentParser", "util.build_utils.ExpandFileArgs", "os.path.join", "shutil.copy", "os.path.abspath", "util.build_utils.CalledProcessError", "util.build_utils.AddDepfileOption", "re.sub", "util.build_utils.Touch", "util.diff_utils.CheckExpectations", "util.diff_utils.AddCommandLineFlags", "dex.CreateStderrFilter", "dex_jdk_libs.DexJdkLibJar", "util.build_utils.TempDir", "util.build_utils.FindInDirectory", "sys.exit", "re.compile", "zipfile.ZipFile", "util.build_utils.CheckOutput", "util.build_utils.WriteDepfile", "shutil.move", "util.build_utils.ParseGnList" ]
[((584, 641), 're.compile', 're.compile', (['"""^\\\\s*-checkdiscard[\\\\s\\\\S]*?}"""', 're.MULTILINE'], {}), "('^\\\\s*-checkdiscard[\\\\s\\\\S]*?}', re.MULTILINE)\n", (594, 641), False, 'import re\n'), ((656, 690), 're.compile', 're.compile', (['"""^\\\\s*-"""', 're.MULTILINE'], {}), "('^\\\\s*-', re.MULTILINE)\n", (666, 690), False, 'import re\n'), ((1934, 1974), 'util.build_utils.ExpandFileArgs', 'build_utils.ExpandFileArgs', (['sys.argv[1:]'], {}), '(sys.argv[1:])\n', (1960, 1974), False, 'from util import build_utils\n'), ((1986, 2011), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2009, 2011), False, 'import argparse\n'), ((2014, 2050), 'util.build_utils.AddDepfileOption', 'build_utils.AddDepfileOption', (['parser'], {}), '(parser)\n', (2042, 2050), False, 'from util import build_utils\n'), ((5390, 5428), 'util.diff_utils.AddCommandLineFlags', 'diff_utils.AddCommandLineFlags', (['parser'], {}), '(parser)\n', (5420, 5428), False, 'from util import diff_utils\n'), ((5965, 6007), 'util.build_utils.ParseGnList', 'build_utils.ParseGnList', (['options.classpath'], {}), '(options.classpath)\n', (5988, 6007), False, 'from util import build_utils\n'), ((6037, 6086), 'util.build_utils.ParseGnList', 'build_utils.ParseGnList', (['options.proguard_configs'], {}), '(options.proguard_configs)\n', (6060, 6086), False, 'from util import build_utils\n'), ((6111, 6155), 'util.build_utils.ParseGnList', 'build_utils.ParseGnList', (['options.input_paths'], {}), '(options.input_paths)\n', (6134, 6155), False, 'from util import build_utils\n'), ((6195, 6254), 'util.build_utils.ParseGnList', 'build_utils.ParseGnList', (['options.extra_mapping_output_paths'], {}), '(options.extra_mapping_output_paths)\n', (6218, 6254), False, 'from util import build_utils\n'), ((6934, 6962), 'os.path.join', 'os.path.join', (['work_dir', 'name'], {}), '(work_dir, name)\n', (6946, 6962), False, 'import os\n'), ((6967, 6993), 'os.mkdir', 'os.mkdir', (['self.staging_dir'], {}), '(self.staging_dir)\n', (6975, 6993), False, 'import os\n'), ((7086, 7131), 'util.build_utils.FindInDirectory', 'build_utils.FindInDirectory', (['self.staging_dir'], {}), '(self.staging_dir)\n', (7113, 7131), False, 'from util import build_utils\n'), ((7997, 8070), 'util.build_utils.DoZip', 'build_utils.DoZip', (['found_files', 'tmp_jar_output'], {'base_dir': 'self.staging_dir'}), '(found_files, tmp_jar_output, base_dir=self.staging_dir)\n', (8014, 8070), False, 'from util import build_utils\n'), ((8075, 8127), 'shutil.move', 'shutil.move', (['tmp_jar_output', 'self._final_output_path'], {}), '(tmp_jar_output, self._final_output_path)\n', (8086, 8127), False, 'import shutil\n'), ((8313, 8334), 'util.build_utils.TempDir', 'build_utils.TempDir', ([], {}), '()\n', (8332, 8334), False, 'from util import build_utils\n'), ((8603, 8639), 'os.path.join', 'os.path.join', (['tmp_dir', '"""mapping.txt"""'], {}), "(tmp_dir, 'mapping.txt')\n", (8615, 8639), False, 'import os\n'), ((8741, 8776), 'util.build_utils.Touch', 'build_utils.Touch', (['tmp_mapping_path'], {}), '(tmp_mapping_path)\n', (8758, 8776), False, 'from util import build_utils\n'), ((8795, 8825), 'os.path.join', 'os.path.join', (['tmp_dir', '"""r8out"""'], {}), "(tmp_dir, 'r8out')\n", (8807, 8825), False, 'import os\n'), ((8830, 8850), 'os.mkdir', 'os.mkdir', (['tmp_output'], {}), '(tmp_output)\n', (8838, 8850), False, 'import os\n'), ((13757, 13797), 're.sub', 're.sub', (['""" #generated:\\\\d+"""', '""""""', 'contents'], {}), "(' #generated:\\\\d+', '', contents)\n", (13763, 13797), False, 'import re\n'), ((15711, 15722), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (15719, 15722), False, 'import sys\n'), ((16009, 16041), 'util.build_utils.Touch', 'build_utils.Touch', (['options.stamp'], {}), '(options.stamp)\n', (16026, 16041), False, 'from util import build_utils\n'), ((16095, 16159), 'util.build_utils.WriteDepfile', 'build_utils.WriteDepfile', (['options.depfile', 'output'], {'inputs': 'inputs'}), '(options.depfile, output, inputs=inputs)\n', (16119, 16159), False, 'from util import build_utils\n'), ((17229, 17282), 'util.diff_utils.CheckExpectations', 'diff_utils.CheckExpectations', (['merged_configs', 'options'], {}), '(merged_configs, options)\n', (17257, 17282), False, 'from util import diff_utils\n'), ((17701, 17744), 'shutil.copy', 'shutil.copy', (['options.mapping_output', 'output'], {}), '(options.mapping_output, output)\n', (17712, 17744), False, 'import shutil\n'), ((6637, 6663), 'util.build_utils.ParseGnList', 'build_utils.ParseGnList', (['x'], {}), '(x)\n', (6660, 6663), False, 'from util import build_utils\n'), ((7734, 7786), 'shutil.move', 'shutil.move', (['found_files[0]', 'self._final_output_path'], {}), '(found_files[0], self._final_output_path)\n', (7745, 7786), False, 'import shutil\n'), ((8399, 8443), 'os.path.join', 'os.path.join', (['tmp_dir', '"""proguard_config.txt"""'], {}), "(tmp_dir, 'proguard_config.txt')\n", (8411, 8443), False, 'import os\n'), ((11156, 11227), 'dex.CreateStderrFilter', 'dex.CreateStderrFilter', (['options.show_desugar_default_interface_warnings'], {}), '(options.show_desugar_default_interface_warnings)\n', (11178, 11227), False, 'import dex\n'), ((11245, 11377), 'util.build_utils.CheckOutput', 'build_utils.CheckOutput', (['cmd'], {'print_stdout': 'print_stdout', 'stderr_filter': 'stderr_filter', 'fail_on_output': 'options.warnings_as_errors'}), '(cmd, print_stdout=print_stdout, stderr_filter=\n stderr_filter, fail_on_output=options.warnings_as_errors)\n', (11268, 11377), False, 'from util import build_utils\n'), ((11956, 12013), 'util.build_utils.FindInDirectory', 'build_utils.FindInDirectory', (['base_dex_context.staging_dir'], {}), '(base_dex_context.staging_dir)\n', (11983, 12013), False, 'from util import build_utils\n'), ((12191, 12463), 'dex_jdk_libs.DexJdkLibJar', 'dex_jdk_libs.DexJdkLibJar', (['options.r8_path', 'options.min_api', 'options.desugar_jdk_libs_json', 'options.desugar_jdk_libs_jar', 'options.desugar_jdk_libs_configuration_jar', 'options.desugared_library_keep_rule_output', 'jdk_dex_output', 'options.warnings_as_errors'], {}), '(options.r8_path, options.min_api, options.\n desugar_jdk_libs_json, options.desugar_jdk_libs_jar, options.\n desugar_jdk_libs_configuration_jar, options.\n desugared_library_keep_rule_output, jdk_dex_output, options.\n warnings_as_errors)\n', (12216, 12463), False, 'import dex_jdk_libs\n'), ((15382, 15407), 'zipfile.ZipFile', 'zipfile.ZipFile', (['jar_path'], {}), '(jar_path)\n', (15397, 15407), False, 'import zipfile\n'), ((17330, 17430), 'util.build_utils.WriteDepfile', 'build_utils.WriteDepfile', (['options.depfile', 'options.actual_file'], {'inputs': 'options.proguard_configs'}), '(options.depfile, options.actual_file, inputs=\n options.proguard_configs)\n', (17354, 17430), False, 'from util import build_utils\n'), ((11738, 11816), 'util.build_utils.CalledProcessError', 'build_utils.CalledProcessError', (['err.cwd', 'err.args', '(err.output + debugging_link)'], {}), '(err.cwd, err.args, err.output + debugging_link)\n', (11768, 11816), False, 'from util import build_utils\n'), ((14589, 14627), 'os.path.abspath', 'os.path.abspath', (['options.apply_mapping'], {}), '(options.apply_mapping)\n', (14604, 14627), False, 'import os\n')]
import getpass import keyring KEYRING_SERVICE = "wantan" def setup_args(args): """Add our args to a subgroup of the main script's.""" auth_verbs = args.add_mutually_exclusive_group(required=True) auth_verbs.add_argument("--dump", action="store_true", help="Dump the API key associated with a username") auth_verbs.add_argument("--store", action="store_true", help="Store an API key associated with a username") args.add_argument("--key", help="The API key to store") args.add_argument("user", help="The username associated with the API key") args.set_defaults(func=main) def get_key(user): return keyring.get_password(KEYRING_SERVICE, user) def main(args): if args.dump: print(f"{get_key(args.user)}") elif args.store: key = args.key if not key: key = getpass.getpass(prompt="API key: ") keyring.set_password(KEYRING_SERVICE, args.user, key)
[ "getpass.getpass", "keyring.get_password", "keyring.set_password" ]
[((668, 711), 'keyring.get_password', 'keyring.get_password', (['KEYRING_SERVICE', 'user'], {}), '(KEYRING_SERVICE, user)\n', (688, 711), False, 'import keyring\n'), ((912, 965), 'keyring.set_password', 'keyring.set_password', (['KEYRING_SERVICE', 'args.user', 'key'], {}), '(KEYRING_SERVICE, args.user, key)\n', (932, 965), False, 'import keyring\n'), ((868, 903), 'getpass.getpass', 'getpass.getpass', ([], {'prompt': '"""API key: """'}), "(prompt='API key: ')\n", (883, 903), False, 'import getpass\n')]
# -*-coding = utf-8 -*- import json import os import subprocess if __name__ == "__main__": whereFfmpeg = "" toDir = "" beginPath = "" paths = os.listdir(beginPath) videoName = "\\video.m4s" audioName = "\\audio.m4s" for path in paths: secondPath = os.path.join(beginPath, path) liTaskPath = os.listdir(secondPath) dirName = os.path.join(toDir, path) if not os.path.exists(dirName): os.mkdir(dirName) for i in liTaskPath: thirdPath = os.path.join(secondPath, i) with open(thirdPath + "\\entry.json", encoding="utf-8") as file: finalName = json.load(file)["page_data"]["download_subtitle"].replace(" ", "") finalPath = "" for j in os.listdir(thirdPath): finalPath = os.path.join(thirdPath, j) if os.path.isdir(finalPath): break finalString = whereFfmpeg + ' -i %s -i %s -c:v copy -c:a aac -strict experimental %s.mp4' \ % (finalPath + videoName, finalPath + audioName, dirName + "\\" + finalName) print(finalString) p = subprocess.Popen(finalString) p.communicate()
[ "os.mkdir", "subprocess.Popen", "json.load", "os.path.isdir", "os.path.exists", "os.path.join", "os.listdir" ]
[((147, 168), 'os.listdir', 'os.listdir', (['beginPath'], {}), '(beginPath)\n', (157, 168), False, 'import os\n'), ((258, 287), 'os.path.join', 'os.path.join', (['beginPath', 'path'], {}), '(beginPath, path)\n', (270, 287), False, 'import os\n'), ((303, 325), 'os.listdir', 'os.listdir', (['secondPath'], {}), '(secondPath)\n', (313, 325), False, 'import os\n'), ((338, 363), 'os.path.join', 'os.path.join', (['toDir', 'path'], {}), '(toDir, path)\n', (350, 363), False, 'import os\n'), ((373, 396), 'os.path.exists', 'os.path.exists', (['dirName'], {}), '(dirName)\n', (387, 396), False, 'import os\n'), ((401, 418), 'os.mkdir', 'os.mkdir', (['dirName'], {}), '(dirName)\n', (409, 418), False, 'import os\n'), ((457, 484), 'os.path.join', 'os.path.join', (['secondPath', 'i'], {}), '(secondPath, i)\n', (469, 484), False, 'import os\n'), ((667, 688), 'os.listdir', 'os.listdir', (['thirdPath'], {}), '(thirdPath)\n', (677, 688), False, 'import os\n'), ((986, 1015), 'subprocess.Popen', 'subprocess.Popen', (['finalString'], {}), '(finalString)\n', (1002, 1015), False, 'import subprocess\n'), ((706, 732), 'os.path.join', 'os.path.join', (['thirdPath', 'j'], {}), '(thirdPath, j)\n', (718, 732), False, 'import os\n'), ((740, 764), 'os.path.isdir', 'os.path.isdir', (['finalPath'], {}), '(finalPath)\n', (753, 764), False, 'import os\n'), ((569, 584), 'json.load', 'json.load', (['file'], {}), '(file)\n', (578, 584), False, 'import json\n')]
#!/opt/homebrew/bin/python3.9 ## ## Icebreaker and IceSugar RSMB5 project - RV32I for Lattice iCE40 ## With complete open-source toolchain flow using: ## -> yosys ## -> icarus verilog ## -> icestorm project ## ## Tests are written in several languages ## -> Systemverilog Pure Testbench (Vivado) ## -> UVM testbench (Vivado) ## -> PyUvm (Icarus) ## -> Formal either using SVA and PSL (Vivado) or cuncurrent assertions with Yosys ## ## Copyright (c) 2021 <NAME> (<EMAIL>) ## ## Permission is hereby granted, free of charge, to any person obtaining a ## copy of this software and associated documentation files (the "Software"), ## to deal in the Software without restriction, including without limitation ## the rights to use, copy, modify, merge, publish, distribute, sublicense, ## and/or sell copies of the Software, and to permit persons to whom the ## Software is furnished to do so, subject to the following conditions: ## ## The above copyright notice and this permission notice shall be included ## in all copies or substantial portions of the Software. ## ## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, ## EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF ## MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. ## IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY ## CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, ## TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE ## SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ############################################################################ #### import main packages ############################################################################ import json as j import pandas as pd import sys import template as temp from string import Template ############################################################################ ############################################################################ #### Classes and functions ############################################################################ regfile_type = "regfile" memory_type = "memory" # Use to open the JSON file and get the dictionary back def parse_json() -> dict: data = {} with open("./output_all/reg.json", "r") as f: data = j.load(f) f.close() return data def gen_lists_and_csv(data): name = [] t_reg = [] address = [] sub_data = data['children'] sw_rd_mask = [] hw_rd_mask = [] sw_wr_mask = [] hw_wr_mask = [] reset_p = [] res = {} res2 = {} global is_regfile global is_memory for reg in sub_data: # Check the register aggregation type if reg['type'] == regfile_type: is_regfile = True is_memory = False elif reg['type'] == memory_type: is_regfile = False is_memory = True # according to the result we create the parameters t_reg.append(reg['type']) ## check if Memory so that we can print the start and end if ((not is_regfile) & is_memory): address.append(reg['memory_adress_start']) name.append("memory_adress_start") else: address.append(reg['absolute_adress']) name.append(reg['inst_name']) ## Look Inside for children for x in reg['children']: t_reg.append(x['type']) name.append(x['inst_name']) if ((not is_memory) & is_regfile): ## Get the masks sw_rd_mask.append(x['sw_read_mask']) hw_rd_mask.append(x['hw_read_mask']) sw_wr_mask.append(x['sw_write_mask']) hw_wr_mask.append(x['hw_write_mask']) reset_p.append(x['global_reset_value']) if (x['type'] != "field"): address.append(x['address_offset']) if ((not is_regfile) & is_memory): t_reg.append(memory_type) name.append("memory_adress_end") address.append(reg['memory_adress_end']) ## Generate the final dicationary res = dict(zip(name, address)) res2 = dict(zip(name, t_reg)) rest_dict = dict(zip(name, reset_p)) hwwr_dict = dict(zip(name, hw_wr_mask)) hwrd_dict = dict(zip(name, hw_rd_mask)) swwr_dict = dict(zip(name, sw_wr_mask)) swrd_dict = dict(zip(name, sw_rd_mask)) df = pd.DataFrame(data={"TYPE": t_reg, "NAME": name, "ADDRESS": address}) with open ('./output_all/reg.csv', 'x') as f: df.to_csv("./output_all/reg.csv", sep=',',index=False) f.close() t = Template(temp.param_template+'\n') d = Template(temp.define_template+'\n') p = Template(temp.python_const_template+'\n') with open('./output_all/reg_param.svh', 'x') as f: ## Fristly write the header f.write(temp.header) ## Start with Params for x in res.keys(): if res2[x] == regfile_type: a=t.substitute({'name' : "{}_{}".format(res2[x],x), 'value' : res[x].replace('0x',"32'h")}) elif res2[x] == memory_type: a=t.substitute({'name' : "{}".format(x), 'value' : res[x].replace('0x',"32'h")}) else: a=t.substitute({'name' : "register_{}".format(x), 'value' : res[x].replace('0x',"32'h")}) f.write(a) ## Start with Defines for x in res.keys(): if res2[x] == regfile_type: b=d.substitute({'name' : "{}_{}".format(res2[x],x), 'value' : res[x].replace('0x',"32'h")}) elif res2[x] == memory_type: b=d.substitute({'name' : "{}".format(x), 'value' : res[x].replace('0x',"32'h")}) else: b=d.substitute({'name' : "register_{}".format(x), 'value' : res[x].replace('0x',"32'h")}) f.write(b) ## Start for the Mask for x in hwwr_dict.keys(): b=d.substitute({'name' : "mask_hwwr_{}".format(x), 'value' : hwwr_dict[x].replace('0x',"32'h")}) f.write(b) for x in hwrd_dict.keys(): b=d.substitute({'name' : "mask_hwrd_{}".format(x), 'value' : hwrd_dict[x].replace('0x',"32'h")}) f.write(b) for x in swwr_dict.keys(): b=d.substitute({'name' : "mask_swwr_{}".format(x), 'value' : swwr_dict[x].replace('0x',"32'h")}) f.write(b) for x in swrd_dict.keys(): b=d.substitute({'name' : "mask_swrd_{}".format(x), 'value' : swrd_dict[x].replace('0x',"32'h")}) f.write(b) ## Start for Resert for x in rest_dict.keys(): b=d.substitute({'name' : "{}_POR_VALUE".format(x), 'value' : rest_dict[x].replace('0x',"32'h")}) f.write(b) f.close() with open('./output_all/reg_python_const.py', 'x') as f: ## Fristly write the header f.write(temp.header_python) for x in res.keys(): if res2[x] == regfile_type: c=p.substitute({'name' : "{}_{}".format(res2[x],x), 'value' : res[x]}) elif res2[x] == memory_type: c=p.substitute({'name' : "{}".format(x), 'value' : res[x]}) else: c=p.substitute({'name' : "register_{}".format(x), 'value' : res[x]}) f.write(c) ## Start for the Mask for x in hwwr_dict.keys(): c=p.substitute({'name' : "mask_hwwr_{}".format(x), 'value' : hwwr_dict[x]}) f.write(c) for x in hwrd_dict.keys(): c=p.substitute({'name' : "mask_hwrd_{}".format(x), 'value' : hwrd_dict[x]}) f.write(c) for x in swwr_dict.keys(): c=p.substitute({'name' : "mask_swwr_{}".format(x), 'value' : swwr_dict[x]}) f.write(c) for x in swrd_dict.keys(): c=p.substitute({'name' : "mask_swrd_{}".format(x), 'value' : swrd_dict[x]}) f.write(c) ## Start for Resert for x in rest_dict.keys(): c=p.substitute({'name' : "{}_POR_VALUE".format(x), 'value' : rest_dict[x]}) f.write(c) f.close() def main(): data_f = parse_json() gen_lists_and_csv(data_f) if __name__ == '__main__': main()
[ "pandas.DataFrame", "json.load", "string.Template" ]
[((4498, 4566), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': "{'TYPE': t_reg, 'NAME': name, 'ADDRESS': address}"}), "(data={'TYPE': t_reg, 'NAME': name, 'ADDRESS': address})\n", (4510, 4566), True, 'import pandas as pd\n'), ((4702, 4738), 'string.Template', 'Template', (["(temp.param_template + '\\n')"], {}), "(temp.param_template + '\\n')\n", (4710, 4738), False, 'from string import Template\n'), ((4745, 4782), 'string.Template', 'Template', (["(temp.define_template + '\\n')"], {}), "(temp.define_template + '\\n')\n", (4753, 4782), False, 'from string import Template\n'), ((4789, 4832), 'string.Template', 'Template', (["(temp.python_const_template + '\\n')"], {}), "(temp.python_const_template + '\\n')\n", (4797, 4832), False, 'from string import Template\n'), ((2317, 2326), 'json.load', 'j.load', (['f'], {}), '(f)\n', (2323, 2326), True, 'import json as j\n')]
import json import requests # POST payload = {'userId': 6622, 'title': 'Something', 'body': 'some body'} response = requests.post('https://jsonplaceholder.typicode.com/posts', data=json.dumps(payload)) assert response.status_code == 201, f'POST: Received status code {response.status_code}' print(response.text) # GET data = requests.get('https://jsonplaceholder.typicode.com/todos') assert data.status_code == 200, f'GET: Received status code {data.status_code}' json_data = json.loads(data.text) print(json_data) print(type(json_data))
[ "json.loads", "requests.get", "json.dumps" ]
[((328, 386), 'requests.get', 'requests.get', (['"""https://jsonplaceholder.typicode.com/todos"""'], {}), "('https://jsonplaceholder.typicode.com/todos')\n", (340, 386), False, 'import requests\n'), ((480, 501), 'json.loads', 'json.loads', (['data.text'], {}), '(data.text)\n', (490, 501), False, 'import json\n'), ((183, 202), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (193, 202), False, 'import json\n')]
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstack import resource class PortForwarding(resource.Resource): name_attribute = "floating_ip_port_forwarding" resource_name = "port forwarding" resource_key = 'port_forwarding' resources_key = 'port_forwardings' base_path = '/floatingips/%(floatingip_id)s/port_forwardings' _allow_unknown_attrs_in_body = True # capabilities allow_create = True allow_fetch = True allow_commit = True allow_delete = True allow_list = True _query_mapping = resource.QueryParameters( 'internal_port_id', 'external_port', 'protocol' ) # Properties #: The ID of Floating IP address floatingip_id = resource.URI('floatingip_id') #: The ID of internal port internal_port_id = resource.Body('internal_port_id') #: The internal IP address internal_ip_address = resource.Body('internal_ip_address') #: The internal TCP/UDP/other port number internal_port = resource.Body('internal_port', type=int) #: The external TCP/UDP/other port number external_port = resource.Body('external_port', type=int) #: The protocol protocol = resource.Body('protocol') #: The description description = resource.Body('description')
[ "openstack.resource.QueryParameters", "openstack.resource.URI", "openstack.resource.Body" ]
[((1051, 1124), 'openstack.resource.QueryParameters', 'resource.QueryParameters', (['"""internal_port_id"""', '"""external_port"""', '"""protocol"""'], {}), "('internal_port_id', 'external_port', 'protocol')\n", (1075, 1124), False, 'from openstack import resource\n'), ((1214, 1243), 'openstack.resource.URI', 'resource.URI', (['"""floatingip_id"""'], {}), "('floatingip_id')\n", (1226, 1243), False, 'from openstack import resource\n'), ((1298, 1331), 'openstack.resource.Body', 'resource.Body', (['"""internal_port_id"""'], {}), "('internal_port_id')\n", (1311, 1331), False, 'from openstack import resource\n'), ((1389, 1425), 'openstack.resource.Body', 'resource.Body', (['"""internal_ip_address"""'], {}), "('internal_ip_address')\n", (1402, 1425), False, 'from openstack import resource\n'), ((1492, 1532), 'openstack.resource.Body', 'resource.Body', (['"""internal_port"""'], {'type': 'int'}), "('internal_port', type=int)\n", (1505, 1532), False, 'from openstack import resource\n'), ((1599, 1639), 'openstack.resource.Body', 'resource.Body', (['"""external_port"""'], {'type': 'int'}), "('external_port', type=int)\n", (1612, 1639), False, 'from openstack import resource\n'), ((1675, 1700), 'openstack.resource.Body', 'resource.Body', (['"""protocol"""'], {}), "('protocol')\n", (1688, 1700), False, 'from openstack import resource\n'), ((1742, 1770), 'openstack.resource.Body', 'resource.Body', (['"""description"""'], {}), "('description')\n", (1755, 1770), False, 'from openstack import resource\n')]
import glob import json import os import aiohttp from ruamel.yaml import YAML from . import with_loop, log from ..dictionary import loader @with_loop async def main(args, loop): if args.url.startswith("http"): log.critical("Downloading dictionary JSON...") async with aiohttp.ClientSession(loop=loop) as session: async with session.get(args.url) as resp: data = await resp.json() elif os.path.isfile(args.url): log.critical("Reading dictionary JSON...") with open(args.url) as f: data = json.load(f) else: log.critical("Reading dictionary YAML source...") data = {} yaml = YAML(typ="safe") yaml.allow_duplicate_keys = True for path in glob.glob(f"{args.url}/*.yaml"): with open(path) as f: data[os.path.basename(path)] = yaml.load(f) loader.load(data)
[ "json.load", "os.path.basename", "ruamel.yaml.YAML", "aiohttp.ClientSession", "os.path.isfile", "glob.glob" ]
[((442, 466), 'os.path.isfile', 'os.path.isfile', (['args.url'], {}), '(args.url)\n', (456, 466), False, 'import os\n'), ((292, 324), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {'loop': 'loop'}), '(loop=loop)\n', (313, 324), False, 'import aiohttp\n'), ((687, 703), 'ruamel.yaml.YAML', 'YAML', ([], {'typ': '"""safe"""'}), "(typ='safe')\n", (691, 703), False, 'from ruamel.yaml import YAML\n'), ((765, 796), 'glob.glob', 'glob.glob', (['f"""{args.url}/*.yaml"""'], {}), "(f'{args.url}/*.yaml')\n", (774, 796), False, 'import glob\n'), ((572, 584), 'json.load', 'json.load', (['f'], {}), '(f)\n', (581, 584), False, 'import json\n'), ((853, 875), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (869, 875), False, 'import os\n')]
# -*- coding: utf-8 -*- """ Created on Thu Mar 12 14:27:05 2020 @author: ricardoguimaraes """ import numpy as np import pandas as pd import geopandas as gpd from gdf_heatmap import gdf_heatmap from array_to_tiff import array_to_tiff if '__main__' == __name__ : from shapely.geometry import Point import matplotlib.pyplot as plt df = pd.DataFrame({'x': np.random.normal(-45, 8, size=(100)), 'y': np.random.normal(-4, 8, size=(100)), 'z': np.random.normal(-40, 4, size=(100))} ) df['geometry'] = df.apply(lambda x: Point(x['x'], x['y']), axis=1) gdf = gpd.GeoDataFrame(df) Result = gdf_heatmap(gdf, df_column ='z', dx=0.5, dy=0.5, verbose=True, smooth=0.3, function='gaussian') array_to_tiff(Result['array'], Result['x'], Result['y'],Result['dx'], Result['dy'], to_file=r'C:\Users\lealp\Downloads\Temp\My_tiff') input('Press any to close') plt.close('all') del Result del gdf
[ "shapely.geometry.Point", "matplotlib.pyplot.close", "geopandas.GeoDataFrame", "numpy.random.normal", "gdf_heatmap.gdf_heatmap", "array_to_tiff.array_to_tiff" ]
[((665, 685), 'geopandas.GeoDataFrame', 'gpd.GeoDataFrame', (['df'], {}), '(df)\n', (681, 685), True, 'import geopandas as gpd\n'), ((704, 802), 'gdf_heatmap.gdf_heatmap', 'gdf_heatmap', (['gdf'], {'df_column': '"""z"""', 'dx': '(0.5)', 'dy': '(0.5)', 'verbose': '(True)', 'smooth': '(0.3)', 'function': '"""gaussian"""'}), "(gdf, df_column='z', dx=0.5, dy=0.5, verbose=True, smooth=0.3,\n function='gaussian')\n", (715, 802), False, 'from gdf_heatmap import gdf_heatmap\n'), ((862, 1004), 'array_to_tiff.array_to_tiff', 'array_to_tiff', (["Result['array']", "Result['x']", "Result['y']", "Result['dx']", "Result['dy']"], {'to_file': '"""C:\\\\Users\\\\lealp\\\\Downloads\\\\Temp\\\\My_tiff"""'}), "(Result['array'], Result['x'], Result['y'], Result['dx'],\n Result['dy'], to_file='C:\\\\Users\\\\lealp\\\\Downloads\\\\Temp\\\\My_tiff')\n", (875, 1004), False, 'from array_to_tiff import array_to_tiff\n'), ((1068, 1084), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (1077, 1084), True, 'import matplotlib.pyplot as plt\n'), ((380, 414), 'numpy.random.normal', 'np.random.normal', (['(-45)', '(8)'], {'size': '(100)'}), '(-45, 8, size=100)\n', (396, 414), True, 'import numpy as np\n'), ((447, 480), 'numpy.random.normal', 'np.random.normal', (['(-4)', '(8)'], {'size': '(100)'}), '(-4, 8, size=100)\n', (463, 480), True, 'import numpy as np\n'), ((513, 547), 'numpy.random.normal', 'np.random.normal', (['(-40)', '(4)'], {'size': '(100)'}), '(-40, 4, size=100)\n', (529, 547), True, 'import numpy as np\n'), ((619, 640), 'shapely.geometry.Point', 'Point', (["x['x']", "x['y']"], {}), "(x['x'], x['y'])\n", (624, 640), False, 'from shapely.geometry import Point\n')]
from keras.layers import Input, Dense, Dropout, LSTM from keras.models import Model, Sequential from keras.optimizers import SGD from keras import regularizers def feedforward(layer_spec=[64],num_labels=5,activ='sigmoid', optim='adam',loss='categorical_crossentropy', droprate=None,loss_weights=None,reg_weight=0.01): model = Sequential() input_shape = (8,) for i,units in enumerate(layer_spec): if i == 0: d_layer = Dense(units,activation=activ,kernel_regularizer=regularizers.l1(reg_weight),input_shape=input_shape) else: d_layer = Dense(units,activation=activ,kernel_regularizer=regularizers.l1(reg_weight)) model.add(d_layer) if droprate is not None: model.add(Dropout(droprate)) model.add(Dense(num_labels,activation='softmax')) # sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True) model.compile(optimizer=optim, loss=loss, loss_weights=loss_weights, metrics=['categorical_accuracy']) return model # TODO: Finish the function below def basic_rnn(timesteps,output_dim=4): model = Sequential() model.add( LSTM(10, input_shape=(timesteps,8), unroll=True, return_sequences=True) ) model.add( Dense(10) ) return model
[ "keras.layers.Dropout", "keras.layers.LSTM", "keras.layers.Dense", "keras.regularizers.l1", "keras.models.Sequential" ]
[((363, 375), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (373, 375), False, 'from keras.models import Model, Sequential\n'), ((1183, 1195), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (1193, 1195), False, 'from keras.models import Model, Sequential\n'), ((812, 851), 'keras.layers.Dense', 'Dense', (['num_labels'], {'activation': '"""softmax"""'}), "(num_labels, activation='softmax')\n", (817, 851), False, 'from keras.layers import Input, Dense, Dropout, LSTM\n'), ((1211, 1283), 'keras.layers.LSTM', 'LSTM', (['(10)'], {'input_shape': '(timesteps, 8)', 'unroll': '(True)', 'return_sequences': '(True)'}), '(10, input_shape=(timesteps, 8), unroll=True, return_sequences=True)\n', (1215, 1283), False, 'from keras.layers import Input, Dense, Dropout, LSTM\n'), ((1300, 1309), 'keras.layers.Dense', 'Dense', (['(10)'], {}), '(10)\n', (1305, 1309), False, 'from keras.layers import Input, Dense, Dropout, LSTM\n'), ((778, 795), 'keras.layers.Dropout', 'Dropout', (['droprate'], {}), '(droprate)\n', (785, 795), False, 'from keras.layers import Input, Dense, Dropout, LSTM\n'), ((530, 557), 'keras.regularizers.l1', 'regularizers.l1', (['reg_weight'], {}), '(reg_weight)\n', (545, 557), False, 'from keras import regularizers\n'), ((667, 694), 'keras.regularizers.l1', 'regularizers.l1', (['reg_weight'], {}), '(reg_weight)\n', (682, 694), False, 'from keras import regularizers\n')]
import re stages = { "Battlefield": re.compile(r"battle", flags=re.I), "Dream Land N64": re.compile(r"land", flags=re.I), "Final Destination": re.compile(r"final|fd", flags=re.I), "Fountain of Dreams": re.compile(r"fount|fod", flags=re.I), "Yoshi's Story": re.compile(r"yoshi", flags=re.I), "Pokemon Stadium": re.compile(r"pokemon|stadium|ps", flags=re.I), } characters = { "Bowser": re.compile(r"bowser", flags=re.I), "Capt<NAME>": re.compile(r"falcon|cf", flags=re.I), "<NAME>": re.compile(r"donkey|kong|dk", flags=re.I), "Dr. Mario": re.compile(r"doc|dr", flags=re.I), "Falco": re.compile(r"falco\b", flags=re.I), "Fox": re.compile(r"fox", flags=re.I), "Ganondorf": re.compile(r"ganon", flags=re.I), "Ice Climbers": re.compile(r"ic", flags=re.I), "Jigglypuff": re.compile(r"jig|puff", flags=re.I), "Kirby": re.compile(r"kirby", flags=re.I), "Link": re.compile(r"(?!y)link", flags=re.I), "Luigi": re.compile(r"luigi", flags=re.I), "Mario": re.compile(r"(?!d)mario", flags=re.I), "Marth": re.compile(r"marth", flags=re.I), "Mewtwo": re.compile(r"mew", flags=re.I), "Mr. Game & Watch": re.compile(r"game|&", flags=re.I), "Ness": re.compile(r"ness", flags=re.I), "Peach": re.compile(r"peach|daisy", flags=re.I), "Pichu": re.compile(r"pichu", flags=re.I), "Pikachu": re.compile(r"pika", flags=re.I), "Roy": re.compile(r"roy", flags=re.I), "Samus": re.compile(r"samus", flags=re.I), "Sheik": re.compile(r"sh", flags=re.I), "<NAME>": re.compile(r"y.*link", flags=re.I), "Yoshi": re.compile(r"yoshi", flags=re.I), "Zelda": re.compile(r"zelda", flags=re.I), } rounds = { "Winners Quarters": re.compile(r"winner.*quarter|wq", flags=re.I), "Winners Semis": re.compile(r"winner.*semi|ws", flags=re.I), "Winners Finals": re.compile(r"winner.*final|wf", flags=re.I), "Losers Eighths": re.compile(r"loser.*eight", flags=re.I), "Losers Quarters": re.compile(r"loser.*quarter|lq", flags=re.I), "Losers Semis": re.compile(r"loser.*semi|ls", flags=re.I), "Losers Finals": re.compile(r"loser.*final|lf", flags=re.I), "Grand Finals": re.compile(r"grand.*final|gf", flags=re.I), } sponsors = { "Team Liquid": re.compile(r"liquid|tl", flags=re.I), "Alliance": re.compile(r"\[A\]|alliance", flags=re.I), "Counter Logic Gaming": re.compile(r"clg|counter.*logic", flags=re.I), "Cloud 9": re.compile(r"c9|cloud", flags=re.I), } def guess_character(ch): for item, prog in characters.items(): if prog.search(ch): return item return None def guess_stage(s): for item, prog in stages.items(): if prog.search(s): return item return None def guess_round(r): for item, prog in rounds.items(): if prog.search(r): return item return None def guess_sponsor(s): for item, prog in sponsors.items(): if prog.search(s): return item return None
[ "re.compile" ]
[((46, 78), 're.compile', 're.compile', (['"""battle"""'], {'flags': 're.I'}), "('battle', flags=re.I)\n", (56, 78), False, 'import re\n'), ((104, 134), 're.compile', 're.compile', (['"""land"""'], {'flags': 're.I'}), "('land', flags=re.I)\n", (114, 134), False, 'import re\n'), ((163, 197), 're.compile', 're.compile', (['"""final|fd"""'], {'flags': 're.I'}), "('final|fd', flags=re.I)\n", (173, 197), False, 'import re\n'), ((227, 262), 're.compile', 're.compile', (['"""fount|fod"""'], {'flags': 're.I'}), "('fount|fod', flags=re.I)\n", (237, 262), False, 'import re\n'), ((287, 318), 're.compile', 're.compile', (['"""yoshi"""'], {'flags': 're.I'}), "('yoshi', flags=re.I)\n", (297, 318), False, 'import re\n'), ((345, 389), 're.compile', 're.compile', (['"""pokemon|stadium|ps"""'], {'flags': 're.I'}), "('pokemon|stadium|ps', flags=re.I)\n", (355, 389), False, 'import re\n'), ((426, 458), 're.compile', 're.compile', (['"""bowser"""'], {'flags': 're.I'}), "('bowser', flags=re.I)\n", (436, 458), False, 'import re\n'), ((480, 515), 're.compile', 're.compile', (['"""falcon|cf"""'], {'flags': 're.I'}), "('falcon|cf', flags=re.I)\n", (490, 515), False, 'import re\n'), ((533, 573), 're.compile', 're.compile', (['"""donkey|kong|dk"""'], {'flags': 're.I'}), "('donkey|kong|dk', flags=re.I)\n", (543, 573), False, 'import re\n'), ((594, 626), 're.compile', 're.compile', (['"""doc|dr"""'], {'flags': 're.I'}), "('doc|dr', flags=re.I)\n", (604, 626), False, 'import re\n'), ((643, 677), 're.compile', 're.compile', (['"""falco\\\\b"""'], {'flags': 're.I'}), "('falco\\\\b', flags=re.I)\n", (653, 677), False, 'import re\n'), ((691, 720), 're.compile', 're.compile', (['"""fox"""'], {'flags': 're.I'}), "('fox', flags=re.I)\n", (701, 720), False, 'import re\n'), ((741, 772), 're.compile', 're.compile', (['"""ganon"""'], {'flags': 're.I'}), "('ganon', flags=re.I)\n", (751, 772), False, 'import re\n'), ((796, 824), 're.compile', 're.compile', (['"""ic"""'], {'flags': 're.I'}), "('ic', flags=re.I)\n", (806, 824), False, 'import re\n'), ((846, 880), 're.compile', 're.compile', (['"""jig|puff"""'], {'flags': 're.I'}), "('jig|puff', flags=re.I)\n", (856, 880), False, 'import re\n'), ((897, 928), 're.compile', 're.compile', (['"""kirby"""'], {'flags': 're.I'}), "('kirby', flags=re.I)\n", (907, 928), False, 'import re\n'), ((944, 979), 're.compile', 're.compile', (['"""(?!y)link"""'], {'flags': 're.I'}), "('(?!y)link', flags=re.I)\n", (954, 979), False, 'import re\n'), ((996, 1027), 're.compile', 're.compile', (['"""luigi"""'], {'flags': 're.I'}), "('luigi', flags=re.I)\n", (1006, 1027), False, 'import re\n'), ((1044, 1080), 're.compile', 're.compile', (['"""(?!d)mario"""'], {'flags': 're.I'}), "('(?!d)mario', flags=re.I)\n", (1054, 1080), False, 'import re\n'), ((1097, 1128), 're.compile', 're.compile', (['"""marth"""'], {'flags': 're.I'}), "('marth', flags=re.I)\n", (1107, 1128), False, 'import re\n'), ((1146, 1175), 're.compile', 're.compile', (['"""mew"""'], {'flags': 're.I'}), "('mew', flags=re.I)\n", (1156, 1175), False, 'import re\n'), ((1203, 1235), 're.compile', 're.compile', (['"""game|&"""'], {'flags': 're.I'}), "('game|&', flags=re.I)\n", (1213, 1235), False, 'import re\n'), ((1251, 1281), 're.compile', 're.compile', (['"""ness"""'], {'flags': 're.I'}), "('ness', flags=re.I)\n", (1261, 1281), False, 'import re\n'), ((1298, 1335), 're.compile', 're.compile', (['"""peach|daisy"""'], {'flags': 're.I'}), "('peach|daisy', flags=re.I)\n", (1308, 1335), False, 'import re\n'), ((1352, 1383), 're.compile', 're.compile', (['"""pichu"""'], {'flags': 're.I'}), "('pichu', flags=re.I)\n", (1362, 1383), False, 'import re\n'), ((1402, 1432), 're.compile', 're.compile', (['"""pika"""'], {'flags': 're.I'}), "('pika', flags=re.I)\n", (1412, 1432), False, 'import re\n'), ((1447, 1476), 're.compile', 're.compile', (['"""roy"""'], {'flags': 're.I'}), "('roy', flags=re.I)\n", (1457, 1476), False, 'import re\n'), ((1493, 1524), 're.compile', 're.compile', (['"""samus"""'], {'flags': 're.I'}), "('samus', flags=re.I)\n", (1503, 1524), False, 'import re\n'), ((1541, 1569), 're.compile', 're.compile', (['"""sh"""'], {'flags': 're.I'}), "('sh', flags=re.I)\n", (1551, 1569), False, 'import re\n'), ((1587, 1620), 're.compile', 're.compile', (['"""y.*link"""'], {'flags': 're.I'}), "('y.*link', flags=re.I)\n", (1597, 1620), False, 'import re\n'), ((1637, 1668), 're.compile', 're.compile', (['"""yoshi"""'], {'flags': 're.I'}), "('yoshi', flags=re.I)\n", (1647, 1668), False, 'import re\n'), ((1685, 1716), 're.compile', 're.compile', (['"""zelda"""'], {'flags': 're.I'}), "('zelda', flags=re.I)\n", (1695, 1716), False, 'import re\n'), ((1759, 1803), 're.compile', 're.compile', (['"""winner.*quarter|wq"""'], {'flags': 're.I'}), "('winner.*quarter|wq', flags=re.I)\n", (1769, 1803), False, 'import re\n'), ((1828, 1869), 're.compile', 're.compile', (['"""winner.*semi|ws"""'], {'flags': 're.I'}), "('winner.*semi|ws', flags=re.I)\n", (1838, 1869), False, 'import re\n'), ((1895, 1937), 're.compile', 're.compile', (['"""winner.*final|wf"""'], {'flags': 're.I'}), "('winner.*final|wf', flags=re.I)\n", (1905, 1937), False, 'import re\n'), ((1963, 2001), 're.compile', 're.compile', (['"""loser.*eight"""'], {'flags': 're.I'}), "('loser.*eight', flags=re.I)\n", (1973, 2001), False, 'import re\n'), ((2028, 2071), 're.compile', 're.compile', (['"""loser.*quarter|lq"""'], {'flags': 're.I'}), "('loser.*quarter|lq', flags=re.I)\n", (2038, 2071), False, 'import re\n'), ((2095, 2135), 're.compile', 're.compile', (['"""loser.*semi|ls"""'], {'flags': 're.I'}), "('loser.*semi|ls', flags=re.I)\n", (2105, 2135), False, 'import re\n'), ((2160, 2201), 're.compile', 're.compile', (['"""loser.*final|lf"""'], {'flags': 're.I'}), "('loser.*final|lf', flags=re.I)\n", (2170, 2201), False, 'import re\n'), ((2225, 2266), 're.compile', 're.compile', (['"""grand.*final|gf"""'], {'flags': 're.I'}), "('grand.*final|gf', flags=re.I)\n", (2235, 2266), False, 'import re\n'), ((2306, 2341), 're.compile', 're.compile', (['"""liquid|tl"""'], {'flags': 're.I'}), "('liquid|tl', flags=re.I)\n", (2316, 2341), False, 'import re\n'), ((2361, 2403), 're.compile', 're.compile', (['"""\\\\[A\\\\]|alliance"""'], {'flags': 're.I'}), "('\\\\[A\\\\]|alliance', flags=re.I)\n", (2371, 2403), False, 'import re\n'), ((2433, 2477), 're.compile', 're.compile', (['"""clg|counter.*logic"""'], {'flags': 're.I'}), "('clg|counter.*logic', flags=re.I)\n", (2443, 2477), False, 'import re\n'), ((2496, 2530), 're.compile', 're.compile', (['"""c9|cloud"""'], {'flags': 're.I'}), "('c9|cloud', flags=re.I)\n", (2506, 2530), False, 'import re\n')]
import tensorflow as tf from . import utils from . import base def l2(params=None): if params is None: params = base.find_variables(weight=True) return utils.smart_sum([tf.nn.l2_loss(x) for x in params])
[ "tensorflow.nn.l2_loss" ]
[((188, 204), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['x'], {}), '(x)\n', (201, 204), True, 'import tensorflow as tf\n')]
# -*- coding: utf-8 -*- import sys import os from datetime import datetime sys.path.append(os.path.abspath('..')) sys.path.append(os.path.abspath('.')) extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.mathjax', 'sphinx.ext.viewcode', ] templates_path = ['_templates'] source_suffix = '.rst' master_doc = 'index' project = u'Sphinx Bulma Theme' year = datetime.now().year copyright = u'%d <NAME>' % year exclude_patterns = ['_build'] html_logo = 'static/logo.png' html_favicon = 'static/favicon.ico' html_theme = 'sphinx-bulma' html_theme_options = { 'display_git': True, 'git_host': 'github.com', 'git_user': 'oAGoulart', 'git_repo': 'sphinx-bulma', 'git_version': 'master/docs/', 'git_icon': 'github-circled', 'git_desc': 'Edit on GitHub' } html_theme_path = ["../src"]
[ "os.path.abspath", "datetime.datetime.now" ]
[((92, 113), 'os.path.abspath', 'os.path.abspath', (['""".."""'], {}), "('..')\n", (107, 113), False, 'import os\n'), ((131, 151), 'os.path.abspath', 'os.path.abspath', (['"""."""'], {}), "('.')\n", (146, 151), False, 'import os\n'), ((360, 374), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (372, 374), False, 'from datetime import datetime\n')]
from hltv.api.match.request import MatchRequest from hltv.models.team.lib import Team from hltv.libs.helper import * class Match: def __init__(self, match_id: int): self.body: Any = MatchRequest(match_id)() self.results: dict = dict() self.match_id = match_id self._canonicalize_body(self.body) @staticmethod def get(match_id: int): return Match(match_id) def _canonicalize_body(self, body: Any): head = get_head_data(body) self.results.update(head) self.results['team_one'], self.results['team_two'] = self._get_teams() def _get_teams(self): ret = list() for div in self.body.find_all("div", attrs={"class": "standard-box teamsBox"}): for a in div.find_all("a"): if "/team/" in a['href']: ret.append(Team.get(get_id_from_link(a['href']))) return tuple(ret)
[ "hltv.api.match.request.MatchRequest" ]
[((196, 218), 'hltv.api.match.request.MatchRequest', 'MatchRequest', (['match_id'], {}), '(match_id)\n', (208, 218), False, 'from hltv.api.match.request import MatchRequest\n')]
import numpy as np from scipy.integrate import quad import pandas as pd # calculate the k-corrention in erg.s-1.cm-2: def NE(E,Epeak,alpha,beita): if (alpha-beita)*Epeak/(2+alpha)>=E: NE=(E/100)**alpha*np.exp(-E*(2+alpha)/Epeak) return NE elif (alpha-beita)*Epeak/(2+alpha)<=E: NE=(((alpha-beita)*Epeak/(100*(2+alpha)))**(alpha-beita)*np.exp(beita-alpha)*(E/100)**beita) return NE def k(Epeak,Z,alpha,beita,bandmin,bandmax): a1=quad(lambda E:E*NE(E,Epeak,alpha,beita),1/(1+Z),10**4/(1+Z)) a2=quad(lambda E:E*NE(E,Epeak,alpha,beita),bandmin,bandmax) k=a1[0]/a2[0] return k # calculate the k-corrention in photons.s-1.cm-2: def nk(Epeak,Z,alpha,beita,bandmin,bandmax): a1=quad(lambda E:E*NE(E,Epeak,alpha,beita),1/(1+Z),10**4/(1+Z)) a2=quad(lambda E:NE(E,Epeak,alpha,beita),bandmin,bandmax) k=a1[0]/a2[0] # return k return k*1.6*10**(-9) #transform kev to erg # calculate the luminosity distance omegal=0.734 omegam=0.266 h=0.71 H0=1/(3.09*10**17) H0yr=1/(9.78*10**9) # H0=70*10**5 c=2.99792458*10**8 def dl(Z): integrateportion=quad(lambda x:1/np.sqrt(omegam*(1+x)**3+omegal),0,Z) dl=c*(1+Z)/(h*H0)*integrateportion[0] # dl =c/H0*integrateportion[0] return dl*10**2 # transform m to cm #Calculate the opening angle def seita(z,ep,s,alpha,beita,bandmin,bandmax): eiso=4*np.pi*dl(z)**2*s*k(ep,z,alpha,beita,bandmin,bandmax)/(1+z) Egama=(ep*(1+z)/10**2.57)**(1/0.61)*3.8*10**50 seitaradian=np.arccos(1-Egama/eiso) seita=seitaradian/(2*np.pi)*360 return seita # calculate seita for photons.s-1.cm-2 def pseita(z,ep,s,alpha,beita,bandmin,bandmax): eiso=4*np.pi*dl(z)**2*s*nk(ep,z,alpha,beita,bandmin,bandmax)/(1+z) Egama=(ep*(1+z)/10**2.57)**(1/0.61)*3.8*10**50 seitaradian=np.arccos(1-Egama/eiso) seita=seitaradian/(2*np.pi)*360 return seita #Calculate the Egamma def egamma(z,ep): Egama = (ep * (1 + z) / 10 ** 2.57) ** (1 / 0.61) * 3.8 * 10 ** 50 return Egama #Calculate the Eiso def eiso(z,ep,s,alpha,beita,bandmin,bandmax): eiso=4*np.pi*dl(z)**2*s*k(ep,z,alpha,beita,bandmin,bandmax)/(1+z) return eiso #Define a new spectrum calculate method @2018.6.20 [the cases only contain 'alpha'] def alphaNE(E,Epeak,alpha): NE=(E/100)**alpha*np.exp(-(2+alpha)*E/Epeak) return NE def alphaek(Epeak,alpha,Z,bandmin,bandmax): a1=quad(lambda E:E*alphaNE(E,Epeak,alpha),1/(1+Z),10**4/(1+Z)) a2=quad(lambda E:E*alphaNE(E,Epeak,alpha),bandmin, bandmax) k=a1[0]/a2[0] return k def alphapk(Epeak,alpha,Z,bandmin,bandmax): a1=quad(lambda E:E*alphaNE(E,Epeak,alpha),1/(1+Z),10**4/(1+Z)) a2=quad(lambda E:alphaNE(E,Epeak,alpha),bandmin,bandmax) k=a1[0]/a2[0] return k*1.6*10**(-9) def seitaerg6_20(z,ep,s,alpha,bandmin,bandmax): eiso=4*np.pi*dl(z)**2*s*alphaek(ep,alpha,z,bandmin,bandmax)/(1+z) Egama=(ep*(1+z)/10**2.57)**(1/0.61)*3.8*10**50 seitaradian=np.arccos(1-Egama/eiso) seita=seitaradian/(2*np.pi)*360 # k = alphaek(ep,alpha,z,bandmin, bandmax) return seita,z,Egama def seitaphoton6_20(z,ep,s,alpha,bandmin,bandmax): eiso=4*np.pi*dl(z)**2*s*alphapk(ep,alpha,z,bandmin,bandmax)/(1+z) Egama=(ep*(1+z)/10**2.57)**(1/0.61)*3.8*10**50 seitaradian=np.arccos(1-Egama/eiso) seita=seitaradian/(2*np.pi)*360 # k=alphapk(ep,alpha,z,bandmin,bandmax)*(1/1.6)*10**(9) return seita,z,Egama #refer the 6.11 work: def erg6_11(): df = pd.read_excel("/Users/dingding/Desktop/calculate/6.9/erg.xlsx") ebandmin = df['bandmin'] ebandmax=df['bandmax'] egrbname=df['GRB'] ez=df['z'] eep=df['ep'] ealpha=df['alpha'] ebeta=df['beta'] efluence=df['fluence'] i=0 seita1=[] eegamma=[] for i in range(len(egrbname)): seita1=np.append(seita1,seita(ez[i],eep[i],efluence[i],ealpha[i],ebeta[i],ebandmin[i],ebandmax[i])) eegamma=np.append(eegamma,egamma(ez[i],eep[i])) return seita1,ez,eegamma def photon6_11(): dp = pd.read_excel("/Users/dingding/Desktop/calculate/6.9/photons.xlsx") pbandmin = dp['bandmin'] pbandmax=dp['bandmax'] pgrbname=dp['GRB'] pz=dp['z'] pep=dp['ep'] palpha=dp['alpha'] pbeta=dp['beta'] pfluence=dp['fluence'] i=0 seita2=[] pegamma=[] for i in range(len(pgrbname)): seita2=np.append(seita2,pseita(pz[i],pep[i],pfluence[i],palpha[i],pbeta[i],pbandmin[i],pbandmax[i])) pegamma=np.append(pegamma,egamma(pz[i],pep[i])) return seita2,pz,pegamma #Calculate the Linear regression equation: def linearregressionEQ(series1,series2): up=[] down=[] xmean=np.mean(series1) ymean=np.mean(series2) for i in range(len(series1)): up=np.append(up,series1[i]*series2[i]-len(series1)*xmean*ymean) down=np.append(down,series1[i]**2-len(series1)*xmean**2) u=np.sum(up) d=np.sum(down) b=u/d a=ymean-b*xmean return a,b def linearnew(series1,series2): up1=[] up2=[] up3=[] up4=[] down1=[] down2=[] for i in range(len(series1)): up1=np.append(up1,series1[i]**2) up2=np.append(up2,series2[i]) up3=np.append(up3,series1[i]) up4=np.append(up4,series1[i]*series2[i]) down1=np.append(down1,series1[i]**2) down2=np.append(down2,series1[i]) up1=np.sum(up1) up2=np.sum(up2) up3=np.sum(up3) up4=np.sum(up4) down1=np.sum(down1) down2=np.sum(down2) up=up1*up2-up3*up4 down=down1*len(series1)-down2**2 a0=up/down up=len(series1)*up4-up3*up2 down=len(series1)*down1-down2**2 a1=up/down return a0,a1 # 8.31 # Define a model to describe the distribution of GRB with redshift z # define the complete gamma function: def comGammaFunc(v): gamma=quad(lambda t:t**(v-1)*np.e**(-t),0,float("inf")) return gamma[0] #define the incomplete gamma function: def incomGammaFunc(v,z): sgamma=quad(lambda u:u**(v-1)*np.e**(-u),0,z)[0] bgamma=quad(lambda u:u**(v-1)*np.e**(-u),z,float('inf'))[0] return bgamma,sgamma #and define the Seitafunction: def SeitaFunc(eps,z,alpha,beta): Seita1=incomGammaFunc(alpha+2,eps**beta*10**(0.15*beta*z))[1] Seita2=comGammaFunc(alpha+2) Seita=Seita1/Seita2 return Seita # define the star formation rate segment function: def RSFR(z): zpeak=1 if z<=zpeak: Rsfr=(1+z)**(3.44) return Rsfr elif z>=zpeak: Rsfr=(1+zpeak)**(3.44) return Rsfr # define the grb rate function: def RGRB(z,eps,alpha,beta,rho): A=1/(33.30270146296203) RGRB=A*rho*RSFR(z)*SeitaFunc(eps,z,alpha,beta) return RGRB #define a number calculate function without duration T def N(z,eps,alpha,beta,rho,zmax): convertfactor=c*3600*24*365*10**2*3.26164*10**9 dlgpc=dl(z)/convertfactor E=np.sqrt(omegam*(1+z)**3+omegal) n=RGRB(z,eps,alpha,beta,rho)/(1+z)*4*np.pi*c*dlgpc**2/(H0yr*(1+z)**2*E) N=quad(lambda z:n,z,zmax) return N[0] import matplotlib.pyplot as plt import matplotlib import random # 9.6 # Here during the defination, normalized constant A_{L} is ellipsis: def Luminosityfunction(L_gamma): L_critical=10**(49.69) #unit is erg sigma_L=0.4 A_L=1/(1.7235434382660358e+50) luminosityfunc=A_L*np.exp(-(np.log10(L_gamma)-np.log10( L_critical))**2/(2*sigma_L**2))/(np.sqrt(2*np.pi)*sigma_L) return luminosityfunc # Define the angle distribution as log-normal distribution: def thetalogdistri(theta_jet): theta_critical=10**(-1.27) sigema_theta=0.6 A_theta=1/0.32112249370542306 Psi=A_theta*np.exp(-(np.log10(theta_jet)-np.log10(theta_critical))**2/ (2*sigema_theta**2))/(np.sqrt(2*np.pi)*sigema_theta) return Psi-0.22039824379156006-0.688381515339374 #-0.22039824379156006 # def Ntheta(thetamin,thetamax): # N=quad(lambda theta_jet:thetalogdistri(theta_jet),thetamin,thetamax) # return N[0] # Define peak flux P: def P(z,L_gamma,theta_jet): L=L_gamma/(1-np.cos(theta_jet/180*np.pi)) C=random.uniform(0.1,1) ep=200*(L/10**52)**0.5/C/(1+z) P=L/(4*np.pi*dl(z)**2*nk(ep,z,-1.1,-2.2,15,150)) #15-150 kev of swift/BAT return P # BAT trigger probability: def eta_t(P): if P<0.45: eta_t=P**2 return eta_t/0.67 #noamalize the probability of p-detectable elif P>=0.45: eta_t=0.67*(1.0-0.4/P)**0.52 return eta_t/0.67 #noamalize the probability of p-detectable # weak dependence of probability on the observed peak flux: def eta_z(P): eta_z=0.26+0.032*np.e**(1.61*np.log10(P)) return eta_z # the probability of alignment for a GRB with jet opening angle theta_{j}: def eta_a(theta_jet): # eta_a=1.4*(1-np.cos(theta_jet))/(4*np.pi) #where 1.4 sr is instrument solid angle normal=1-np.cos(theta_jet) return normal # def Nluminus(z,theta_jet,Luminusmin,Luminusmax): # N=quad(lambda L_gamma:eta_a(theta_jet)*eta_t(P(z,L_gamma,theta_jet) # )*eta_z(P(z,L_gamma,theta_jet))*Luminosityfunction(L_gamma), # Luminusmin,Luminusmax) # return N[0] def luminosity(z,s,t90): l=4*np.pi*dl(z)**2*s*k(80,z,-1,-2.5,15,150)*(1+z)/t90 return l def P_obseved(z,s,t90): l=luminosity(z,s,t90) p=l/(4*np.pi*dl(z)**2*nk(80,z,-1,-2.5,15,150)) return p def pdflog(series,down,up,num): step=(up-down)/num pdf=[] for i in range(num): counter=0 for j in range(len(series)): if 10**(down+i*step)<series[j]<10**(down+(i+1)*step): counter=counter+1 pdf=np.append(pdf,counter) pdf=pdf/np.sum(pdf) return pdf # #Define a operation to delete the 'nan' element: # def deletenan(series1,series2): # series=np.append(series1,series2) # a=series[:len(series):3] # b=series[1:len(series):3] # c=series[2:len(series):3] # a=np.nan_to_num(a) # itemindex=np.argwhere(a==0) # a=np.delete(a,itemindex,axis=0) # b=np.delete(b,itemindex,axis=0) # c=np.delete(c,itemindex,axis=0) # return a,b,c
[ "numpy.sum", "scipy.integrate.quad", "random.uniform", "pandas.read_excel", "numpy.append", "numpy.mean", "numpy.exp", "numpy.cos", "numpy.log10", "numpy.arccos", "numpy.sqrt" ]
[((1513, 1540), 'numpy.arccos', 'np.arccos', (['(1 - Egama / eiso)'], {}), '(1 - Egama / eiso)\n', (1522, 1540), True, 'import numpy as np\n'), ((1816, 1843), 'numpy.arccos', 'np.arccos', (['(1 - Egama / eiso)'], {}), '(1 - Egama / eiso)\n', (1825, 1843), True, 'import numpy as np\n'), ((2965, 2992), 'numpy.arccos', 'np.arccos', (['(1 - Egama / eiso)'], {}), '(1 - Egama / eiso)\n', (2974, 2992), True, 'import numpy as np\n'), ((3286, 3313), 'numpy.arccos', 'np.arccos', (['(1 - Egama / eiso)'], {}), '(1 - Egama / eiso)\n', (3295, 3313), True, 'import numpy as np\n'), ((3478, 3541), 'pandas.read_excel', 'pd.read_excel', (['"""/Users/dingding/Desktop/calculate/6.9/erg.xlsx"""'], {}), "('/Users/dingding/Desktop/calculate/6.9/erg.xlsx')\n", (3491, 3541), True, 'import pandas as pd\n'), ((4017, 4084), 'pandas.read_excel', 'pd.read_excel', (['"""/Users/dingding/Desktop/calculate/6.9/photons.xlsx"""'], {}), "('/Users/dingding/Desktop/calculate/6.9/photons.xlsx')\n", (4030, 4084), True, 'import pandas as pd\n'), ((4651, 4667), 'numpy.mean', 'np.mean', (['series1'], {}), '(series1)\n', (4658, 4667), True, 'import numpy as np\n'), ((4678, 4694), 'numpy.mean', 'np.mean', (['series2'], {}), '(series2)\n', (4685, 4694), True, 'import numpy as np\n'), ((4872, 4882), 'numpy.sum', 'np.sum', (['up'], {}), '(up)\n', (4878, 4882), True, 'import numpy as np\n'), ((4889, 4901), 'numpy.sum', 'np.sum', (['down'], {}), '(down)\n', (4895, 4901), True, 'import numpy as np\n'), ((5345, 5356), 'numpy.sum', 'np.sum', (['up1'], {}), '(up1)\n', (5351, 5356), True, 'import numpy as np\n'), ((5365, 5376), 'numpy.sum', 'np.sum', (['up2'], {}), '(up2)\n', (5371, 5376), True, 'import numpy as np\n'), ((5385, 5396), 'numpy.sum', 'np.sum', (['up3'], {}), '(up3)\n', (5391, 5396), True, 'import numpy as np\n'), ((5405, 5416), 'numpy.sum', 'np.sum', (['up4'], {}), '(up4)\n', (5411, 5416), True, 'import numpy as np\n'), ((5427, 5440), 'numpy.sum', 'np.sum', (['down1'], {}), '(down1)\n', (5433, 5440), True, 'import numpy as np\n'), ((5451, 5464), 'numpy.sum', 'np.sum', (['down2'], {}), '(down2)\n', (5457, 5464), True, 'import numpy as np\n'), ((6845, 6884), 'numpy.sqrt', 'np.sqrt', (['(omegam * (1 + z) ** 3 + omegal)'], {}), '(omegam * (1 + z) ** 3 + omegal)\n', (6852, 6884), True, 'import numpy as np\n'), ((6959, 6985), 'scipy.integrate.quad', 'quad', (['(lambda z: n)', 'z', 'zmax'], {}), '(lambda z: n, z, zmax)\n', (6963, 6985), False, 'from scipy.integrate import quad\n'), ((8051, 8073), 'random.uniform', 'random.uniform', (['(0.1)', '(1)'], {}), '(0.1, 1)\n', (8065, 8073), False, 'import random\n'), ((2314, 2346), 'numpy.exp', 'np.exp', (['(-(2 + alpha) * E / Epeak)'], {}), '(-(2 + alpha) * E / Epeak)\n', (2320, 2346), True, 'import numpy as np\n'), ((5096, 5127), 'numpy.append', 'np.append', (['up1', '(series1[i] ** 2)'], {}), '(up1, series1[i] ** 2)\n', (5105, 5127), True, 'import numpy as np\n'), ((5137, 5163), 'numpy.append', 'np.append', (['up2', 'series2[i]'], {}), '(up2, series2[i])\n', (5146, 5163), True, 'import numpy as np\n'), ((5175, 5201), 'numpy.append', 'np.append', (['up3', 'series1[i]'], {}), '(up3, series1[i])\n', (5184, 5201), True, 'import numpy as np\n'), ((5213, 5252), 'numpy.append', 'np.append', (['up4', '(series1[i] * series2[i])'], {}), '(up4, series1[i] * series2[i])\n', (5222, 5252), True, 'import numpy as np\n'), ((5264, 5297), 'numpy.append', 'np.append', (['down1', '(series1[i] ** 2)'], {}), '(down1, series1[i] ** 2)\n', (5273, 5297), True, 'import numpy as np\n'), ((5309, 5337), 'numpy.append', 'np.append', (['down2', 'series1[i]'], {}), '(down2, series1[i])\n', (5318, 5337), True, 'import numpy as np\n'), ((5947, 5994), 'scipy.integrate.quad', 'quad', (['(lambda u: u ** (v - 1) * np.e ** -u)', '(0)', 'z'], {}), '(lambda u: u ** (v - 1) * np.e ** -u, 0, z)\n', (5951, 5994), False, 'from scipy.integrate import quad\n'), ((8833, 8850), 'numpy.cos', 'np.cos', (['theta_jet'], {}), '(theta_jet)\n', (8839, 8850), True, 'import numpy as np\n'), ((9602, 9625), 'numpy.append', 'np.append', (['pdf', 'counter'], {}), '(pdf, counter)\n', (9611, 9625), True, 'import numpy as np\n'), ((9637, 9648), 'numpy.sum', 'np.sum', (['pdf'], {}), '(pdf)\n', (9643, 9648), True, 'import numpy as np\n'), ((216, 248), 'numpy.exp', 'np.exp', (['(-E * (2 + alpha) / Epeak)'], {}), '(-E * (2 + alpha) / Epeak)\n', (222, 248), True, 'import numpy as np\n'), ((7376, 7394), 'numpy.sqrt', 'np.sqrt', (['(2 * np.pi)'], {}), '(2 * np.pi)\n', (7383, 7394), True, 'import numpy as np\n'), ((7717, 7735), 'numpy.sqrt', 'np.sqrt', (['(2 * np.pi)'], {}), '(2 * np.pi)\n', (7724, 7735), True, 'import numpy as np\n'), ((8016, 8047), 'numpy.cos', 'np.cos', (['(theta_jet / 180 * np.pi)'], {}), '(theta_jet / 180 * np.pi)\n', (8022, 8047), True, 'import numpy as np\n'), ((1138, 1177), 'numpy.sqrt', 'np.sqrt', (['(omegam * (1 + x) ** 3 + omegal)'], {}), '(omegam * (1 + x) ** 3 + omegal)\n', (1145, 1177), True, 'import numpy as np\n'), ((369, 390), 'numpy.exp', 'np.exp', (['(beita - alpha)'], {}), '(beita - alpha)\n', (375, 390), True, 'import numpy as np\n'), ((8600, 8611), 'numpy.log10', 'np.log10', (['P'], {}), '(P)\n', (8608, 8611), True, 'import numpy as np\n'), ((7307, 7324), 'numpy.log10', 'np.log10', (['L_gamma'], {}), '(L_gamma)\n', (7315, 7324), True, 'import numpy as np\n'), ((7325, 7345), 'numpy.log10', 'np.log10', (['L_critical'], {}), '(L_critical)\n', (7333, 7345), True, 'import numpy as np\n'), ((7630, 7649), 'numpy.log10', 'np.log10', (['theta_jet'], {}), '(theta_jet)\n', (7638, 7649), True, 'import numpy as np\n'), ((7650, 7674), 'numpy.log10', 'np.log10', (['theta_critical'], {}), '(theta_critical)\n', (7658, 7674), True, 'import numpy as np\n')]
''' Created on Jun 1, 2011 @author: Peter ''' from numpy import * import matplotlib import matplotlib.pyplot as plt import pca dataMat = pca.loadDataSet('testSet.txt') lowDMat, reconMat = pca.pca(dataMat, 1) fig = plt.figure() ax = fig.add_subplot(111) ax.scatter(dataMat[:,0], dataMat[:,1], marker='^', s=90) ax.scatter(reconMat[:,0], reconMat[:,1], marker='o', s=50, c='red') plt.show()
[ "matplotlib.pyplot.figure", "pca.pca", "pca.loadDataSet", "matplotlib.pyplot.show" ]
[((149, 179), 'pca.loadDataSet', 'pca.loadDataSet', (['"""testSet.txt"""'], {}), "('testSet.txt')\n", (164, 179), False, 'import pca\n'), ((201, 220), 'pca.pca', 'pca.pca', (['dataMat', '(1)'], {}), '(dataMat, 1)\n', (208, 220), False, 'import pca\n'), ((230, 242), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (240, 242), True, 'import matplotlib.pyplot as plt\n'), ((398, 408), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (406, 408), True, 'import matplotlib.pyplot as plt\n')]
""" Fixed Maximum Cost (FMC) baseline """ import logging from collections import defaultdict from typing import Tuple, List import time import numpy as np from pup.algorithms import privacy_helper from pup.algorithms.uniform_prior import cal_prob_dists_num_users_for_grid from pup.algorithms.util import get_linear_profit_fixed_cost from pup.common.datatypes import CheckinDataset from pup.common.enums import MethodType from pup.common.grid import Grid from pup.config import Config from pup.experiment import exp_util from pup.io import dataio logger = logging.getLogger(__name__) def exe_fixed_maximum_cost(data: CheckinDataset, grid: Grid) -> Tuple[List[List], float, np.ndarray, float]: """ Execute Fixed Maximum Cost method Parameters ---------- data check-in dataset grid the grid for experiment evaluation Returns ------- typing.List[typing.List] the matrix of probability distributions of the number of users for each grid cell total_cost: float total cost spent on buying data costs: numpy.ndarray costs of each region exe_time: float execution time """ s_time = time.time() logger.info('Starting FIXED MAXIMUM COST method') # Load config price_from_noise_rate = Config.price_from_noise_func_rate std_from_noise_initial_value = Config.standard_deviation_from_noise_func_initial_value std_from_noise_rate = Config.standard_deviation_from_noise_func_rate final_probs_filter_type = Config.final_probs_filter_type budget_per_region = get_fmc_budget() # START FMC --------------------- logger.info('Budget = {}'.format(budget_per_region)) noisy_data, remain_budget_per_region = buy_data_with_budget( budget_per_region, data, price_from_noise_rate, std_from_noise_initial_value, std_from_noise_rate) logger.info('Prepare {} noisy data point with normal random variables'.format(len(noisy_data))) num_regions = np.prod(grid.get_shape()) cost = budget_per_region - remain_budget_per_region costs = np.zeros(grid.get_shape()) costs.fill(cost) total_cost = cost * num_regions logger.info('Total cost spent on buying data = {}'.format(total_cost)) # Run experiment on the entire grid. One can run on single region by using 1x1 grid # Calculate the probability distributions of the number of each grid cell dists_of_num_users = cal_prob_dists_num_users_for_grid(grid, noisy_data, final_probs_filter_type) exe_time = time.time() - s_time return dists_of_num_users, total_cost, costs, exe_time # END FMC --------------------- def get_fmc_budget() -> float: """ Get budget for FMC - First, get based on given percentage - Second, get based on probing costs if percentage is not given - Third, get based on a fixed budget if others are not available Returns ------- float budget """ fmc_budget_from_cost_percentage = Config.fmc_budget_from_cost_percentage if fmc_budget_from_cost_percentage <= 0: # we will not get budget from percentage of the fixed cost fmc_budget_from_probing = Config.fmc_budget_from_probing if fmc_budget_from_probing: # we get budget from costs of SIP costs = dataio.read_costs(MethodType.PROBING) budget = int(np.average(costs)) + 1 else: # we used a fixed budget budget = Config.budget # prepare budget else: # get budget from the percentage of the fixed cost budget = get_linear_profit_fixed_cost() * fmc_budget_from_cost_percentage / 100.0 return budget def buy_data_with_budget(budget: float, data: CheckinDataset, price_from_noise_rate: float, std_from_noise_initial_value: float, std_from_noise_rate: float) -> Tuple[CheckinDataset, float]: """ Buy data points with a given total budget. Each data point would be given the same amount of budget. For a particular data point, the budget may be more than enough to buy it without perturbation. So there can be some budget left. This budget is not used for other data points. Parameters ---------- budget maximum budget data the dataset to buy data from price_from_noise_rate rate of price from noise exponential function std_from_noise_initial_value initial value of standard deviation from noise exponential function, i.e. when input values is approx 0 std_from_noise_rate rate of standard deviation from noise exponential function Returns ------- noisy_data: CheckinDataset noisy data bought remain_budget: float remain budget """ # calculate the price to pay for each data point num_data_points = exp_util.cal_num_data_points(data) price_per_data_point = budget / float(num_data_points) logger.info('Price per data point = {}'.format(price_per_data_point)) # buy noisy data remain_budget = 0 noisy_data = defaultdict(defaultdict) for user, checkins in data.items(): for c_id, c in checkins.items(): noisy_c = privacy_helper.buy_data_at_price( c, price_per_data_point, price_from_noise_rate, std_from_noise_initial_value, std_from_noise_rate) noisy_data[user][c_id] = noisy_c if c.combined_privacy_value < price_per_data_point: remain_budget += price_per_data_point - c.combined_privacy_value logger.info('Remain budget for region = {}'.format(remain_budget)) return noisy_data, remain_budget
[ "numpy.average", "pup.io.dataio.read_costs", "pup.algorithms.util.get_linear_profit_fixed_cost", "pup.algorithms.privacy_helper.buy_data_at_price", "pup.algorithms.uniform_prior.cal_prob_dists_num_users_for_grid", "time.time", "pup.experiment.exp_util.cal_num_data_points", "collections.defaultdict", "logging.getLogger" ]
[((559, 586), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (576, 586), False, 'import logging\n'), ((1185, 1196), 'time.time', 'time.time', ([], {}), '()\n', (1194, 1196), False, 'import time\n'), ((2441, 2517), 'pup.algorithms.uniform_prior.cal_prob_dists_num_users_for_grid', 'cal_prob_dists_num_users_for_grid', (['grid', 'noisy_data', 'final_probs_filter_type'], {}), '(grid, noisy_data, final_probs_filter_type)\n', (2474, 2517), False, 'from pup.algorithms.uniform_prior import cal_prob_dists_num_users_for_grid\n'), ((4879, 4913), 'pup.experiment.exp_util.cal_num_data_points', 'exp_util.cal_num_data_points', (['data'], {}), '(data)\n', (4907, 4913), False, 'from pup.experiment import exp_util\n'), ((5108, 5132), 'collections.defaultdict', 'defaultdict', (['defaultdict'], {}), '(defaultdict)\n', (5119, 5132), False, 'from collections import defaultdict\n'), ((2534, 2545), 'time.time', 'time.time', ([], {}), '()\n', (2543, 2545), False, 'import time\n'), ((3307, 3344), 'pup.io.dataio.read_costs', 'dataio.read_costs', (['MethodType.PROBING'], {}), '(MethodType.PROBING)\n', (3324, 3344), False, 'from pup.io import dataio\n'), ((5236, 5371), 'pup.algorithms.privacy_helper.buy_data_at_price', 'privacy_helper.buy_data_at_price', (['c', 'price_per_data_point', 'price_from_noise_rate', 'std_from_noise_initial_value', 'std_from_noise_rate'], {}), '(c, price_per_data_point,\n price_from_noise_rate, std_from_noise_initial_value, std_from_noise_rate)\n', (5268, 5371), False, 'from pup.algorithms import privacy_helper\n'), ((3583, 3613), 'pup.algorithms.util.get_linear_profit_fixed_cost', 'get_linear_profit_fixed_cost', ([], {}), '()\n', (3611, 3613), False, 'from pup.algorithms.util import get_linear_profit_fixed_cost\n'), ((3370, 3387), 'numpy.average', 'np.average', (['costs'], {}), '(costs)\n', (3380, 3387), True, 'import numpy as np\n')]
import requests from pprint import pprint from requests.auth import HTTPBasicAuth from getpass import getpass from urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning) if __name__ == "__main__": username = "k19ran" password = getpass() url = "https://api.github.com/user" #http_headers = {"accept":"application/vnd.github.v3+json"} #response = requests.get(url,headers=http_headers,auth=(username,password),verify=False) response = requests.get(url,auth=(username,password)) response = response.json() print() print(response) print()
[ "requests.packages.urllib3.disable_warnings", "requests.get", "getpass.getpass" ]
[((166, 241), 'requests.packages.urllib3.disable_warnings', 'requests.packages.urllib3.disable_warnings', ([], {'category': 'InsecureRequestWarning'}), '(category=InsecureRequestWarning)\n', (208, 241), False, 'import requests\n'), ((311, 320), 'getpass.getpass', 'getpass', ([], {}), '()\n', (318, 320), False, 'from getpass import getpass\n'), ((533, 577), 'requests.get', 'requests.get', (['url'], {'auth': '(username, password)'}), '(url, auth=(username, password))\n', (545, 577), False, 'import requests\n')]