code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
from loris.parameters.api import AbstractParameter
from unittest.mock import Mock
import pytest
class ProperImpl(AbstractParameter):
def __init__(self, uri_slice, enabled_features):
super(ProperImpl, self).__init__(uri_slice, enabled_features)
@property
def canonical(self):
return "canonical version"
class TestAbstractParameter(object):
def test_canonical_required(self):
class WithoutCanonical(AbstractParameter):
def __init__(self, uri_slice, enabled_features):
super(WithoutCanonical, self).__init__(uri_slice, enabled_features)
with pytest.raises(TypeError) as type_error:
w = WithoutCanonical("abc", (), Mock())
assert "Can't instantiate abstract class" in str(type_error.value)
def test_init_required(self):
class WithoutInit(AbstractParameter):
@property
def canonical(self):
return "canonical version"
with pytest.raises(TypeError) as type_error:
w = WithoutInit("abc", (), Mock())
assert "Can't instantiate abstract class" in str(type_error.value)
def test_init_sig_required(self):
class WrongInitSig(AbstractParameter):
def __init__(self):
super(WrongInitSig, self).__init__()
@property
def canonical(self):
return "canonical version"
with pytest.raises(TypeError) as type_error:
WrongInitSig()
assert "__init__() missing 2 required positional" in str(type_error.value)
def test_proper_impl(self):
ProperImpl("foo", ())
def test_stuff_is_defined(self):
p = ProperImpl("foo", ())
assert p.uri_slice == "foo"
assert p.enabled_features == ()
| [
"pytest.raises",
"unittest.mock.Mock"
] | [((621, 645), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (634, 645), False, 'import pytest\n'), ((981, 1005), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (994, 1005), False, 'import pytest\n'), ((1427, 1451), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1440, 1451), False, 'import pytest\n'), ((705, 711), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (709, 711), False, 'from unittest.mock import Mock\n'), ((1060, 1066), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (1064, 1066), False, 'from unittest.mock import Mock\n')] |
from rdr_server.common.enums import SiteStatus, EnrollingStatus, DigitalSchedulingStatus, ObsoleteStatus
from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText
from rdr_server.model.base_model import BaseModel, ModelMixin, ModelEnum
class Site(ModelMixin, BaseModel):
__tablename__ = 'site'
siteId = Column('site_id', Integer, unique=True)
siteName = Column('site_name', String(255), nullable=False)
# The Google group for the site; this is a unique key used externally.
googleGroup = Column('google_group', String(255), nullable=False, unique=True)
mayolinkClientNumber = Column('mayolink_client_number', Integer)
organizationId = Column('organization_id', Integer,
ForeignKey('organization.organization_id'))
# Deprecated; this is being replaced by organizationId.
hpoId = Column('hpo_id', Integer, ForeignKey('hpo.hpo_id'))
siteStatus = Column('site_status', ModelEnum(SiteStatus))
enrollingStatus = Column('enrolling_status', ModelEnum(EnrollingStatus))
digitalSchedulingStatus = Column('digital_scheduling_status', ModelEnum(DigitalSchedulingStatus))
scheduleInstructions = Column('schedule_instructions', String(2048))
scheduleInstructions_ES = Column('schedule_instructions_es', String(2048))
launchDate = Column('launch_date', Date)
notes = Column('notes', UnicodeText)
notes_ES = Column('notes_es', UnicodeText)
latitude = Column('latitude', Float)
longitude = Column('longitude', Float)
timeZoneId = Column('time_zone_id', String(1024))
directions = Column('directions', UnicodeText)
physicalLocationName = Column('physical_location_name', String(1024))
address1 = Column('address_1', String(1024))
address2 = Column('address_2', String(1024))
city = Column('city', String(255))
state = Column('state', String(2))
zipCode = Column('zip_code', String(10))
phoneNumber = Column('phone_number', String(80))
adminEmails = Column('admin_emails', String(4096))
link = Column('link', String(255))
isObsolete = Column('is_obsolete', ModelEnum(ObsoleteStatus))
| [
"sqlalchemy.String",
"sqlalchemy.ForeignKey",
"sqlalchemy.Column",
"rdr_server.model.base_model.ModelEnum"
] | [((342, 381), 'sqlalchemy.Column', 'Column', (['"""site_id"""', 'Integer'], {'unique': '(True)'}), "('site_id', Integer, unique=True)\n", (348, 381), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((631, 672), 'sqlalchemy.Column', 'Column', (['"""mayolink_client_number"""', 'Integer'], {}), "('mayolink_client_number', Integer)\n", (637, 672), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((1336, 1363), 'sqlalchemy.Column', 'Column', (['"""launch_date"""', 'Date'], {}), "('launch_date', Date)\n", (1342, 1363), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((1376, 1404), 'sqlalchemy.Column', 'Column', (['"""notes"""', 'UnicodeText'], {}), "('notes', UnicodeText)\n", (1382, 1404), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((1420, 1451), 'sqlalchemy.Column', 'Column', (['"""notes_es"""', 'UnicodeText'], {}), "('notes_es', UnicodeText)\n", (1426, 1451), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((1467, 1492), 'sqlalchemy.Column', 'Column', (['"""latitude"""', 'Float'], {}), "('latitude', Float)\n", (1473, 1492), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((1509, 1535), 'sqlalchemy.Column', 'Column', (['"""longitude"""', 'Float'], {}), "('longitude', Float)\n", (1515, 1535), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((1607, 1640), 'sqlalchemy.Column', 'Column', (['"""directions"""', 'UnicodeText'], {}), "('directions', UnicodeText)\n", (1613, 1640), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((417, 428), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (423, 428), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((562, 573), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (568, 573), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((757, 799), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""organization.organization_id"""'], {}), "('organization.organization_id')\n", (767, 799), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((899, 923), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""hpo.hpo_id"""'], {}), "('hpo.hpo_id')\n", (909, 923), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((965, 986), 'rdr_server.model.base_model.ModelEnum', 'ModelEnum', (['SiteStatus'], {}), '(SiteStatus)\n', (974, 986), False, 'from rdr_server.model.base_model import BaseModel, ModelMixin, ModelEnum\n'), ((1037, 1063), 'rdr_server.model.base_model.ModelEnum', 'ModelEnum', (['EnrollingStatus'], {}), '(EnrollingStatus)\n', (1046, 1063), False, 'from rdr_server.model.base_model import BaseModel, ModelMixin, ModelEnum\n'), ((1131, 1165), 'rdr_server.model.base_model.ModelEnum', 'ModelEnum', (['DigitalSchedulingStatus'], {}), '(DigitalSchedulingStatus)\n', (1140, 1165), False, 'from rdr_server.model.base_model import BaseModel, ModelMixin, ModelEnum\n'), ((1226, 1238), 'sqlalchemy.String', 'String', (['(2048)'], {}), '(2048)\n', (1232, 1238), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((1305, 1317), 'sqlalchemy.String', 'String', (['(2048)'], {}), '(2048)\n', (1311, 1317), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((1576, 1588), 'sqlalchemy.String', 'String', (['(1024)'], {}), '(1024)\n', (1582, 1588), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((1701, 1713), 'sqlalchemy.String', 'String', (['(1024)'], {}), '(1024)\n', (1707, 1713), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((1750, 1762), 'sqlalchemy.String', 'String', (['(1024)'], {}), '(1024)\n', (1756, 1762), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((1799, 1811), 'sqlalchemy.String', 'String', (['(1024)'], {}), '(1024)\n', (1805, 1811), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((1839, 1850), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (1845, 1850), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((1880, 1889), 'sqlalchemy.String', 'String', (['(2)'], {}), '(2)\n', (1886, 1889), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((1924, 1934), 'sqlalchemy.String', 'String', (['(10)'], {}), '(10)\n', (1930, 1934), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((1977, 1987), 'sqlalchemy.String', 'String', (['(80)'], {}), '(80)\n', (1983, 1987), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((2030, 2042), 'sqlalchemy.String', 'String', (['(4096)'], {}), '(4096)\n', (2036, 2042), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((2070, 2081), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (2076, 2081), False, 'from sqlalchemy import Column, Integer, String, Date, Float, ForeignKey, UnicodeText\n'), ((2122, 2147), 'rdr_server.model.base_model.ModelEnum', 'ModelEnum', (['ObsoleteStatus'], {}), '(ObsoleteStatus)\n', (2131, 2147), False, 'from rdr_server.model.base_model import BaseModel, ModelMixin, ModelEnum\n')] |
# _*_ coding: utf-8 _*_
"""
password-validate.utils
-----------------------
This module provides utility functions that are used within password_validate
that are also useful for external consumption.
"""
import hashlib
from os.path import abspath, dirname, join
DICTIONARY_LOC = "dictionary_files"
DICTIONARY = "dictionary.txt"
PHPBB = "phpbb.txt"
ROCKYOU = "rockyou.txt"
DICTS = [
DICTIONARY,
PHPBB,
]
def hashit(password):
"""
Hashes any string sent to it with sha512.
:param password: String to hash
:return: String with a hexdigest of the hashed string.
"""
hash_object = hashlib.sha512()
hash_object.update(password.encode("utf-8"))
return hash_object.hexdigest()
def not_in_dict(password):
"""
Parses several dictionary files to see if the provided password is included
within them.
If the dictionary file contains any words that are under five characters in
length, they are skipped. If the string is found, this is considered to be
a failed check and therefore not a valid password.
:param password: String to check
:return: Boolean, True if not found, False if it is
"""
for passwd_file in DICTS:
dict_words = read_file(passwd_file)
for word in dict_words:
if "dictionary.txt" in passwd_file and len(word) < 5:
# skip common words under 5 characters long
continue
if password == word:
return False
return True
def read_file(filename):
"""
Helper function that simple iterates over the dictionary files.
:param filename: String with the path and filename of the dictionary
:return: String generator with each line of the dictionary
"""
file_loc = dirname(abspath(__file__))
data_loc = join(file_loc, DICTIONARY_LOC, filename)
with open(data_loc, "rb") as file:
for line in file:
try:
yield line.decode("utf-8").rstrip()
except UnicodeDecodeError:
# LOL, like my hack around this one??
continue
| [
"os.path.abspath",
"os.path.join",
"hashlib.sha512"
] | [((613, 629), 'hashlib.sha512', 'hashlib.sha512', ([], {}), '()\n', (627, 629), False, 'import hashlib\n'), ((1803, 1843), 'os.path.join', 'join', (['file_loc', 'DICTIONARY_LOC', 'filename'], {}), '(file_loc, DICTIONARY_LOC, filename)\n', (1807, 1843), False, 'from os.path import abspath, dirname, join\n'), ((1769, 1786), 'os.path.abspath', 'abspath', (['__file__'], {}), '(__file__)\n', (1776, 1786), False, 'from os.path import abspath, dirname, join\n')] |
"""
Complex Valued Neural Layers From Scratch
Programmed by <NAME>
* MIT Licence
* 2022-02-15 Last Update
"""
from torch import nn
import torch
##__________________________________Complex Linear Layer __________________________________________
class CLinear(nn.Module):
def __init__(self, in_channels, out_channels, **kwargs):
super(CLinear, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.re_linear = nn.Linear(self.in_channels, self.out_channels, **kwargs)
self.im_linear = nn.Linear(self.in_channels, self.out_channels, **kwargs)
nn.init.xavier_uniform_(self.re_linear.weight)
nn.init.xavier_uniform_(self.im_linear.weight)
def forward(self, x):
x_re = x[..., 0]
x_im = x[..., 1]
out_re = self.re_linear(x_re) - self.im_linear(x_im)
out_im = self.re_linear(x_im) + self.im_linear(x_re)
out = torch.stack([out_re, out_im], -1)
return out
##______________________________________Complex Convolution 2d_____________________________________________
class CConv2d(nn.Module):
def __init__(self, in_channels, out_channels, **kwargs):
super(CConv2d, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.re_conv = nn.Conv2d(self.in_channels, self.out_channels, **kwargs)
self.im_conv = nn.Conv2d(self.in_channels, self.out_channels, **kwargs)
nn.init.xavier_uniform_(self.re_conv.weight)
nn.init.xavier_uniform_(self.im_conv.weight)
def forward(self, x):
x_re = x[..., 0]
x_im = x[..., 1]
out_re = self.re_conv(x_re) - self.im_conv(x_im)
out_im = self.re_conv(x_im) + self.im_conv(x_re)
out = torch.stack([out_re, out_im], -1)
return out
##___________________________________Complex Convolution Transpose 2d_______________________________________________
class CConvTrans2d(nn.Module):
def __init__(self, in_channels, out_channels, **kwargs):
super(CConvTrans2d, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.re_Tconv = nn.ConvTranspose2d(self.in_channels, self.out_channels, **kwargs)
self.im_Tconv = nn.ConvTranspose2d(self.in_channels, self.out_channels, **kwargs)
nn.init.xavier_uniform_(self.re_Tconv.weight)
nn.init.xavier_uniform_(self.im_Tconv.weight)
def forward(self, x):
x_re = x[..., 0]
x_im = x[..., 1]
out_re = self.re_Tconv(x_re) - self.im_Tconv(x_im)
out_im = self.re_Tconv(x_im) + self.im_Tconv(x_re)
out = torch.stack([out_re, out_im], -1)
return out
##___________________________Complex BatchNorm Layer____________________________________
class CBatchnorm(nn.Module):
def __init__(self, in_channels):
super(CBatchnorm, self).__init__()
self.in_channels = in_channels
self.re_batch = nn.BatchNorm2d(in_channels)
self.im_batch = nn.BatchNorm2d(in_channels)
def forward(self, x):
x_re = x[..., 0]
x_im = x[..., 1]
out_re = self.re_batch(x_re)
out_im = self.re_batch(x_im)
out = torch.stack([out_re, out_im], -1)
return out
##_______________________Complex Convolutional Block_______________________________________
class CconvBlock(nn.Module):
def __init__(self, in_channels, out_channels, **kwargs):
super(CconvBlock, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.CConv2d = CConv2d(self.in_channels, self.out_channels, **kwargs)
self.CBatchnorm = CBatchnorm(self.out_channels)
self.leaky_relu = nn.LeakyReLU()
def forward(self, x):
conved = self.CConv2d(x)
normed = self.CBatchnorm(conved)
activated = self.leaky_relu(normed)
return activated
##__________________________________Complex Convolutional Transpose Block________________________________________
class CConvTransBlock(nn.Module):
def __init__(self, in_channels, out_channels, last_layer=False, **kwargs):
super(CConvTransBlock, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.last_layer = last_layer
self.CConvTrans2d = CConvTrans2d(self.in_channels, self.out_channels, **kwargs)
self.CBatchnorm = CBatchnorm(self.out_channels)
self.leaky_relu = nn.LeakyReLU()
def forward(self, x):
conved = self.CConvTrans2d(x)
if not self.last_layer:
normed = self.CBatchnorm(conved)
activated = self.leaky_relu(normed)
return activated
else:
m_phase = conved/(torch.abs(conved)+1e-8)
m_mag = torch.tanh(torch.abs(conved))
out = m_phase * m_mag
return out
##______________________Complex LSTM Layer_________________________________________________
class CLSTM(nn.Module):
def __init__(self, in_channels, hidden_size, num_layers, **kwargs):
super(CLSTM, self).__init__()
self.in_channels = in_channels
self.hidden_size = hidden_size
self.num_layers = num_layers
self.re_LSTM = nn.LSTM(self.in_channels, self.hidden_size, self.num_layers , **kwargs)
self.im_LSTM = nn.LSTM(self.in_channels, self.hidden_size, self.num_layers, **kwargs)
def forward(self, x, h0, c0):
x_re = x[..., 0]
x_im = x[..., 1]
out_re1, (hn_re1, cn_re1) = self.re_LSTM(x_re, (h0[...,0], c0[...,0]))
out_re2, (hn_re2, cn_re2) = self.im_LSTM(x_im, (h0[...,1], c0[...,1]))
out_re = out_re1 - out_re2
hn_re = hn_re1 - hn_re2
cn_re = cn_re1 - cn_re2
out_im1, (hn_im1, cn_im1) = self.re_LSTM(x_re, (h0[...,1], c0[...,1]))
out_im2, (hn_im2, cn_im2) = self.im_LSTM(x_im, (h0[...,0], c0[...,0]))
out_im = out_im1 + out_im2
hn_im = hn_im1 + hn_im2
cn_im = cn_im1 + cn_im2
out = torch.stack([out_re, out_im], -1)
hn = torch.stack([hn_re, hn_im], -1)
cn = torch.stack([cn_re, cn_im], -1)
return out, (hn, cn)
##_______________________________Complex MaxPooling 2d Layer___________________
class CMaxPool2d(nn.Module):
def __init__(self, kernel_size, **kwargs):
super(CMaxPool2d, self).__init__()
self.kernel_size = kernel_size
self.CMax_re = nn.MaxPool2d(self.kernel_size, **kwargs)
self.CMax_im = nn.MaxPool2d(self.kernel_size, **kwargs)
def forward(self, x):
x_re = x[..., 0]
x_im = x[..., 1]
out_re = self.CMax_re(x_re)
out_im = self.CMax_im(x_im)
out = torch.stack([out_re, out_im], -1)
return out
##________________________________Complex Average Pooling 2d Layer_____________________________
class CAvgPool2d(nn.Module):
def __init__(self, kernel_size, **kwargs):
super(CAvgPool2d, self).__init__()
self.kernel_size = kernel_size
self.CMax_re = nn.AvgPool2d(self.kernel_size, **kwargs)
self.CMax_im = nn.AvgPool2d(self.kernel_size, **kwargs)
def forward(self, x):
x_re = x[..., 0]
x_im = x[..., 1]
out_re = self.CMax_re(x_re)
out_im = self.CMax_im(x_im)
out = torch.stack([out_re, out_im], -1)
return out
| [
"torch.nn.BatchNorm2d",
"torch.abs",
"torch.nn.LeakyReLU",
"torch.nn.init.xavier_uniform_",
"torch.nn.LSTM",
"torch.stack",
"torch.nn.Conv2d",
"torch.nn.MaxPool2d",
"torch.nn.Linear",
"torch.nn.AvgPool2d",
"torch.nn.ConvTranspose2d"
] | [((475, 531), 'torch.nn.Linear', 'nn.Linear', (['self.in_channels', 'self.out_channels'], {}), '(self.in_channels, self.out_channels, **kwargs)\n', (484, 531), False, 'from torch import nn\n'), ((553, 609), 'torch.nn.Linear', 'nn.Linear', (['self.in_channels', 'self.out_channels'], {}), '(self.in_channels, self.out_channels, **kwargs)\n', (562, 609), False, 'from torch import nn\n'), ((615, 661), 'torch.nn.init.xavier_uniform_', 'nn.init.xavier_uniform_', (['self.re_linear.weight'], {}), '(self.re_linear.weight)\n', (638, 661), False, 'from torch import nn\n'), ((666, 712), 'torch.nn.init.xavier_uniform_', 'nn.init.xavier_uniform_', (['self.im_linear.weight'], {}), '(self.im_linear.weight)\n', (689, 712), False, 'from torch import nn\n'), ((910, 943), 'torch.stack', 'torch.stack', (['[out_re, out_im]', '(-1)'], {}), '([out_re, out_im], -1)\n', (921, 943), False, 'import torch\n'), ((1291, 1347), 'torch.nn.Conv2d', 'nn.Conv2d', (['self.in_channels', 'self.out_channels'], {}), '(self.in_channels, self.out_channels, **kwargs)\n', (1300, 1347), False, 'from torch import nn\n'), ((1367, 1423), 'torch.nn.Conv2d', 'nn.Conv2d', (['self.in_channels', 'self.out_channels'], {}), '(self.in_channels, self.out_channels, **kwargs)\n', (1376, 1423), False, 'from torch import nn\n'), ((1429, 1473), 'torch.nn.init.xavier_uniform_', 'nn.init.xavier_uniform_', (['self.re_conv.weight'], {}), '(self.re_conv.weight)\n', (1452, 1473), False, 'from torch import nn\n'), ((1478, 1522), 'torch.nn.init.xavier_uniform_', 'nn.init.xavier_uniform_', (['self.im_conv.weight'], {}), '(self.im_conv.weight)\n', (1501, 1522), False, 'from torch import nn\n'), ((1710, 1743), 'torch.stack', 'torch.stack', (['[out_re, out_im]', '(-1)'], {}), '([out_re, out_im], -1)\n', (1721, 1743), False, 'import torch\n'), ((2115, 2180), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['self.in_channels', 'self.out_channels'], {}), '(self.in_channels, self.out_channels, **kwargs)\n', (2133, 2180), False, 'from torch import nn\n'), ((2201, 2266), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['self.in_channels', 'self.out_channels'], {}), '(self.in_channels, self.out_channels, **kwargs)\n', (2219, 2266), False, 'from torch import nn\n'), ((2272, 2317), 'torch.nn.init.xavier_uniform_', 'nn.init.xavier_uniform_', (['self.re_Tconv.weight'], {}), '(self.re_Tconv.weight)\n', (2295, 2317), False, 'from torch import nn\n'), ((2322, 2367), 'torch.nn.init.xavier_uniform_', 'nn.init.xavier_uniform_', (['self.im_Tconv.weight'], {}), '(self.im_Tconv.weight)\n', (2345, 2367), False, 'from torch import nn\n'), ((2560, 2593), 'torch.stack', 'torch.stack', (['[out_re, out_im]', '(-1)'], {}), '([out_re, out_im], -1)\n', (2571, 2593), False, 'import torch\n'), ((2883, 2910), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['in_channels'], {}), '(in_channels)\n', (2897, 2910), False, 'from torch import nn\n'), ((2933, 2960), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['in_channels'], {}), '(in_channels)\n', (2947, 2960), False, 'from torch import nn\n'), ((3120, 3153), 'torch.stack', 'torch.stack', (['[out_re, out_im]', '(-1)'], {}), '([out_re, out_im], -1)\n', (3131, 3153), False, 'import torch\n'), ((3616, 3630), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {}), '()\n', (3628, 3630), False, 'from torch import nn\n'), ((4333, 4347), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {}), '()\n', (4345, 4347), False, 'from torch import nn\n'), ((5056, 5126), 'torch.nn.LSTM', 'nn.LSTM', (['self.in_channels', 'self.hidden_size', 'self.num_layers'], {}), '(self.in_channels, self.hidden_size, self.num_layers, **kwargs)\n', (5063, 5126), False, 'from torch import nn\n'), ((5147, 5217), 'torch.nn.LSTM', 'nn.LSTM', (['self.in_channels', 'self.hidden_size', 'self.num_layers'], {}), '(self.in_channels, self.hidden_size, self.num_layers, **kwargs)\n', (5154, 5217), False, 'from torch import nn\n'), ((5845, 5878), 'torch.stack', 'torch.stack', (['[out_re, out_im]', '(-1)'], {}), '([out_re, out_im], -1)\n', (5856, 5878), False, 'import torch\n'), ((5893, 5924), 'torch.stack', 'torch.stack', (['[hn_re, hn_im]', '(-1)'], {}), '([hn_re, hn_im], -1)\n', (5904, 5924), False, 'import torch\n'), ((5939, 5970), 'torch.stack', 'torch.stack', (['[cn_re, cn_im]', '(-1)'], {}), '([cn_re, cn_im], -1)\n', (5950, 5970), False, 'import torch\n'), ((6267, 6307), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['self.kernel_size'], {}), '(self.kernel_size, **kwargs)\n', (6279, 6307), False, 'from torch import nn\n'), ((6327, 6367), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['self.kernel_size'], {}), '(self.kernel_size, **kwargs)\n', (6339, 6367), False, 'from torch import nn\n'), ((6533, 6566), 'torch.stack', 'torch.stack', (['[out_re, out_im]', '(-1)'], {}), '([out_re, out_im], -1)\n', (6544, 6566), False, 'import torch\n'), ((6856, 6896), 'torch.nn.AvgPool2d', 'nn.AvgPool2d', (['self.kernel_size'], {}), '(self.kernel_size, **kwargs)\n', (6868, 6896), False, 'from torch import nn\n'), ((6916, 6956), 'torch.nn.AvgPool2d', 'nn.AvgPool2d', (['self.kernel_size'], {}), '(self.kernel_size, **kwargs)\n', (6928, 6956), False, 'from torch import nn\n'), ((7122, 7155), 'torch.stack', 'torch.stack', (['[out_re, out_im]', '(-1)'], {}), '([out_re, out_im], -1)\n', (7133, 7155), False, 'import torch\n'), ((4638, 4655), 'torch.abs', 'torch.abs', (['conved'], {}), '(conved)\n', (4647, 4655), False, 'import torch\n'), ((4585, 4602), 'torch.abs', 'torch.abs', (['conved'], {}), '(conved)\n', (4594, 4602), False, 'import torch\n')] |
from simplelinkedin import LinkedIn
def run_script(settings):
with LinkedIn(
username=settings.get("LINKEDIN_USER"),
password=settings.get("LINKEDIN_PASSWORD"),
browser=settings.get("LINKEDIN_BROWSER"),
driver_path=settings.get("LINKEDIN_BROWSER_DRIVER"),
headless=bool(settings.get("LINKEDIN_BROWSER_HEADLESS")),
) as ln:
# all the steps manually
ln.login()
# ln.remove_sent_invitations(older_than_days=14)
ln.send_invitations(
max_invitation=max(ln.WEEKLY_MAX_INVITATION - ln.invitations_sent_last_week, 0),
min_mutual=10,
max_mutual=450,
preferred_users=["Quant"],
not_preferred_users=["Sportsman"],
view_profile=True,
)
ln.accept_invitations()
# OR
# run smart follow-unfollow method (without setting cron jobs) which essentially does the same thing as
# all the above steps
ln.smart_follow_unfollow(
users_preferred=settings.get("LINKEDIN_PREFERRED_USER") or [],
users_not_preferred=settings.get("LINKEDIN_NOT_PREFERRED_USER") or [],
)
# setting and un-setting cron
# set cron
ln.set_smart_cron(settings)
# remove existing cron jobs
ln.remove_cron_jobs(settings=settings)
if __name__ == "__main__":
import os
sett = {
"LINKEDIN_USER": os.getenv("LINKEDIN_USER"),
"LINKEDIN_PASSWORD": os.getenv("LINKEDIN_PASSWORD"),
"LINKEDIN_BROWSER": "Chrome",
"LINKEDIN_BROWSER_DRIVER": "/Users/dayhatt/workspace/drivers/chromedriver",
"LINKEDIN_BROWSER_HEADLESS": 0,
"LINKEDIN_BROWSER_CRON": 0,
"LINKEDIN_CRON_USER": "dayhatt",
"LINKEDIN_PREFERRED_USER": "./data/user_preferred.txt",
"LINKEDIN_NOT_PREFERRED_USER": "./data/user_not_preferred.txt",
}
run_script(settings=sett)
| [
"os.getenv"
] | [((1438, 1464), 'os.getenv', 'os.getenv', (['"""LINKEDIN_USER"""'], {}), "('LINKEDIN_USER')\n", (1447, 1464), False, 'import os\n'), ((1495, 1525), 'os.getenv', 'os.getenv', (['"""LINKEDIN_PASSWORD"""'], {}), "('LINKEDIN_PASSWORD')\n", (1504, 1525), False, 'import os\n')] |
from django.forms import ModelForm
from .models import bc_sector, bc_company, bc_company_code
from administrators.models import bc_admin_type_investiment
class SectorForm(ModelForm):
class Meta:
model = bc_sector
fields = ['name','description','is_active']
class CompanyForm(ModelForm):
class Meta:
model = bc_company
fields = ['name','identification','is_new_market','bc_sector']
def __init__(self, *args, **kwargs):
super(CompanyForm, self).__init__(*args, **kwargs)
self.fields['bc_sector'].label = 'Sector'
self.fields['bc_sector'].queryset = bc_sector.objects.filter(is_active=True)
class CompanyCodeForm(ModelForm):
class Meta:
model = bc_company_code
fields = ['name','bc_admin_type_investiment','bc_company']
def __init__(self, *args, **kwargs):
super(CompanyCodeForm, self).__init__(*args, **kwargs)
self.fields['bc_admin_type_investiment'].label = 'Type of Investiment'
self.fields['bc_admin_type_investiment'].queryset = bc_admin_type_investiment.objects.filter(is_active=True)
self.fields['bc_company'].label = 'Company'
self.fields['bc_company'].queryset = bc_company.objects.filter(is_active=True) | [
"administrators.models.bc_admin_type_investiment.objects.filter"
] | [((1055, 1111), 'administrators.models.bc_admin_type_investiment.objects.filter', 'bc_admin_type_investiment.objects.filter', ([], {'is_active': '(True)'}), '(is_active=True)\n', (1095, 1111), False, 'from administrators.models import bc_admin_type_investiment\n')] |
import pymongo
from bson.json_util import loads, dumps
from bson import json_util
import csv
import sys
import uuid
import os
import itertools
from faker import Faker
from collections import defaultdict
import json
import datetime
from deepmerge import Merger
import random
import re
############################################################################################################
# This is a utility that takes in the model name. The model file name has a suffix that indicates
# how many records should be added to the list before returning the results.
# The objective is to send back the largest list possible without killing the mLocust
# worker CPU/RAM
############################################################################################################
stripProp = lambda str: re.sub(r'\s+', '', (str[0].upper() + str[1:].strip('()')))
fake = Faker()
# This serializer isn't needed anymore as long as we use faker.datetime.datetime instead of datetime.date
# I'll keep this serializer code in here in case someone in the future needs to use it for another data type that isn't native...
def ser(o):
"""Customize serialization of types that are not JSON native"""
if isinstance(o, datetime.date):
return str(o)
def procpath(path, counts, generator):
"""Recursively walk a path, generating a partial tree with just this path's random contents"""
stripped = stripProp(path[0])
if len(path) == 1:
# Base case. Generate a random value by running the Python expression in the text file
return { stripped: eval(generator) }
elif path[0].endswith('()'):
# Lists are slightly more complex. We generate a list of the length specified in the
# counts map. Note that what we pass recursively is _the exact same path_, but we strip
# off the ()s, which will cause us to hit the `else` block below on recursion.
return {
stripped: [ procpath([ path[0].strip('()') ] + path[1:], counts, generator)[stripped] for X in range(0, counts[stripped]) ]
}
else:
# Return an object, of the specified type, populated recursively.
return {
# stripped: {
stripped: procpath(path[1:], counts, generator)
# }
}
def zipmerge(the_merger, path, base, nxt):
"""Strategy for deepmerge that will zip merge two lists. Assumes lists of equal length."""
return [ the_merger.merge(base[i], nxt[i]) for i in range(0, len(base)) ]
def ID(key):
id_map[key] += 1
return key + str(id_map[key]+starting_id_minus_1)
# A deep merger using our custom list merge strategy.
merger = Merger([
(dict, "merge"),
(list, zipmerge)
], [ "override" ], [ "override" ])
# This field is used for an incremental field, e.g. ID. We can't really control this using mLocust so we'll always default to 0.
# Not every loader file may require this.
starting_id_minus_1 = 0
id_map = defaultdict(int)
def bulkFetch(model):
# from the model name, derive the template file name along with bulk insert count
# example model would be members.csv.100
arr = model.split('.')
template = "models/" + model
bulkCount = int(arr[2])
# instantiate a new list
l = []
for J in range(0, bulkCount): # iterate through the bulk insert count
# A dictionary that will provide consistent, random list lengths
counts = defaultdict(lambda: random.randint(1, 5))
data = {}
with open(template) as csvfile:
propreader = csv.reader(itertools.islice(csvfile, 1, None))
for row in propreader:
path = row[0].split('.')
# print(path)
partial = procpath(path, counts, row[3])
# print(partial);
# Merge partial trees.
data = merger.merge(data, partial)
# print(data);
# print(data);
# To JSON!
# Old debugging statements that used the custom deserializer. Non-issue if you use native bson/json data types
# print("%s\t%s\t%s"%(str(id), str(idempotencyKey), json.dumps(obj, default=ser)))
# print(json.dumps(obj, default=ser))
# Add the object to our list
l.append(data)
return l
| [
"itertools.islice",
"deepmerge.Merger",
"faker.Faker",
"collections.defaultdict",
"random.randint"
] | [((873, 880), 'faker.Faker', 'Faker', ([], {}), '()\n', (878, 880), False, 'from faker import Faker\n'), ((2648, 2719), 'deepmerge.Merger', 'Merger', (["[(dict, 'merge'), (list, zipmerge)]", "['override']", "['override']"], {}), "([(dict, 'merge'), (list, zipmerge)], ['override'], ['override'])\n", (2654, 2719), False, 'from deepmerge import Merger\n'), ((2941, 2957), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (2952, 2957), False, 'from collections import defaultdict\n'), ((3427, 3447), 'random.randint', 'random.randint', (['(1)', '(5)'], {}), '(1, 5)\n', (3441, 3447), False, 'import random\n'), ((3543, 3577), 'itertools.islice', 'itertools.islice', (['csvfile', '(1)', 'None'], {}), '(csvfile, 1, None)\n', (3559, 3577), False, 'import itertools\n')] |
# Copyright (c) 2019-2020, RTE (https://www.rte-france.com)
# See AUTHORS.txt
# This Source Code Form is subject to the terms of the Apache License, version 2.0.
# If a copy of the Apache License, version 2.0 was not distributed with this file, you can obtain one at http://www.apache.org/licenses/LICENSE-2.0.
# SPDX-License-Identifier: Apache-2.0
# This file is part of hadar-simulator, a python adequacy library for everyone.
import sys
import click
from typing import List
import nbformat
import os
from nbconvert import RSTExporter
from nbconvert.preprocessors import ExecutePreprocessor
exporter = RSTExporter()
ep = ExecutePreprocessor(timeout=600, kernel_name="python3", store_widget_state=True)
def open_nb(name: str, src: str) -> nbformat:
"""
Open notebook file
:param name: name of notebook to open
:param src: source directory
:return: notebook object
"""
print("Reading...", end=" ")
nb = nbformat.read(
"{src}/{name}/{name}.ipynb".format(name=name, src=src), as_version=4
)
print("OK", end=" ")
return nb
def execute(nb: nbformat, name: str, src: str) -> nbformat:
"""
Execute notebook and store widget state.
:param nb: notebook object to execute
:param name: notebook name (for setup directory context purpose)
:param src: notebook source directory (for setup context)
:return: notebook object with computed and stored output widget state
"""
print("Executing...", end=" ")
ep.preprocess(nb, {"metadata": {"path": "%s/%s/" % (src, name)}})
print("OK", end=" ")
return nb
def copy_image(name: str, export: str, src: str):
"""
Copy images present next to notebook file to exported folder.
:param name: notebook name
:param export: export directory
:param src: source directory
:return: None
"""
src = "%s/%s" % (src, name)
dest = "%s/%s" % (export, name)
images = [f for f in os.listdir(src) if f.split(".")[-1] in ["png"]]
for img in images:
os.rename("%s/%s" % (src, img), "%s/%s" % (dest, img))
def to_export(nb: nbformat, name: str, export: str):
"""
Export notebook into HTML format.
:param nb: notebook with result state
:param name: notebook name
:param export: directory to export
:return: None
"""
print("Exporting...", end=" ")
rst, _ = exporter.from_notebook_node(nb)
path = "%s/%s" % (export, name)
if not os.path.exists(path):
os.makedirs(path)
with open("%s/%s.rst" % (path, name), "w") as f:
f.write(rst)
print("OK", end=" ")
def list_notebook(src: str) -> List[str]:
"""
List available notebook in directory.
:return:
"""
dirs = os.listdir(src)
return [
d
for d in dirs
if os.path.isfile("{src}/{name}/{name}.ipynb".format(name=d, src=src))
]
@click.command("Check and export notebooks")
@click.option("--src", nargs=1, help="Notebook directory")
@click.option("--check", nargs=1, help="check notebook according to result file given")
@click.option("--export", nargs=1, help="export notebooks to directory given")
def main(src: str, check: str, export: str):
for name in list_notebook(src):
print("{:30}".format(name), ":", end="")
nb = open_nb(name, src)
nb = execute(nb, name, src)
if check:
pass # Implement check
if export:
to_export(nb, name, export)
copy_image(name, export, src)
print("")
if __name__ == "__main__":
main()
| [
"os.path.exists",
"os.listdir",
"os.makedirs",
"click.option",
"os.rename",
"nbconvert.preprocessors.ExecutePreprocessor",
"click.command",
"nbconvert.RSTExporter"
] | [((613, 626), 'nbconvert.RSTExporter', 'RSTExporter', ([], {}), '()\n', (624, 626), False, 'from nbconvert import RSTExporter\n'), ((632, 717), 'nbconvert.preprocessors.ExecutePreprocessor', 'ExecutePreprocessor', ([], {'timeout': '(600)', 'kernel_name': '"""python3"""', 'store_widget_state': '(True)'}), "(timeout=600, kernel_name='python3', store_widget_state=True\n )\n", (651, 717), False, 'from nbconvert.preprocessors import ExecutePreprocessor\n'), ((2873, 2916), 'click.command', 'click.command', (['"""Check and export notebooks"""'], {}), "('Check and export notebooks')\n", (2886, 2916), False, 'import click\n'), ((2918, 2975), 'click.option', 'click.option', (['"""--src"""'], {'nargs': '(1)', 'help': '"""Notebook directory"""'}), "('--src', nargs=1, help='Notebook directory')\n", (2930, 2975), False, 'import click\n'), ((2977, 3068), 'click.option', 'click.option', (['"""--check"""'], {'nargs': '(1)', 'help': '"""check notebook according to result file given"""'}), "('--check', nargs=1, help=\n 'check notebook according to result file given')\n", (2989, 3068), False, 'import click\n'), ((3065, 3142), 'click.option', 'click.option', (['"""--export"""'], {'nargs': '(1)', 'help': '"""export notebooks to directory given"""'}), "('--export', nargs=1, help='export notebooks to directory given')\n", (3077, 3142), False, 'import click\n'), ((2724, 2739), 'os.listdir', 'os.listdir', (['src'], {}), '(src)\n', (2734, 2739), False, 'import os\n'), ((2024, 2078), 'os.rename', 'os.rename', (["('%s/%s' % (src, img))", "('%s/%s' % (dest, img))"], {}), "('%s/%s' % (src, img), '%s/%s' % (dest, img))\n", (2033, 2078), False, 'import os\n'), ((2447, 2467), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (2461, 2467), False, 'import os\n'), ((2477, 2494), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (2488, 2494), False, 'import os\n'), ((1945, 1960), 'os.listdir', 'os.listdir', (['src'], {}), '(src)\n', (1955, 1960), False, 'import os\n')] |
import datetime
class Timerange:
def __init__(self, start, length):
self.start = start
self.length = length
def range(self):
end = self.start + self.length
return (self.start, end)
def __contains__(self, day):
delta = day - self.start
return datetime.timedelta(seconds = 0) <= delta and delta < self.length
def overlaps(self, other):
if other.start < self.start:
return other.overlaps(self)
assert(self.start <= other.start)
return other.start < self.start + self.length
def __repr__(self):
return 'Timerange(%s, %s)' % (repr(self.start), repr(self.length))
def __str__(self):
start, end = (i.strftime('%H:%M') for i in self.range())
return '%s - %s' % (start, end)
def __eq__(self, other):
return self.start == other.start and self.length == other.length
def __ne__(self, other):
return not self == other
def between(start, end):
assert(len(start) == 2 and len(end) == 2)
start_hour, start_minute = start
end_hour, end_minute = end
start_obj = datetime.datetime(1970, 1, 1, start_hour, start_minute)
end_obj = datetime.datetime(1970, 1, 1, end_hour, end_minute)
assert(end_obj - start_obj > datetime.timedelta(seconds = 0))
return Timerange(start_obj, end_obj - start_obj)
| [
"datetime.datetime",
"datetime.timedelta"
] | [((998, 1053), 'datetime.datetime', 'datetime.datetime', (['(1970)', '(1)', '(1)', 'start_hour', 'start_minute'], {}), '(1970, 1, 1, start_hour, start_minute)\n', (1015, 1053), False, 'import datetime\n'), ((1065, 1116), 'datetime.datetime', 'datetime.datetime', (['(1970)', '(1)', '(1)', 'end_hour', 'end_minute'], {}), '(1970, 1, 1, end_hour, end_minute)\n', (1082, 1116), False, 'import datetime\n'), ((1147, 1176), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(0)'}), '(seconds=0)\n', (1165, 1176), False, 'import datetime\n'), ((260, 289), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(0)'}), '(seconds=0)\n', (278, 289), False, 'import datetime\n')] |
#! /usr/bin/env python3
import logging, sys, os
from . import main
logger = logging.getLogger(__name__ if not __name__ == '__main__' else os.path.basename(__file__))
if __name__ == '__main__':
logging.basicConfig()
sys.exit(main())
| [
"logging.basicConfig",
"os.path.basename"
] | [((200, 221), 'logging.basicConfig', 'logging.basicConfig', ([], {}), '()\n', (219, 221), False, 'import logging, sys, os\n'), ((140, 166), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (156, 166), False, 'import logging, sys, os\n')] |
# 插入
print('插入'*15)
x = [1,2,3]
print(x)
x = x+ [4]
x.append(5)
print(x)
x.insert(3,'w')
x.extend(['a','b'])
print(x*3)
# 删除
print("删除"*15)
y = ["a","b","c","d",'e','f']
del y[2]
print(y)
y.pop(0)
print(y)
y.remove('f')
print(y)
# 列表元素访问与计数
print("列表元素访问与计数"*5)
x =[1,2,3,3,4,5]
print(x.count(3),x.index(2))
# 列表排序
print("列表排序"*10)
x = [1,2,4,5,6,34,22,55,22,11,24,56,78]
import random as r
r.shuffle(x)
print(x)
x.reverse()
print("reverse",x)
x.sort(reverse = True)
print('sort ',x)
# 使用内置函数sorted对列表进行排序并返回新列表,不对原列表做任何修改。
sorted(x)
reversed(x)
# 打包
print("打包"*10)
a = [1,2,3]
b = [4,5,6]
print(list(zip(a,b)))
# 枚举
print("枚举"*10)
for item in enumerate('abcdef'):
print(item)
# 遍历列表的三种方式
print("遍历列表的三种方式"*10)
a = ['a','b','c','d','e','f']
for i in a:
print(i)
for i in range(len(a)):
print(i,a[i])
for i,ele in enumerate(a):
print(i,ele)
| [
"random.shuffle"
] | [((394, 406), 'random.shuffle', 'r.shuffle', (['x'], {}), '(x)\n', (403, 406), True, 'import random as r\n')] |
import aiohttp
class SmarwiControl:
"""Control class."""
def __init__(self, hosts):
"""Initialize."""
self.hosts = [x.strip() for x in hosts.split(',')]
self.title = ', '.join([x.split('.')[0] for x in self.hosts])
async def authenticate(self) -> bool:
"""Test if we can authenticate with the host."""
try:
for host in self.hosts:
ctl = SmarwiControlItem(host)
await ctl.get_status()
result = True
except:
result = False
return result
def list(self) -> list:
"""List what we have"""
return [SmarwiControlItem(host) for host in self.hosts]
class SmarwiControlItem:
"""Control class for single host."""
def __init__(self, host):
self.host = host
self.name = host.split('.')[0]
async def __request(self, path):
async with aiohttp.ClientSession() as session:
async with session.get(f"http://{self.host}/{path}") as resp:
if resp.status != 200:
raise ValueError(f"Request failed with {resp.status}/{resp.reason}")
return await resp.text()
async def open(self):
await self.__request("cmd/open/100")
async def set_position(self, pos:int):
# if pos too small, close instead
if pos > 1:
await self.__request("cmd/open/{}".format(pos))
else:
await self.close()
async def close(self):
await self.__request("cmd/close")
async def get_status(self):
response = await self.__request("statusn")
result = {}
for item in response.split('\n'):
item_list = item.split(':', maxsplit=1)
result[item_list[0]] = item_list[1]
return result
| [
"aiohttp.ClientSession"
] | [((920, 943), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (941, 943), False, 'import aiohttp\n')] |
from PyQt5 import QtWidgets, QtCore, QtPrintSupport
from PyQt5.QtCore import QDate, QTime, Qt, QTimer, QRectF
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5.QtPrintSupport import QPrinter
from database import MyCursor
from PyQt5 import QtGui
from datetime import datetime, timedelta
from annulation import Annulation, RemEntry
from ops import Ui_zd
#GLOBALS
SWITCH = 0
class AddOps(QWidget, Ui_zd):
def __init__(self, parent=None):
super(AddOps, self).__init__(parent)
self.setWindowModality(Qt.ApplicationModal)
self.setupUi(self)
self.setWindowTitle('Opérations')
self.m = MyCursor()
flags = QtCore.Qt.WindowFlags(QtCore.Qt.FramelessWindowHint)
self.setWindowFlags(flags)
self.setAttribute(QtCore.Qt.WA_TranslucentBackground, True)
self.setWindowIcon(QIcon('MJDB_ICON.ico'))
self.acc_list = self.findChild(QComboBox, 'acc_combo')
self.sacc_list = self.findChild(QComboBox, 'acc_combo2')
self.debit = self.findChild(QLineEdit, 'debit')
self.credit = self.findChild(QLineEdit, 'credit')
self.insert = self.findChild(QPushButton, 'new_op')
self.con = self.findChild(QRadioButton, 'con')
self.non_con = self.findChild(QRadioButton, 'non_con')
self.w_debit = self.findChild(QLabel, 'w_debit')
self.w_credit = self.findChild(QLabel, 'w_credit')
self.date = self.findChild(QLabel, 'date')
self.time = self.findChild(QLabel, 'time')
self.ops_table = self.findChild(QTableWidget, 'ops_table')
self.user = self.findChild(QLabel, 'user')
self.wrong = self.findChild(QLabel, 'wrong')
self.g_ballance = self.findChild(QLineEdit, 'b_sc')
self.g_ballance_ = self.findChild(QLineEdit, 'b_g')
self.general = self.findChild(QLineEdit, 'general_b')
self.title = self.findChild(QFrame, 'title_bar')
self.colse_btn = self.findChild(QPushButton, 'close_btn')
self.minimize = self.findChild(QPushButton, 'mini')
self.date_label = self.findChild(QLabel, 'today')
self.annulation = self.findChild(QCheckBox, 'annulation')
self.insert.clicked.connect(self.add_ops)
self.colse_btn.clicked.connect(lambda: self.close())
self.minimize.clicked.connect(lambda: self.showMinimized())
self.remarque_btn.clicked.connect(self.handlePrint)
self.ops_table.setSortingEnabled(False)
now = QDate.currentDate()
self.date.setText(now.toString(Qt.ISODate))
timer = QTimer(self)
timer.timeout.connect(self.displaytime)
timer.start(1000)
self.m = MyCursor()
self.m.mycursor.execute('SELECT name FROM Accounts')
for x in self.m.mycursor.fetchall():
self.acc_list.addItems(x)
self.acc_list.currentIndexChanged.connect(self.repeat)
self.sacc_list.currentIndexChanged.connect(self.repeat1)
self.con.setChecked(True)
self.general_()
def moveWindow(event):
if event.buttons() == Qt.LeftButton:
self.move(self.pos() + event.globalPos() - self.dragPos)
self.dragPos = event.globalPos()
event.accept()
self.title_bar.mouseMoveEvent = moveWindow
self.debit.textChanged.connect(self.debitChanged)
self.credit.textChanged.connect(self.creditChanged)
self.debit.mousePressEvent = lambda _: self.debit.selectAll()
self.credit.mousePressEvent = lambda _: self.credit.selectAll()
# CELL CLICKED
self.ops_table.cellClicked.connect(self.cellOpClicked)
def handlePrint(self):
printer = QPrinter(QPrinter.HighResolution)
dialog = QtPrintSupport.QPrintDialog(printer, self)
if dialog.exec_() == QtPrintSupport.QPrintDialog.Accepted:
self.handlePaintRequest()
def handlePaintRequest(self):
rows = self.ops_table.selectionModel().selectedRows()
for self.index in sorted(rows):
pass
if len(rows) > 0:
for row in range(self.index.row(), self.index.row() + 1):
self.twi0 = self.ops_table.item(row, 0)
self.twi8 = self.ops_table.item(row, 1)
self.twi16 = self.ops_table.item(row, 2)
self.twi32 = self.ops_table.item(row, 3)
self.twi64 = self.ops_table.item(row, 4)
self.twi128 = self.ops_table.item(row, 5)
self.twi256 = self.ops_table.item(row, 6)
datetime2 = datetime.now().date().strftime('%d/%m/%Y')
datetime1 = datetime.now().strftime('%H:%M:%S')
printer = QPrinter()
printer.setOrientation(getattr(QPrinter, "Landscape"))
printer.setPaperSize(QPrinter.A5)
painter = QtGui.QPainter()
# Start painter
painter.begin(printer)
# Grab a widget you want to print
printer.setFullPage(True)
# Draw grabbed pixmap
painter.setRenderHint(QPainter.SmoothPixmapTransform)
painter.drawImage(0, 0, QImage('recus-01.jpg'))
painter.setPen(QColor('black'))
font = QFont('Mongolian Baiti')
font1 = QFont('Mongolian Baiti')
font.setPointSize(15)
font1.setPointSize(12)
painter.setFont(QFont(font1))
painter.drawText(170, 50, datetime2)
painter.drawText(170, 77, datetime1)
painter.drawText(140, 102, self.user.text())
painter.drawText(15, 320, self.acc_combo.currentText())
painter.drawText(15, 375, self.twi0.text())
painter.drawText(500, 330, self.twi8.text())
painter.drawText(560, 372, self.twi32.text())
painter.drawText(455, 417, self.twi64.text())
painter.drawText(850, 417, self.twi16.text())
painter.setFont(QFont(font))
painter.drawText(405, 525, self.twi128.text())
painter.drawText(705, 525, self.twi256.text())
painter.end()
def cellOpClicked(self):
self.stackedWidget.setCurrentIndex(0)
def general_(self):
self.m = MyCursor()
self.m.mycursor.execute("SELECT SUM(debit) FROM Ops WHERE type IN ('C', 'C / Annulation')")
result4 = self.m.mycursor.fetchone()[0]
self.o = MyCursor()
self.o.mycursor.execute("SELECT SUM(credit) FROM Ops WHERE type IN ('C', 'C / Annulation')")
result5 = self.o.mycursor.fetchone()[0]
if result4 is None and result5 is None:
pass
else:
re = result4 + result5
formatted_re = "{:,.2f}".format(re)
if re < 0:
self.general.setStyleSheet("""QLineEdit{border-radius:10px;
color: rgb(255, 0, 0);}""")
self.general.setText(formatted_re + ' DH')
elif re > 0:
self.general.setStyleSheet("""QLineEdit{border-radius:10px;
color: rgb(0, 170, 0);}""")
self.general.setText(formatted_re + ' DH')
elif re == 0:
self.general.setStyleSheet("""QLineEdit{border-radius:10px;
color: rgb(0, 0, 0);}""")
self.general.setText(formatted_re + ' DH')
def debitChanged(self):
if self.debit.text() == '':
self.debit.setText('0')
self.debit.selectAll()
def creditChanged(self):
if self.credit.text() == '':
self.credit.setText('0')
self.credit.selectAll()
def mousePressEvent(self, event):
self.dragPos = event.globalPos()
def displaytime(self):
time = QTime.currentTime()
self.time.setText(time.toString(Qt.DefaultLocaleLongDate))
def style_table(self):
for x in range(self.ops_table.rowCount()):
g = float(self.ops_table.item(x, 6).text())
y = float(self.ops_table.item(x, 5).text())
if g < 0:
formatted_float_debit = "{:,.2f}".format(g)
self.ops_table.setItem(x, 6, QtWidgets.QTableWidgetItem(str(formatted_float_debit + ' DH')))
self.ops_table.item(x, 6).setForeground(QtGui.QColor(255, 0, 0))
elif y > 0:
formatted_float_debit1 = "{:,.2f}".format(y)
self.ops_table.setItem(x, 5, QtWidgets.QTableWidgetItem(str(formatted_float_debit1 + ' DH')))
self.ops_table.item(x, 5).setForeground(QtGui.QColor(0, 170, 0))
elif g == 0 or y == 0:
formatted_float_debit = "{:,.2f}".format(g)
formatted_float_debit1 = "{:,.2f}".format(y)
self.ops_table.setItem(x, 6, QtWidgets.QTableWidgetItem(str(formatted_float_debit + ' DH')))
self.ops_table.item(x, 6).setForeground(QtGui.QColor(0, 0, 0))
self.ops_table.setItem(x, 5, QtWidgets.QTableWidgetItem(str(formatted_float_debit1 + ' DH')))
self.ops_table.item(x, 5).setForeground(QtGui.QColor(0, 0, 0))
def fill_table(self):
if self.sacc_list.currentText() == 'Tout':
time_r = datetime.now().strftime('%H-%M-%S')
r = " ".join([self.date_label.text(), str(time_r)])
date = datetime.strptime(r, '%d/%m/%Y %H-%M-%S')
self.m.mycursor.execute("""SELECT opID, reference, userID, created, type ,debit, credit FROM Ops
WHERE acc = %s and created between %s and %s""",
(self.acc_list.currentText(), date - timedelta(1), datetime.now()))
f = self.m.mycursor.fetchall()
self.ops_table.setRowCount(0)
for column_number, row_data in enumerate(f):
self.ops_table.insertRow(column_number)
for row_number, data in enumerate(row_data):
self.ops_table.setItem(column_number, row_number, QtWidgets.QTableWidgetItem(str(data)))
self.style_table()
elif self.sacc_list.currentText() == 'Selectionnez un sous-compte...':
time_r = datetime.now().strftime('%H-%M-%S')
r = " ".join([self.date_label.text(), str(time_r)])
date = datetime.strptime(r, '%d/%m/%Y %H-%M-%S')
self.m.mycursor.execute("""SELECT opID, reference, userID, created, type ,debit, credit FROM Ops
WHERE acc = %s and created between %s and %s""",
(self.acc_list.currentText(), date - timedelta(1), datetime.now()))
f = self.m.mycursor.fetchall()
self.ops_table.setRowCount(0)
for column_number, row_data in enumerate(f):
self.ops_table.insertRow(column_number)
for row_number, data in enumerate(row_data):
self.ops_table.setItem(column_number, row_number, QtWidgets.QTableWidgetItem(str(data)))
self.style_table()
else:
time_r = datetime.now().strftime('%H-%M-%S')
r = " ".join([self.date_label.text(), str(time_r)])
date = datetime.strptime(r, '%d/%m/%Y %H-%M-%S')
self.m.mycursor.execute("""SELECT opID, reference, userID, created, type ,debit, credit FROM Ops
WHERE opID = %s and acc=%s and created between %s and %s""",
(self.sacc_list.currentText(), self.acc_list.currentText(), date - timedelta(1), datetime.now()))
f = self.m.mycursor.fetchall()
self.ops_table.setRowCount(0)
for column_number, row_data in enumerate(f):
self.ops_table.insertRow(column_number)
for row_number, data in enumerate(row_data):
self.ops_table.setItem(column_number, row_number, QtWidgets.QTableWidgetItem(str(data)))
self.style_table()
time_r = datetime.now().strftime('%H-%M-%S')
r = " ".join([self.date_label.text(), str(time_r)])
date = datetime.strptime(r, '%d/%m/%Y %H-%M-%S')
self.m = MyCursor()
self.m.mycursor.execute("SELECT SUM(debit) FROM Ops WHERE opID=%s and acc=%s and created between %s and %s", (self.sacc_list.currentText(), self.acc_list.currentText(),
date - timedelta(1), datetime.now().date(),))
result = self.m.mycursor.fetchone()[0]
self.o = MyCursor()
self.o.mycursor.execute("SELECT SUM(credit) FROM Ops WHERE opID=%s and acc=%s and created between %s and %s", (self.sacc_list.currentText(), self.acc_list.currentText(),
date - timedelta(1), datetime.now().date(),))
result1 = self.o.mycursor.fetchone()[0]
if result is None and result1 is None:
l_sc = 0
self.g_ballance.setStyleSheet("""border-radius:10px;
color: rgb(0, 0, 0);""")
formatted_float_debit = "{:,.2f}".format(l_sc)
self.g_ballance.setText(str(formatted_float_debit) + ' DH')
else:
l_sc = result + result1
if l_sc < 0:
self.g_ballance.setStyleSheet("""border-radius:10px;
color: rgb(255, 0, 0);""")
formatted_float_debit = "{:,.2f}".format(l_sc)
self.g_ballance.setText(str(formatted_float_debit) + ' DH')
elif l_sc > 0:
self.g_ballance.setStyleSheet("""border-radius:10px;
color: rgb(0, 170, 0);""")
formatted_float_debit = "{:,.2f}".format(l_sc)
self.g_ballance.setText(str(formatted_float_debit) + ' DH')
def repeat1(self):
self.fill_table()
def soldeSCompte(self):
time_r = datetime.now().strftime('%H-%M-%S')
r = " ".join([self.date_label.text(), str(time_r)])
date = datetime.strptime(r, '%d/%m/%Y %H-%M-%S')
self.m = MyCursor()
self.m.mycursor.execute("SELECT SUM(debit) FROM Ops WHERE opID=%s and acc=%s and created between %s and %s", (self.sacc_list.currentText(), self.acc_list.currentText(), datetime.now().date() - timedelta(1), datetime.now().date(),))
result = self.m.mycursor.fetchone()[0]
self.m.mycursor.execute("SELECT SUM(credit) FROM Ops WHERE opID=%s and acc=%s and created between %s and %s", (self.sacc_list.currentText(), self.acc_list.currentText(), datetime.now().date() - timedelta(1), datetime.now().date(),))
result1 = self.m.mycursor.fetchone()[0]
if result is None and result1 is None:
self.g_ballance.setStyleSheet("color: rbg(0, 0, 0);")
fr1 = 0
formatted_float_debit = "{:,.2f}".format(fr1)
self.g_ballance.setText(str(formatted_float_debit) + ' DH')
else:
self.fr12 = result + result1
if self.fr12 < 0:
self.g_ballance.setStyleSheet("color: rgb(255, 0, 0);")
formatted_float_debit = "{:,.2f}".format(self.fr12)
self.g_ballance.setText(str(formatted_float_debit) + ' DH')
elif self.fr12 > 0:
self.g_ballance.setStyleSheet("color: rgb(0, 170, 0);")
formatted_float_debit = "{:,.2f}".format(self.fr12)
self.g_ballance.setText(str(formatted_float_debit) + ' DH')
return self.fr12
def soldeCompte(self):
self.m.mycursor.execute("SELECT SUM(debit) FROM Ops WHERE acc=%s", (self.acc_list.currentText(),))
result2 = self.m.mycursor.fetchone()[0]
self.m.mycursor.execute("SELECT SUM(credit) FROM Ops WHERE acc=%s", (self.acc_list.currentText(),))
result3 = self.m.mycursor.fetchone()[0]
if result2 is None and result3 is None:
pass
else:
self.fr = result2 + result3
self.m.mycursor.execute("UPDATE Accounts SET balance=%s WHERE name=%s", (self.fr, self.acc_list.currentText(),))
self.m.db.commit()
return self.fr
def keyPressEvent(self, event):
r = RemEntry()
switch_rem = r.switch
if event.key() == Qt.Key_Return:
self.add_ops()
elif event.key() == Qt.Key_Return and SWITCH == 2:
self.anul()
def addfunc(self, sacc, user, debit1, credit1, anul, motif):
time_r = datetime.now().strftime('%H-%M-%S')
r = " ".join([self.date_label.text(), str(time_r)])
date = datetime.strptime(r, '%d/%m/%Y %H-%M-%S')
debit1 = float(self.debit.text())
credit1 = (float(self.credit.text()) * (-1))
if self.soldeCompte() is None and self.soldeSCompte() is None:
m = MyCursor()
m.mycursor.execute(
"INSERT INTO Ops (opID, userID, created, type, debit, credit, soldeCompte, soldeSCompte, acc, motif) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",
(sacc, user, date, self.type + anul, debit1, credit1, debit1+credit1, debit1+credit1,
str(self.acc_list.currentText()), motif,))
m.db.commit()
else:
if self.soldeSCompte() is None:
m = MyCursor()
m.mycursor.execute(
"INSERT INTO Ops (opID, userID, created, type, debit, credit, soldeCompte, soldeSCompte, acc, motif) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",
(sacc, user, date, self.type + anul, debit1, credit1, self.soldeCompte()+debit1+credit1, debit1+credit1,
str(self.acc_list.currentText()), motif,))
m.db.commit()
else:
m = MyCursor()
m.mycursor.execute(
"INSERT INTO Ops (opID, userID, created, type, debit, credit, soldeCompte, soldeSCompte, acc, motif) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",
(sacc, user, date, self.type + anul, debit1, credit1, self.soldeCompte()+debit1+credit1, self.soldeSCompte()+debit1+credit1,
str(self.acc_list.currentText()), motif,))
m.db.commit()
self.soldeSCompte()
self.soldeCompte()
m = MyCursor()
m.mycursor.execute('SELECT opID, reference, userID, created, type ,debit, credit FROM ops WHERE opID = %s',
(self.sacc_list.currentText(),))
f = m.mycursor.fetchall()
self.ops_table.setRowCount(0)
for column_number, row_data in enumerate(f):
self.ops_table.insertRow(column_number)
for row_number, data in enumerate(row_data):
self.ops_table.setItem(column_number, row_number, QtWidgets.QTableWidgetItem(str(data)))
for x in range(self.ops_table.rowCount()):
self.ops_table.item(x, 6).setForeground(QtGui.QColor(255, 0, 0))
self.ops_table.item(x, 5).setForeground(QtGui.QColor(0, 170, 0))
self.account_fill()
self.fill_table()
self.debit.selectAll()
def anul(self):
debit1 = float(self.debit.text())
credit1 = (float(self.credit.text()) * (-1))
sacc = self.sacc_list.currentText()
user = self.user.text()
motif = self.a.motif.toPlainText()
if motif == '':
self.a.wrong.setText('*')
else:
self.addfunc(sacc, user, debit1, credit1, ' / Annulation', motif)
self.general_()
self.a.close()
self.fill_ops()
def remarque_op(self):
debit1 = float(self.debit.text())
credit1 = (float(self.credit.text()) * (-1))
sacc = self.sacc_list.currentText()
user = self.user.text()
motif = self.rem.motif.toPlainText()
self.m = MyCursor()
self.m.mycursor.execute("SELECT Obl FROM Accounts WHERE name=%s", (self.acc_list.currentText(),))
r = ''.join(map(str, self.m.mycursor.fetchone()))
if r == 'None':
self.addfunc(sacc, user, debit1, credit1, '', motif)
self.general_()
self.rem.close()
self.fill_ops()
elif r == '0':
self.addfunc(sacc, user, debit1, credit1, '', motif)
self.general_()
self.rem.close()
self.fill_ops()
else:
if motif == '':
self.rem.wrong.setText("*")
else:
self.addfunc(sacc, user, debit1, credit1, '', motif)
self.general_()
self.rem.close()
self.fill_ops()
def fill_ops(self):
self.fill_table()
self.debit.selectAll()
self.debit.clear()
self.credit.clear()
self.debit.setFocus()
def add_ops(self):
global SWITCH
try:
self.debit.setFocus()
self.debit.selectAll()
test = self.debit.text().lower()
test_char = test.islower()
test2 = self.credit.text().lower()
test_char2 = test2.islower()
if self.con.isChecked():
self.type = self.con.text()
elif self.non_con.isChecked():
self.type = self.non_con.text()
if self.sacc_list.currentText() != 'Tout':
if self.debit.text() != '0' and self.credit.text() == '0' and self.sacc_list.currentText() != 'Selectionnez un sous-compte...' and test_char is False and test_char2 is False:
if self.annulation.isChecked():
self.a = Annulation()
self.a.show()
self.a.val.clicked.connect(self.anul)
SWITCH = 2
else:
self.rem = RemEntry()
self.rem.show()
self.rem.val.clicked.connect(self.remarque_op)
SWITCH = 1
elif self.debit.text() == '0' and self.credit.text() != '0' and test_char is False and test_char2 is False:
if self.annulation.isChecked():
self.a = Annulation()
self.a.show()
self.a.val.clicked.connect(self.anul)
SWITCH = 2
else:
self.rem = RemEntry()
self.rem.show()
self.rem.val.clicked.connect(self.remarque_op)
SWITCH = 1
else:
pass
except Exception:
msg = QMessageBox()
msg.setWindowTitle('Erreur')
msg.setText("Erreur Inconnu! CODE D'ERREUR (0001)")
msg.setIcon(QMessageBox.Critical)
msg.exec_()
def account_fill(self):
if self.acc_list.currentText() == 'Selectionnez un compte...':
self.g_ballance.clear()
self.g_ballance_.clear()
else:
self.m.mycursor.execute('SELECT balance FROM Accounts WHERE name=%s',
(self.acc_list.currentText(),))
l_ = self.m.mycursor.fetchone()[0]
if l_ is None:
pass
else:
if l_ < 0:
self.g_ballance_.setStyleSheet("""border-radius:10px;
color: rgb(255, 0, 0);""")
formatted_float_debit = "{:,.2f}".format(l_)
self.g_ballance_.setText(str(formatted_float_debit) + ' DH')
elif l_ > 0:
self.g_ballance_.setStyleSheet("""border-radius:10px;
color: rgb(0, 170, 0);""")
formatted_float_debit = "{:,.2f}".format(l_)
self.g_ballance_.setText(str(formatted_float_debit) + ' DH')
elif l_ == 0:
self.g_ballance_.setStyleSheet("""border-radius:10px;
color: rgb(0, 0, 0);""")
formatted_float_debit = "{:,.2f}".format(l_)
self.g_ballance_.setText(str(formatted_float_debit) + ' DH')
def repeat(self):
self.sacc_list.clear()
self.m.mycursor.execute('SELECT name FROM SAccounts WHERE accountID=%s', (self.acc_list.currentText(),))
for x in self.m.mycursor.fetchall():
self.sacc_list.addItems(x)
self.sacc_list.insertItem(0, 'Tout')
self.account_fill()
time_r = datetime.now().strftime('%H-%M-%S')
r = " ".join([self.date_label.text(), str(time_r)])
date = datetime.strptime(r, '%d/%m/%Y %H-%M-%S')
self.m.mycursor.execute("""SELECT opID, reference, userID, created, type ,debit, credit FROM Ops
WHERE acc = %s and created between %s and %s""",
(self.acc_list.currentText(), date, datetime.now()))
f = self.m.mycursor.fetchall()
self.ops_table.setRowCount(0)
for column_number, row_data in enumerate(f):
self.ops_table.insertRow(column_number)
for row_number, data in enumerate(row_data):
self.ops_table.setItem(column_number, row_number, QtWidgets.QTableWidgetItem(str(data)))
self.style_table()
self.fill_table()
if __name__ == '__main__':
import sys
app = QtWidgets.QApplication(sys.argv)
window = AddOps()
window.show()
sys.exit(app.exec())
| [
"PyQt5.QtPrintSupport.QPrinter",
"PyQt5.QtGui.QPainter",
"PyQt5.QtPrintSupport.QPrintDialog",
"PyQt5.QtCore.QDate.currentDate",
"PyQt5.QtCore.QTime.currentTime",
"datetime.datetime.strptime",
"PyQt5.QtCore.QTimer",
"PyQt5.QtGui.QColor",
"annulation.Annulation",
"datetime.datetime.now",
"database... | [((25708, 25740), 'PyQt5.QtWidgets.QApplication', 'QtWidgets.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (25730, 25740), False, 'from PyQt5 import QtWidgets, QtCore, QtPrintSupport\n'), ((645, 655), 'database.MyCursor', 'MyCursor', ([], {}), '()\n', (653, 655), False, 'from database import MyCursor\n'), ((672, 724), 'PyQt5.QtCore.Qt.WindowFlags', 'QtCore.Qt.WindowFlags', (['QtCore.Qt.FramelessWindowHint'], {}), '(QtCore.Qt.FramelessWindowHint)\n', (693, 724), False, 'from PyQt5 import QtWidgets, QtCore, QtPrintSupport\n'), ((2491, 2510), 'PyQt5.QtCore.QDate.currentDate', 'QDate.currentDate', ([], {}), '()\n', (2508, 2510), False, 'from PyQt5.QtCore import QDate, QTime, Qt, QTimer, QRectF\n'), ((2579, 2591), 'PyQt5.QtCore.QTimer', 'QTimer', (['self'], {}), '(self)\n', (2585, 2591), False, 'from PyQt5.QtCore import QDate, QTime, Qt, QTimer, QRectF\n'), ((2684, 2694), 'database.MyCursor', 'MyCursor', ([], {}), '()\n', (2692, 2694), False, 'from database import MyCursor\n'), ((3708, 3741), 'PyQt5.QtPrintSupport.QPrinter', 'QPrinter', (['QPrinter.HighResolution'], {}), '(QPrinter.HighResolution)\n', (3716, 3741), False, 'from PyQt5.QtPrintSupport import QPrinter\n'), ((3759, 3801), 'PyQt5.QtPrintSupport.QPrintDialog', 'QtPrintSupport.QPrintDialog', (['printer', 'self'], {}), '(printer, self)\n', (3786, 3801), False, 'from PyQt5 import QtWidgets, QtCore, QtPrintSupport\n'), ((4719, 4729), 'PyQt5.QtPrintSupport.QPrinter', 'QPrinter', ([], {}), '()\n', (4727, 4729), False, 'from PyQt5.QtPrintSupport import QPrinter\n'), ((4853, 4869), 'PyQt5.QtGui.QPainter', 'QtGui.QPainter', ([], {}), '()\n', (4867, 4869), False, 'from PyQt5 import QtGui\n'), ((6147, 6157), 'database.MyCursor', 'MyCursor', ([], {}), '()\n', (6155, 6157), False, 'from database import MyCursor\n'), ((6324, 6334), 'database.MyCursor', 'MyCursor', ([], {}), '()\n', (6332, 6334), False, 'from database import MyCursor\n'), ((7816, 7835), 'PyQt5.QtCore.QTime.currentTime', 'QTime.currentTime', ([], {}), '()\n', (7833, 7835), False, 'from PyQt5.QtCore import QDate, QTime, Qt, QTimer, QRectF\n'), ((12153, 12194), 'datetime.datetime.strptime', 'datetime.strptime', (['r', '"""%d/%m/%Y %H-%M-%S"""'], {}), "(r, '%d/%m/%Y %H-%M-%S')\n", (12170, 12194), False, 'from datetime import datetime, timedelta\n'), ((12213, 12223), 'database.MyCursor', 'MyCursor', ([], {}), '()\n', (12221, 12223), False, 'from database import MyCursor\n'), ((12521, 12531), 'database.MyCursor', 'MyCursor', ([], {}), '()\n', (12529, 12531), False, 'from database import MyCursor\n'), ((13964, 14005), 'datetime.datetime.strptime', 'datetime.strptime', (['r', '"""%d/%m/%Y %H-%M-%S"""'], {}), "(r, '%d/%m/%Y %H-%M-%S')\n", (13981, 14005), False, 'from datetime import datetime, timedelta\n'), ((14025, 14035), 'database.MyCursor', 'MyCursor', ([], {}), '()\n', (14033, 14035), False, 'from database import MyCursor\n'), ((16192, 16202), 'annulation.RemEntry', 'RemEntry', ([], {}), '()\n', (16200, 16202), False, 'from annulation import Annulation, RemEntry\n'), ((16607, 16648), 'datetime.datetime.strptime', 'datetime.strptime', (['r', '"""%d/%m/%Y %H-%M-%S"""'], {}), "(r, '%d/%m/%Y %H-%M-%S')\n", (16624, 16648), False, 'from datetime import datetime, timedelta\n'), ((18405, 18415), 'database.MyCursor', 'MyCursor', ([], {}), '()\n', (18413, 18415), False, 'from database import MyCursor\n'), ((20035, 20045), 'database.MyCursor', 'MyCursor', ([], {}), '()\n', (20043, 20045), False, 'from database import MyCursor\n'), ((24950, 24991), 'datetime.datetime.strptime', 'datetime.strptime', (['r', '"""%d/%m/%Y %H-%M-%S"""'], {}), "(r, '%d/%m/%Y %H-%M-%S')\n", (24967, 24991), False, 'from datetime import datetime, timedelta\n'), ((9393, 9434), 'datetime.datetime.strptime', 'datetime.strptime', (['r', '"""%d/%m/%Y %H-%M-%S"""'], {}), "(r, '%d/%m/%Y %H-%M-%S')\n", (9410, 9434), False, 'from datetime import datetime, timedelta\n'), ((16865, 16875), 'database.MyCursor', 'MyCursor', ([], {}), '()\n', (16873, 16875), False, 'from database import MyCursor\n'), ((4664, 4678), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4676, 4678), False, 'from datetime import datetime, timedelta\n'), ((10352, 10393), 'datetime.datetime.strptime', 'datetime.strptime', (['r', '"""%d/%m/%Y %H-%M-%S"""'], {}), "(r, '%d/%m/%Y %H-%M-%S')\n", (10369, 10393), False, 'from datetime import datetime, timedelta\n'), ((11258, 11299), 'datetime.datetime.strptime', 'datetime.strptime', (['r', '"""%d/%m/%Y %H-%M-%S"""'], {}), "(r, '%d/%m/%Y %H-%M-%S')\n", (11275, 11299), False, 'from datetime import datetime, timedelta\n'), ((12042, 12056), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (12054, 12056), False, 'from datetime import datetime, timedelta\n'), ((13853, 13867), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (13865, 13867), False, 'from datetime import datetime, timedelta\n'), ((16488, 16502), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (16500, 16502), False, 'from datetime import datetime, timedelta\n'), ((17352, 17362), 'database.MyCursor', 'MyCursor', ([], {}), '()\n', (17360, 17362), False, 'from database import MyCursor\n'), ((17845, 17855), 'database.MyCursor', 'MyCursor', ([], {}), '()\n', (17853, 17855), False, 'from database import MyCursor\n'), ((19074, 19097), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (19086, 19097), False, 'from PyQt5 import QtGui\n'), ((19155, 19178), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(0)', '(170)', '(0)'], {}), '(0, 170, 0)\n', (19167, 19178), False, 'from PyQt5 import QtGui\n'), ((24839, 24853), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (24851, 24853), False, 'from datetime import datetime, timedelta\n'), ((25239, 25253), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (25251, 25253), False, 'from datetime import datetime, timedelta\n'), ((8341, 8364), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (8353, 8364), False, 'from PyQt5 import QtGui\n'), ((9274, 9288), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (9286, 9288), False, 'from datetime import datetime, timedelta\n'), ((9717, 9731), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (9729, 9731), False, 'from datetime import datetime, timedelta\n'), ((12417, 12429), 'datetime.timedelta', 'timedelta', (['(1)'], {}), '(1)\n', (12426, 12429), False, 'from datetime import datetime, timedelta\n'), ((12726, 12738), 'datetime.timedelta', 'timedelta', (['(1)'], {}), '(1)\n', (12735, 12738), False, 'from datetime import datetime, timedelta\n'), ((14237, 14249), 'datetime.timedelta', 'timedelta', (['(1)'], {}), '(1)\n', (14246, 14249), False, 'from datetime import datetime, timedelta\n'), ((14526, 14538), 'datetime.timedelta', 'timedelta', (['(1)'], {}), '(1)\n', (14535, 14538), False, 'from datetime import datetime, timedelta\n'), ((4601, 4615), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4613, 4615), False, 'from datetime import datetime, timedelta\n'), ((8617, 8640), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(0)', '(170)', '(0)'], {}), '(0, 170, 0)\n', (8629, 8640), False, 'from PyQt5 import QtGui\n'), ((9703, 9715), 'datetime.timedelta', 'timedelta', (['(1)'], {}), '(1)\n', (9712, 9715), False, 'from datetime import datetime, timedelta\n'), ((10233, 10247), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (10245, 10247), False, 'from datetime import datetime, timedelta\n'), ((10688, 10702), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (10700, 10702), False, 'from datetime import datetime, timedelta\n'), ((11139, 11153), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (11151, 11153), False, 'from datetime import datetime, timedelta\n'), ((11609, 11623), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (11621, 11623), False, 'from datetime import datetime, timedelta\n'), ((12431, 12445), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (12443, 12445), False, 'from datetime import datetime, timedelta\n'), ((12740, 12754), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (12752, 12754), False, 'from datetime import datetime, timedelta\n'), ((14251, 14265), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (14263, 14265), False, 'from datetime import datetime, timedelta\n'), ((14540, 14554), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (14552, 14554), False, 'from datetime import datetime, timedelta\n'), ((21896, 21908), 'annulation.Annulation', 'Annulation', ([], {}), '()\n', (21906, 21908), False, 'from annulation import Annulation, RemEntry\n'), ((22105, 22115), 'annulation.RemEntry', 'RemEntry', ([], {}), '()\n', (22113, 22115), False, 'from annulation import Annulation, RemEntry\n'), ((8963, 8984), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (8975, 8984), False, 'from PyQt5 import QtGui\n'), ((9152, 9173), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (9164, 9173), False, 'from PyQt5 import QtGui\n'), ((10674, 10686), 'datetime.timedelta', 'timedelta', (['(1)'], {}), '(1)\n', (10683, 10686), False, 'from datetime import datetime, timedelta\n'), ((11595, 11607), 'datetime.timedelta', 'timedelta', (['(1)'], {}), '(1)\n', (11604, 11607), False, 'from datetime import datetime, timedelta\n'), ((14213, 14227), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (14225, 14227), False, 'from datetime import datetime, timedelta\n'), ((14502, 14516), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (14514, 14516), False, 'from datetime import datetime, timedelta\n'), ((22473, 22485), 'annulation.Annulation', 'Annulation', ([], {}), '()\n', (22483, 22485), False, 'from annulation import Annulation, RemEntry\n'), ((22683, 22693), 'annulation.RemEntry', 'RemEntry', ([], {}), '()\n', (22691, 22693), False, 'from annulation import Annulation, RemEntry\n')] |
#!/usr/bin/env python
# Copyright 2015 Open Connectome Project (http://openconnecto.me)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# dti2tens.py
# Created by <NAME> on 2015-01-09.
# Email: <EMAIL>
# Copyright (c) 2015. All rights reserved.
# Load necessary packages
from argparse import ArgumentParser
from os import system
from os.path import basename, splitext
def make_tens(dti, grad, bval, mask, scheme, dti_bfloat, tensors): #, fa, md, eigs):
"""
Computes tensors from DTI image
We leverage Camino's tensor estimation tool to compute the tensors at each voxel within the DTI volumes. The tensors are computed using standard methods of estimation: performing multiple linear regression on the equation relating the diffusion direction matrix of the voxel, the b-vectors, and the voxel intensities across different imposed b-fields.
Camino's dtfit documentation: http://cmic.cs.ucl.ac.uk/camino/index.php?n=Man.Dtfit
**Positional Arguments**
DTI Image: [.nii; nifti image]
- Corrected DTI volume.
B-vectors: [.grad; ASCII file]
- Field direction vectors for each volume in DTI image.
B-values: [.b; ASCII file]
- List of b-values corresponding to the b-vectors.
Brain mask: [.nii; nifti image]
- Binary labels identifying region of the image which contains brain tissue.
**Returns**
Tensors: [.Bdouble; big-endian double]
- List of tensors for each voxel in the source DTI image.
"""
# Create scheme file
system('fsl2scheme -bvecfile '+grad+' -bvalfile '+bval+' -bscale 1 > '+scheme)
# Maps the DTI image to a Camino compatible Bfloat format
system('image2voxel -4dimage '+dti+' -outputfile '+dti_bfloat)
# Produce tensors from image
system('dtfit '+dti_bfloat+' '+scheme+' -bgmask '+mask+' -outputfile '+tensors)
# In order to visualize, and just 'cause it's fun anyways, we get some stats
#[fa_base, ext] = splitext(basename(fa))
#[md_base, ext] = splitext(basename(md))
#system('for PROG in '+fa+' '+md+'; do cat '+tensors+' | ${PROG} | voxel2image -outputroot ${PROG} -header '+dti+'; done')
# We also need the eigen system to visualize
#system('cat '+tensors+' | dteig > '+eigs)
def main():
parser = ArgumentParser(description="")
parser.add_argument("dti", action="store", help="The DTI image, not skull stripped (.nii)")
parser.add_argument("grad", action="store", help="The gradient directions corresponding to the DTI image (bvec)")
parser.add_argument("bval", action="store", help="The bvalue file corresponding to the DTI image (bval)")
parser.add_argument("mask", action="store", help="The brain mask of the DTI image (.nii, .nii.gz)")
parser.add_argument("scheme", action="store", help="The scheme file (.scheme)")
parser.add_argument("dti_bfloat", action="store", help="The Bfloat format equivalent of the DTI image (.Bfloat)")
parser.add_argument("tensors", action="store", help="The produced tensors in Bdouble format (.Bdouble)")
#parser.add_argument("fa", action="store", help="The fractional anisotropy statistic (.nii)")
#parser.add_argument("md", action="store", help="The mean diffusivity statistic (.nii)")
#parser.add_argument("eigs", action="store", help="The eigen values of the system (.Bdouble)")
result = parser.parse_args()
make_tens(result.dti, result.grad, result.bval, result.mask, result.scheme, result.dti_bfloat, result.tensors)#, result.fa, result.md, result.eigs)
if __name__ == '__main__':
main()
| [
"os.system",
"argparse.ArgumentParser"
] | [((1978, 2070), 'os.system', 'system', (["('fsl2scheme -bvecfile ' + grad + ' -bvalfile ' + bval + ' -bscale 1 > ' +\n scheme)"], {}), "('fsl2scheme -bvecfile ' + grad + ' -bvalfile ' + bval +\n ' -bscale 1 > ' + scheme)\n", (1984, 2070), False, 'from os import system\n'), ((2119, 2187), 'os.system', 'system', (["('image2voxel -4dimage ' + dti + ' -outputfile ' + dti_bfloat)"], {}), "('image2voxel -4dimage ' + dti + ' -outputfile ' + dti_bfloat)\n", (2125, 2187), False, 'from os import system\n'), ((2215, 2312), 'os.system', 'system', (["('dtfit ' + dti_bfloat + ' ' + scheme + ' -bgmask ' + mask +\n ' -outputfile ' + tensors)"], {}), "('dtfit ' + dti_bfloat + ' ' + scheme + ' -bgmask ' + mask +\n ' -outputfile ' + tensors)\n", (2221, 2312), False, 'from os import system\n'), ((2699, 2729), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '""""""'}), "(description='')\n", (2713, 2729), False, 'from argparse import ArgumentParser\n')] |
"""LTL specification encoder"""
import tensorflow as tf
from ...data.encoder import PAD_TOKEN
from ..ltl_encoder import LTLTreeEncoder
class LTLSpecTreeEncoder(LTLTreeEncoder):
def encode(self, spec):
return super().encode(spec.formula_str)
class LTLSpecGuaranteeEncoder(LTLTreeEncoder):
def __init__(self, guarantee_pad, num_guarantees, **kwargs):
self.num_guarantees = num_guarantees
self.guarantee_ids = []
self.guarantee_padded_tpe = []
super().__init__(pad=guarantee_pad, **kwargs)
@property
def tensor_spec(self):
guarantee_spec = tf.TensorSpec(shape=(self.num_guarantees, self.pad),
dtype=self.tf_dtype)
tpe_spec = tf.TensorSpec(shape=(self.num_guarantees, self.pad,
self.tpe_pad),
dtype=self.tf_dtype)
return (guarantee_spec, tpe_spec)
@property
def tensor(self):
guarantee_tensor = tf.constant(self.guarantee_ids, dtype=self.tf_dtype)
tpe_tensor = tf.constant(self.guarantee_padded_tpe, dtype=self.tf_dtype)
return (guarantee_tensor, tpe_tensor)
def encode(self, spec):
if len(spec.guarantees) > self.num_guarantees:
self.error = 'Num guarantees'
return False
self.guarantee_ids = []
self.guarantee_padded_tpe = []
for guarantee in spec.guarantees:
if not super().encode(guarantee):
return False
self.guarantee_ids.append(self.ids)
self.guarantee_padded_tpe.append(self.padded_tpe)
for _ in range(len(spec.guarantees), self.num_guarantees):
if self.vocabulary:
self.guarantee_ids.append(
self.vocabulary.tokens_to_ids([PAD_TOKEN] * self.pad))
self.guarantee_padded_tpe.append([[0] * self.tpe_pad] *
self.pad)
return True
class LTLSpecPropertyEncoder(LTLTreeEncoder):
def __init__(self, property_pad, num_properties, **kwargs):
self.num_properties = num_properties
self.property_ids = []
self.property_padded_tpe = []
super().__init__(start=True, pad=property_pad, **kwargs)
@property
def tensor_spec(self):
property_spec = tf.TensorSpec(shape=(self.num_properties, self.pad),
dtype=self.tf_dtype)
tpe_spec = tf.TensorSpec(shape=(self.num_properties, self.pad,
self.tpe_pad),
dtype=self.tf_dtype)
return (property_spec, tpe_spec)
@property
def tensor(self):
property_tensor = tf.constant(self.property_ids, dtype=self.tf_dtype)
tpe_tensor = tf.constant(self.property_padded_tpe, dtype=self.tf_dtype)
return (property_tensor, tpe_tensor)
def encode(self, spec):
if len(spec.assumptions + spec.guarantees) > self.num_properties:
self.error = 'Num properties'
return False
self.property_ids = []
self.property_padded_tpe = []
self.encode_start = True
for assumption in spec.assumptions:
if not super().encode(assumption):
return False
self.property_ids.append(self.ids)
self.property_padded_tpe.append(self.padded_tpe)
self.encode_start = False
for guarantee in spec.guarantees:
if not super().encode(guarantee):
return False
self.property_ids.append(self.ids)
self.property_padded_tpe.append(self.padded_tpe)
for _ in range(len(spec.assumptions + spec.guarantees),
self.num_properties):
if self.vocabulary:
self.property_ids.append(
self.vocabulary.tokens_to_ids([PAD_TOKEN] * self.pad))
self.property_padded_tpe.append([[0] * self.tpe_pad] * self.pad)
return True
| [
"tensorflow.constant",
"tensorflow.TensorSpec"
] | [((610, 683), 'tensorflow.TensorSpec', 'tf.TensorSpec', ([], {'shape': '(self.num_guarantees, self.pad)', 'dtype': 'self.tf_dtype'}), '(shape=(self.num_guarantees, self.pad), dtype=self.tf_dtype)\n', (623, 683), True, 'import tensorflow as tf\n'), ((742, 834), 'tensorflow.TensorSpec', 'tf.TensorSpec', ([], {'shape': '(self.num_guarantees, self.pad, self.tpe_pad)', 'dtype': 'self.tf_dtype'}), '(shape=(self.num_guarantees, self.pad, self.tpe_pad), dtype=\n self.tf_dtype)\n', (755, 834), True, 'import tensorflow as tf\n'), ((1009, 1061), 'tensorflow.constant', 'tf.constant', (['self.guarantee_ids'], {'dtype': 'self.tf_dtype'}), '(self.guarantee_ids, dtype=self.tf_dtype)\n', (1020, 1061), True, 'import tensorflow as tf\n'), ((1083, 1142), 'tensorflow.constant', 'tf.constant', (['self.guarantee_padded_tpe'], {'dtype': 'self.tf_dtype'}), '(self.guarantee_padded_tpe, dtype=self.tf_dtype)\n', (1094, 1142), True, 'import tensorflow as tf\n'), ((2364, 2437), 'tensorflow.TensorSpec', 'tf.TensorSpec', ([], {'shape': '(self.num_properties, self.pad)', 'dtype': 'self.tf_dtype'}), '(shape=(self.num_properties, self.pad), dtype=self.tf_dtype)\n', (2377, 2437), True, 'import tensorflow as tf\n'), ((2495, 2587), 'tensorflow.TensorSpec', 'tf.TensorSpec', ([], {'shape': '(self.num_properties, self.pad, self.tpe_pad)', 'dtype': 'self.tf_dtype'}), '(shape=(self.num_properties, self.pad, self.tpe_pad), dtype=\n self.tf_dtype)\n', (2508, 2587), True, 'import tensorflow as tf\n'), ((2760, 2811), 'tensorflow.constant', 'tf.constant', (['self.property_ids'], {'dtype': 'self.tf_dtype'}), '(self.property_ids, dtype=self.tf_dtype)\n', (2771, 2811), True, 'import tensorflow as tf\n'), ((2833, 2891), 'tensorflow.constant', 'tf.constant', (['self.property_padded_tpe'], {'dtype': 'self.tf_dtype'}), '(self.property_padded_tpe, dtype=self.tf_dtype)\n', (2844, 2891), True, 'import tensorflow as tf\n')] |
from imutils.video import WebcamVideoStream
from FaceRecognition import *
from face_recognition import compare_faces
import cv2
SKIP_FRAME_RATIO = 2
images, greys, names = load_faces("knownfaces")
knownfaces = encodeDataset(images, greys)
ID = [1,2,3,4,5,6,7,8,9]
face_locations = []
face_names = []
process_frame_count = 1
vs = WebcamVideoStream().start()
while True:
frame = vs.read()
if(process_frame_count % SKIP_FRAME_RATIO == 0):
temp_frame, grey_frame = preprocess(frame)
face_locations = detectfaces(grey_frame)
face_names = []
id_iden = []
for face in face_locations:
#facealigned, rect = alignface(temp_frame,grey_frame,face)
facelandmark = predictface(grey_frame,face)
faceencoded = encodeface(temp_frame, facelandmark)
matchfaces = compare_faces(knownfaces, faceencoded)
name = "Unknown"
id = "id"
for i,match in enumerate(matchfaces):
if(match == True):
name = names[i]
id = ID[i]
face_names.append(name)
id_iden.append(id)
process_frame_count += 1
for i,location in enumerate(face_locations):
frame = draw_name(frame,face_names[i],location)
insertName(None,None,id_iden[i])
cv2.imshow('Video', frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
vs.stop()
cv2.destroyAllWindows() | [
"imutils.video.WebcamVideoStream",
"cv2.imshow",
"cv2.destroyAllWindows",
"face_recognition.compare_faces",
"cv2.waitKey"
] | [((1481, 1504), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1502, 1504), False, 'import cv2\n'), ((1382, 1408), 'cv2.imshow', 'cv2.imshow', (['"""Video"""', 'frame'], {}), "('Video', frame)\n", (1392, 1408), False, 'import cv2\n'), ((344, 363), 'imutils.video.WebcamVideoStream', 'WebcamVideoStream', ([], {}), '()\n', (361, 363), False, 'from imutils.video import WebcamVideoStream\n'), ((875, 913), 'face_recognition.compare_faces', 'compare_faces', (['knownfaces', 'faceencoded'], {}), '(knownfaces, faceencoded)\n', (888, 913), False, 'from face_recognition import compare_faces\n'), ((1417, 1431), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (1428, 1431), False, 'import cv2\n')] |
import os
import json
import pytest
from cycsm import isd
import cycsm.csm as csm
import usgscam as cam
data_path = os.path.dirname(__file__)
class TestGenericLs:
@pytest.mark.parametrize('image, ground',[
((2500, 9216, 0), (-73589.5516508502, 562548.342040933, 2372508.44060771))
])
def test_image_to_ground(self, ctx_model, image, ground):
gx, gy, gz = ground
x, y, z = ctx_model.imageToGround(*image)
#TODO: Get this test up and running.
#print(x, y, z)
#assert False
#assert x == pytest.approx(gx, rel=1)
#assert y == pytest.approx(gy, rel=1)
#assert z == pytest.approx(gz, rel=1)
#@pytest.mark.parametrize('image, ground',[
# ((512, 512, 0), (-73589.5516508502, 562548.342040933, 2372508.44060771)),
# ((100, 100, 0), (-48020.2164819883, 539322.805489926, 2378549.41724731))
#])
#def test_ground_to_image(self, model, image, ground):
# y, x = model.groundToImage(*ground)
# ix, iy, _ = image
#
# assert x == pytest.approx(ix)
# assert y == pytest.approx(iy)
| [
"os.path.dirname",
"pytest.mark.parametrize"
] | [((118, 143), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (133, 143), False, 'import os\n'), ((172, 295), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""image, ground"""', '[((2500, 9216, 0), (-73589.5516508502, 562548.342040933, 2372508.44060771))]'], {}), "('image, ground', [((2500, 9216, 0), (-\n 73589.5516508502, 562548.342040933, 2372508.44060771))])\n", (195, 295), False, 'import pytest\n')] |
import pytest
import json
import os
import logging
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
from glacierbackup.command import _construct_argparse_parser
from glacierbackup.jobs import BackupJob
from glacierbackup.database import GBDatabase, GBDatabaseError
def _create_test_backup_files_and_dirs(parent_dir):
root_test_dir = os.path.join(parent_dir, '__test_backup_structure')
os.mkdir(root_test_dir)
dir1 = os.path.join(root_test_dir, 'dir1')
os.mkdir(dir1)
dir2 = os.path.join(root_test_dir, 'dir2')
os.mkdir(dir2)
for char in 'a10':
for adir in (dir1, dir2):
with open(os.path.join(adir, char), 'w') as f:
f.write(char*10**5)
file_list_loc = os.path.join(root_test_dir, '__test_backup_file_list.txt')
paths = [root_test_dir+'/dir1/'+char for char in 'a10']
paths += [root_test_dir+'/dir2/'+char for char in 'a10']
with open(file_list_loc, 'w') as f:
for p in paths:
f.write(p)
f.write('\n')
return file_list_loc
class TemplateSetupTeardown(object):
@classmethod
def setup_class(cls):
# Startup config
cls.database_dir = os.path.join(os.path.expanduser('~'), '.glacierbackup/__TESTS')
cls.database_path = os.path.join(cls.database_dir, 'GB_database.sqlite')
tmp_dir = os.path.join(cls.database_dir, 'TMP')
try:
os.makedirs(tmp_dir)
except FileExistsError:
pass
cls._cnf = {
"set_id" : 0,
"compression_algorithm" : "lzma",
"temporary_dir": tmp_dir,
"database_dir": cls.database_dir,
"public_key": "None",
"region_name": "eu-west-1",
"vault_name": "TestVault1",
"aws_access_key_id": cls.keys["aws_access_key_id"],
"aws_secret_access_key": cls.keys["aws_secret_access_key"]
}
cls.sample_conf_file = os.path.join(cls.database_dir, '_init_conf.json')
with open(cls.sample_conf_file, 'w') as f:
json.dump(cls._cnf, f, indent=2)
cls.list_of_files_path = _create_test_backup_files_and_dirs(cls.database_dir)
@classmethod
def teardown_class(cls):
import shutil
shutil.rmtree(cls.database_dir)
class TestGBDatabase(TemplateSetupTeardown):
""" Tests directly glacierbackup.database.GBDatabase """
with open("__keys.json", 'r') as f:
keys = json.load(f)
def teardown_method(self, method):
if os.path.isfile(self.database_path):
os.remove(self.database_path)
def test_operation_protection_on_disconnected_database(self):
DB = GBDatabase(self.database_path)
with pytest.raises(GBDatabaseError):
DB.change('Select * from Jobs')
def test_database_initialization(self):
DB = GBDatabase(self.database_path)
DB.initialize(self._cnf)
with pytest.raises(GBDatabaseError):
CONFIG = DB.read_config_from_db(set_id=0)
DB.connect()
CONFIG = DB.read_config_from_db(set_id=0)
for k in self._cnf.keys():
assert CONFIG[k] == self._cnf[k]
def test_multiple_close_calls(self):
DB = GBDatabase(self.database_path)
DB.initialize(self._cnf)
# Should rise no errors
DB.close()
DB.connect()
DB.close()
DB.close()
DB.close()
del DB
def test_writing_to_Files(self):
DB = GBDatabase(self.database_path)
DB.initialize(self._cnf)
with DB:
v = ('abs', 1, 0, 1)
DB.change('INSERT INTO Files (abs_path, registration_date, file_exists, registered) VALUES (?,?,?,?)', v)
with GBDatabase(self.database_path) as DB:
row = DB.fetch_row('SELECT * FROM Files WHERE abs_path=?', (v[0],))
for i, val in enumerate(row):
assert v[i] == val
def test_writing_two_identical_abs_path_to_Files(self):
DB = GBDatabase(self.database_path)
DB.initialize(self._cnf)
with DB:
v = ('abs', 1, 0, 1)
DB.change('INSERT INTO Files (abs_path, registration_date, file_exists, registered) VALUES (?,?,?,?)', v)
import sqlite3
with pytest.raises(sqlite3.IntegrityError):
DB.change('INSERT INTO Files (abs_path, registration_date, file_exists, registered) VALUES (?,?,?,?)', v)
def test_writing_to_Backups(self):
DB = GBDatabase(self.database_path)
DB.initialize(self._cnf)
with DB:
vs = [('abs', 1, 'abc', 1),
('abs', 2, 'ccc', 2),
('abs', 3, 'ddd', 3),]
DB.change_many('INSERT INTO Backups (abs_path, mod_date, sha256, job_id) VALUES (?,?,?,?)', vs)
with GBDatabase(self.database_path) as DB:
rows = DB.fetch_all('SELECT * FROM Backups')
for written, read in zip(vs, rows):
for v1, v2 in zip(written, read):
assert v1 == v2
def test_writing_duplicates_to_Backups(self):
DB = GBDatabase(self.database_path)
DB.initialize(self._cnf)
with DB:
vs = [('abs', 1, 'abc', 1),
('abs', 1, 'abc', 1),
('abs', 1, 'abc', 1),]
import sqlite3
with pytest.raises(sqlite3.IntegrityError):
DB.change_many('INSERT INTO Backups (abs_path, mod_date, sha256, job_id) VALUES (?,?,?,?)', vs)
def test_writing_nonsense_to_Backups(self):
DB = GBDatabase(self.database_path)
DB.initialize(self._cnf)
with DB:
vs = [('abs', 1, 'abc', 1),
('abs', 1, 'cba', 1),
('abs', 3, 'abc', 1),]
import sqlite3
with pytest.raises(sqlite3.IntegrityError):
DB.change_many('INSERT INTO Backups (abs_path, mod_date, sha256, job_id) VALUES (?,?,?,?)', vs)
@pytest.mark.incremental
class TestsFunctional(TemplateSetupTeardown):
"""Possible test scenarios often depend on the AG Database state,
which is created and altered by executing actions/test cases
earlier. They also simultaniusly depend on the files (i.e.
backed_files -> archive -> encryped_archive). This could be mitigated
by preparing entire state from scratch, independently from glacierbackup
funcionality, by writing custom straight-to-the-state setups or state
injection. However it would require substantial effort at the very
early stage of development when things are mostly in flux, so a fixed
sequence of rather functional than strictly unit tests is used for now.
It still has the benefit of automated execution and can better pin down
a faillure location.
Note that the test order below roughly mimics how glacierbackup would
be used from CLI. """
with open("__keys.json", 'r') as f:
keys = json.load(f)
def test_initialization_from_cmd_args(self):
parser = _construct_argparse_parser()
args = parser.parse_args(['init', '--genkeys', self.sample_conf_file])
args.func(args)
DB = GBDatabase(os.path.join(self.database_dir, 'GB_database.sqlite'))
DB.connect()
CONFIG = DB.read_config_from_db()
assert CONFIG['vault_name'] == self._cnf['vault_name']
assert CONFIG['database_dir'] == self._cnf['database_dir']
assert CONFIG['temporary_dir'] == self._cnf['temporary_dir']
assert len(CONFIG['public_key']) > 100 # TODO: Should do better...
assert os.path.isfile(os.path.join(self.database_dir, 'GB_RSA_private.pem'))
def test_registering_files_by_file_list(self):
database_path = os.path.join(self.database_dir, 'GB_database.sqlite')
parser = _construct_argparse_parser()
args = parser.parse_args(['register', '--database', database_path,
'--filelist', self.list_of_files_path])
args.func(args)
import sqlite3
conn = sqlite3.connect(database_path)
c = conn.cursor()
c.execute('SELECT * FROM FILES')
files_in_db = c.fetchall()
conn.close()
assert len(files_in_db) == 6
def test_backup_job_initialization(self):
database_path = os.path.join(self.database_dir, 'GB_database.sqlite')
self.__class__.DB = GBDatabase(database_path)
self.__class__.DB.connect()
self.__class__.BJ = BackupJob(self.__class__.DB, 'asdf')
def test_backup_job_checkout_files(self):
self.__class__.BJ.checkout_files()
def test_backup_job_archive_packing(self):
self.__class__.BJ.archive_files()
assert os.path.isfile(self.__class__.BJ.archive)
def test_backup_job_archive_contents(self):
import tarfile
tarf = tarfile.open(self.__class__.BJ.archive, 'r')
with open(self.list_of_files_path, 'r') as f:
paths1 = [s.strip() for s in f.readlines()]
paths2 = ['/'+t for t in tarf.getnames()]
for p in paths1:
assert p in paths2
def test_backup_job_encrypt_archive(self):
self.__class__.BJ.encrypt_archive()
assert os.path.isfile(self.__class__.BJ.encrypted_archive)
def test_archive_decryption(self):
pass
def test_backup_job_upload_into_glacier(self):
#~ self.__class__.BJ.upload_into_glacier()
pass
| [
"logging.basicConfig",
"tarfile.open",
"glacierbackup.command._construct_argparse_parser",
"glacierbackup.database.GBDatabase",
"sqlite3.connect",
"os.makedirs",
"json.dump",
"os.path.join",
"os.path.isfile",
"os.remove",
"pytest.raises",
"os.mkdir",
"shutil.rmtree",
"json.load",
"os.pat... | [((51, 159), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'format': '"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'}), "(level=logging.DEBUG, format=\n '%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", (70, 159), False, 'import logging\n'), ((396, 447), 'os.path.join', 'os.path.join', (['parent_dir', '"""__test_backup_structure"""'], {}), "(parent_dir, '__test_backup_structure')\n", (408, 447), False, 'import os\n'), ((452, 475), 'os.mkdir', 'os.mkdir', (['root_test_dir'], {}), '(root_test_dir)\n', (460, 475), False, 'import os\n'), ((492, 527), 'os.path.join', 'os.path.join', (['root_test_dir', '"""dir1"""'], {}), "(root_test_dir, 'dir1')\n", (504, 527), False, 'import os\n'), ((532, 546), 'os.mkdir', 'os.mkdir', (['dir1'], {}), '(dir1)\n', (540, 546), False, 'import os\n'), ((558, 593), 'os.path.join', 'os.path.join', (['root_test_dir', '"""dir2"""'], {}), "(root_test_dir, 'dir2')\n", (570, 593), False, 'import os\n'), ((598, 612), 'os.mkdir', 'os.mkdir', (['dir2'], {}), '(dir2)\n', (606, 612), False, 'import os\n'), ((790, 848), 'os.path.join', 'os.path.join', (['root_test_dir', '"""__test_backup_file_list.txt"""'], {}), "(root_test_dir, '__test_backup_file_list.txt')\n", (802, 848), False, 'import os\n'), ((1334, 1386), 'os.path.join', 'os.path.join', (['cls.database_dir', '"""GB_database.sqlite"""'], {}), "(cls.database_dir, 'GB_database.sqlite')\n", (1346, 1386), False, 'import os\n'), ((1405, 1442), 'os.path.join', 'os.path.join', (['cls.database_dir', '"""TMP"""'], {}), "(cls.database_dir, 'TMP')\n", (1417, 1442), False, 'import os\n'), ((2068, 2117), 'os.path.join', 'os.path.join', (['cls.database_dir', '"""_init_conf.json"""'], {}), "(cls.database_dir, '_init_conf.json')\n", (2080, 2117), False, 'import os\n'), ((2394, 2425), 'shutil.rmtree', 'shutil.rmtree', (['cls.database_dir'], {}), '(cls.database_dir)\n', (2407, 2425), False, 'import shutil\n'), ((2589, 2601), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2598, 2601), False, 'import json\n'), ((2657, 2691), 'os.path.isfile', 'os.path.isfile', (['self.database_path'], {}), '(self.database_path)\n', (2671, 2691), False, 'import os\n'), ((2819, 2849), 'glacierbackup.database.GBDatabase', 'GBDatabase', (['self.database_path'], {}), '(self.database_path)\n', (2829, 2849), False, 'from glacierbackup.database import GBDatabase, GBDatabaseError\n'), ((3005, 3035), 'glacierbackup.database.GBDatabase', 'GBDatabase', (['self.database_path'], {}), '(self.database_path)\n', (3015, 3035), False, 'from glacierbackup.database import GBDatabase, GBDatabaseError\n'), ((3391, 3421), 'glacierbackup.database.GBDatabase', 'GBDatabase', (['self.database_path'], {}), '(self.database_path)\n', (3401, 3421), False, 'from glacierbackup.database import GBDatabase, GBDatabaseError\n'), ((3654, 3684), 'glacierbackup.database.GBDatabase', 'GBDatabase', (['self.database_path'], {}), '(self.database_path)\n', (3664, 3684), False, 'from glacierbackup.database import GBDatabase, GBDatabaseError\n'), ((4181, 4211), 'glacierbackup.database.GBDatabase', 'GBDatabase', (['self.database_path'], {}), '(self.database_path)\n', (4191, 4211), False, 'from glacierbackup.database import GBDatabase, GBDatabaseError\n'), ((4679, 4709), 'glacierbackup.database.GBDatabase', 'GBDatabase', (['self.database_path'], {}), '(self.database_path)\n', (4689, 4709), False, 'from glacierbackup.database import GBDatabase, GBDatabaseError\n'), ((5308, 5338), 'glacierbackup.database.GBDatabase', 'GBDatabase', (['self.database_path'], {}), '(self.database_path)\n', (5318, 5338), False, 'from glacierbackup.database import GBDatabase, GBDatabaseError\n'), ((5767, 5797), 'glacierbackup.database.GBDatabase', 'GBDatabase', (['self.database_path'], {}), '(self.database_path)\n', (5777, 5797), False, 'from glacierbackup.database import GBDatabase, GBDatabaseError\n'), ((7144, 7156), 'json.load', 'json.load', (['f'], {}), '(f)\n', (7153, 7156), False, 'import json\n'), ((7232, 7260), 'glacierbackup.command._construct_argparse_parser', '_construct_argparse_parser', ([], {}), '()\n', (7258, 7260), False, 'from glacierbackup.command import _construct_argparse_parser\n'), ((7972, 8025), 'os.path.join', 'os.path.join', (['self.database_dir', '"""GB_database.sqlite"""'], {}), "(self.database_dir, 'GB_database.sqlite')\n", (7984, 8025), False, 'import os\n'), ((8043, 8071), 'glacierbackup.command._construct_argparse_parser', '_construct_argparse_parser', ([], {}), '()\n', (8069, 8071), False, 'from glacierbackup.command import _construct_argparse_parser\n'), ((8292, 8322), 'sqlite3.connect', 'sqlite3.connect', (['database_path'], {}), '(database_path)\n', (8307, 8322), False, 'import sqlite3\n'), ((8571, 8624), 'os.path.join', 'os.path.join', (['self.database_dir', '"""GB_database.sqlite"""'], {}), "(self.database_dir, 'GB_database.sqlite')\n", (8583, 8624), False, 'import os\n'), ((8653, 8678), 'glacierbackup.database.GBDatabase', 'GBDatabase', (['database_path'], {}), '(database_path)\n', (8663, 8678), False, 'from glacierbackup.database import GBDatabase, GBDatabaseError\n'), ((8743, 8779), 'glacierbackup.jobs.BackupJob', 'BackupJob', (['self.__class__.DB', '"""asdf"""'], {}), "(self.__class__.DB, 'asdf')\n", (8752, 8779), False, 'from glacierbackup.jobs import BackupJob\n'), ((8999, 9040), 'os.path.isfile', 'os.path.isfile', (['self.__class__.BJ.archive'], {}), '(self.__class__.BJ.archive)\n', (9013, 9040), False, 'import os\n'), ((9136, 9180), 'tarfile.open', 'tarfile.open', (['self.__class__.BJ.archive', '"""r"""'], {}), "(self.__class__.BJ.archive, 'r')\n", (9148, 9180), False, 'import tarfile\n'), ((9524, 9575), 'os.path.isfile', 'os.path.isfile', (['self.__class__.BJ.encrypted_archive'], {}), '(self.__class__.BJ.encrypted_archive)\n', (9538, 9575), False, 'import os\n'), ((1255, 1278), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (1273, 1278), False, 'import os\n'), ((1468, 1488), 'os.makedirs', 'os.makedirs', (['tmp_dir'], {}), '(tmp_dir)\n', (1479, 1488), False, 'import os\n'), ((2181, 2213), 'json.dump', 'json.dump', (['cls._cnf', 'f'], {'indent': '(2)'}), '(cls._cnf, f, indent=2)\n', (2190, 2213), False, 'import json\n'), ((2705, 2734), 'os.remove', 'os.remove', (['self.database_path'], {}), '(self.database_path)\n', (2714, 2734), False, 'import os\n'), ((2863, 2893), 'pytest.raises', 'pytest.raises', (['GBDatabaseError'], {}), '(GBDatabaseError)\n', (2876, 2893), False, 'import pytest\n'), ((3082, 3112), 'pytest.raises', 'pytest.raises', (['GBDatabaseError'], {}), '(GBDatabaseError)\n', (3095, 3112), False, 'import pytest\n'), ((3912, 3942), 'glacierbackup.database.GBDatabase', 'GBDatabase', (['self.database_path'], {}), '(self.database_path)\n', (3922, 3942), False, 'from glacierbackup.database import GBDatabase, GBDatabaseError\n'), ((5015, 5045), 'glacierbackup.database.GBDatabase', 'GBDatabase', (['self.database_path'], {}), '(self.database_path)\n', (5025, 5045), False, 'from glacierbackup.database import GBDatabase, GBDatabaseError\n'), ((7397, 7450), 'os.path.join', 'os.path.join', (['self.database_dir', '"""GB_database.sqlite"""'], {}), "(self.database_dir, 'GB_database.sqlite')\n", (7409, 7450), False, 'import os\n'), ((7833, 7886), 'os.path.join', 'os.path.join', (['self.database_dir', '"""GB_RSA_private.pem"""'], {}), "(self.database_dir, 'GB_RSA_private.pem')\n", (7845, 7886), False, 'import os\n'), ((4457, 4494), 'pytest.raises', 'pytest.raises', (['sqlite3.IntegrityError'], {}), '(sqlite3.IntegrityError)\n', (4470, 4494), False, 'import pytest\n'), ((5554, 5591), 'pytest.raises', 'pytest.raises', (['sqlite3.IntegrityError'], {}), '(sqlite3.IntegrityError)\n', (5567, 5591), False, 'import pytest\n'), ((6013, 6050), 'pytest.raises', 'pytest.raises', (['sqlite3.IntegrityError'], {}), '(sqlite3.IntegrityError)\n', (6026, 6050), False, 'import pytest\n'), ((692, 716), 'os.path.join', 'os.path.join', (['adir', 'char'], {}), '(adir, char)\n', (704, 716), False, 'import os\n')] |
import pandas
import folium
data = pandas.read_csv("Volcanoes.txt")
lat = list(data['LAT'])
lon = list(data['LON'])
elev = list(data['ELEV'])
name = list(data["NAME"])
html = """
Volcano name:<br>
<a href="https://www.google.com/search?q=%%22%s%%22" target="_blank">%s</a><br>
Height: %s m
"""
def color_producer(elevation):
if elevation < 1000:
return 'green'
elif 1000 <= elevation < 3000:
return 'orange'
else:
return 'red'
map = folium.Map(location=[38.58,-99.09], zoom_start=6, tiles="Stamen Terrain")
fgv = folium.FeatureGroup(name='Volcanoes')
for lt, ln, el, name in zip(lat, lon, elev, name):
iframe = folium.IFrame(html=html % (name, name, el), width=200, height=100)
fgv.add_child(folium.CircleMarker(location=[lt, ln], radius=6, popup=folium.Popup(iframe),
fill_color = color_producer(el), color ='grey', fill_opacity=0.9))
fgp = folium.FeatureGroup(name='Population')
fgp.add_child(folium.GeoJson(data=open('world.json', 'r', encoding='utf-8-sig').read(),
style_function=lambda x: {'fillColor':'green' if x['properties']['POP2005'] < 10000000
else 'yellow' if 10000000 <= x['properties']['POP2005'] < 20000000 else 'red'}))
map.add_child(fgv)
map.add_child(fgp)
map.add_child(folium.LayerControl())
map.save("Map1.html")
| [
"folium.IFrame",
"pandas.read_csv",
"folium.LayerControl",
"folium.Map",
"folium.FeatureGroup",
"folium.Popup"
] | [((39, 71), 'pandas.read_csv', 'pandas.read_csv', (['"""Volcanoes.txt"""'], {}), "('Volcanoes.txt')\n", (54, 71), False, 'import pandas\n'), ((494, 568), 'folium.Map', 'folium.Map', ([], {'location': '[38.58, -99.09]', 'zoom_start': '(6)', 'tiles': '"""Stamen Terrain"""'}), "(location=[38.58, -99.09], zoom_start=6, tiles='Stamen Terrain')\n", (504, 568), False, 'import folium\n'), ((575, 612), 'folium.FeatureGroup', 'folium.FeatureGroup', ([], {'name': '"""Volcanoes"""'}), "(name='Volcanoes')\n", (594, 612), False, 'import folium\n'), ((925, 963), 'folium.FeatureGroup', 'folium.FeatureGroup', ([], {'name': '"""Population"""'}), "(name='Population')\n", (944, 963), False, 'import folium\n'), ((681, 747), 'folium.IFrame', 'folium.IFrame', ([], {'html': '(html % (name, name, el))', 'width': '(200)', 'height': '(100)'}), '(html=html % (name, name, el), width=200, height=100)\n', (694, 747), False, 'import folium\n'), ((1282, 1303), 'folium.LayerControl', 'folium.LayerControl', ([], {}), '()\n', (1301, 1303), False, 'import folium\n'), ((822, 842), 'folium.Popup', 'folium.Popup', (['iframe'], {}), '(iframe)\n', (834, 842), False, 'import folium\n')] |
from components.competition import Competition
from components.pizzeria import Pizzeria
class TestObjects:
def test_two_identical_pizzeria_objects(self):
p1 = Pizzeria({'p1': [1, 2, 3], 'p2': [4, 5, 6]}, 6)
p2 = Pizzeria({'p1': [1, 2, 3], 'p2': [4, 5, 6]}, 6)
assert p1 == p2
def test_two_different_pizzeria_objects(self):
p1 = Pizzeria({'p1': [1, 2, 3], 'p2': [4, 5, 6]}, 6)
p2 = Pizzeria({'p1': [1, 2, 3, 0], 'p2': [4, 5, 6]}, 7)
assert p1 != p2
def test_pizzeria_against_other_object(self):
p1 = Pizzeria({'p1': [1, 2, 3], 'p2': [4, 5, 6]}, 6)
p2 = 'mock'
assert p1 != p2
def test_two_identical_competition_objects(self):
c1 = Competition(3, 4, 5)
c2 = Competition(3, 4, 5)
assert c1 == c2
def test_two_different_competition_objects(self):
c1 = Competition(5, 4, 3)
c2 = Competition(3, 4, 5)
assert c1 != c2
def test_competition_against_other_object(self):
c1 = Competition(5, 4, 3)
c2 = 'mock'
assert c1 != c2
def test_pizzeria_reverse_index_properly_constructed(self):
pizzas = {0: [0, 1, 2],
1: [3, 4, 5],
2: [6, 3, 1],
3: [4, 3, 5],
4: [6, 5]}
expected_reverse_index = {
0: [0],
1: [0, 2],
2: [0],
3: [1, 2, 3],
4: [1, 3],
5: [1, 3, 4],
6: [2, 4]
}
pizzeria = Pizzeria(pizzas, 7)
assert pizzeria.ingredients_reverse_index == expected_reverse_index
| [
"components.pizzeria.Pizzeria",
"components.competition.Competition"
] | [((173, 220), 'components.pizzeria.Pizzeria', 'Pizzeria', (["{'p1': [1, 2, 3], 'p2': [4, 5, 6]}", '(6)'], {}), "({'p1': [1, 2, 3], 'p2': [4, 5, 6]}, 6)\n", (181, 220), False, 'from components.pizzeria import Pizzeria\n'), ((234, 281), 'components.pizzeria.Pizzeria', 'Pizzeria', (["{'p1': [1, 2, 3], 'p2': [4, 5, 6]}", '(6)'], {}), "({'p1': [1, 2, 3], 'p2': [4, 5, 6]}, 6)\n", (242, 281), False, 'from components.pizzeria import Pizzeria\n'), ((371, 418), 'components.pizzeria.Pizzeria', 'Pizzeria', (["{'p1': [1, 2, 3], 'p2': [4, 5, 6]}", '(6)'], {}), "({'p1': [1, 2, 3], 'p2': [4, 5, 6]}, 6)\n", (379, 418), False, 'from components.pizzeria import Pizzeria\n'), ((432, 482), 'components.pizzeria.Pizzeria', 'Pizzeria', (["{'p1': [1, 2, 3, 0], 'p2': [4, 5, 6]}", '(7)'], {}), "({'p1': [1, 2, 3, 0], 'p2': [4, 5, 6]}, 7)\n", (440, 482), False, 'from components.pizzeria import Pizzeria\n'), ((571, 618), 'components.pizzeria.Pizzeria', 'Pizzeria', (["{'p1': [1, 2, 3], 'p2': [4, 5, 6]}", '(6)'], {}), "({'p1': [1, 2, 3], 'p2': [4, 5, 6]}, 6)\n", (579, 618), False, 'from components.pizzeria import Pizzeria\n'), ((731, 751), 'components.competition.Competition', 'Competition', (['(3)', '(4)', '(5)'], {}), '(3, 4, 5)\n', (742, 751), False, 'from components.competition import Competition\n'), ((765, 785), 'components.competition.Competition', 'Competition', (['(3)', '(4)', '(5)'], {}), '(3, 4, 5)\n', (776, 785), False, 'from components.competition import Competition\n'), ((878, 898), 'components.competition.Competition', 'Competition', (['(5)', '(4)', '(3)'], {}), '(5, 4, 3)\n', (889, 898), False, 'from components.competition import Competition\n'), ((912, 932), 'components.competition.Competition', 'Competition', (['(3)', '(4)', '(5)'], {}), '(3, 4, 5)\n', (923, 932), False, 'from components.competition import Competition\n'), ((1024, 1044), 'components.competition.Competition', 'Competition', (['(5)', '(4)', '(3)'], {}), '(5, 4, 3)\n', (1035, 1044), False, 'from components.competition import Competition\n'), ((1535, 1554), 'components.pizzeria.Pizzeria', 'Pizzeria', (['pizzas', '(7)'], {}), '(pizzas, 7)\n', (1543, 1554), False, 'from components.pizzeria import Pizzeria\n')] |
#!/usr/bin/env python3
import pvml
import numpy as np
import matplotlib.pyplot as plt
import argparse
from itertools import zip_longest
_NORMALIZATION = {
"none": lambda *X: (X[0] if len(X) == 1 else X),
"meanvar": pvml.meanvar_normalization,
"minmax": pvml.minmax_normalization,
"maxabs": pvml.maxabs_normalization,
"l2": pvml.l2_normalization,
"l1": pvml.l1_normalization,
"whitening": pvml.whitening,
"pca": pvml.pca
}
def parse_args():
parser = argparse.ArgumentParser("Classification demo")
a = parser.add_argument
a("-r", "--lr", type=float, default=0.01,
help="learning rate (%(default)g)")
a("-l", "--lambda", type=float, dest="lambda_", default=0,
help="regularization coefficient (%(default)g)")
a("-s", "--steps", type=int, default=10000,
help="maximum number of training iterations (%(default)d)")
a("-p", "--plot-every", type=int, default=100,
help="frequency of plotting training data (%(default)d)")
a("-t", "--test", help="test set")
a("-f", "--features", help="Comma-separated feature columns")
a("-n", "--normalization", choices=_NORMALIZATION.keys(),
default="none", help="Feature normalization")
a("-c", "--class", type=int, default=-1, dest="class_",
help="Class column")
a("--seed", type=int, default=171956,
help="Random seed")
a("--confusion-matrix", "-C", action="store_true",
help="Show the confusion matrix.")
a("--dump", action="store_true",
help="Save the decision boundary and other data")
a("--nodraw", action="store_true",
help="Skip drawing the plots")
a("-m", "--model", choices=_MODELS.keys(), default="logreg",
help="Classification model")
a("-k", "--kernel", choices=["rbf", "polynomial"], default="rbf",
help="Kernel function")
a("--kernel-param", type=float, default=2,
help="Parameter of the kernel")
a("--knn-k", type=int, default=0, help="KNN neighbors (default auto)")
a("--classtree-minsize", type=int, default=1,
help="Classification tree minimum node size (%(default)d)")
a("--classtree-diversity", default="gini",
choices=["gini", "entropy", "error"],
help="Classification tree diversity function (%(default)s)")
a("--classtree-cv", type=int, default=5,
help="Cross-validation folds used for pruning (%(default)d)")
a("--mlp-hidden", default="",
help="Comma-separated list of number of hidden neurons")
a("--mlp-momentum", type=float, default=0.99,
help="Momentum term (%(default)g)")
a("--mlp-batch", type=int,
help="Batch size (default: use all training data)")
a("train", help="training set")
return parser.parse_args()
class DemoModel:
def __init__(self, args, binary, iterative=True):
self.lr = args.lr
self.lambda_ = args.lambda_
self.binary = binary
self.iterative = iterative
self.plot_every = args.plot_every
self.draw = not args.nodraw
self.confusion_matrix = args.confusion_matrix
self.dump = args.dump
def train(self, X, Y, Xtest, Ytest, steps):
st = self.plot_every
iterations = []
train_acc = []
test_acc = []
train_loss = []
test_loss = []
print("Step Train", "" if Xtest is None else "Test")
for step in range(st, steps + st, st):
self.train_step(X, Y, st)
iterations.append(step)
Z, P = self.inference(X)
train_acc.append(100 * (Z == Y).mean())
train_loss.append(self.loss(Y, P))
if Xtest is not None:
Ztest, Ptest = self.inference(Xtest)
test_acc.append(100 * (Ztest == Ytest).mean())
test_loss.append(self.loss(Ytest, Ptest))
self.plot_curves(0, "Accuracy (%)", iterations, train_acc,
test_acc)
self.plot_curves(1, "Loss", iterations, train_loss, test_loss)
self.plot_confusion(4, "Confusion matrix (train)", Z, Y)
if X.shape[1] == 2:
self.plot_data(2, "Training set", X, Y)
if Xtest is not None:
self.plot_data(3, "Test set", Xtest, Ytest)
if Xtest is None:
print("{} {:.2f}%".format(step, train_acc[-1]))
else:
self.plot_confusion(5, "Confusion matrix (test)", Ztest, Ytest)
print("{} {:.2f}% {:.2f}%".format(step, train_acc[-1],
test_acc[-1]))
plt.pause(0.0001)
if not self.iterative or (self.draw and not plt.fignum_exists(0)):
break
if self.dump:
with open("dump.txt", "wt") as f:
for t in zip_longest(iterations, train_acc, test_acc,
train_loss, test_loss):
row = (x if x is not None else "" for x in t)
print("{} {} {} {} {}".format(*row), file=f)
def plot_curves(self, fignum, title, iters, train, test):
train = [x for x in train if x is not None]
test = [x for x in test if x is not None]
if not self.draw or (not train and not test):
return
plt.figure(fignum)
plt.clf()
plt.title(title)
plt.xlabel("Iterations")
if train:
plt.plot(iters, train)
if test:
plt.plot(iters, test)
plt.legend(["train", "test"])
def plot_data(self, fignum, title, X, Y, resolution=200):
if not self.draw:
return
plt.figure(fignum)
plt.clf()
plt.title(title)
plt.scatter(X[:, 0], X[:, 1], c=Y, cmap=plt.cm.coolwarm)
xmin, xmax = plt.gca().get_xlim()
ymin, ymax = plt.gca().get_ylim()
ax = np.linspace(xmin, xmax, resolution)
ay = np.linspace(ymin, ymax, resolution)
gx, gy = np.meshgrid(ax, ay)
data = np.vstack((gx.reshape(-1), gy.reshape(-1))).T
v = self.inference(data)[1]
if v.ndim == 1:
v = v.reshape(gx.shape)
plt.contour(gx, gy, v, [0.5], cmap=plt.cm.coolwarm)
self.dump_contour(gx, gy, v - 0.5, title)
elif v.shape[1] == 2:
v = v[:, 0] - v[:, 1]
v = v.reshape(gx.shape)
plt.contour(gx, gy, v, [0.0], cmap=plt.cm.coolwarm)
self.dump_contour(gx, gy, v, title)
else:
values = np.arange(v.shape[1] - 1) + 0.5
v = v.argmax(1)
v = v.reshape(gx.shape)
plt.contour(gx, gy, v, values, cmap=plt.cm.coolwarm)
def dump_contour(self, gx, gy, v, title):
if self.dump:
with open(f"contour-{title}.txt".replace(" ", "_"), "w") as f:
for i in range(v.shape[0]):
for j in range(v.shape[1]):
print(gx[i, j], gy[i, j], v[i, j], file=f)
print(file=f)
def plot_confusion(self, fignum, title, predictions, labels):
if not self.draw or not self.confusion_matrix:
return
klasses = max(predictions.max(), labels.max()) + 1
plt.figure(fignum)
plt.clf()
plt.title(title)
cmat = np.bincount(klasses * labels + predictions,
minlength=klasses ** 2)
cmat = cmat.reshape(klasses, klasses)
cmat = 100 * cmat / np.maximum(1, cmat.sum(1, keepdims=True))
im = plt.imshow(cmat, vmin=0, vmax=100, cmap="OrRd")
plt.gca().set_xticks(np.arange(klasses))
plt.gca().set_yticks(np.arange(klasses))
colors = ("black", "white")
for i in range(klasses):
for j in range(klasses):
val = cmat[i, j]
color = (colors[0] if val < 50 else colors[1])
im.axes.text(j, i, "%.1f" % val, color=color,
horizontalalignment="center",
verticalalignment="center")
def train_step(self, X, Y, steps):
pass
def inference(self, X):
pass
def loss(self, Y, P):
pass
_MODELS = {}
def _register_model(name):
def f(cls):
_MODELS[name] = cls
return cls
return f
@_register_model("logreg")
class LogisticRegressionModel(DemoModel):
def __init__(self, args):
super().__init__(args, True)
self.w = None
self.b = 0
def train_step(self, X, Y, steps):
ret = pvml.logreg_train(X, Y, lr=self.lr,
lambda_=self.lambda_, steps=steps,
init_w=self.w, init_b=self.b)
self.w, self.b = ret
def inference(self, X):
P = pvml.logreg_inference(X, self.w, self.b)
return (P > 0.5).astype(int), P
def loss(self, Y, P):
return pvml.binary_cross_entropy(Y, P)
@_register_model("logreg_l1")
class LogisticRegressionL1Model(LogisticRegressionModel):
def train_step(self, X, Y, steps):
ret = pvml.logreg_l1_train(X, Y, lr=self.lr,
lambda_=self.lambda_, steps=steps,
init_w=self.w, init_b=self.b)
self.w, self.b = ret
@_register_model("ksvm")
class KernelSVMModel(DemoModel):
def __init__(self, args):
super().__init__(args, True)
self.alpha = None
self.b = 0
self.Xtrain = None
self.kfun = args.kernel
self.kparam = args.kernel_param
def train_step(self, X, Y, steps):
self.Xtrain = X
ret = pvml.ksvm_train(X, Y, self.kfun, self.kparam,
lr=self.lr, lambda_=self.lambda_,
steps=steps, init_alpha=self.alpha,
init_b=self.b)
self.alpha, self.b = ret
def inference(self, X):
ret = pvml.ksvm_inference(X, self.Xtrain, self.alpha, self.b,
self.kfun, self.kparam)
labels, logits = ret
return labels, logits + 0.5
def loss(self, Y, P):
return pvml.hinge_loss(Y, P - 0.5)
@_register_model("svm")
class LinearSVMModel(DemoModel):
def __init__(self, args):
super().__init__(args, True)
self.w = None
self.b = 0
def train_step(self, X, Y, steps):
ret = pvml.svm_train(X, Y, lr=self.lr, lambda_=self.lambda_,
steps=steps, init_w=self.w,
init_b=self.b)
self.w, self.b = ret
def inference(self, X):
labels, logits = pvml.svm_inference(X, self.w, self.b)
return labels, logits + 0.5
def loss(self, Y, P):
return pvml.hinge_loss(Y, P - 0.5)
@_register_model("multinomial")
class MultinomialLogisticRegressionModel(DemoModel):
def __init__(self, args):
super().__init__(args, False)
self.w = None
self.b = None
def train_step(self, X, Y, steps):
self.w, self.b = pvml.multinomial_logreg_train(
X, Y, lr=self.lr,
lambda_=self.lambda_,
steps=steps, init_w=self.w,
init_b=self.b)
def inference(self, X):
P = pvml.multinomial_logreg_inference(X, self.w, self.b)
Z = np.argmax(P, 1)
return Z, P
def loss(self, Y, P):
return pvml.cross_entropy(Y, P)
@_register_model("ovo_svm")
class OvoSVMModel(DemoModel):
def __init__(self, args):
super().__init__(args, False)
self.W = None
self.b = None
def train_step(self, X, Y, steps):
ret = pvml.one_vs_one_svm_train(X, Y, lr=self.lr, lambda_=self.lambda_,
steps=steps, init_w=self.W,
init_b=self.b)
self.W, self.b = ret
def inference(self, X):
return pvml.one_vs_one_svm_inference(X, self.W, self.b)
@_register_model("ovr_svm")
class OvrSVMModel(DemoModel):
def __init__(self, args):
super().__init__(args, False)
self.W = None
self.b = None
def train_step(self, X, Y, steps):
ret = pvml.one_vs_rest_svm_train(X, Y, lr=self.lr, lambda_=self.lambda_,
steps=steps, init_w=self.W,
init_b=self.b)
self.W, self.b = ret
def inference(self, X):
return pvml.one_vs_rest_svm_inference(X, self.W, self.b)
@_register_model("ovo_ksvm")
class OvoKSVMModel(DemoModel):
def __init__(self, args):
super().__init__(args, False)
self.Xtrain = None
self.alpha = None
self.b = None
self.kfun = args.kernel
self.kparam = args.kernel_param
def train_step(self, X, Y, steps):
self.Xtrain = X.copy()
ret = pvml.one_vs_one_ksvm_train(X, Y, self.kfun, self.kparam,
lr=self.lr, lambda_=self.lambda_,
steps=steps, init_alpha=self.alpha,
init_b=self.b)
self.alpha, self.b = ret
def inference(self, X):
return pvml.one_vs_one_ksvm_inference(X, self.Xtrain, self.alpha, self.b,
self.kfun, self.kparam)
@_register_model("ovr_ksvm")
class OvrKSVMModel(DemoModel):
def __init__(self, args):
super().__init__(args, False)
self.Xtrain = None
self.alpha = None
self.b = None
self.kfun = args.kernel
self.kparam = args.kernel_param
def train_step(self, X, Y, steps):
self.Xtrain = X.copy()
ret = pvml.one_vs_rest_ksvm_train(X, Y, self.kfun, self.kparam,
lr=self.lr, lambda_=self.lambda_,
steps=steps, init_alpha=self.alpha,
init_b=self.b)
self.alpha, self.b = ret
def inference(self, X):
return pvml.one_vs_rest_ksvm_inference(X, self.Xtrain, self.alpha, self.b,
self.kfun, self.kparam)
@_register_model("hgda")
class HeteroscedasticGDA(DemoModel):
def __init__(self, args):
super().__init__(args, False, False)
self.means = None
self.icovs = None
self.priors = None
def train_step(self, X, Y, steps):
ret = pvml.hgda_train(X, Y)
self.means, self.invcovs, self.priors = ret
def inference(self, X):
ret = pvml.hgda_inference(X, self.means, self.invcovs,
self.priors)
labels, scores = ret
return labels, scores
@_register_model("ogda")
class OmoscedasticGDA(DemoModel):
def __init__(self, args):
super().__init__(args, False, False)
self.w = None
self.b = None
def train_step(self, X, Y, steps):
self.w, self.b = pvml.ogda_train(X, Y)
def inference(self, X):
labels, scores = pvml.ogda_inference(X, self.w, self.b)
return labels, scores
@_register_model("mindist")
class MinimumDistanceClassifier(DemoModel):
def __init__(self, args):
super().__init__(args, False, False)
self.means = None
def train_step(self, X, Y, steps):
self.means = pvml.mindist_train(X, Y)
def inference(self, X):
labels, scores = pvml.mindist_inference(X, self.means)
return labels, scores
@_register_model("categorical_nb")
class CategoricalNaiveBayes(DemoModel):
def __init__(self, args):
super().__init__(args, False, False)
self.probs = None
self.priors = None
def train_step(self, X, Y, steps):
ret = pvml.categorical_naive_bayes_train(X, Y)
self.probs, self.priors = ret
def inference(self, X):
ret = pvml.categorical_naive_bayes_inference(X, self.probs,
self.priors)
labels, scores = ret
return ret
@_register_model("multinomial_nb")
class MultinomialNaiveBayes(DemoModel):
def __init__(self, args):
super().__init__(args, False, False)
self.w = None
self.b = None
def train_step(self, X, Y, steps):
ret = pvml.multinomial_naive_bayes_train(X, Y)
self.w, self.b = ret
def inference(self, X):
ret = pvml.multinomial_naive_bayes_inference(X, self.w,
self.b)
labels, scores = ret
return ret
@_register_model("gaussian_nb")
class GaussianNaiveBayes(DemoModel):
def __init__(self, args):
super().__init__(args, False, False)
self.means = None
self.vars = None
self.priors = None
def train_step(self, X, Y, steps):
ret = pvml.gaussian_naive_bayes_train(X, Y)
self.means, self.vars, self.priors = ret
def inference(self, X):
ret = pvml.gaussian_naive_bayes_inference(X, self.means,
self.vars,
self.priors)
return ret
@_register_model("classtree")
class ClassificationTree(DemoModel):
def __init__(self, args):
super().__init__(args, False, False)
self.tree = pvml.ClassificationTree()
self.minsize = args.classtree_minsize
self.cv = args.classtree_cv
self.diversity = args.classtree_diversity
def train_step(self, X, Y, steps):
self.tree.train(X, Y, minsize=self.minsize,
diversity=self.diversity, pruning_cv=self.cv)
def inference(self, X):
ret = self.tree.inference(X)
return ret
@_register_model("perceptron")
class Perceptron(DemoModel):
def __init__(self, args):
super().__init__(args, True)
self.w = None
self.b = 0
def train_step(self, X, Y, steps):
ret = pvml.perceptron_train(X, Y, steps, init_w=self.w,
init_b=self.b)
self.w, self.b = ret
def inference(self, X):
ret = pvml.perceptron_inference(X, self.w, self.b)
return ret
@_register_model("knn")
class KNN(DemoModel):
def __init__(self, args):
super().__init__(args, False, False)
self.X = None
self.Y = None
self.k = args.knn_k
def train_step(self, X, Y, steps):
self.X = X.copy()
self.Y = Y.copy()
if self.k < 1:
print("Select K... ", end="", flush=True)
self.k, acc = pvml.knn_select_k(X, Y)
print("{} ({:.3f}%)".format(self.k, acc * 100))
def inference(self, X):
ret = pvml.knn_inference(X, self.X, self.Y, self.k)
return ret
@_register_model("kmeans")
class KMeans(DemoModel):
def __init__(self, args):
super().__init__(args, False)
self.k = 2
self.centroids = None
def train_step(self, X, Y, steps):
new_k = Y.max() + 1
if new_k > self.k:
# If the classes change centroids are reset
self.centroids = None
self.k = new_k
self.centroids = pvml.kmeans_train(X, self.k, steps=steps,
init_centroids=self.centroids)
self._sort_centroids(X, Y)
def inference(self, X):
ret = pvml.kmeans_inference(X, self.centroids)
return ret
def _sort_centroids(self, X, Y):
# K-means labels do not correspond to training labels. A
# categorical classifier is used to reorder the centroids to
# minimize the error.
P, _ = pvml.kmeans_inference(X, self.centroids)
probs, priors = pvml.categorical_naive_bayes_train(P[:, None], Y)
YK = np.arange(self.k)[:, None]
Q, _ = pvml.categorical_naive_bayes_inference(YK, probs, priors)
ii = np.argsort(Q)
self.centroids = self.centroids[ii, :]
@_register_model("mlp")
class MultiLayerPerceptron(DemoModel):
def __init__(self, args):
super().__init__(args, False)
self.net = None
self.hidden = [int(x) for x in args.mlp_hidden.split(",") if x.strip()]
self.momentum = args.mlp_momentum
self.batch = args.mlp_batch
def train_step(self, X, Y, steps):
if self.net is None:
counts = [X.shape[1]] + self.hidden + [Y.max() + 1]
self.net = pvml.MLP(counts)
self.net.train(X, Y, lr=self.lr, lambda_=self.lambda_,
momentum=self.momentum, steps=steps,
batch=self.batch)
def inference(self, X):
labels, scores = self.net.inference(X)
return labels, scores
def loss(self, Y, P):
return self.net.loss(Y, P)
def select_features(X, Y, features, class_):
if features is None and class_ == -1:
return X, Y
if features is None:
features = np.arange(X.shape[1] - 1)
else:
features = np.array(list(map(int, features.split(","))))
data = np.concatenate((X, Y[:, None]), 1)
X = data[:, features]
Y = data[:, class_]
return X, Y
def normalization(X, Xtest, fun):
if Xtest is None:
return _NORMALIZATION[fun](X), None
else:
return _NORMALIZATION[fun](X, Xtest)
def main():
args = parse_args()
np.random.seed(args.seed)
X, Y = pvml.load_dataset(args.train)
print("Training set loaded: {} samples, {} features, {} classes".format(
X.shape[0], X.shape[1], Y.max() + 1))
X, Y = select_features(X, Y, args.features, args.class_)
if args.test:
Xtest, Ytest = pvml.load_dataset(args.test)
print("Test set loaded: {} samples, {} features, {} classes".format(
Xtest.shape[0], Xtest.shape[1], Ytest.max() + 1))
Xtest, Ytest = select_features(Xtest, Ytest, args.features,
args.class_)
else:
Xtest, Ytest = None, None
X, Xtest = normalization(X, Xtest, args.normalization)
model = _MODELS[args.model](args)
if model.binary:
Y = (Y > 0).astype(int)
if Ytest is not None:
Ytest = (Ytest > 0).astype(int)
plt.ion()
model.train(X, Y, Xtest, Ytest, args.steps)
plt.ioff()
print("TRAINING COMPLETED")
plt.show()
if __name__ == "__main__":
main()
| [
"pvml.logreg_l1_train",
"pvml.kmeans_train",
"pvml.svm_inference",
"pvml.ksvm_train",
"numpy.argsort",
"pvml.ClassificationTree",
"pvml.binary_cross_entropy",
"pvml.ogda_inference",
"pvml.svm_train",
"pvml.perceptron_inference",
"pvml.hgda_train",
"numpy.arange",
"matplotlib.pyplot.imshow",
... | [((490, 536), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""Classification demo"""'], {}), "('Classification demo')\n", (513, 536), False, 'import argparse\n'), ((21042, 21076), 'numpy.concatenate', 'np.concatenate', (['(X, Y[:, None])', '(1)'], {}), '((X, Y[:, None]), 1)\n', (21056, 21076), True, 'import numpy as np\n'), ((21342, 21367), 'numpy.random.seed', 'np.random.seed', (['args.seed'], {}), '(args.seed)\n', (21356, 21367), True, 'import numpy as np\n'), ((21379, 21408), 'pvml.load_dataset', 'pvml.load_dataset', (['args.train'], {}), '(args.train)\n', (21396, 21408), False, 'import pvml\n'), ((22198, 22207), 'matplotlib.pyplot.ion', 'plt.ion', ([], {}), '()\n', (22205, 22207), True, 'import matplotlib.pyplot as plt\n'), ((22260, 22270), 'matplotlib.pyplot.ioff', 'plt.ioff', ([], {}), '()\n', (22268, 22270), True, 'import matplotlib.pyplot as plt\n'), ((22307, 22317), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (22315, 22317), True, 'import matplotlib.pyplot as plt\n'), ((5291, 5309), 'matplotlib.pyplot.figure', 'plt.figure', (['fignum'], {}), '(fignum)\n', (5301, 5309), True, 'import matplotlib.pyplot as plt\n'), ((5318, 5327), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (5325, 5327), True, 'import matplotlib.pyplot as plt\n'), ((5336, 5352), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (5345, 5352), True, 'import matplotlib.pyplot as plt\n'), ((5361, 5385), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Iterations"""'], {}), "('Iterations')\n", (5371, 5385), True, 'import matplotlib.pyplot as plt\n'), ((5648, 5666), 'matplotlib.pyplot.figure', 'plt.figure', (['fignum'], {}), '(fignum)\n', (5658, 5666), True, 'import matplotlib.pyplot as plt\n'), ((5675, 5684), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (5682, 5684), True, 'import matplotlib.pyplot as plt\n'), ((5693, 5709), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (5702, 5709), True, 'import matplotlib.pyplot as plt\n'), ((5718, 5774), 'matplotlib.pyplot.scatter', 'plt.scatter', (['X[:, 0]', 'X[:, 1]'], {'c': 'Y', 'cmap': 'plt.cm.coolwarm'}), '(X[:, 0], X[:, 1], c=Y, cmap=plt.cm.coolwarm)\n', (5729, 5774), True, 'import matplotlib.pyplot as plt\n'), ((5872, 5907), 'numpy.linspace', 'np.linspace', (['xmin', 'xmax', 'resolution'], {}), '(xmin, xmax, resolution)\n', (5883, 5907), True, 'import numpy as np\n'), ((5921, 5956), 'numpy.linspace', 'np.linspace', (['ymin', 'ymax', 'resolution'], {}), '(ymin, ymax, resolution)\n', (5932, 5956), True, 'import numpy as np\n'), ((5974, 5993), 'numpy.meshgrid', 'np.meshgrid', (['ax', 'ay'], {}), '(ax, ay)\n', (5985, 5993), True, 'import numpy as np\n'), ((7222, 7240), 'matplotlib.pyplot.figure', 'plt.figure', (['fignum'], {}), '(fignum)\n', (7232, 7240), True, 'import matplotlib.pyplot as plt\n'), ((7249, 7258), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (7256, 7258), True, 'import matplotlib.pyplot as plt\n'), ((7267, 7283), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (7276, 7283), True, 'import matplotlib.pyplot as plt\n'), ((7299, 7366), 'numpy.bincount', 'np.bincount', (['(klasses * labels + predictions)'], {'minlength': '(klasses ** 2)'}), '(klasses * labels + predictions, minlength=klasses ** 2)\n', (7310, 7366), True, 'import numpy as np\n'), ((7523, 7570), 'matplotlib.pyplot.imshow', 'plt.imshow', (['cmat'], {'vmin': '(0)', 'vmax': '(100)', 'cmap': '"""OrRd"""'}), "(cmat, vmin=0, vmax=100, cmap='OrRd')\n", (7533, 7570), True, 'import matplotlib.pyplot as plt\n'), ((8537, 8641), 'pvml.logreg_train', 'pvml.logreg_train', (['X', 'Y'], {'lr': 'self.lr', 'lambda_': 'self.lambda_', 'steps': 'steps', 'init_w': 'self.w', 'init_b': 'self.b'}), '(X, Y, lr=self.lr, lambda_=self.lambda_, steps=steps,\n init_w=self.w, init_b=self.b)\n', (8554, 8641), False, 'import pvml\n'), ((8772, 8812), 'pvml.logreg_inference', 'pvml.logreg_inference', (['X', 'self.w', 'self.b'], {}), '(X, self.w, self.b)\n', (8793, 8812), False, 'import pvml\n'), ((8895, 8926), 'pvml.binary_cross_entropy', 'pvml.binary_cross_entropy', (['Y', 'P'], {}), '(Y, P)\n', (8920, 8926), False, 'import pvml\n'), ((9070, 9177), 'pvml.logreg_l1_train', 'pvml.logreg_l1_train', (['X', 'Y'], {'lr': 'self.lr', 'lambda_': 'self.lambda_', 'steps': 'steps', 'init_w': 'self.w', 'init_b': 'self.b'}), '(X, Y, lr=self.lr, lambda_=self.lambda_, steps=steps,\n init_w=self.w, init_b=self.b)\n', (9090, 9177), False, 'import pvml\n'), ((9622, 9757), 'pvml.ksvm_train', 'pvml.ksvm_train', (['X', 'Y', 'self.kfun', 'self.kparam'], {'lr': 'self.lr', 'lambda_': 'self.lambda_', 'steps': 'steps', 'init_alpha': 'self.alpha', 'init_b': 'self.b'}), '(X, Y, self.kfun, self.kparam, lr=self.lr, lambda_=self.\n lambda_, steps=steps, init_alpha=self.alpha, init_b=self.b)\n', (9637, 9757), False, 'import pvml\n'), ((9919, 9998), 'pvml.ksvm_inference', 'pvml.ksvm_inference', (['X', 'self.Xtrain', 'self.alpha', 'self.b', 'self.kfun', 'self.kparam'], {}), '(X, self.Xtrain, self.alpha, self.b, self.kfun, self.kparam)\n', (9938, 9998), False, 'import pvml\n'), ((10140, 10167), 'pvml.hinge_loss', 'pvml.hinge_loss', (['Y', '(P - 0.5)'], {}), '(Y, P - 0.5)\n', (10155, 10167), False, 'import pvml\n'), ((10389, 10491), 'pvml.svm_train', 'pvml.svm_train', (['X', 'Y'], {'lr': 'self.lr', 'lambda_': 'self.lambda_', 'steps': 'steps', 'init_w': 'self.w', 'init_b': 'self.b'}), '(X, Y, lr=self.lr, lambda_=self.lambda_, steps=steps, init_w=\n self.w, init_b=self.b)\n', (10403, 10491), False, 'import pvml\n'), ((10628, 10665), 'pvml.svm_inference', 'pvml.svm_inference', (['X', 'self.w', 'self.b'], {}), '(X, self.w, self.b)\n', (10646, 10665), False, 'import pvml\n'), ((10744, 10771), 'pvml.hinge_loss', 'pvml.hinge_loss', (['Y', '(P - 0.5)'], {}), '(Y, P - 0.5)\n', (10759, 10771), False, 'import pvml\n'), ((11036, 11153), 'pvml.multinomial_logreg_train', 'pvml.multinomial_logreg_train', (['X', 'Y'], {'lr': 'self.lr', 'lambda_': 'self.lambda_', 'steps': 'steps', 'init_w': 'self.w', 'init_b': 'self.b'}), '(X, Y, lr=self.lr, lambda_=self.lambda_, steps\n =steps, init_w=self.w, init_b=self.b)\n', (11065, 11153), False, 'import pvml\n'), ((11239, 11291), 'pvml.multinomial_logreg_inference', 'pvml.multinomial_logreg_inference', (['X', 'self.w', 'self.b'], {}), '(X, self.w, self.b)\n', (11272, 11291), False, 'import pvml\n'), ((11304, 11319), 'numpy.argmax', 'np.argmax', (['P', '(1)'], {}), '(P, 1)\n', (11313, 11319), True, 'import numpy as np\n'), ((11382, 11406), 'pvml.cross_entropy', 'pvml.cross_entropy', (['Y', 'P'], {}), '(Y, P)\n', (11400, 11406), False, 'import pvml\n'), ((11633, 11746), 'pvml.one_vs_one_svm_train', 'pvml.one_vs_one_svm_train', (['X', 'Y'], {'lr': 'self.lr', 'lambda_': 'self.lambda_', 'steps': 'steps', 'init_w': 'self.W', 'init_b': 'self.b'}), '(X, Y, lr=self.lr, lambda_=self.lambda_, steps=\n steps, init_w=self.W, init_b=self.b)\n', (11658, 11746), False, 'import pvml\n'), ((11895, 11943), 'pvml.one_vs_one_svm_inference', 'pvml.one_vs_one_svm_inference', (['X', 'self.W', 'self.b'], {}), '(X, self.W, self.b)\n', (11924, 11943), False, 'import pvml\n'), ((12170, 12284), 'pvml.one_vs_rest_svm_train', 'pvml.one_vs_rest_svm_train', (['X', 'Y'], {'lr': 'self.lr', 'lambda_': 'self.lambda_', 'steps': 'steps', 'init_w': 'self.W', 'init_b': 'self.b'}), '(X, Y, lr=self.lr, lambda_=self.lambda_, steps=\n steps, init_w=self.W, init_b=self.b)\n', (12196, 12284), False, 'import pvml\n'), ((12435, 12484), 'pvml.one_vs_rest_svm_inference', 'pvml.one_vs_rest_svm_inference', (['X', 'self.W', 'self.b'], {}), '(X, self.W, self.b)\n', (12465, 12484), False, 'import pvml\n'), ((12847, 12992), 'pvml.one_vs_one_ksvm_train', 'pvml.one_vs_one_ksvm_train', (['X', 'Y', 'self.kfun', 'self.kparam'], {'lr': 'self.lr', 'lambda_': 'self.lambda_', 'steps': 'steps', 'init_alpha': 'self.alpha', 'init_b': 'self.b'}), '(X, Y, self.kfun, self.kparam, lr=self.lr,\n lambda_=self.lambda_, steps=steps, init_alpha=self.alpha, init_b=self.b)\n', (12873, 12992), False, 'import pvml\n'), ((13189, 13284), 'pvml.one_vs_one_ksvm_inference', 'pvml.one_vs_one_ksvm_inference', (['X', 'self.Xtrain', 'self.alpha', 'self.b', 'self.kfun', 'self.kparam'], {}), '(X, self.Xtrain, self.alpha, self.b, self.\n kfun, self.kparam)\n', (13219, 13284), False, 'import pvml\n'), ((13688, 13834), 'pvml.one_vs_rest_ksvm_train', 'pvml.one_vs_rest_ksvm_train', (['X', 'Y', 'self.kfun', 'self.kparam'], {'lr': 'self.lr', 'lambda_': 'self.lambda_', 'steps': 'steps', 'init_alpha': 'self.alpha', 'init_b': 'self.b'}), '(X, Y, self.kfun, self.kparam, lr=self.lr,\n lambda_=self.lambda_, steps=steps, init_alpha=self.alpha, init_b=self.b)\n', (13715, 13834), False, 'import pvml\n'), ((14034, 14130), 'pvml.one_vs_rest_ksvm_inference', 'pvml.one_vs_rest_ksvm_inference', (['X', 'self.Xtrain', 'self.alpha', 'self.b', 'self.kfun', 'self.kparam'], {}), '(X, self.Xtrain, self.alpha, self.b, self.\n kfun, self.kparam)\n', (14065, 14130), False, 'import pvml\n'), ((14445, 14466), 'pvml.hgda_train', 'pvml.hgda_train', (['X', 'Y'], {}), '(X, Y)\n', (14460, 14466), False, 'import pvml\n'), ((14562, 14623), 'pvml.hgda_inference', 'pvml.hgda_inference', (['X', 'self.means', 'self.invcovs', 'self.priors'], {}), '(X, self.means, self.invcovs, self.priors)\n', (14581, 14623), False, 'import pvml\n'), ((14962, 14983), 'pvml.ogda_train', 'pvml.ogda_train', (['X', 'Y'], {}), '(X, Y)\n', (14977, 14983), False, 'import pvml\n'), ((15038, 15076), 'pvml.ogda_inference', 'pvml.ogda_inference', (['X', 'self.w', 'self.b'], {}), '(X, self.w, self.b)\n', (15057, 15076), False, 'import pvml\n'), ((15343, 15367), 'pvml.mindist_train', 'pvml.mindist_train', (['X', 'Y'], {}), '(X, Y)\n', (15361, 15367), False, 'import pvml\n'), ((15422, 15459), 'pvml.mindist_inference', 'pvml.mindist_inference', (['X', 'self.means'], {}), '(X, self.means)\n', (15444, 15459), False, 'import pvml\n'), ((15749, 15789), 'pvml.categorical_naive_bayes_train', 'pvml.categorical_naive_bayes_train', (['X', 'Y'], {}), '(X, Y)\n', (15783, 15789), False, 'import pvml\n'), ((15871, 15937), 'pvml.categorical_naive_bayes_inference', 'pvml.categorical_naive_bayes_inference', (['X', 'self.probs', 'self.priors'], {}), '(X, self.probs, self.priors)\n', (15909, 15937), False, 'import pvml\n'), ((16289, 16329), 'pvml.multinomial_naive_bayes_train', 'pvml.multinomial_naive_bayes_train', (['X', 'Y'], {}), '(X, Y)\n', (16323, 16329), False, 'import pvml\n'), ((16402, 16459), 'pvml.multinomial_naive_bayes_inference', 'pvml.multinomial_naive_bayes_inference', (['X', 'self.w', 'self.b'], {}), '(X, self.w, self.b)\n', (16440, 16459), False, 'import pvml\n'), ((16839, 16876), 'pvml.gaussian_naive_bayes_train', 'pvml.gaussian_naive_bayes_train', (['X', 'Y'], {}), '(X, Y)\n', (16870, 16876), False, 'import pvml\n'), ((16969, 17043), 'pvml.gaussian_naive_bayes_inference', 'pvml.gaussian_naive_bayes_inference', (['X', 'self.means', 'self.vars', 'self.priors'], {}), '(X, self.means, self.vars, self.priors)\n', (17004, 17043), False, 'import pvml\n'), ((17327, 17352), 'pvml.ClassificationTree', 'pvml.ClassificationTree', ([], {}), '()\n', (17350, 17352), False, 'import pvml\n'), ((17956, 18020), 'pvml.perceptron_train', 'pvml.perceptron_train', (['X', 'Y', 'steps'], {'init_w': 'self.w', 'init_b': 'self.b'}), '(X, Y, steps, init_w=self.w, init_b=self.b)\n', (17977, 18020), False, 'import pvml\n'), ((18129, 18173), 'pvml.perceptron_inference', 'pvml.perceptron_inference', (['X', 'self.w', 'self.b'], {}), '(X, self.w, self.b)\n', (18154, 18173), False, 'import pvml\n'), ((18710, 18755), 'pvml.knn_inference', 'pvml.knn_inference', (['X', 'self.X', 'self.Y', 'self.k'], {}), '(X, self.X, self.Y, self.k)\n', (18728, 18755), False, 'import pvml\n'), ((19183, 19255), 'pvml.kmeans_train', 'pvml.kmeans_train', (['X', 'self.k'], {'steps': 'steps', 'init_centroids': 'self.centroids'}), '(X, self.k, steps=steps, init_centroids=self.centroids)\n', (19200, 19255), False, 'import pvml\n'), ((19377, 19417), 'pvml.kmeans_inference', 'pvml.kmeans_inference', (['X', 'self.centroids'], {}), '(X, self.centroids)\n', (19398, 19417), False, 'import pvml\n'), ((19655, 19695), 'pvml.kmeans_inference', 'pvml.kmeans_inference', (['X', 'self.centroids'], {}), '(X, self.centroids)\n', (19676, 19695), False, 'import pvml\n'), ((19720, 19769), 'pvml.categorical_naive_bayes_train', 'pvml.categorical_naive_bayes_train', (['P[:, None]', 'Y'], {}), '(P[:, None], Y)\n', (19754, 19769), False, 'import pvml\n'), ((19825, 19882), 'pvml.categorical_naive_bayes_inference', 'pvml.categorical_naive_bayes_inference', (['YK', 'probs', 'priors'], {}), '(YK, probs, priors)\n', (19863, 19882), False, 'import pvml\n'), ((19896, 19909), 'numpy.argsort', 'np.argsort', (['Q'], {}), '(Q)\n', (19906, 19909), True, 'import numpy as np\n'), ((20930, 20955), 'numpy.arange', 'np.arange', (['(X.shape[1] - 1)'], {}), '(X.shape[1] - 1)\n', (20939, 20955), True, 'import numpy as np\n'), ((21636, 21664), 'pvml.load_dataset', 'pvml.load_dataset', (['args.test'], {}), '(args.test)\n', (21653, 21664), False, 'import pvml\n'), ((4596, 4613), 'matplotlib.pyplot.pause', 'plt.pause', (['(0.0001)'], {}), '(0.0001)\n', (4605, 4613), True, 'import matplotlib.pyplot as plt\n'), ((5416, 5438), 'matplotlib.pyplot.plot', 'plt.plot', (['iters', 'train'], {}), '(iters, train)\n', (5424, 5438), True, 'import matplotlib.pyplot as plt\n'), ((5468, 5489), 'matplotlib.pyplot.plot', 'plt.plot', (['iters', 'test'], {}), '(iters, test)\n', (5476, 5489), True, 'import matplotlib.pyplot as plt\n'), ((5502, 5531), 'matplotlib.pyplot.legend', 'plt.legend', (["['train', 'test']"], {}), "(['train', 'test'])\n", (5512, 5531), True, 'import matplotlib.pyplot as plt\n'), ((6163, 6214), 'matplotlib.pyplot.contour', 'plt.contour', (['gx', 'gy', 'v', '[0.5]'], {'cmap': 'plt.cm.coolwarm'}), '(gx, gy, v, [0.5], cmap=plt.cm.coolwarm)\n', (6174, 6214), True, 'import matplotlib.pyplot as plt\n'), ((7600, 7618), 'numpy.arange', 'np.arange', (['klasses'], {}), '(klasses)\n', (7609, 7618), True, 'import numpy as np\n'), ((7649, 7667), 'numpy.arange', 'np.arange', (['klasses'], {}), '(klasses)\n', (7658, 7667), True, 'import numpy as np\n'), ((18583, 18606), 'pvml.knn_select_k', 'pvml.knn_select_k', (['X', 'Y'], {}), '(X, Y)\n', (18600, 18606), False, 'import pvml\n'), ((19783, 19800), 'numpy.arange', 'np.arange', (['self.k'], {}), '(self.k)\n', (19792, 19800), True, 'import numpy as np\n'), ((20428, 20444), 'pvml.MLP', 'pvml.MLP', (['counts'], {}), '(counts)\n', (20436, 20444), False, 'import pvml\n'), ((4808, 4875), 'itertools.zip_longest', 'zip_longest', (['iterations', 'train_acc', 'test_acc', 'train_loss', 'test_loss'], {}), '(iterations, train_acc, test_acc, train_loss, test_loss)\n', (4819, 4875), False, 'from itertools import zip_longest\n'), ((5796, 5805), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (5803, 5805), True, 'import matplotlib.pyplot as plt\n'), ((5838, 5847), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (5845, 5847), True, 'import matplotlib.pyplot as plt\n'), ((6381, 6432), 'matplotlib.pyplot.contour', 'plt.contour', (['gx', 'gy', 'v', '[0.0]'], {'cmap': 'plt.cm.coolwarm'}), '(gx, gy, v, [0.0], cmap=plt.cm.coolwarm)\n', (6392, 6432), True, 'import matplotlib.pyplot as plt\n'), ((6624, 6676), 'matplotlib.pyplot.contour', 'plt.contour', (['gx', 'gy', 'v', 'values'], {'cmap': 'plt.cm.coolwarm'}), '(gx, gy, v, values, cmap=plt.cm.coolwarm)\n', (6635, 6676), True, 'import matplotlib.pyplot as plt\n'), ((7579, 7588), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (7586, 7588), True, 'import matplotlib.pyplot as plt\n'), ((7628, 7637), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (7635, 7637), True, 'import matplotlib.pyplot as plt\n'), ((6516, 6541), 'numpy.arange', 'np.arange', (['(v.shape[1] - 1)'], {}), '(v.shape[1] - 1)\n', (6525, 6541), True, 'import numpy as np\n'), ((4670, 4690), 'matplotlib.pyplot.fignum_exists', 'plt.fignum_exists', (['(0)'], {}), '(0)\n', (4687, 4690), True, 'import matplotlib.pyplot as plt\n')] |
# -*- coding: utf-8 -*-
"""
Created on Tue Jun 11 13:46:58 2019
@author: bdgecyt
"""
import cv2
import math
from time import time
import numpy as np
import wrapper
from operator import itemgetter
boxes = []
xCount = 0
yCount = 0
iter = 0
img = 0
def on_mouse(event, x, y, flags, params):
global iter
t = time()
global img
if event == cv2.EVENT_LBUTTONDOWN:
print('Start Mouse Position: '+str(x)+', '+str(y))
sbox = [x, y]
boxes.append(sbox)
# cv2.line(img,pt1=(0,0),pt2=(x,y),color=(255,255,0),thickness=2)
elif event == cv2.EVENT_LBUTTONUP:
print('End Mouse Position: '+str(x)+', '+str(y))
ebox = [x, y]
boxes.append(ebox)
# print boxes
iter += 1
# print iter
def split(start, end, segments):
x_delta = (end[0] - start[0]) / float(segments)
y_delta = (end[1] - start[1]) / float(segments)
points = []
for i in range(1, segments):
points.append([start[0] + i * x_delta, start[1] + i * y_delta])
return [start] + points + [end]
def line_intersection(line1, line2):
xdiff = (line1[0][0] - line1[1][0], line2[0][0] - line2[1][0])
ydiff = (line1[0][1] - line1[1][1], line2[0][1] - line2[1][1])
def det(a, b):
return a[0] * b[1] - a[1] * b[0]
div = det(xdiff, ydiff)
if div == 0:
raise Exception('lines do not intersect')
d = (det(*line1), det(*line2))
x = det(d, xdiff) / div
y = det(d, ydiff) / div
return x, y
def norm(point1, point2):
xdiff = point1[0] - point2[0]
ydiff = point1[1] - point2[1]
norm = math.sqrt(xdiff*xdiff + ydiff*ydiff)
# print norm
return norm
def orderptinline(pts, vp):
# print("ordering points")
# print(pts)
lengths = [norm(pt, vp) for pt in pts]
lengths= np.argsort(lengths)[::-1]
strlength = ''.join(str(e) for e in lengths)
# print(strlength)
return strlength
def getborderpt(line1, line2):
return line_intersection(line1, line2)
def findAnglebetVP(line, vp):
a = np.array(line[0])
b = np.array(line[1])
c = np.array(vp)
ba = a - b
bc = c - b
cosine_angle = np.dot(ba, bc) / (np.linalg.norm(ba) * np.linalg.norm(bc))
angle = np.arccos(cosine_angle)
return np.degrees(angle)
def estimatelength(order,a1,a2,r1,r2, Vanish, response):
if order == "0123":
# print("order is:" + order)
reflength = (norm(a1, r2)/norm(a1, Vanish))/(norm(r1,r2)/norm(r1,Vanish))*response
# print(reflength)
ref2length = (norm(a2, r2)/norm(a2, Vanish))/(norm(r1,r2)/norm(r1,Vanish))*response
# print(ref2length)
finallength = reflength-ref2length
elif order == "0213":
# print("order is:" + order)
reflength = (norm(a1, r2)/norm(a1, Vanish))/(norm(r1,r2)/norm(r1,Vanish))*response
ref2length = response/(norm(r1, r2)/norm(r1, Vanish))/(norm(a2,r2)/norm(a2,Vanish))
finallength = reflength - ref2length
elif order == "0213":
reflength = (norm(a1, r2)/norm(a1, Vanish))/(norm(r1,r2)/norm(r1,Vanish))*response
ref2length = response/((norm(r1, a2)/norm(r1, Vanish))/(norm(r2,a2)/norm(r2,Vanish))-1)
finallength = reflength + ref2length
elif order == "2031":
reflength = response/(norm(r1, r2)/norm(r1, Vanish))/(norm(a1,r2)/norm(a1,Vanish))
ref2length = reflength/((norm(a1, a2)/norm(a1, Vanish))/(norm(r2,a2)/norm(r2,Vanish))-1)
finallength = reflength + ref2length
elif order == "2301":
reflength = response/((norm(r1, a1)/norm(r1, Vanish))/(norm(r2,a1)/norm(r2,Vanish))-1)
ref2length = (reflength +response)/((norm(r1, a2)/norm(r1, Vanish))/(norm(a1,a2)/norm(a2,Vanish))-1)
finallength = ref2length
else:
finallength = 99999
return finallength
def calibrateframe(img, findref = False):
vps = wrapper.dealAImage(img,"data/result/",True,True,True)
vps = [[i[0], i[1]] for i in vps]
print(vps)
count = 0
# while(True):
#
# # print count
# if iter == 2:
# cv2.destroyAllWindows()
# break
#
# count += 1
# cv2.namedWindow('real image')
# cv2.setMouseCallback('real image', on_mouse, 0)
#
# if len(boxes) != 0:
# for i in range(0,len(boxes), 2):
# # print(i)
# try:
# cv2.line(img,pt1=tuple(boxes[i]),pt2=tuple(boxes[i+1]),color=(0,255,255),thickness=2)
#
# except:
# continue
# cv2.imshow('real image', img)
# if cv2.waitKey(1) == 27:
# cv2.destroyAllWindows()
# break
print(vps)
vps = sorted(vps, key=itemgetter(1))
print(vps)
print(boxes)
xVanish = vps[0]
print ("x vanishing pt:" + str(xVanish))
yVanish = vps[1]
print ("y vanishing pt:" + str(yVanish))
zVanish = vps[2]
print ("z vanishing pt:" + str(zVanish))
if findref == True:
referenceline = [boxes[0], boxes[1]]
referenceline.sort(key = lambda x: norm(x, xVanish), reverse = False)
ang1 = findAnglebetVP(referenceline, xVanish)
print("angles between reference line and xVanish:" + str(ang1))
referenceline.sort(key = lambda x: norm(x, yVanish), reverse = False)
ang2 = findAnglebetVP(referenceline, yVanish)
print("angles between reference line and yVanish:" + str(ang2))
if ang1> ang2:
print("ref vp is Y vanishing point" )
refV= yVanish
ortV= xVanish
if ang2> ang1:
print("ref vp is X vanishing point" )
refV= xVanish
ortV= yVanish
referenceline.sort(key = lambda x: norm(x, refV), reverse = True)
estimateline = [boxes[2], boxes[3]]
estimateline.sort(key = lambda x: norm(x, refV), reverse = True)
response = float(input("Please enter length of reference object: "))
response2 = float(input("Please enter length of measured object: "))
return response, response2, estimateline, referenceline, refV, ortV, zVanish, xVanish, yVanish
else:
return zVanish, xVanish, yVanish
def drawfallarea(img, refV, ortV, zVanish, correctpt, correct2pt):
nextpt= [int(0.78*img_shape[1]),
int(0.615*img_shape[0])]
droptoVP3 = [nextpt, zVanish]
print("vp3")
print(droptoVP3)
bordervp3= line_intersection(droptoVP3, [(0, img_shape[0]),(img_shape[1], img_shape[0])])
dropline3 = [nextpt, bordervp3]
ptB = line_intersection(dropline3, [correctpt, ortV])
cv2.line(img,(int(correctpt[0]), int(correctpt[1])), (int(ptB[0]), int(ptB[1])),color=(0,0,255),thickness=2)
backline1 = [correct2pt, ortV]
backline2 = [ptB, refV]
ptC= line_intersection(backline1, backline2)
cv2.line(img,(int(correct2pt[0]), int(correct2pt[1])), (int(ptC[0]), int(ptC[1])),color=(0,0,255),thickness=2)
cv2.line(img,(int(ptB[0]), int(ptB[1])), (int(ptC[0]), int(ptC[1])),color=(0,0,255),thickness=2)
def processframe(img, response, response2, estimateline, referenceline, refV, ortV, zVanish, xVanish, yVanish, img_shape):
droptoVP1= [estimateline[0], zVanish]
droptoVP2= [estimateline[1], zVanish]
print("vp1")
print(droptoVP1)
# print(droptoVP1)
# cv2.line(img,(0, int(0.9*img_shape[0])), (img_shape[1], int(0.9*img_shape[0])),color=(0,255,255),thickness=10)
#test line
# cv2.line(img,(0, int(0.8*img_shape[0])), (int(0.78*img_shape[1]), int(0.615*img_shape[0])),color=(0,255,255),thickness=10)
bordervp1= line_intersection(droptoVP1, [(0, img_shape[0]),(img_shape[1], img_shape[0])])
bordervp2= line_intersection(droptoVP2, [(0, img_shape[0]),(img_shape[1], img_shape[0])])
# print(bordervp1)
# print(bordervp2)
dropline1 = [estimateline[0], bordervp1]
dropline2 = [estimateline[1], bordervp2]
refline1 = [referenceline[0],ortV]
refline2 = [referenceline[1],ortV]
print("breaking drop line to segments")
dropline1seg = split(dropline1[0], dropline1[1], 50)
# print(dropline1seg)
finallengths = []
dropline2pts = []
for pt in dropline1seg:
# print(pt)
cv2.circle(img,(int(pt[0]), int(pt[1])), 3, (0,255,255), -1)
# cv2.line(img,(int(pt[0]), int(pt[1])), (int(yVanish[0]), int(yVanish[1])),color=(0,255,255),thickness=2)
intersectDropline2= line_intersection([pt, refV], dropline2)
dropline2pts += [intersectDropline2]
intersectRefline1= line_intersection([pt, refV], refline1)
intersectRefline2= line_intersection([pt, refV], refline2)
cv2.circle(img,(int(intersectDropline2[0]), int(intersectDropline2[1])), 3, (255,0,0), -1)
cv2.circle(img,(int(intersectRefline1[0]), int(intersectRefline1[1])), 3, (0,255,0), -1)
cv2.circle(img,(int(intersectRefline2[0]), int(intersectRefline2[1])), 3, (0,0,255), -1)
ordered = orderptinline([pt, intersectDropline2,intersectRefline1, intersectRefline2] , refV)
finallength = estimatelength(ordered, pt, intersectDropline2,intersectRefline1, intersectRefline2, refV, response)
# reflength = (norm(pt, intersectRefline2)/norm(pt, yVanish))/(norm(intersectRefline1,intersectRefline2)/norm(intersectRefline1,yVanish))*response
# print(reflength)
# ref2length = (norm(intersectDropline2, intersectRefline2)/norm(intersectDropline2, yVanish))/(norm(intersectRefline1,intersectRefline2)/norm(intersectRefline1,yVanish))*response
# print(ref2length)
# finallength = reflength-ref2length
# print("finallength:" +str(finallength))
finallengths += [finallength]
measurements = [abs(response2- i)for i in finallengths]
correctpt = dropline1seg[np.argmin(measurements)]
correct2pt = dropline2pts[np.argmin(measurements)]
#if finallength >16 and finallength <18:
cv2.line(img,(int(estimateline[0][0]), int(estimateline[0][1])), (int(estimateline[1][0]), int(estimateline[1][1])),color=(0,255,255),thickness=2)
cv2.line(img,(int(correctpt[0]), int(correctpt[1])), (int(correct2pt[0]), int(correct2pt[1])),color=(0,0,255),thickness=2)
drawfallarea(img, refV, ortV, zVanish, correctpt, correct2pt)
print("nearest measurement:" +str( finallengths[np.argmin(measurements)] ) )
if zVanish:
cv2.line(img,(int(0.5*img.shape[1]), int(0.5*img.shape[0])), (int(zVanish[0]), int(zVanish[1])),color=(0,255,255),thickness=2)
if xVanish:
cv2.line(img,(int(0.5*img.shape[1]), int(0.5*img.shape[0])), (int(xVanish[0]), int(xVanish[1])),color=(0,255,255),thickness=2)
if yVanish:
cv2.line(img,(int(0.5*img.shape[1]), int(0.5*img.shape[0])), (int(yVanish[0]), int(yVanish[1])),color=(0,255,255),thickness=2)
# return img
if __name__ == "__main__":
img = cv2.imread('data\\18.jpg')
# img = cv2.resize(img, None, fx = 0.3,fy = 0.3)
img_shape = img.shape
# cv2.circle(img, (100,900), 5, (0,0,255), 5)
# while(True):
# cv2.imshow('points image', img)
# if cv2.waitKey(1) == 27:
# cv2.destroyAllWindows()
# break
# print(img.shape)
response, response2, estimateline, referenceline, refV, ortV, zVanish, xVanish, yVanish = calibrateframe(img, findref = True)
#
while(True):
print(img.shape)
img = cv2.imread('data\\18.jpg')
# img = cv2.resize(img, None, fx = 0.3,fy = 0.3)
processframe(img, response, response2, estimateline, referenceline, refV, ortV, zVanish, xVanish, yVanish, img_shape)
cv2.imshow('points image', img)
# estimateline[0][0] -= 1
# estimateline[1][0] -= 1
# print("estimate line is:" + str(estimateline))
if cv2.waitKey(1) == 27:
cv2.destroyAllWindows()
break
# img = cv2.blur(img, (3,3))
# img = cv2.resize(img, None, fx = 0.2,fy = 0.2)
# print(img.shape) | [
"numpy.arccos",
"math.sqrt",
"wrapper.dealAImage",
"operator.itemgetter",
"cv2.imshow",
"numpy.argsort",
"numpy.array",
"numpy.dot",
"cv2.waitKey",
"cv2.destroyAllWindows",
"numpy.linalg.norm",
"numpy.argmin",
"numpy.degrees",
"time.time",
"cv2.imread"
] | [((323, 329), 'time.time', 'time', ([], {}), '()\n', (327, 329), False, 'from time import time\n'), ((1635, 1675), 'math.sqrt', 'math.sqrt', (['(xdiff * xdiff + ydiff * ydiff)'], {}), '(xdiff * xdiff + ydiff * ydiff)\n', (1644, 1675), False, 'import math\n'), ((2078, 2095), 'numpy.array', 'np.array', (['line[0]'], {}), '(line[0])\n', (2086, 2095), True, 'import numpy as np\n'), ((2104, 2121), 'numpy.array', 'np.array', (['line[1]'], {}), '(line[1])\n', (2112, 2121), True, 'import numpy as np\n'), ((2130, 2142), 'numpy.array', 'np.array', (['vp'], {}), '(vp)\n', (2138, 2142), True, 'import numpy as np\n'), ((2268, 2291), 'numpy.arccos', 'np.arccos', (['cosine_angle'], {}), '(cosine_angle)\n', (2277, 2291), True, 'import numpy as np\n'), ((2308, 2325), 'numpy.degrees', 'np.degrees', (['angle'], {}), '(angle)\n', (2318, 2325), True, 'import numpy as np\n'), ((3910, 3967), 'wrapper.dealAImage', 'wrapper.dealAImage', (['img', '"""data/result/"""', '(True)', '(True)', '(True)'], {}), "(img, 'data/result/', True, True, True)\n", (3928, 3967), False, 'import wrapper\n'), ((11040, 11066), 'cv2.imread', 'cv2.imread', (['"""data\\\\18.jpg"""'], {}), "('data\\\\18.jpg')\n", (11050, 11066), False, 'import cv2\n'), ((1837, 1856), 'numpy.argsort', 'np.argsort', (['lengths'], {}), '(lengths)\n', (1847, 1856), True, 'import numpy as np\n'), ((2197, 2211), 'numpy.dot', 'np.dot', (['ba', 'bc'], {}), '(ba, bc)\n', (2203, 2211), True, 'import numpy as np\n'), ((9958, 9981), 'numpy.argmin', 'np.argmin', (['measurements'], {}), '(measurements)\n', (9967, 9981), True, 'import numpy as np\n'), ((10013, 10036), 'numpy.argmin', 'np.argmin', (['measurements'], {}), '(measurements)\n', (10022, 10036), True, 'import numpy as np\n'), ((11557, 11583), 'cv2.imread', 'cv2.imread', (['"""data\\\\18.jpg"""'], {}), "('data\\\\18.jpg')\n", (11567, 11583), False, 'import cv2\n'), ((11774, 11805), 'cv2.imshow', 'cv2.imshow', (['"""points image"""', 'img'], {}), "('points image', img)\n", (11784, 11805), False, 'import cv2\n'), ((2215, 2233), 'numpy.linalg.norm', 'np.linalg.norm', (['ba'], {}), '(ba)\n', (2229, 2233), True, 'import numpy as np\n'), ((2236, 2254), 'numpy.linalg.norm', 'np.linalg.norm', (['bc'], {}), '(bc)\n', (2250, 2254), True, 'import numpy as np\n'), ((4779, 4792), 'operator.itemgetter', 'itemgetter', (['(1)'], {}), '(1)\n', (4789, 4792), False, 'from operator import itemgetter\n'), ((11939, 11953), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (11950, 11953), False, 'import cv2\n'), ((11973, 11996), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (11994, 11996), False, 'import cv2\n'), ((10485, 10508), 'numpy.argmin', 'np.argmin', (['measurements'], {}), '(measurements)\n', (10494, 10508), True, 'import numpy as np\n')] |
"""
Orthogonal Weight Normalization: Solution to Optimization over Multiple Dependent Stiefel Manifolds in Deep Neural Networks
AAAI 2018
Authors: <NAME>
"""
import torch.nn
import torch.nn.functional as F
from torch.nn import Parameter
from torch.autograd import Variable
from typing import List
from torch.autograd.function import once_differentiable
__all__ = ['OWN_Conv2d']
# norm funcitons--------------------------------
class IdentityModule(torch.nn.Module):
def __init__(self, *args, **kwargs):
super(IdentityModule, self).__init__()
def forward(self, input: torch.Tensor):
return input
class OWNNorm(torch.nn.Module):
def __init__(self, norm_groups=1, *args, **kwargs):
super(OWNNorm, self).__init__()
self.norm_groups = norm_groups
def matrix_power3(self, Input):
B=torch.bmm(Input, Input)
return torch.bmm(B, Input)
def forward(self, weight: torch.Tensor):
assert weight.shape[0] % self.norm_groups == 0
Z = weight.view(self.norm_groups, weight.shape[0] // self.norm_groups, -1) # type: torch.Tensor
Zc = Z - Z.mean(dim=-1, keepdim=True)
S = torch.matmul(Zc, Zc.transpose(1, 2))
wm = torch.randn(S.shape).to(S)
for i in range(self.norm_groups):
U, Eig, _ = S[i].svd()
Scales = Eig.rsqrt().diag()
wm[i] = U.mm(Scales).mm(U.t())
W = wm.matmul(Zc)
return W.view_as(weight)
def extra_repr(self):
fmt_str = ['OWN:']
if self.norm_groups > 1:
fmt_str.append('groups={}'.format(self.norm_groups))
return ', '.join(fmt_str)
class OWN_Conv2d(torch.nn.Conv2d):
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True,
norm_groups=1, NScale=1.414, adjustScale=False):
super(OWN_Conv2d, self).__init__(in_channels, out_channels, kernel_size, stride, padding, dilation, groups, bias)
print('OWN_conv:----norm_groups:', norm_groups, '---NScale:', NScale, '---adjust:', adjustScale)
self.weight_normalization = OWNNorm(norm_groups=norm_groups)
self.scale_ = torch.ones(out_channels, 1, 1, 1).fill_(NScale)
if adjustScale:
self.WNScale = Parameter(self.scale_)
else:
# self.scale = Variable(self.scale, requires_grad=False)
self.register_buffer('WNScale', self.scale_)
def forward(self, input_f: torch.Tensor) -> torch.Tensor:
weight_q = self.weight_normalization(self.weight)
weight_q = weight_q * self.WNScale
out = F.conv2d(input_f, weight_q, self.bias, self.stride, self.padding, self.dilation, self.groups)
return out
if __name__ == '__main__':
oni_ = OWNNorm(norm_groups=2)
print(oni_)
w_ = torch.randn(4, 4, 3, 3)
w_.requires_grad_()
y_ = oni_(w_)
z_ = y_.view(w_.size(0), -1)
print(z_.matmul(z_.t()))
y_.sum().backward()
print('w grad', w_.grad.size())
| [
"torch.nn.functional.conv2d",
"torch.nn.Parameter"
] | [((2626, 2724), 'torch.nn.functional.conv2d', 'F.conv2d', (['input_f', 'weight_q', 'self.bias', 'self.stride', 'self.padding', 'self.dilation', 'self.groups'], {}), '(input_f, weight_q, self.bias, self.stride, self.padding, self.\n dilation, self.groups)\n', (2634, 2724), True, 'import torch.nn.functional as F\n'), ((2284, 2306), 'torch.nn.Parameter', 'Parameter', (['self.scale_'], {}), '(self.scale_)\n', (2293, 2306), False, 'from torch.nn import Parameter\n')] |
import pandas as pd
from visions import visions_string, visions_datetime
from visions.core.model import TypeRelation
from visions.core.model.relations import InferenceRelation
from visions.utils.coercion import test_utils
def to_datetime_year_week(series):
"""Convert a series of the format YYYY/UU (year, week) to datetime.
A '0' is added as day dummy value, as pandas requires a day value to parse.
Args:
series: the Series to parse
Returns:
A datetime series
Examples:
>>> series = pd.Series(['2018/47', '2018/12', '2018/03'])
>>> parsed_series = to_datetime_year_week(series)
>>> print(parsed_series.dt.week)
0 47
1 12
2 3
dtype: int64
"""
return pd.to_datetime(series + "0", format="%Y/%U%w")
def to_datetime_year_month_day(series):
"""Convert a series of the format YYYYMMDD (year, month, day) to datetime.
Args:
series: the Series to parse
Returns:
A datetime series
Examples:
>>> series = pd.Series(['20181201', '20181202', '20181203'])
>>> parsed_series = to_datetime_year_week(series)
>>> print(parsed_series.dt.day)
0 1
1 2
2 3
dtype: int64
"""
return pd.to_datetime(series, format="%Y%m%d")
def get_string_datetime_type_relation(func):
return InferenceRelation(
relationship=test_utils.coercion_test(func),
transformer=func,
related_type=visions_string,
type=visions_datetime,
)
def string_to_datetime_year_week():
return get_string_datetime_type_relation(to_datetime_year_week)
def string_to_datetime_year_month_day():
return get_string_datetime_type_relation(to_datetime_year_month_day)
| [
"visions.utils.coercion.test_utils.coercion_test",
"pandas.to_datetime"
] | [((767, 813), 'pandas.to_datetime', 'pd.to_datetime', (["(series + '0')"], {'format': '"""%Y/%U%w"""'}), "(series + '0', format='%Y/%U%w')\n", (781, 813), True, 'import pandas as pd\n'), ((1289, 1328), 'pandas.to_datetime', 'pd.to_datetime', (['series'], {'format': '"""%Y%m%d"""'}), "(series, format='%Y%m%d')\n", (1303, 1328), True, 'import pandas as pd\n'), ((1427, 1457), 'visions.utils.coercion.test_utils.coercion_test', 'test_utils.coercion_test', (['func'], {}), '(func)\n', (1451, 1457), False, 'from visions.utils.coercion import test_utils\n')] |
# Copyright 2019 Graphcore Ltd.
import time
from datetime import datetime
import os
from absl import app, flags
import subprocess
import re
import statistics
"""
This program launches subprocesses to handle data loading and resnext101 inference.
It can also be used to perform inference on other ONNX CNNs that take ImageNet sized input images.
To adapt, download a different ONNX model from the Python package `pretrainedmodels` via get_model.py,
or save your own model to models/<model_name>/<model_name>_<batch_size>.onnx
Then, run with the flag --model_name <model_name> --batch_size <batch_size>
"""
def launch_resnext_subprocess(i, f):
# parse flags into list of strings to pass through to subprocesses
# give the i_th process the i_th dataset
data_sub_dir = FLAGS.data_dir + f"{i}"
args = FLAGS.flags_into_string().split('\n')
print(f"\n\nRunning subprocess {i}: \t ")
print(" ".join(["python3", "resnext101.py",
"--data_sub_dir", data_sub_dir] + args))
return subprocess.Popen(["python3", "resnext101.py", "--data_sub_dir", data_sub_dir] + args, stdout=f, stderr=f)
FLAGS = flags.FLAGS
flags.DEFINE_integer("batch_size", 6, "Batch size (per device)")
flags.DEFINE_integer(
"num_ipus", 8, "Number of IPUs to be used. One IPU runs one compute process.")
flags.DEFINE_string("data_dir", "datasets/",
"Parent directory containing subdirectory dataset(s). Number of subdirs should equal num_ipus")
flags.DEFINE_integer("num_workers", 12, "Number of threads per dataloader")
flags.DEFINE_integer("batches_per_step", 1500,
"Number of batches to fetch on the host ready for streaming onto the device, reducing host IO")
flags.DEFINE_boolean(
"profile", False, "Saves a GCProfile memory report. Use for debugging")
flags.DEFINE_string("model_name", "resnext101_32x4d",
"model name. Used to locate ONNX protobuf in models/")
flags.DEFINE_bool("synthetic", False, "Use synthetic data created on the IPU for inference")
flags.DEFINE_integer(
"iterations", 1, "Number of iterations to run if using synthetic data. Each iteration uses one `batches_per_step` x `batch_size` x `H` x `W` x `C` sized input tensor.")
def main(argv):
FLAGS = flags.FLAGS
log_str = f"""
Number of subprocesses created: {FLAGS.num_ipus}
Per subprocess:
\t Batch size: {FLAGS.batch_size}
\t Number of batches prepared by the host at a time: {FLAGS.batches_per_step}
"""
print(log_str)
procs = []
log_files = []
timestamp = datetime.now().strftime("%H-%M-%S")
if not os.path.exists("logs"):
os.mkdir("logs")
os.mkdir(f"logs/{timestamp}")
for i in range(FLAGS.num_ipus):
f = open(f"logs/{timestamp}/log_{i}", "w")
p = launch_resnext_subprocess(i, f)
# sleep to prevent race conditions on acquiring IPUs
time.sleep(1)
# log
log_files.append(f)
procs.append(p)
exit_codes = [p.wait() for p in procs]
print(f"All processes finished with exit codes: {exit_codes}")
for f in log_files:
f.close()
regex_throughput = re.compile("Compute .* sec .* (.*) images/sec.")
regex_latency = re.compile("Total (.*).* sec. Preprocessing")
throughputs = []
latencies = []
for i in range(FLAGS.num_ipus):
sub_throughputs = []
sub_latencies = []
with open(f"logs/{timestamp}/log_{i}") as f:
for line in f:
match = regex_throughput.search(line)
match_lat = regex_latency.search(line)
if match:
res = match.group(1)
sub_throughputs.append(float(res))
if match_lat:
res = match_lat.group(1)
sub_latencies.append(float(res))
throughputs.append(sub_throughputs)
latencies.append(sub_latencies)
sums_throughputs = [sum(l) for l in zip(*throughputs)]
mean_latencies = [statistics.mean(l) for l in zip(*latencies)]
stats = zip(mean_latencies, sums_throughputs)
start = 2 if len(sums_throughputs) >= 4 else 0
for (duration, through) in list(stats)[start:]:
report_string = "Total {:<8.3} sec.".format(duration)
report_string += " Preprocessing {:<8.3} sec ({:4.3}%).".format(
duration, 95.) # just for the output
report_string += " Compute {:<8.3} sec ({:4.3}%).".format(
duration, 95.)
report_string += " {:5f} images/sec.".format(int(through))
print(report_string)
if __name__ == '__main__':
app.run(main)
| [
"statistics.mean",
"os.path.exists",
"absl.flags.DEFINE_bool",
"re.compile",
"absl.flags.DEFINE_integer",
"subprocess.Popen",
"absl.flags.DEFINE_boolean",
"absl.app.run",
"time.sleep",
"datetime.datetime.now",
"os.mkdir",
"absl.flags.DEFINE_string"
] | [((1149, 1213), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""batch_size"""', '(6)', '"""Batch size (per device)"""'], {}), "('batch_size', 6, 'Batch size (per device)')\n", (1169, 1213), False, 'from absl import app, flags\n'), ((1214, 1317), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""num_ipus"""', '(8)', '"""Number of IPUs to be used. One IPU runs one compute process."""'], {}), "('num_ipus', 8,\n 'Number of IPUs to be used. One IPU runs one compute process.')\n", (1234, 1317), False, 'from absl import app, flags\n'), ((1319, 1468), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""data_dir"""', '"""datasets/"""', '"""Parent directory containing subdirectory dataset(s). Number of subdirs should equal num_ipus"""'], {}), "('data_dir', 'datasets/',\n 'Parent directory containing subdirectory dataset(s). Number of subdirs should equal num_ipus'\n )\n", (1338, 1468), False, 'from absl import app, flags\n'), ((1480, 1555), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""num_workers"""', '(12)', '"""Number of threads per dataloader"""'], {}), "('num_workers', 12, 'Number of threads per dataloader')\n", (1500, 1555), False, 'from absl import app, flags\n'), ((1556, 1707), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""batches_per_step"""', '(1500)', '"""Number of batches to fetch on the host ready for streaming onto the device, reducing host IO"""'], {}), "('batches_per_step', 1500,\n 'Number of batches to fetch on the host ready for streaming onto the device, reducing host IO'\n )\n", (1576, 1707), False, 'from absl import app, flags\n'), ((1720, 1816), 'absl.flags.DEFINE_boolean', 'flags.DEFINE_boolean', (['"""profile"""', '(False)', '"""Saves a GCProfile memory report. Use for debugging"""'], {}), "('profile', False,\n 'Saves a GCProfile memory report. Use for debugging')\n", (1740, 1816), False, 'from absl import app, flags\n'), ((1818, 1930), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""model_name"""', '"""resnext101_32x4d"""', '"""model name. Used to locate ONNX protobuf in models/"""'], {}), "('model_name', 'resnext101_32x4d',\n 'model name. Used to locate ONNX protobuf in models/')\n", (1837, 1930), False, 'from absl import app, flags\n'), ((1947, 2043), 'absl.flags.DEFINE_bool', 'flags.DEFINE_bool', (['"""synthetic"""', '(False)', '"""Use synthetic data created on the IPU for inference"""'], {}), "('synthetic', False,\n 'Use synthetic data created on the IPU for inference')\n", (1964, 2043), False, 'from absl import app, flags\n'), ((2040, 2238), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""iterations"""', '(1)', '"""Number of iterations to run if using synthetic data. Each iteration uses one `batches_per_step` x `batch_size` x `H` x `W` x `C` sized input tensor."""'], {}), "('iterations', 1,\n 'Number of iterations to run if using synthetic data. Each iteration uses one `batches_per_step` x `batch_size` x `H` x `W` x `C` sized input tensor.'\n )\n", (2060, 2238), False, 'from absl import app, flags\n'), ((1021, 1130), 'subprocess.Popen', 'subprocess.Popen', (["(['python3', 'resnext101.py', '--data_sub_dir', data_sub_dir] + args)"], {'stdout': 'f', 'stderr': 'f'}), "(['python3', 'resnext101.py', '--data_sub_dir',\n data_sub_dir] + args, stdout=f, stderr=f)\n", (1037, 1130), False, 'import subprocess\n'), ((2702, 2731), 'os.mkdir', 'os.mkdir', (['f"""logs/{timestamp}"""'], {}), "(f'logs/{timestamp}')\n", (2710, 2731), False, 'import os\n'), ((3191, 3239), 're.compile', 're.compile', (['"""Compute .* sec .* (.*) images/sec."""'], {}), "('Compute .* sec .* (.*) images/sec.')\n", (3201, 3239), False, 'import re\n'), ((3260, 3307), 're.compile', 're.compile', (['"""Total (.*).* sec. Preprocessing"""'], {}), "('Total (.*).* sec. Preprocessing')\n", (3270, 3307), False, 'import re\n'), ((4656, 4669), 'absl.app.run', 'app.run', (['main'], {}), '(main)\n', (4663, 4669), False, 'from absl import app, flags\n'), ((2649, 2671), 'os.path.exists', 'os.path.exists', (['"""logs"""'], {}), "('logs')\n", (2663, 2671), False, 'import os\n'), ((2681, 2697), 'os.mkdir', 'os.mkdir', (['"""logs"""'], {}), "('logs')\n", (2689, 2697), False, 'import os\n'), ((2933, 2946), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (2943, 2946), False, 'import time\n'), ((4044, 4062), 'statistics.mean', 'statistics.mean', (['l'], {}), '(l)\n', (4059, 4062), False, 'import statistics\n'), ((2602, 2616), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2614, 2616), False, 'from datetime import datetime\n')] |
import pytest
import pandas as pd
from src.preprocess import check
class Test_check_column_names:
def test_check_column_names(self):
records = pd.DataFrame({'a': [1]})
config = pd.DataFrame({'column': ['a'], 'dataset': ['ACAPS']})
res = check.check_column_names(records, config, log=False)
assert res is None
| [
"pandas.DataFrame",
"src.preprocess.check.check_column_names"
] | [((158, 182), 'pandas.DataFrame', 'pd.DataFrame', (["{'a': [1]}"], {}), "({'a': [1]})\n", (170, 182), True, 'import pandas as pd\n'), ((200, 253), 'pandas.DataFrame', 'pd.DataFrame', (["{'column': ['a'], 'dataset': ['ACAPS']}"], {}), "({'column': ['a'], 'dataset': ['ACAPS']})\n", (212, 253), True, 'import pandas as pd\n'), ((269, 321), 'src.preprocess.check.check_column_names', 'check.check_column_names', (['records', 'config'], {'log': '(False)'}), '(records, config, log=False)\n', (293, 321), False, 'from src.preprocess import check\n')] |
from unittest import mock
from django.test import TestCase
from parameterized import param, parameterized
from rest_framework import validators
from apps.order.constants import (
EXTENSION_ERROR_MESSAGE,
MAX_IMAGE_SIZE,
MAX_IMAGE_SIZE_ERROR_MESSAGE,
)
from apps.order.validators import FileValidator
class FileValidatorTestCase(TestCase):
def setUp(self) -> None:
super().setUp()
self.validator = FileValidator
def test__image_validator__success(self):
file = mock.MagicMock()
file.name = "test.jpg"
file.size = 1
actual_file = self.validator.image_validator(file=file)
self.assertEqual(actual_file.name, file.name)
self.assertEqual(actual_file.size, file.size)
@parameterized.expand(
[
param(
EXTENSION_ERROR_MESSAGE,
name="test.pdf",
size=1,
error=validators.ValidationError,
),
param(
MAX_IMAGE_SIZE_ERROR_MESSAGE,
name="test.jpg",
size=MAX_IMAGE_SIZE + 1,
error=validators.ValidationError,
),
]
)
def test__image_validator__raise_exceptions(self, _, name, size, error):
file = mock.MagicMock()
file.name = name
file.size = size
with self.assertRaises(error):
self.validator.image_validator(file=file)
| [
"unittest.mock.MagicMock",
"parameterized.param"
] | [((510, 526), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (524, 526), False, 'from unittest import mock\n'), ((1285, 1301), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (1299, 1301), False, 'from unittest import mock\n'), ((803, 897), 'parameterized.param', 'param', (['EXTENSION_ERROR_MESSAGE'], {'name': '"""test.pdf"""', 'size': '(1)', 'error': 'validators.ValidationError'}), "(EXTENSION_ERROR_MESSAGE, name='test.pdf', size=1, error=validators.\n ValidationError)\n", (808, 897), False, 'from parameterized import param, parameterized\n'), ((985, 1101), 'parameterized.param', 'param', (['MAX_IMAGE_SIZE_ERROR_MESSAGE'], {'name': '"""test.jpg"""', 'size': '(MAX_IMAGE_SIZE + 1)', 'error': 'validators.ValidationError'}), "(MAX_IMAGE_SIZE_ERROR_MESSAGE, name='test.jpg', size=MAX_IMAGE_SIZE + \n 1, error=validators.ValidationError)\n", (990, 1101), False, 'from parameterized import param, parameterized\n')] |
# -*- coding: utf-8 -*-
"""Command line interface."""
import logging
import click
from pathrev.pipeline import (
do_gsea, do_preranked,
)
logger = logging.getLogger(__name__)
@click.group(help='pathrev')
def main():
"""Run pathrev."""
logging.basicConfig(format="%(asctime)s - %(levelname)s - %(name)s - %(message)s")
matrix_option = click.option(
'-m', '--matrix',
help="path to matrix",
type=click.Path(file_okay=True, dir_okay=False, exists=True),
required=True
)
rnk_option = click.option(
'-r', '--rnk',
help="path to rank file",
type=click.Path(file_okay=True, dir_okay=False, exists=True),
required=True
)
phenotype_option = click.option(
'-c', '--cls',
help="path to cls file",
type=click.Path(file_okay=True, dir_okay=False, exists=True),
required=True
)
gene_set_option = click.option(
'-g', '--gmt',
help="path to gmt file",
type=click.Path(file_okay=True, dir_okay=False, exists=True),
required=True
)
out_dir_option = click.option(
'-o', '--out_dir',
help="path to output directory",
type=click.Path(file_okay=False, dir_okay=True, exists=False),
required=True
)
@main.command()
@matrix_option
@phenotype_option
@gene_set_option
@out_dir_option
def gsea(matrix, cls, gmt, out_dir):
"""Run normal GSEA with a matrix file."""
click.echo("Running GSEA on {} with {}, {} and outputting to {}".format(matrix, cls, gmt, out_dir))
do_gsea(matrix, cls, gmt, out_dir)
click.echo('Done with GSEA analysis')
@main.command()
@rnk_option
@gene_set_option
@out_dir_option
def prerank(rnk, gmt, out_dir):
"""Run prerank GSEA with a sorted rank file."""
click.echo("Running GSEA-PreRanked on {} with {} and outputting to {}".format(rnk, gmt, out_dir))
do_preranked(rnk, gmt, out_dir)
click.echo('Done with prerank analysis')
if __name__ == '__main__':
main()
| [
"logging.getLogger",
"logging.basicConfig",
"click.group",
"click.echo",
"click.Path",
"pathrev.pipeline.do_preranked",
"pathrev.pipeline.do_gsea"
] | [((154, 181), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (171, 181), False, 'import logging\n'), ((185, 212), 'click.group', 'click.group', ([], {'help': '"""pathrev"""'}), "(help='pathrev')\n", (196, 212), False, 'import click\n'), ((252, 339), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s - %(levelname)s - %(name)s - %(message)s"""'}), "(format=\n '%(asctime)s - %(levelname)s - %(name)s - %(message)s')\n", (271, 339), False, 'import logging\n'), ((1468, 1502), 'pathrev.pipeline.do_gsea', 'do_gsea', (['matrix', 'cls', 'gmt', 'out_dir'], {}), '(matrix, cls, gmt, out_dir)\n', (1475, 1502), False, 'from pathrev.pipeline import do_gsea, do_preranked\n'), ((1507, 1544), 'click.echo', 'click.echo', (['"""Done with GSEA analysis"""'], {}), "('Done with GSEA analysis')\n", (1517, 1544), False, 'import click\n'), ((1798, 1829), 'pathrev.pipeline.do_preranked', 'do_preranked', (['rnk', 'gmt', 'out_dir'], {}), '(rnk, gmt, out_dir)\n', (1810, 1829), False, 'from pathrev.pipeline import do_gsea, do_preranked\n'), ((1834, 1874), 'click.echo', 'click.echo', (['"""Done with prerank analysis"""'], {}), "('Done with prerank analysis')\n", (1844, 1874), False, 'import click\n'), ((424, 479), 'click.Path', 'click.Path', ([], {'file_okay': '(True)', 'dir_okay': '(False)', 'exists': '(True)'}), '(file_okay=True, dir_okay=False, exists=True)\n', (434, 479), False, 'import click\n'), ((590, 645), 'click.Path', 'click.Path', ([], {'file_okay': '(True)', 'dir_okay': '(False)', 'exists': '(True)'}), '(file_okay=True, dir_okay=False, exists=True)\n', (600, 645), False, 'import click\n'), ((761, 816), 'click.Path', 'click.Path', ([], {'file_okay': '(True)', 'dir_okay': '(False)', 'exists': '(True)'}), '(file_okay=True, dir_okay=False, exists=True)\n', (771, 816), False, 'import click\n'), ((931, 986), 'click.Path', 'click.Path', ([], {'file_okay': '(True)', 'dir_okay': '(False)', 'exists': '(True)'}), '(file_okay=True, dir_okay=False, exists=True)\n', (941, 986), False, 'import click\n'), ((1112, 1168), 'click.Path', 'click.Path', ([], {'file_okay': '(False)', 'dir_okay': '(True)', 'exists': '(False)'}), '(file_okay=False, dir_okay=True, exists=False)\n', (1122, 1168), False, 'import click\n')] |
import logging
from parsl.monitoring.handler import DatabaseHandler
from parsl.monitoring.handler import RemoteHandler
from parsl.utils import RepresentationMixin
class NullHandler(logging.Handler):
"""Setup default logging to /dev/null since this is library."""
def emit(self, record):
pass
class MonitoringStore(RepresentationMixin):
def __init__(self,
host=None,
port=None,
logging_server_host='localhost',
logging_server_port=9595):
"""
Parameters
----------
host : str
The hostname for running the visualization interface.
port : int
The port for the visualization interface.
logging_server_host : str
The hostname for the logging server.
logging_server_port : int
The port for the logging server.
"""
self.host = host
self.port = port
self.logging_server_host = logging_server_host
self.logging_server_port = logging_server_port
class Database(MonitoringStore, RepresentationMixin):
def __init__(self,
connection_string=None, **kwargs):
""" Initializes a monitoring configuration class.
Parameters
----------
connection_string : str, optional
Database connection string that defines how to connect to the database. If not set, DFK init will use a sqlite3
database inside the rundir.
"""
super().__init__(**kwargs)
self.connection_string = connection_string
class VisualizationServer(RepresentationMixin):
def __init__(self,
host='http://localhost',
port=8899):
"""
Parameters
----------
host : str
The hostname for running the visualization interface.
port : int
The port for the visualization interface
"""
self.host = host
self.port = port
class Monitoring(RepresentationMixin):
""" This is a config class for monitoring. """
def __init__(self,
store=None,
visualization_server=None,
monitoring_interval=15,
workflow_name=None,
version='1.0.0'):
""" Initializes a monitoring configuration class.
Parameters
----------
monitoring_interval : float, optional
The amount of time in seconds to sleep in between resource monitoring logs per task.
workflow_name : str, optional
Name to record as the workflow base name, defaults to the name of the parsl script file if left as None.
version : str, optional
Optional workflow identification to distinguish between workflows with the same name, not used internally only for display to user.
Example
-------
.. code-block:: python
import parsl
from parsl.config import Config
from parsl.executors.threads import ThreadPoolExecutor
from parsl.monitoring.db_logger import MonitoringConfig
config = Config(
executors=[ThreadPoolExecutor()],
monitoring_config=MonitoringConfig(
MonitoringStore=DatabaseStore(
connection_string='sqlite///monitoring.db'
)
VisualizationInterface=VisualizationInterface(
host='http:localhost'
port='9999'
)
)
)
parsl.load(config)
"""
self.store = store
self.visualization_server = visualization_server
self.version = version
self.monitoring_interval = monitoring_interval
self.workflow_name = workflow_name
# for now just set this to none but can be used to present the dashboard location to user
self.dashboard_link = None
def get_parsl_logger(
logger_name='parsl_monitor_logger',
is_logging_server=False,
monitoring_config=None,
**kwargs):
"""
Parameters
----------
logger_name : str, optional
Name of the logger to use. Prevents adding repeat handlers or incorrect handlers
is_logging_server : Bool, optional
Used internally to determine which handler to return when using local db logging
monitoring_config : MonitoringConfig, optional
Pass in a logger class object to use for generating loggers.
Returns
-------
logging.logger object
Raises
------
OptionalModuleMissing
"""
logger = logging.getLogger(logger_name)
if monitoring_config is None:
logger.addHandler(NullHandler())
return logger
if monitoring_config.store is None:
raise ValueError('No MonitoringStore defined')
if is_logging_server:
# add a handler that will take logs being received on the server and log them to the store
handler = DatabaseHandler(monitoring_config.store.connection_string)
# use the specific name generated by the server or the monitor wrapper
logger = logging.getLogger(logger_name)
logger.setLevel(logging.INFO)
logger.addHandler(handler)
else:
# add a handler that will pass logs to the logging server
handler = RemoteHandler(monitoring_config.store.logging_server_host, monitoring_config.store.logging_server_port)
# use the specific name generated by the server or the monitor wrapper
logger = logging.getLogger(logger_name)
logger.setLevel(logging.INFO)
logger.addHandler(handler)
return logger
| [
"logging.getLogger",
"parsl.monitoring.handler.DatabaseHandler",
"parsl.monitoring.handler.RemoteHandler"
] | [((4797, 4827), 'logging.getLogger', 'logging.getLogger', (['logger_name'], {}), '(logger_name)\n', (4814, 4827), False, 'import logging\n'), ((5166, 5224), 'parsl.monitoring.handler.DatabaseHandler', 'DatabaseHandler', (['monitoring_config.store.connection_string'], {}), '(monitoring_config.store.connection_string)\n', (5181, 5224), False, 'from parsl.monitoring.handler import DatabaseHandler\n'), ((5321, 5351), 'logging.getLogger', 'logging.getLogger', (['logger_name'], {}), '(logger_name)\n', (5338, 5351), False, 'import logging\n'), ((5519, 5626), 'parsl.monitoring.handler.RemoteHandler', 'RemoteHandler', (['monitoring_config.store.logging_server_host', 'monitoring_config.store.logging_server_port'], {}), '(monitoring_config.store.logging_server_host,\n monitoring_config.store.logging_server_port)\n', (5532, 5626), False, 'from parsl.monitoring.handler import RemoteHandler\n'), ((5719, 5749), 'logging.getLogger', 'logging.getLogger', (['logger_name'], {}), '(logger_name)\n', (5736, 5749), False, 'import logging\n')] |
from flask import Flask,request,render_template
import numpy as np
from Reccomending_functions import item_item_cf,user_user_cf,rank_matrix_factorize
from Database_connector import fetch_from_database
import random
#ML Packages
asd = []
app = Flask(__name__)
@app.route('/')
def index():
global asd
randindex = [x for x in range(1,301)]
random.shuffle(randindex)
movies_list = randindex[0:12]
asd = movies_list
display_list = fetch_from_database(movies_list)
return render_template("Display_movies.html",display_list=display_list)
@app.route('/recommendations',methods=['POST','GET'])
def recommend():
if request.method != 'POST':
return "Bye-Bye"
movies_list = asd
user_ratings = np.zeros((1,301))
for i in range(len(movies_list)):
user_ratings[0][movies_list[i]]=request.form['movie'+str(i+1)]
if request.form['recco_method']=="uucf":
recommendend_movies_list = user_user_cf(user_ratings,movies_list)
elif request.form['recco_method']=="iicf":
recommendend_movies_list = item_item_cf(user_ratings, movies_list)
elif request.form['recco_method']=="rf":
recommendend_movies_list = rank_matrix_factorize(user_ratings,movies_list)
print(user_ratings)
recommendend_movies_list = list(recommendend_movies_list)
sasa =[]
for i in recommendend_movies_list:
sasa.append(int(i))
movie_details = fetch_from_database(sasa)
return render_template("Display Recommendations.html",movie_details=movie_details)
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
| [
"flask.render_template",
"Reccomending_functions.item_item_cf",
"random.shuffle",
"flask.Flask",
"Database_connector.fetch_from_database",
"Reccomending_functions.user_user_cf",
"numpy.zeros",
"Reccomending_functions.rank_matrix_factorize"
] | [((243, 258), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (248, 258), False, 'from flask import Flask, request, render_template\n'), ((349, 374), 'random.shuffle', 'random.shuffle', (['randindex'], {}), '(randindex)\n', (363, 374), False, 'import random\n'), ((450, 482), 'Database_connector.fetch_from_database', 'fetch_from_database', (['movies_list'], {}), '(movies_list)\n', (469, 482), False, 'from Database_connector import fetch_from_database\n'), ((494, 559), 'flask.render_template', 'render_template', (['"""Display_movies.html"""'], {'display_list': 'display_list'}), "('Display_movies.html', display_list=display_list)\n", (509, 559), False, 'from flask import Flask, request, render_template\n'), ((735, 753), 'numpy.zeros', 'np.zeros', (['(1, 301)'], {}), '((1, 301))\n', (743, 753), True, 'import numpy as np\n'), ((1417, 1442), 'Database_connector.fetch_from_database', 'fetch_from_database', (['sasa'], {}), '(sasa)\n', (1436, 1442), False, 'from Database_connector import fetch_from_database\n'), ((1454, 1530), 'flask.render_template', 'render_template', (['"""Display Recommendations.html"""'], {'movie_details': 'movie_details'}), "('Display Recommendations.html', movie_details=movie_details)\n", (1469, 1530), False, 'from flask import Flask, request, render_template\n'), ((942, 981), 'Reccomending_functions.user_user_cf', 'user_user_cf', (['user_ratings', 'movies_list'], {}), '(user_ratings, movies_list)\n', (954, 981), False, 'from Reccomending_functions import item_item_cf, user_user_cf, rank_matrix_factorize\n'), ((1063, 1102), 'Reccomending_functions.item_item_cf', 'item_item_cf', (['user_ratings', 'movies_list'], {}), '(user_ratings, movies_list)\n', (1075, 1102), False, 'from Reccomending_functions import item_item_cf, user_user_cf, rank_matrix_factorize\n'), ((1183, 1231), 'Reccomending_functions.rank_matrix_factorize', 'rank_matrix_factorize', (['user_ratings', 'movies_list'], {}), '(user_ratings, movies_list)\n', (1204, 1231), False, 'from Reccomending_functions import item_item_cf, user_user_cf, rank_matrix_factorize\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .base import NameProvider
from .base import PROVIDERS
from vor.human import Gender
from py_utilities.decorators import run_once
import os
import random
class Census1990Provider(NameProvider):
KEYS = {
'provider': 'Census1990',
'm_first': 'male:first_name',
'f_first': 'female:first_name',
'last': 'all:last_name'
}
def __init__(self):
super(Census1990Provider, self).__init__()
self.load_provider_data()
@run_once
def load_provider_data(self):
for (data_source_path, key) in self._get_data_source_keys():
names = []
with open(data_source_path) as data_file:
for line in data_file:
name, _, _, _ = line.split()
names.append(name.upper())
PROVIDERS[self._get_prefixed_key(key)] = names
def get_first_name(self, gender):
key = self._get_first_name_key_from_gender(gender)
return random.choice(PROVIDERS[key]).title()
def get_last_name(self):
key = self._get_last_name_key()
return random.choice(PROVIDERS[key]).title()
def _get_prefixed_key(self, key):
return "{0}:{1}".format(self.KEYS['provider'], key)
def _get_first_name_key_from_gender(self, gender):
if gender == Gender.Male:
return self._get_prefixed_key(self.KEYS['m_first'])
elif gender == Gender.Female:
return self._get_prefixed_key(self.KEYS['f_first'])
else:
raise ValueError("Unsupported value in gender enum")
def _get_last_name_key(self):
return self._get_prefixed_key(self.KEYS['last'])
def _get_data_source_keys(self):
cwd = os.path.dirname(os.path.realpath(__file__))
full_path = lambda x: os.path.join(cwd, x)
source_keys = [
('../../data/1990_census/dist.all.last', self.KEYS['last']),
('../../data/1990_census/dist.male.first', self.KEYS['m_first']),
('../../data/1990_census/dist.female.first', self.KEYS['f_first'])
]
return [(full_path(source), keys) for source, keys in source_keys]
# vim: filetype=python
| [
"os.path.realpath",
"random.choice",
"os.path.join"
] | [((1781, 1807), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (1797, 1807), False, 'import os\n'), ((1839, 1859), 'os.path.join', 'os.path.join', (['cwd', 'x'], {}), '(cwd, x)\n', (1851, 1859), False, 'import os\n'), ((1026, 1055), 'random.choice', 'random.choice', (['PROVIDERS[key]'], {}), '(PROVIDERS[key])\n', (1039, 1055), False, 'import random\n'), ((1149, 1178), 'random.choice', 'random.choice', (['PROVIDERS[key]'], {}), '(PROVIDERS[key])\n', (1162, 1178), False, 'import random\n')] |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'addCourse/', views.add_course, name='addCourse'),
url(r'^dropCourse/', views.drop_course, name='dropCourse'),
]
| [
"django.conf.urls.url"
] | [((75, 128), 'django.conf.urls.url', 'url', (['"""addCourse/"""', 'views.add_course'], {'name': '"""addCourse"""'}), "('addCourse/', views.add_course, name='addCourse')\n", (78, 128), False, 'from django.conf.urls import url\n'), ((135, 192), 'django.conf.urls.url', 'url', (['"""^dropCourse/"""', 'views.drop_course'], {'name': '"""dropCourse"""'}), "('^dropCourse/', views.drop_course, name='dropCourse')\n", (138, 192), False, 'from django.conf.urls import url\n')] |
import pickle
from collections import OrderedDict
from itertools import product
import orjson
import pytest
import pytest_asyncio
import xmltodict
from aiohttp.client_reqrep import ClientResponse
from pydantic import BaseModel
from yarl import URL
from aiotapioca.adapters import TapiocaAdapter, generate_wrapper_from_adapter
from aiotapioca.aiotapioca import TapiocaClient, TapiocaClientExecutor
from aiotapioca.exceptions import ClientError, ServerError
from aiotapioca.serializers import SimpleSerializer
from .callbacks import callback_201, callback_401
from .clients import (
ClassParserClient,
CustomModel,
CustomModelDT,
DictParserClient,
FailTokenRefreshClient,
FuncParserClient,
NoneSemaphoreClient,
PydanticDefaultClientAdapter,
PydanticForcedClient,
RetryRequestClient,
RootModel,
RootModelDT,
SimpleClient,
StaticMethodParserClient,
TokenRefreshByDefaultClient,
TokenRefreshClient,
XMLClient,
)
@pytest_asyncio.fixture
async def retry_request_client():
async with RetryRequestClient() as c:
yield c
@pytest_asyncio.fixture
async def xml_client():
async with XMLClient() as c:
yield c
@pytest_asyncio.fixture
async def token_refresh_by_default_client():
async with TokenRefreshByDefaultClient(token="token") as c:
yield c
@pytest.fixture
def refresh_token_possible_false_values():
yield False, None, 1, 0, "511", -22, 41, [], tuple(), {}, set(), [41], {
"key": "value"
}
def check_response(response, data, status=200, refresh_data=None):
executor = response()
assert type(response) == TapiocaClient
assert type(executor) == TapiocaClientExecutor
assert executor.data == data
assert executor.refresh_data == refresh_data
assert isinstance(executor.response, ClientResponse)
assert executor.status == status
async def check_pages_responses(
response, total_pages=1, max_pages=None, max_items=None
):
result_response = {
response: {
"data": [{"key": "value"}],
"paging": {"next": "http://api.example.org/next_batch"},
},
response.data: [{"key": "value"}],
response.paging: {"next": "http://api.example.org/next_batch"},
response.paging.next: "http://api.example.org/next_batch",
}
for resp, data in result_response.items():
check_response(resp, data)
iterations_count = 0
async for item in response().pages(max_pages=max_pages, max_items=max_items):
result_page = {item: {"key": "value"}, item.key: "value"}
for resp, data in result_page.items():
check_response(resp, data)
iterations_count += 1
assert iterations_count == total_pages
"""
test TapiocaClient
"""
def test_adapter_class_default_attributes():
assert isinstance(TapiocaAdapter.refresh_token, bool)
assert isinstance(TapiocaAdapter.semaphore, int)
assert isinstance(TapiocaAdapter.serializer_class, object)
assert TapiocaAdapter.refresh_token is False
assert TapiocaAdapter.semaphore == 10
assert TapiocaAdapter.serializer_class == SimpleSerializer
def test_fill_url_template(client):
expected_url = "https://api.example.org/user/123/"
resource = client.user(id="123")
assert resource.data == expected_url
def test_fill_another_root_url_template(client):
expected_url = "https://api.another.com/another-root/"
resource = client.another_root()
assert resource.data == expected_url
def test_calling_len_on_tapioca_list(client):
wrap_client = client._wrap_in_tapioca([0, 1, 2])
assert len(wrap_client) == 3
def test_iterated_client_items_should_be_tapioca_instances(client):
wrap_client = client._wrap_in_tapioca([0, 1, 2])
for item in wrap_client:
assert isinstance(item, TapiocaClient)
def test_iterated_client_items_should_contain_list_items(client):
wrap_client = client._wrap_in_tapioca([0, 1, 2])
for i, item in enumerate(wrap_client):
assert item().data == i
async def test_in_operator(mocked, client):
mocked.get(
client.test().data,
body='{"data": 1, "other": 2}',
status=200,
content_type="application/json",
)
response = await client.test().get()
assert "data" in response
assert "other" in response
assert "wat" not in response
async def test_transform_camelCase_in_snake_case(mocked, client):
next_url = "http://api.example.org/next_batch"
response_data = {
"data": {
"key_snake": "value",
"camelCase": "data in camel case",
"NormalCamelCase": "data in camel case",
},
"paging": {"next": "%s" % next_url},
}
mocked.add(
client.test().data,
body=orjson.dumps(response_data),
status=200,
content_type="application/json",
)
response = await client.test().get()
assert response.data.key_snake().data == "value"
assert response.data.camel_case().data == "data in camel case"
assert response.data.normal_camel_case().data == "data in camel case"
async def test_should_be_able_to_access_by_index(mocked, client):
mocked.get(
client.test().data,
body='["a", "b", "c"]',
status=200,
content_type="application/json",
)
response = await client.test().get()
assert response[0]().data == "a"
assert response[1]().data == "b"
assert response[2]().data == "c"
async def test_accessing_index_out_of_bounds_should_raise_index_error(mocked, client):
mocked.get(
client.test().data,
body='["a", "b", "c"]',
status=200,
content_type="application/json",
)
response = await client.test().get()
with pytest.raises(IndexError):
response[3]
async def test_accessing_empty_list_should_raise_index_error(mocked, client):
mocked.get(
client.test().data, body="[]", status=200, content_type="application/json"
)
response = await client.test().get()
with pytest.raises(IndexError):
response[3]
def test_fill_url_from_default_params():
client = SimpleClient(default_url_params={"id": 123})
assert client.user().data == "https://api.example.org/user/123/"
async def test_is_pickleable(mocked):
pickle_client = pickle.loads(pickle.dumps(SimpleClient()))
# ensure requests keep working after pickle:
next_url = "http://api.example.org/next_batch"
mocked.get(
pickle_client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
async with pickle_client:
response = await pickle_client.test().get()
iterations_count = 0
async for item in response().pages():
assert "value" in item.key().data
iterations_count += 1
assert iterations_count == 2
"""
test TapiocaExecutor
"""
def test_resource_executor_data_should_be_composed_url(client):
expected_url = "https://api.example.org/test/"
resource = client.test()
assert resource.data == expected_url
def test_docs(client):
assert "\n".join(client.resource.__doc__.split("\n")[1:]) == (
"Resource: " + client.resource._resource["resource"] + "\n"
"Docs: " + client.resource._resource["docs"] + "\n"
"Foo: " + client.resource._resource["foo"] + "\n"
"Spam: " + client.resource._resource["spam"]
)
def test_access_data_attributres_through_executor(client):
wrap_client = client._wrap_in_tapioca({"test": "value"})
items = wrap_client().items()
assert isinstance(items, TapiocaClient)
data = dict(items().data)
assert data == {"test": "value"}
def test_is_possible_to_reverse_a_list_through_executor(client):
wrap_client = client._wrap_in_tapioca([0, 1, 2])
wrap_client().reverse()
assert wrap_client().data == [2, 1, 0]
def test_cannot__getittem__(client):
wrap_client = client._wrap_in_tapioca([0, 1, 2])
with pytest.raises(Exception):
wrap_client()[0]
def test_cannot_iterate(client):
wrap_client = client._wrap_in_tapioca([0, 1, 2])
with pytest.raises(Exception):
for item in wrap_client():
pass
def test_dir_call_returns_executor_methods(client):
wrap_client = client._wrap_in_tapioca([0, 1, 2])
e_dir = dir(wrap_client())
assert "data" in e_dir
assert "response" in e_dir
assert "get" in e_dir
assert "post" in e_dir
assert "post_batch" in e_dir
assert "pages" in e_dir
assert "open_docs" in e_dir
assert "open_in_browser" in e_dir
async def test_response_executor_object_has_a_response(mocked, client):
next_url = "http://api.example.org/next_batch"
mocked.get(
client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
executor = response()
assert executor.response is not None
assert executor._response is not None
assert executor.response.status == 200
assert executor._response.status == 200
def test_raises_error_if_executor_does_not_have_a_response_object(client):
with pytest.raises(Exception):
client().response
async def test_response_executor_has_a_status_code(mocked, client):
mocked.get(
client.test().data,
body='{"data": {"key": "value"}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
assert response().status == 200
"""
test TapiocaExecutor requests
"""
def test_when_executor_has_no_response(client):
with pytest.raises(Exception) as context:
client.test().response
exception = context.exception
assert "has no response" == str(exception)
async def test_access_response_field(mocked, client):
mocked.get(
client.test().data,
body='{"data": {"key": "value"}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
response_data = response.data()
assert response_data.data == {"key": "value"}
async def test_carries_request_kwargs_over_calls(mocked, client):
mocked.get(
client.test().data,
body='{"data": {"key": "value"}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
request_kwargs = response.data.key()._request_kwargs
assert "url" in request_kwargs
assert "data" in request_kwargs
assert "headers" in request_kwargs
async def test_thrown_tapioca_exception_with_client_error_data(mocked, client):
mocked.get(
client.test().data,
body='{"error": "bad request test"}',
status=400,
content_type="application/json",
)
with pytest.raises(ClientError) as client_exception:
await client.test().get()
assert "bad request test" in client_exception.value.args
async def test_thrown_tapioca_exception_with_server_error_data(mocked, client):
mocked.get(
client.test().data,
body='{"error": "server error test"}',
status=500,
content_type="application/json",
)
with pytest.raises(ServerError) as server_exception:
await client.test().get()
assert "server error test" in server_exception.value.args
async def test_retry_request(mocked, retry_request_client):
for _ in range(10):
mocked.get(
retry_request_client.test().data,
body='{"error": "bad request test"}',
status=400,
content_type="application/json",
)
mocked.get(
retry_request_client.test().data,
body='{"data": "success!"}',
status=200,
content_type="application/json",
)
response = await retry_request_client.test().get()
assert response.data().data == "success!"
for _ in range(3):
mocked.get(
retry_request_client.test().data,
body='{"error": "bad request test"}',
status=400,
content_type="application/json",
)
mocked.get(
retry_request_client.test().data,
body='{"data": "success!"}',
status=200,
content_type="application/json",
)
response = await retry_request_client.test().get()
assert response.data().data == "success!"
for _ in range(3):
mocked.get(
retry_request_client.test().data,
body='{"error": "bad request test"}',
status=403,
content_type="application/json",
)
with pytest.raises(ClientError):
await retry_request_client.test().get()
async def test_requests(mocked, client):
semaphores = (3, None)
types_request = ("get", "post", "put", "patch", "delete")
for semaphore, type_request in product(semaphores, types_request):
executor = client.test()
status = 200 if type_request == "get" else 201
mocked_method = getattr(mocked, type_request)
executor_method = getattr(executor, type_request)
mocked_method(
executor.data,
body='{"data": {"key": "value"}}',
status=status,
content_type="application/json",
)
kwargs = {}
if semaphore:
kwargs.update({"semaphore": semaphore})
response = await executor_method(**kwargs)
result_response = {
response: {"data": {"key": "value"}},
response.data: {"key": "value"},
response.data.key: "value",
}
for response, data in result_response.items():
check_response(response, data, status)
async def test_batch_requests(mocked, client):
response_data = [
{"data": {"key": "value"}},
{"data": {"key": "value"}},
{"data": {"key": "value"}},
]
semaphores = (3, None)
types_request = ("post", "put", "patch", "delete")
for semaphore, type_request in product(semaphores, types_request):
executor = client.test()
mocked_method = getattr(mocked, type_request)
executor_method = getattr(executor, type_request + "_batch")
for data_row in response_data:
mocked_method(
executor.data,
body=orjson.dumps(data_row),
status=201,
content_type="application/json",
)
kwargs = dict(data=response_data)
if semaphore:
kwargs.update({"semaphore": semaphore})
results = await executor_method(**kwargs)
for i, response in enumerate(results):
result_response = {
response: response_data[i],
response.data: response_data[i]["data"],
response.data.key: response_data[i]["data"]["key"],
}
for resp, data in result_response.items():
check_response(resp, data, 201)
assert len(results) == len(response_data)
async def test_as_api_params_requests(mocked):
semaphores = (4, None, False)
types_request = ("get", "post", "put", "patch", "delete")
for semaphore, type_request in product(semaphores, types_request):
async with SimpleClient(semaphore=semaphore) as simple_client:
executor = simple_client.test()
status = 200 if type_request == "get" else 201
mocked_method = getattr(mocked, type_request)
executor_method = getattr(executor, type_request)
mocked_method(
executor.data,
body='{"data": {"key": "value"}}',
status=status,
content_type="application/json",
)
kwargs = dict()
response = await executor_method(**kwargs)
result_response = {
response: {"data": {"key": "value"}},
response.data: {"key": "value"},
response.data.key: "value",
}
for response, data in result_response.items():
check_response(response, data, status)
assert response()._api_params.get("semaphore") == semaphore
async def test_as_api_params_batch_requests(mocked):
response_data = [
{"data": {"key": "value"}},
{"data": {"key": "value"}},
{"data": {"key": "value"}},
]
semaphores = (4, None, False)
types_request = ("post", "put", "patch", "delete")
for semaphore, type_request in product(semaphores, types_request):
async with SimpleClient(semaphore=semaphore) as simple_client:
executor = simple_client.test()
mocked_method = getattr(mocked, type_request)
executor_method = getattr(executor, type_request + "_batch")
for data_row in response_data:
mocked_method(
executor.data,
body=orjson.dumps(data_row),
status=201,
content_type="application/json",
)
kwargs = dict(data=response_data)
if semaphore:
kwargs.update({"semaphore": semaphore})
results = await executor_method(**kwargs)
for i, response in enumerate(results):
result_response = {
response: response_data[i],
response.data: response_data[i]["data"],
response.data.key: response_data[i]["data"]["key"],
}
for resp, data in result_response.items():
check_response(resp, data, 201)
assert resp()._api_params.get("semaphore") == semaphore
assert len(results) == len(response_data)
async def test_failed_semaphore(mocked):
async with NoneSemaphoreClient() as none_semaphore_client:
mocked.get(
none_semaphore_client.test().data,
body='{"data": {"key": "value"}}',
status=200,
content_type="application/json",
)
with pytest.raises(TypeError):
await none_semaphore_client.test().get()
"""
test iterator features
"""
async def test_simple_pages_iterator(mocked, client):
next_url = "http://api.example.org/next_batch"
mocked.get(
client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
await check_pages_responses(response, total_pages=2)
async def test_simple_pages_with_max_pages_iterator(mocked, client):
next_url = "http://api.example.org/next_batch"
mocked.get(
client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}, {"key": "value"}, {"key": "value"}], "paging": {"next": "%s"}}'
% next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}, {"key": "value"}, {"key": "value"}], "paging": {"next": "%s"}}'
% next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}, {"key": "value"}, {"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
await check_pages_responses(response, total_pages=7, max_pages=3)
async def test_simple_pages_with_max_items_iterator(mocked, client):
next_url = "http://api.example.org/next_batch"
mocked.get(
client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}, {"key": "value"}, {"key": "value"}], "paging": {"next": "%s"}}'
% next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}, {"key": "value"}, {"key": "value"}], "paging": {"next": "%s"}}'
% next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}, {"key": "value"}, {"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
await check_pages_responses(response, total_pages=3, max_items=3)
async def test_simple_pages_with_max_pages_and_max_items_iterator(mocked, client):
next_url = "http://api.example.org/next_batch"
mocked.get(
client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}, {"key": "value"}, {"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
await check_pages_responses(response, total_pages=3, max_pages=2, max_items=3)
async def test_simple_pages_max_pages_zero_iterator(mocked, client):
next_url = "http://api.example.org/next_batch"
mocked.get(
client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.add(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
await check_pages_responses(response, total_pages=0, max_pages=0)
async def test_simple_pages_max_items_zero_iterator(mocked, client):
next_url = "http://api.example.org/next_batch"
mocked.get(
client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
await check_pages_responses(response, total_pages=0, max_items=0)
async def test_simple_pages_max_pages_ans_max_items_zero_iterator(mocked, client):
next_url = "http://api.example.org/next_batch"
mocked.get(
client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
await check_pages_responses(response, total_pages=0, max_pages=0, max_items=0)
async def test_pages_iterator_with_client_error(mocked, client):
next_url = "http://api.example.org/next_batch"
mocked.get(
client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=408,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
result_response = {
response: {
"data": [{"key": "value"}],
"paging": {"next": "http://api.example.org/next_batch"},
},
response.data: [{"key": "value"}],
response.paging: {"next": "http://api.example.org/next_batch"},
response.paging.next: "http://api.example.org/next_batch",
}
for resp, data in result_response.items():
check_response(resp, data)
iterations_count = 0
with pytest.raises(ClientError):
async for item in response().pages():
result_page = {item: {"key": "value"}, item.key: "value"}
for resp, data in result_page.items():
check_response(resp, data)
iterations_count += 1
assert iterations_count == 2
async def test_pages_iterator_with_server_error(mocked, client):
next_url = "http://api.example.org/next_batch"
mocked.get(
client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=504,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
result_response = {
response: {
"data": [{"key": "value"}],
"paging": {"next": "http://api.example.org/next_batch"},
},
response.data: [{"key": "value"}],
response.paging: {"next": "http://api.example.org/next_batch"},
response.paging.next: "http://api.example.org/next_batch",
}
for resp, data in result_response.items():
check_response(resp, data)
iterations_count = 0
with pytest.raises(ServerError):
async for item in response().pages():
result_page = {item: {"key": "value"}, item.key: "value"}
for resp, data in result_page.items():
check_response(resp, data)
iterations_count += 1
assert iterations_count == 2
async def test_pages_iterator_with_error_on_single_page(mocked, client):
next_url = "http://api.example.org/next_batch"
mocked.get(
client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{}], "paging": {"next": "%s"}}' % next_url,
status=204,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
result_response = {
response: {
"data": [{"key": "value"}],
"paging": {"next": "http://api.example.org/next_batch"},
},
response.data: [{"key": "value"}],
response.paging: {"next": "http://api.example.org/next_batch"},
response.paging.next: "http://api.example.org/next_batch",
}
for resp, data in result_response.items():
check_response(resp, data)
iterations_count = 0
async for item in response().pages():
if iterations_count == 2:
status = 204
result_page = {item: dict()}
else:
status = 200
result_page = {item: {"key": "value"}, item.key: "value"}
for resp, data in result_page.items():
check_response(resp, data, status)
iterations_count += 1
assert iterations_count == 4
"""
test XML requests
"""
async def test_xml_post_string(mocked, xml_client):
mocked.post(
xml_client.test().data,
body="Any response",
status=200,
content_type="application/json",
)
data = '<tag1 attr1="val1">' "<tag2>text1</tag2>" "<tag3>text2</tag3>" "</tag1>"
await xml_client.test().post(data=data)
request_body = mocked.requests[("POST", URL(xml_client.test().data))][0].kwargs[
"data"
]
assert request_body == data.encode("utf-8")
async def test_xml_post_dict(mocked, xml_client):
mocked.post(
xml_client.test().data,
body="Any response",
status=200,
content_type="application/json",
)
data = OrderedDict(
[
(
"tag1",
OrderedDict([("@attr1", "val1"), ("tag2", "text1"), ("tag3", "text2")]),
)
]
)
await xml_client.test().post(data=data)
request_body = mocked.requests[("POST", URL(xml_client.test().data))][0].kwargs[
"data"
]
assert request_body == xmltodict.unparse(data).encode("utf-8")
async def test_xml_post_dict_passes_unparse_param(mocked, xml_client):
mocked.post(
xml_client.test().data,
body="Any response",
status=200,
content_type="application/json",
)
data = OrderedDict(
[
(
"tag1",
OrderedDict([("@attr1", "val1"), ("tag2", "text1"), ("tag3", "text2")]),
)
]
)
await xml_client.test().post(data=data, xmltodict_unparse__full_document=False)
request_body = mocked.requests[("POST", URL(xml_client.test().data))][0].kwargs[
"data"
]
assert request_body == xmltodict.unparse(data, full_document=False).encode("utf-8")
async def test_xml_returns_text_if_response_not_xml(mocked, xml_client):
mocked.post(
xml_client.test().data,
body="Any response",
status=200,
content_type="any content",
)
data = OrderedDict(
[
(
"tag1",
OrderedDict([("@attr1", "val1"), ("tag2", "text1"), ("tag3", "text2")]),
)
]
)
response = await xml_client.test().post(data=data)
assert "Any response" == response().data["text"]
async def test_xml_post_dict_returns_dict_if_response_xml(mocked, xml_client):
xml_body = '<tag1 attr1="val1">text1</tag1>'
mocked.post(
xml_client.test().data,
body=xml_body,
status=200,
content_type="application/xml",
)
data = OrderedDict(
[
(
"tag1",
OrderedDict([("@attr1", "val1"), ("tag2", "text1"), ("tag3", "text2")]),
)
]
)
response = await xml_client.test().post(data=data)
assert response().data == xmltodict.parse(xml_body)
"""
test token refreshing
"""
async def test_not_token_refresh_client_propagates_client_error(mocked, client):
no_refresh_client = client
mocked.post(
no_refresh_client.test().data,
callback=callback_401,
content_type="application/json",
)
with pytest.raises(ClientError):
await no_refresh_client.test().post()
async def test_disable_token_refreshing(mocked, refresh_token_possible_false_values):
async with TokenRefreshClient(token="token") as token_refreshing_client:
mocked.post(
token_refreshing_client.test().data,
callback=callback_401,
content_type="application/json",
)
with pytest.raises(ClientError):
await token_refreshing_client.test().post()
for refresh_token in refresh_token_possible_false_values:
async with TokenRefreshClient(
token="token", refresh_token=refresh_token
) as token_refreshing_client:
mocked.post(
token_refreshing_client.test().data,
callback=callback_401,
content_type="application/json",
)
with pytest.raises(ClientError):
await token_refreshing_client.test().post()
async with TokenRefreshClient(token="token") as token_refreshing_client:
mocked.post(
token_refreshing_client.test().data,
callback=callback_401,
content_type="application/json",
)
with pytest.raises(ClientError):
await token_refreshing_client.test().post(refresh_token=refresh_token)
async def test_token_expired_automatically_refresh_authentication(mocked):
async with TokenRefreshClient(token="token") as token_refresh_client:
mocked.post(
token_refresh_client.test().data,
callback=callback_401,
content_type="application/json",
)
mocked.post(
token_refresh_client.test().data,
callback=callback_201,
content_type="application/json",
)
response = await token_refresh_client.test().post(refresh_token=True)
# refresh_authentication method should be able to update api_params
assert response._api_params["token"] == "new_token"
mocked.post(
token_refresh_client.test().data,
callback=callback_401,
content_type="application/json",
)
mocked.post(
token_refresh_client.test().data,
callback=callback_401,
content_type="application/json",
)
# check that the refresh_token flag is not cyclic
with pytest.raises(ClientError):
await token_refresh_client.test().post(refresh_token=True)
async with TokenRefreshClient(
token="token", refresh_token=True
) as token_refresh_client:
mocked.post(
token_refresh_client.test().data,
callback=callback_401,
content_type="application/json",
)
mocked.post(
token_refresh_client.test().data,
callback=callback_201,
content_type="application/json",
)
response = await token_refresh_client.test().post()
# refresh_authentication method should be able to update api_params
assert response._api_params["token"] == "new_token"
mocked.post(
token_refresh_client.test().data,
callback=callback_401,
content_type="application/json",
)
mocked.post(
token_refresh_client.test().data,
callback=callback_401,
content_type="application/json",
)
# check that the refresh_token flag is not cyclic
with pytest.raises(ClientError):
await token_refresh_client.test().post()
async def test_token_expired_automatically_refresh_authentication_by_default(
mocked, token_refresh_by_default_client
):
mocked.post(
token_refresh_by_default_client.test().data,
callback=callback_401,
content_type="application/json",
)
mocked.post(
token_refresh_by_default_client.test().data,
callback=callback_201,
content_type="application/json",
)
response = await token_refresh_by_default_client.test().post()
# refresh_authentication method should be able to update api_params
assert response._api_params["token"] == "new_token"
mocked.post(
token_refresh_by_default_client.test().data,
callback=callback_401,
content_type="application/json",
)
mocked.post(
token_refresh_by_default_client.test().data,
callback=callback_401,
content_type="application/json",
)
# check that the refresh_token flag is not cyclic
with pytest.raises(ClientError):
await token_refresh_by_default_client.test().post()
async def test_raises_error_if_refresh_authentication_method_returns_false_value(
mocked, refresh_token_possible_false_values
):
async with FailTokenRefreshClient(token="token") as fail_client:
mocked.post(
fail_client.test().data,
callback=callback_401,
content_type="application/json",
)
with pytest.raises(ClientError):
await fail_client.test().post()
for refresh_token in (True, *refresh_token_possible_false_values):
async with FailTokenRefreshClient(
token="token", refresh_token=refresh_token
) as fail_client:
mocked.post(
fail_client.test().data,
callback=callback_401,
content_type="application/json",
)
with pytest.raises(ClientError):
await fail_client.test().post()
async with FailTokenRefreshClient(token="token") as fail_client:
mocked.post(
fail_client.test().data,
callback=callback_401,
content_type="application/json",
)
with pytest.raises(ClientError):
await fail_client.test().post(refresh_token=refresh_token)
"""
Test PydanticAdapterMixin.
"""
async def test_pydantic_model_not_found(mocked):
async with PydanticForcedClient() as client:
mocked.get(
client.test_not_found().data,
body="{}",
status=200,
content_type="application/json",
)
with pytest.raises(ValueError):
await client.test_not_found().get()
async def test_bad_pydantic_model(mocked):
async with PydanticForcedClient() as client:
mocked.get(
client.test_bad_pydantic_model().data,
body="{}",
status=200,
content_type="application/json",
)
with pytest.raises(ValueError):
await client.test_bad_pydantic_model().get()
async def test_bad_dataclass_model(mocked):
async with PydanticForcedClient() as client:
mocked.get(
client.test_bad_dataclass_model().data,
body="{}",
status=200,
content_type="application/json",
)
with pytest.raises(TypeError):
await client.test_bad_dataclass_model().get()
async def test_pydantic_mixin_response_to_native(mocked):
response_body_root = (
'[{"key1": "value1", "key2": 123}, {"key1": "value2", "key2": 321}]'
)
response_body = '{"data": %s}' % response_body_root
validate_data_received_list = [True, False]
validate_data_sending_list = [True, False]
extract_root_list = [True, False]
convert_to_dict_list = [True, False]
for validate_received, validate_sending, extract, convert in product(
validate_data_received_list,
validate_data_sending_list,
extract_root_list,
convert_to_dict_list,
):
class PidanticClientAdapter(PydanticDefaultClientAdapter):
validate_data_received = validate_received
validate_data_sending = validate_sending
extract_root = extract
convert_to_dict = convert
PydanticClient = generate_wrapper_from_adapter(PidanticClientAdapter)
async with PydanticClient() as client:
mocked.get(
client.test().data,
body=response_body,
status=200,
content_type="application/json",
)
response = await client.test().get()
if convert or not validate_received:
assert isinstance(response().data, dict)
assert response().data == orjson.loads(response_body)
else:
assert isinstance(response().data, BaseModel)
assert response().data.dict() == orjson.loads(response_body)
mocked.get(
client.test_root().data,
body=response_body_root,
status=200,
content_type="application/json",
)
response = await client.test_root().get()
data = response().data
if extract:
assert isinstance(data, list)
else:
if not validate_received:
assert isinstance(data, list)
elif convert:
assert isinstance(data, dict)
data = data["__root__"]
else:
assert isinstance(data, BaseModel)
data = data.__root__
for response_data, expected_data in zip(
data, orjson.loads(response_body_root)
):
if convert or not validate_received:
assert isinstance(response_data, dict)
assert response_data == expected_data
else:
assert isinstance(response_data, BaseModel)
assert response_data.dict() == expected_data
mocked.get(
client.test_dataclass().data,
body=response_body,
status=200,
content_type="application/json",
)
response = await client.test_dataclass().get()
if convert or not validate_received:
assert isinstance(response().data, dict)
assert response().data == orjson.loads(response_body)
else:
assert isinstance(response().data, BaseModel)
assert response().data.dict() == orjson.loads(response_body)
mocked.get(
client.test_dataclass_root().data,
body=response_body_root,
status=200,
content_type="application/json",
)
response = await client.test_dataclass_root().get()
data = response().data
if extract:
assert isinstance(data, list)
else:
if not validate_received:
assert isinstance(data, list)
elif convert:
assert isinstance(data, dict)
data = data["__root__"]
else:
assert isinstance(data, BaseModel)
data = data.__root__
for response_data, expected_data in zip(
data, orjson.loads(response_body_root)
):
if convert or not validate_received:
assert isinstance(response_data, dict)
assert response_data == expected_data
else:
assert isinstance(response_data, BaseModel)
assert response_data.dict() == expected_data
async def test_pydantic_mixin_format_data_to_request(mocked):
response_body_root = (
'[{"key1": "value1", "key2": 123}, {"key1": "value2", "key2": 321}]'
)
response_body = '{"data": %s}' % response_body_root
validate_data_received_list = [True, False]
validate_data_sending_list = [True, False]
extract_root_list = [True, False]
convert_to_dict_list = [True, False]
for validate_received, validate_sending, extract, convert in product(
validate_data_received_list,
validate_data_sending_list,
extract_root_list,
convert_to_dict_list,
):
class PidanticClientAdapter(PydanticDefaultClientAdapter):
validate_data_received = validate_received
validate_data_sending = validate_sending
extract_root = extract
convert_to_dict = convert
PydanticClient = generate_wrapper_from_adapter(PidanticClientAdapter)
async with PydanticClient() as client:
mocked.post(
client.test().data,
body='{"id": 100500}',
status=200,
content_type="application/json",
)
if validate_sending:
data = orjson.loads(response_body)
response = await client.test().post(data=data)
assert response().data == {"id": 100500}
else:
data = CustomModel.parse_raw(response_body)
response = await client.test().post(data=data)
assert response().data == {"id": 100500}
if validate_sending:
data = orjson.loads(response_body_root)
for _ in range(len(data)):
mocked.post(
client.test_root().data,
body='{"id": 100500}',
status=200,
content_type="application/json",
)
responses = await client.test_root().post_batch(data=data)
assert len(responses) == len(data)
for response in responses:
assert response().data == {"id": 100500}
else:
data = RootModel.parse_raw(response_body_root)
for _ in range(len(data.__root__)):
mocked.post(
client.test_root().data,
body='{"id": 100500}',
status=200,
content_type="application/json",
)
responses = await client.test_root().post_batch(data=data.__root__)
assert len(responses) == len(data.__root__)
for response in responses:
assert response().data == {"id": 100500}
mocked.post(
client.test().data,
body='{"id": 100500}',
status=200,
content_type="application/json",
)
if validate_sending:
data = orjson.loads(response_body)
response = await client.test_dataclass().post(data=data)
assert response().data == {"id": 100500}
else:
data = CustomModelDT.__pydantic_model__.parse_raw(response_body)
response = await client.test_dataclass().post(data=data)
assert response().data == {"id": 100500}
if validate_sending:
data = orjson.loads(response_body_root)
for _ in range(len(data)):
mocked.post(
client.test_root().data,
body='{"id": 100500}',
status=200,
content_type="application/json",
)
responses = await client.test_root().post_batch(data=data)
assert len(responses) == len(data)
for response in responses:
assert response().data == {"id": 100500}
else:
data = RootModelDT.__pydantic_model__.parse_raw(response_body_root)
for _ in range(len(data.__root__)):
mocked.post(
client.test_root().data,
body='{"id": 100500}',
status=200,
content_type="application/json",
)
responses = await client.test_root().post_batch(data=data.__root__)
assert len(responses) == len(data.__root__)
for response in responses:
assert response().data == {"id": 100500}
class PidanticClientAdapter(PydanticDefaultClientAdapter):
forced_to_have_model = True
validate_data_sending = False
validate_data_received = False
PydanticClient = generate_wrapper_from_adapter(PidanticClientAdapter)
async with PydanticClient() as client:
data = orjson.loads(response_body_root)
for _ in range(len(data)):
mocked.post(
client.test_root().data,
body='{"id": 100500}',
status=200,
content_type="application/json",
)
responses = await client.test_root().post_batch(data=data)
assert len(responses) == len(data)
for response in responses:
assert response().data == {"id": 100500}
class TestParsers:
@pytest_asyncio.fixture
async def func_parser_client(self):
async with FuncParserClient() as client:
yield client
@pytest_asyncio.fixture
async def static_method_parser_client(self):
async with StaticMethodParserClient() as client:
yield client
@pytest_asyncio.fixture
async def class_parser_client(self):
async with ClassParserClient() as client:
yield client
@pytest_asyncio.fixture
async def dict_parser_client(self):
async with DictParserClient() as client:
yield client
async def test_parsers_not_found(self, mocked, func_parser_client):
mocked.get(
func_parser_client.test().data,
body='["a", "b", "c"]',
status=200,
content_type="application/json",
)
response = await func_parser_client.test().get()
with pytest.raises(AttributeError):
response().blablabla()
async def test_func_parser(self, mocked, func_parser_client):
mocked.get(
func_parser_client.test().data,
body='["a", "b", "c"]',
status=200,
content_type="application/json",
)
response = await func_parser_client.test().get()
assert response().foo_parser() == ["a", "b", "c"]
assert response().foo_parser(0) == "a"
assert response().foo_parser(1) == "b"
assert response().foo_parser(2) == "c"
with pytest.raises(IndexError):
response().foo_parser(3)
async def test_static_method_parser(self, mocked, static_method_parser_client):
mocked.get(
static_method_parser_client.test().data,
body='["a", "b", "c"]',
status=200,
content_type="application/json",
)
response = await static_method_parser_client.test().get()
assert response().foo() == ["a", "b", "c"]
assert response().foo(0) == "a"
assert response().foo(1) == "b"
assert response().foo(2) == "c"
with pytest.raises(IndexError):
response().foo(3)
async def test_class_parser(self, mocked, class_parser_client):
mocked.get(
class_parser_client.test().data,
body='["a", "b", "c"]',
status=200,
content_type="application/json",
)
response = await class_parser_client.test().get()
parser = response().foo_parser()
assert parser.bar() == ["a", "b", "c"]
assert parser.bar(0) == "a"
assert parser.bar(1) == "b"
assert parser.bar(2) == "c"
with pytest.raises(IndexError):
parser.bar(3)
async def test_dict_parser(self, mocked, dict_parser_client):
mocked.get(
dict_parser_client.test().data,
body='["a", "b", "c"]',
status=200,
content_type="application/json",
)
response = await dict_parser_client.test().get()
assert response().func_parser() == ["a", "b", "c"]
assert response().func_parser(1) == "b"
assert response().static_method_parser() == ["a", "b", "c"]
assert response().static_method_parser(1) == "b"
assert response().class_parser().bar() == ["a", "b", "c"]
assert response().class_parser().bar(1) == "b"
| [
"collections.OrderedDict",
"xmltodict.parse",
"itertools.product",
"pytest.raises",
"xmltodict.unparse",
"orjson.loads",
"aiotapioca.adapters.generate_wrapper_from_adapter",
"orjson.dumps"
] | [((13338, 13372), 'itertools.product', 'product', (['semaphores', 'types_request'], {}), '(semaphores, types_request)\n', (13345, 13372), False, 'from itertools import product\n'), ((14487, 14521), 'itertools.product', 'product', (['semaphores', 'types_request'], {}), '(semaphores, types_request)\n', (14494, 14521), False, 'from itertools import product\n'), ((15681, 15715), 'itertools.product', 'product', (['semaphores', 'types_request'], {}), '(semaphores, types_request)\n', (15688, 15715), False, 'from itertools import product\n'), ((17006, 17040), 'itertools.product', 'product', (['semaphores', 'types_request'], {}), '(semaphores, types_request)\n', (17013, 17040), False, 'from itertools import product\n'), ((39773, 39882), 'itertools.product', 'product', (['validate_data_received_list', 'validate_data_sending_list', 'extract_root_list', 'convert_to_dict_list'], {}), '(validate_data_received_list, validate_data_sending_list,\n extract_root_list, convert_to_dict_list)\n', (39780, 39882), False, 'from itertools import product\n'), ((44251, 44360), 'itertools.product', 'product', (['validate_data_received_list', 'validate_data_sending_list', 'extract_root_list', 'convert_to_dict_list'], {}), '(validate_data_received_list, validate_data_sending_list,\n extract_root_list, convert_to_dict_list)\n', (44258, 44360), False, 'from itertools import product\n'), ((48686, 48738), 'aiotapioca.adapters.generate_wrapper_from_adapter', 'generate_wrapper_from_adapter', (['PidanticClientAdapter'], {}), '(PidanticClientAdapter)\n', (48715, 48738), False, 'from aiotapioca.adapters import TapiocaAdapter, generate_wrapper_from_adapter\n'), ((5768, 5793), 'pytest.raises', 'pytest.raises', (['IndexError'], {}), '(IndexError)\n', (5781, 5793), False, 'import pytest\n'), ((6052, 6077), 'pytest.raises', 'pytest.raises', (['IndexError'], {}), '(IndexError)\n', (6065, 6077), False, 'import pytest\n'), ((8239, 8263), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (8252, 8263), False, 'import pytest\n'), ((8387, 8411), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (8400, 8411), False, 'import pytest\n'), ((9663, 9687), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (9676, 9687), False, 'import pytest\n'), ((10117, 10141), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (10130, 10141), False, 'import pytest\n'), ((11297, 11323), 'pytest.raises', 'pytest.raises', (['ClientError'], {}), '(ClientError)\n', (11310, 11323), False, 'import pytest\n'), ((11689, 11715), 'pytest.raises', 'pytest.raises', (['ServerError'], {}), '(ServerError)\n', (11702, 11715), False, 'import pytest\n'), ((13094, 13120), 'pytest.raises', 'pytest.raises', (['ClientError'], {}), '(ClientError)\n', (13107, 13120), False, 'import pytest\n'), ((25297, 25323), 'pytest.raises', 'pytest.raises', (['ClientError'], {}), '(ClientError)\n', (25310, 25323), False, 'import pytest\n'), ((26964, 26990), 'pytest.raises', 'pytest.raises', (['ServerError'], {}), '(ServerError)\n', (26977, 26990), False, 'import pytest\n'), ((31902, 31927), 'xmltodict.parse', 'xmltodict.parse', (['xml_body'], {}), '(xml_body)\n', (31917, 31927), False, 'import xmltodict\n'), ((32219, 32245), 'pytest.raises', 'pytest.raises', (['ClientError'], {}), '(ClientError)\n', (32232, 32245), False, 'import pytest\n'), ((36836, 36862), 'pytest.raises', 'pytest.raises', (['ClientError'], {}), '(ClientError)\n', (36849, 36862), False, 'import pytest\n'), ((40194, 40246), 'aiotapioca.adapters.generate_wrapper_from_adapter', 'generate_wrapper_from_adapter', (['PidanticClientAdapter'], {}), '(PidanticClientAdapter)\n', (40223, 40246), False, 'from aiotapioca.adapters import TapiocaAdapter, generate_wrapper_from_adapter\n'), ((44672, 44724), 'aiotapioca.adapters.generate_wrapper_from_adapter', 'generate_wrapper_from_adapter', (['PidanticClientAdapter'], {}), '(PidanticClientAdapter)\n', (44701, 44724), False, 'from aiotapioca.adapters import TapiocaAdapter, generate_wrapper_from_adapter\n'), ((48798, 48830), 'orjson.loads', 'orjson.loads', (['response_body_root'], {}), '(response_body_root)\n', (48810, 48830), False, 'import orjson\n'), ((4786, 4813), 'orjson.dumps', 'orjson.dumps', (['response_data'], {}), '(response_data)\n', (4798, 4813), False, 'import orjson\n'), ((18579, 18603), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (18592, 18603), False, 'import pytest\n'), ((32633, 32659), 'pytest.raises', 'pytest.raises', (['ClientError'], {}), '(ClientError)\n', (32646, 32659), False, 'import pytest\n'), ((34664, 34690), 'pytest.raises', 'pytest.raises', (['ClientError'], {}), '(ClientError)\n', (34677, 34690), False, 'import pytest\n'), ((35772, 35798), 'pytest.raises', 'pytest.raises', (['ClientError'], {}), '(ClientError)\n', (35785, 35798), False, 'import pytest\n'), ((37291, 37317), 'pytest.raises', 'pytest.raises', (['ClientError'], {}), '(ClientError)\n', (37304, 37317), False, 'import pytest\n'), ((38501, 38526), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (38514, 38526), False, 'import pytest\n'), ((38856, 38881), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (38869, 38881), False, 'import pytest\n'), ((39222, 39246), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (39235, 39246), False, 'import pytest\n'), ((50194, 50223), 'pytest.raises', 'pytest.raises', (['AttributeError'], {}), '(AttributeError)\n', (50207, 50223), False, 'import pytest\n'), ((50776, 50801), 'pytest.raises', 'pytest.raises', (['IndexError'], {}), '(IndexError)\n', (50789, 50801), False, 'import pytest\n'), ((51364, 51389), 'pytest.raises', 'pytest.raises', (['IndexError'], {}), '(IndexError)\n', (51377, 51389), False, 'import pytest\n'), ((51938, 51963), 'pytest.raises', 'pytest.raises', (['IndexError'], {}), '(IndexError)\n', (51951, 51963), False, 'import pytest\n'), ((29824, 29895), 'collections.OrderedDict', 'OrderedDict', (["[('@attr1', 'val1'), ('tag2', 'text1'), ('tag3', 'text2')]"], {}), "([('@attr1', 'val1'), ('tag2', 'text1'), ('tag3', 'text2')])\n", (29835, 29895), False, 'from collections import OrderedDict\n'), ((30107, 30130), 'xmltodict.unparse', 'xmltodict.unparse', (['data'], {}), '(data)\n', (30124, 30130), False, 'import xmltodict\n'), ((30454, 30525), 'collections.OrderedDict', 'OrderedDict', (["[('@attr1', 'val1'), ('tag2', 'text1'), ('tag3', 'text2')]"], {}), "([('@attr1', 'val1'), ('tag2', 'text1'), ('tag3', 'text2')])\n", (30465, 30525), False, 'from collections import OrderedDict\n'), ((30777, 30821), 'xmltodict.unparse', 'xmltodict.unparse', (['data'], {'full_document': '(False)'}), '(data, full_document=False)\n', (30794, 30821), False, 'import xmltodict\n'), ((31142, 31213), 'collections.OrderedDict', 'OrderedDict', (["[('@attr1', 'val1'), ('tag2', 'text1'), ('tag3', 'text2')]"], {}), "([('@attr1', 'val1'), ('tag2', 'text1'), ('tag3', 'text2')])\n", (31153, 31213), False, 'from collections import OrderedDict\n'), ((31712, 31783), 'collections.OrderedDict', 'OrderedDict', (["[('@attr1', 'val1'), ('tag2', 'text1'), ('tag3', 'text2')]"], {}), "([('@attr1', 'val1'), ('tag2', 'text1'), ('tag3', 'text2')])\n", (31723, 31783), False, 'from collections import OrderedDict\n'), ((33110, 33136), 'pytest.raises', 'pytest.raises', (['ClientError'], {}), '(ClientError)\n', (33123, 33136), False, 'import pytest\n'), ((33478, 33504), 'pytest.raises', 'pytest.raises', (['ClientError'], {}), '(ClientError)\n', (33491, 33504), False, 'import pytest\n'), ((37747, 37773), 'pytest.raises', 'pytest.raises', (['ClientError'], {}), '(ClientError)\n', (37760, 37773), False, 'import pytest\n'), ((38084, 38110), 'pytest.raises', 'pytest.raises', (['ClientError'], {}), '(ClientError)\n', (38097, 38110), False, 'import pytest\n'), ((41648, 41680), 'orjson.loads', 'orjson.loads', (['response_body_root'], {}), '(response_body_root)\n', (41660, 41680), False, 'import orjson\n'), ((43411, 43443), 'orjson.loads', 'orjson.loads', (['response_body_root'], {}), '(response_body_root)\n', (43423, 43443), False, 'import orjson\n'), ((45021, 45048), 'orjson.loads', 'orjson.loads', (['response_body'], {}), '(response_body)\n', (45033, 45048), False, 'import orjson\n'), ((45424, 45456), 'orjson.loads', 'orjson.loads', (['response_body_root'], {}), '(response_body_root)\n', (45436, 45456), False, 'import orjson\n'), ((46847, 46874), 'orjson.loads', 'orjson.loads', (['response_body'], {}), '(response_body)\n', (46859, 46874), False, 'import orjson\n'), ((47291, 47323), 'orjson.loads', 'orjson.loads', (['response_body_root'], {}), '(response_body_root)\n', (47303, 47323), False, 'import orjson\n'), ((14799, 14821), 'orjson.dumps', 'orjson.dumps', (['data_row'], {}), '(data_row)\n', (14811, 14821), False, 'import orjson\n'), ((40679, 40706), 'orjson.loads', 'orjson.loads', (['response_body'], {}), '(response_body)\n', (40691, 40706), False, 'import orjson\n'), ((40836, 40863), 'orjson.loads', 'orjson.loads', (['response_body'], {}), '(response_body)\n', (40848, 40863), False, 'import orjson\n'), ((42422, 42449), 'orjson.loads', 'orjson.loads', (['response_body'], {}), '(response_body)\n', (42434, 42449), False, 'import orjson\n'), ((42579, 42606), 'orjson.loads', 'orjson.loads', (['response_body'], {}), '(response_body)\n', (42591, 42606), False, 'import orjson\n'), ((17425, 17447), 'orjson.dumps', 'orjson.dumps', (['data_row'], {}), '(data_row)\n', (17437, 17447), False, 'import orjson\n')] |
# -*- coding: utf-8 -*-
import json
import scrapy
class SandalSpider(scrapy.Spider):
name = "sandal"
api_url = "https://www.skoringen.dk/sandaler-dame/?page={}"
start_urls = [api_url.format(1)]
def parse(self, response):
if response.status == 404:
return
sandals = response.css("div.item__price")
for sandal in sandals:
detail_url = sandal.css("a::attr(href)").extract_first()
detail_url = response.urljoin(detail_url)
yield scrapy.Request(url=detail_url, callback=self.parse_details)
# pagination handler
page_number = (
int(response.css("div.col::attr(data-pageindex)").extract_first()) + 1
)
yield scrapy.Request(url=self.api_url.format(page_number))
def parse_details(self, response):
yield {
"beskrivelse": response.css(
"div.item-description__content::text"
).extract_first(),
"materiale": response.xpath(
'//div[contains(.//text(), "Materiale")]//div[contains(@class, "item-pim__value")]//text()'
).extract_first(),
"sål": response.xpath(
'//div[contains(.//text(), "Sål")]//div[contains(@class, "item-pim__value")]//text()'
).extract_first(),
}
| [
"scrapy.Request"
] | [((517, 576), 'scrapy.Request', 'scrapy.Request', ([], {'url': 'detail_url', 'callback': 'self.parse_details'}), '(url=detail_url, callback=self.parse_details)\n', (531, 576), False, 'import scrapy\n')] |
import csv
import json
# Open the CSV
f = open( 'events.csv', 'rU' )
# Change each fieldname to the appropriate field name. I know, so difficult.
reader = csv.DictReader( f, fieldnames = ("id","Nombre","datasketch","pacifista","ojoalapaz","indepaz","ONU","Defensoria","Unidad de Victimas","Somos Defensores","Cinep. Noche y Niebla","CODHES","Colectivo de Abogados José Alvear Restrepo Cajar","Género","Fecha","Municipio","Departamento","Tipo de líder","Cargo","Móvil","Otros","Fuentes"))
# Parse the CSV into JSON
out = json.dumps( [ row for row in reader ] )
print ("JSON parsed!")
# Save the JSON
f = open( 'events.json', 'w')
f.write(out)
print ("JSON saved!")
| [
"json.dumps",
"csv.DictReader"
] | [((168, 538), 'csv.DictReader', 'csv.DictReader', (['f'], {'fieldnames': "('id', 'Nombre', 'datasketch', 'pacifista', 'ojoalapaz', 'indepaz', 'ONU',\n 'Defensoria', 'Unidad de Victimas', 'Somos Defensores',\n 'Cinep. Noche y Niebla', 'CODHES',\n 'Colectivo de Abogados José Alvear Restrepo Cajar', 'Género', 'Fecha',\n 'Municipio', 'Departamento', 'Tipo de líder', 'Cargo', 'Móvil', 'Otros',\n 'Fuentes')"}), "(f, fieldnames=('id', 'Nombre', 'datasketch', 'pacifista',\n 'ojoalapaz', 'indepaz', 'ONU', 'Defensoria', 'Unidad de Victimas',\n 'Somos Defensores', 'Cinep. Noche y Niebla', 'CODHES',\n 'Colectivo de Abogados José Alvear Restrepo Cajar', 'Género', 'Fecha',\n 'Municipio', 'Departamento', 'Tipo de líder', 'Cargo', 'Móvil', 'Otros',\n 'Fuentes'))\n", (182, 538), False, 'import csv\n'), ((537, 572), 'json.dumps', 'json.dumps', (['[row for row in reader]'], {}), '([row for row in reader])\n', (547, 572), False, 'import json\n')] |
import speedtest
def perform_test():
s = speedtest.Speedtest()
best_server = s.get_best_server()
print('Best server: ')
print(best_server['name'])
print('Perform upload app:')
result = s.upload()
print('Done:' + str(result / 1024 / 1024) + ' MBit/s')
print('Perform download app:')
result = s.download()
print('Done:' + str(result / 1024 / 1024) + ' MBit/s')
print(s.results)
return s.results
| [
"speedtest.Speedtest"
] | [((47, 68), 'speedtest.Speedtest', 'speedtest.Speedtest', ([], {}), '()\n', (66, 68), False, 'import speedtest\n')] |
import os
import pandas as pd
import shutil
os.chdir("../Downloads/DeepWeeds_Images_256")
try:
os.mkdir("train")
os.mkdir("val")
except:
pass
train = pd.read_csv("../train_set_labels.csv")
val = pd.read_csv("../test_set_labels.csv")
print(train)
for j,i in train.iterrows():
try:
os.mkdir("train/"+str(i.Species))
except:
pass
shutil.copyfile(i.Label, "train/"+i.Species+"/"+i.Label)
for j,i in val.iterrows():
try:
os.mkdir("val/"+str(i.Species))
except:
pass
shutil.copyfile(i.Label, "val/"+i.Species+"/"+i.Label) | [
"os.chdir",
"shutil.copyfile",
"os.mkdir",
"pandas.read_csv"
] | [((47, 92), 'os.chdir', 'os.chdir', (['"""../Downloads/DeepWeeds_Images_256"""'], {}), "('../Downloads/DeepWeeds_Images_256')\n", (55, 92), False, 'import os\n'), ((165, 203), 'pandas.read_csv', 'pd.read_csv', (['"""../train_set_labels.csv"""'], {}), "('../train_set_labels.csv')\n", (176, 203), True, 'import pandas as pd\n'), ((210, 247), 'pandas.read_csv', 'pd.read_csv', (['"""../test_set_labels.csv"""'], {}), "('../test_set_labels.csv')\n", (221, 247), True, 'import pandas as pd\n'), ((102, 119), 'os.mkdir', 'os.mkdir', (['"""train"""'], {}), "('train')\n", (110, 119), False, 'import os\n'), ((124, 139), 'os.mkdir', 'os.mkdir', (['"""val"""'], {}), "('val')\n", (132, 139), False, 'import os\n'), ((371, 433), 'shutil.copyfile', 'shutil.copyfile', (['i.Label', "('train/' + i.Species + '/' + i.Label)"], {}), "(i.Label, 'train/' + i.Species + '/' + i.Label)\n", (386, 433), False, 'import shutil\n'), ((534, 594), 'shutil.copyfile', 'shutil.copyfile', (['i.Label', "('val/' + i.Species + '/' + i.Label)"], {}), "(i.Label, 'val/' + i.Species + '/' + i.Label)\n", (549, 594), False, 'import shutil\n')] |
import aiohttp
from collections import namedtuple
from aiohttp_socks import ProxyConnector
from .config import TIMEOUT, PROXY, USER_AGENT
from . import utils as utl
class HttpClient(object):
'''Performs HTTP requests. A `aiohttp` wrapper, essentialy'''
def __init__(self, timeout=TIMEOUT, proxy=PROXY):
if proxy:
connector = ProxyConnector.from_url(proxy)
self.session = aiohttp.ClientSession(connector=connector)
else:
self.session = aiohttp.ClientSession()
self.headers = {
'User-Agent': USER_AGENT,
'Accept-Language': 'en-GB,en;q=0.5',
}
self.timeout = timeout
self.response = namedtuple('response', ['http', 'html'])
async def close(self):
await self.session.close()
async def get(self, page):
'''Submits a HTTP GET request.'''
page = self._quote(page)
try:
req = await self.session.get(page, headers=self.headers, timeout=self.timeout)
text = await req.text()
self.headers['Referer'] = page
except aiohttp.client_exception.ClientError as e:
return self.response(http=0, html=e.__doc__)
return self.response(http=req.status, html=text)
async def post(self, page, data):
'''Submits a HTTP POST request.'''
page = self._quote(page)
try:
req = await self.session.post(page, data=data, headers=self.headers, timeout=self.timeout)
text = await req.text()
self.headers['Referer'] = page
except aiohttp.client_exception.ClientError as e:
return self.response(http=0, html=e.__doc__)
return self.response(http=req.status, html=text)
def _quote(self, url):
'''URL-encodes URLs.'''
if utl.decode_bytes(utl.unquote_url(url)) == utl.decode_bytes(url):
url = utl.quote_url(url)
return url
def _set_proxy(self, proxy):
'''Returns HTTP or SOCKS proxies dictionary.'''
if proxy:
if not utl.is_url(proxy):
raise ValueError('Invalid proxy format!')
proxy = {'http':proxy, 'https':proxy}
return proxy
| [
"aiohttp_socks.ProxyConnector.from_url",
"aiohttp.ClientSession",
"collections.namedtuple"
] | [((726, 766), 'collections.namedtuple', 'namedtuple', (['"""response"""', "['http', 'html']"], {}), "('response', ['http', 'html'])\n", (736, 766), False, 'from collections import namedtuple\n'), ((370, 400), 'aiohttp_socks.ProxyConnector.from_url', 'ProxyConnector.from_url', (['proxy'], {}), '(proxy)\n', (393, 400), False, 'from aiohttp_socks import ProxyConnector\n'), ((429, 471), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {'connector': 'connector'}), '(connector=connector)\n', (450, 471), False, 'import aiohttp\n'), ((515, 538), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (536, 538), False, 'import aiohttp\n')] |
import os
import time
from flask import Flask, render_template, request
from aws_requests_auth.aws_auth import AWSRequestsAuth
import requests
import uuid
import base64
import shutil
from config import Config
app = Flask(__name__)
config = Config()
@app.route("/", methods=["GET", "POST"])
def index():
if "uploadFile" in request.files:
try:
shutil.rmtree("static/temp")
except:
pass
os.makedirs("static/temp", exist_ok=True)
uploaded_file = request.files.get("uploadFile", None)
uploaded_file = uploaded_file.read()
file_name = f"{uuid.uuid4().hex}.png"
endpoint = f"{config.ENDPOINT}/upload?filename={file_name}"
response = requests.get(endpoint, auth=sign())
response = response.json()
files = {"file": (file_name, uploaded_file)}
http_response = requests.post(
response["url"], data=response["fields"], files=files
)
full_filename = download_processed_file(file_name)
with open(f"static/temp/{file_name}", "wb") as f:
f.write(full_filename)
processed_image = os.path.join("static/temp", file_name)
uploaded_file = base64.b64encode(uploaded_file).decode("utf-8")
else:
processed_image = None
uploaded_file = None
return render_template(
"home.html", processed_image=processed_image, uploaded_file=uploaded_file
)
def sign():
auth = AWSRequestsAuth(
aws_access_key=config.AWS_ACCESS_KEY_ID,
aws_secret_access_key=config.AWS_SECRET_ACCESS_KEY,
aws_host=config.HOST,
aws_region="us-east-1",
aws_service="execute-api",
)
return auth
def download_processed_file(file_name):
while True:
endpoint = f"{config.ENDPOINT}/download?filename={file_name}"
response = requests.get(endpoint, auth=sign())
if response.status_code == 200:
response = requests.get(response.text)
return response.content
time.sleep(1)
| [
"flask.render_template",
"requests.post",
"os.makedirs",
"flask.Flask",
"config.Config",
"base64.b64encode",
"os.path.join",
"time.sleep",
"requests.get",
"uuid.uuid4",
"aws_requests_auth.aws_auth.AWSRequestsAuth",
"shutil.rmtree",
"flask.request.files.get"
] | [((216, 231), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (221, 231), False, 'from flask import Flask, render_template, request\n'), ((242, 250), 'config.Config', 'Config', ([], {}), '()\n', (248, 250), False, 'from config import Config\n'), ((1335, 1430), 'flask.render_template', 'render_template', (['"""home.html"""'], {'processed_image': 'processed_image', 'uploaded_file': 'uploaded_file'}), "('home.html', processed_image=processed_image, uploaded_file\n =uploaded_file)\n", (1350, 1430), False, 'from flask import Flask, render_template, request\n'), ((1465, 1655), 'aws_requests_auth.aws_auth.AWSRequestsAuth', 'AWSRequestsAuth', ([], {'aws_access_key': 'config.AWS_ACCESS_KEY_ID', 'aws_secret_access_key': 'config.AWS_SECRET_ACCESS_KEY', 'aws_host': 'config.HOST', 'aws_region': '"""us-east-1"""', 'aws_service': '"""execute-api"""'}), "(aws_access_key=config.AWS_ACCESS_KEY_ID,\n aws_secret_access_key=config.AWS_SECRET_ACCESS_KEY, aws_host=config.\n HOST, aws_region='us-east-1', aws_service='execute-api')\n", (1480, 1655), False, 'from aws_requests_auth.aws_auth import AWSRequestsAuth\n'), ((440, 481), 'os.makedirs', 'os.makedirs', (['"""static/temp"""'], {'exist_ok': '(True)'}), "('static/temp', exist_ok=True)\n", (451, 481), False, 'import os\n'), ((506, 543), 'flask.request.files.get', 'request.files.get', (['"""uploadFile"""', 'None'], {}), "('uploadFile', None)\n", (523, 543), False, 'from flask import Flask, render_template, request\n'), ((872, 940), 'requests.post', 'requests.post', (["response['url']"], {'data': "response['fields']", 'files': 'files'}), "(response['url'], data=response['fields'], files=files)\n", (885, 940), False, 'import requests\n'), ((1142, 1180), 'os.path.join', 'os.path.join', (['"""static/temp"""', 'file_name'], {}), "('static/temp', file_name)\n", (1154, 1180), False, 'import os\n'), ((2028, 2041), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (2038, 2041), False, 'import time\n'), ((370, 398), 'shutil.rmtree', 'shutil.rmtree', (['"""static/temp"""'], {}), "('static/temp')\n", (383, 398), False, 'import shutil\n'), ((1956, 1983), 'requests.get', 'requests.get', (['response.text'], {}), '(response.text)\n', (1968, 1983), False, 'import requests\n'), ((1205, 1236), 'base64.b64encode', 'base64.b64encode', (['uploaded_file'], {}), '(uploaded_file)\n', (1221, 1236), False, 'import base64\n'), ((612, 624), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (622, 624), False, 'import uuid\n')] |
# -*- coding: utf-8 -*-
# Copyright (c) 2018-2020 <NAME>
"""Module containing all tests for the main window of easyp2p."""
from datetime import date, timedelta
import os
import sys
import unittest.mock
from PyQt5.QtCore import QLocale
from PyQt5.QtWidgets import QApplication, QCheckBox, QLineEdit
import easyp2p.platforms
from easyp2p.ui.main_window import MainWindow
QT_APP = QApplication(sys.argv)
class MainWindowTests(unittest.TestCase):
"""Test the main window of easyp2p."""
PLATFORMS = {pl for pl in dir(easyp2p.platforms) if pl[0].isupper()}
def setUp(self) -> None:
"""Create the GUI."""
self.form = MainWindow(QT_APP)
def set_date_combo_boxes(
self, start_month: int, start_year: int, end_month: int,
end_year: int) -> None:
"""
Helper method to set the indices of the date combo boxes
Args:
start_month: Index of start month combo box entry.
start_year: Index of start year combo box entry.
end_month: Index of end month combo box entry.
end_year: Index of end year combo box entry.
"""
self.form.combo_box_start_month.setCurrentIndex(start_month)
self.form.combo_box_start_year.setCurrentIndex(start_year)
self.form.combo_box_end_month.setCurrentIndex(end_month)
self.form.combo_box_end_year.setCurrentIndex(end_year)
self.form.on_combo_box_start_year_activated()
def test_defaults(self) -> None:
"""Test GUI in default state."""
# All check boxes are unchecked in default state
for check_box in self.form.group_box_platforms.findChildren(QCheckBox):
self.assertFalse(check_box.isChecked())
# Check if date_range is correct
end_last_month = date.today().replace(day=1) - timedelta(days=1)
date_range = (end_last_month.replace(day=1), end_last_month)
self.assertEqual(date_range, self.form.date_range)
# Check if date combo boxes are correct
self.assertEqual(
QLocale().monthName(date_range[0].month, 1),
self.form.combo_box_start_month.currentText())
self.assertEqual(
str(date_range[0].year),
self.form.combo_box_start_year.currentText())
self.assertEqual(
QLocale().monthName(date_range[1].month, 1),
self.form.combo_box_end_month.currentText())
self.assertEqual(
str(date_range[1].year),
self.form.combo_box_end_year.currentText())
# Check if output file name is set correctly
self.assertEqual(
self.form.line_edit_output_file.text(), os.path.join(
self.form.settings.directory,
f'P2P_Results_{date_range[0].strftime("%d%m%Y")}-'
f'{date_range[1].strftime("%d%m%Y")}.xlsx'))
def test_select_all_platforms(self) -> None:
"""Test the Select All Platforms checkbox."""
# Toggle the 'Select all platforms' checkbox
self.form.check_box_select_all.setChecked(True)
# Test that all platform check boxes are checked
for check_box in self.form.group_box_platforms.findChildren(QCheckBox):
self.assertTrue(check_box.isChecked())
def test_get_platforms_no_platform_checked_true(self) -> None:
"""Test get_platforms if no platform is selected and checked==True."""
platforms = self.form.get_platforms(True)
self.assertEqual(platforms, set())
def test_get_platforms_all_platforms_checked_true(self) -> None:
"""
Test get_platforms if all platforms are selected and checked==True.
"""
self.form.check_box_select_all.setChecked(True)
platforms = self.form.get_platforms(True)
self.assertEqual(platforms, self.PLATFORMS)
def test_get_platforms_three_platforms_selected_checked_true(self) -> None:
"""
Test get_platforms if three platforms are selected and checked==True.
"""
self.form.check_box_bondora.setChecked(True)
self.form.check_box_mintos.setChecked(True)
self.form.check_box_twino.setChecked(True)
platforms = self.form.get_platforms(True)
self.assertEqual(platforms, {'Bondora', 'Mintos', 'Twino'})
def test_get_platforms_three_platforms_selected_checked_false(self) -> None:
"""
Test get_platforms if three platforms are selected and checked==False.
"""
self.form.check_box_bondora.setChecked(True)
self.form.check_box_mintos.setChecked(True)
self.form.check_box_twino.setChecked(True)
platforms = self.form.get_platforms(False)
self.assertEqual(platforms, self.PLATFORMS)
def test_get_platforms_checked_false(self) -> None:
"""Test get_platforms if checked==False."""
platforms = self.form.get_platforms(False)
self.assertEqual(platforms, self.PLATFORMS)
def test_select_all_platforms_twice(self) -> None:
"""Test the Select All Platforms checkbox."""
# Toggle the 'Select all platforms' checkbox
self.form.check_box_select_all.setChecked(True)
# Untoggle the 'Select all platforms' checkbox again
self.form.check_box_select_all.setChecked(False)
# Test that all platform check boxes are unchecked again
for check_box in self.form.group_box_platforms.findChildren(QCheckBox):
self.assertFalse(check_box.isChecked())
def test_output_file_on_date_change(self) -> None:
"""Test output file name after a date change."""
old_output_file = self.form.line_edit_output_file.text()
# Change start and end date
self.set_date_combo_boxes(4, 0, 10, 5)
new_output_file = self.form.line_edit_output_file.text()
self.assertNotEqual(new_output_file, old_output_file)
self.assertEqual(
os.path.join(
self.form.settings.directory,
'P2P_Results_01052010-30112015.xlsx'),
new_output_file)
def test_output_file_on_date_change_after_user_change(self) -> None:
"""Test output file after date change if user already changed file."""
QLineEdit.setText(self.form.line_edit_output_file, 'Test.xlsx')
self.form.output_file_changed = True
# Change start and end date
self.set_date_combo_boxes(4, 0, 10, 5)
# Check that the output file name was not changed
self.assertEqual(self.form.line_edit_output_file.text(), 'Test.xlsx')
@unittest.mock.patch('easyp2p.ui.main_window.ProgressWindow')
@unittest.mock.patch('easyp2p.ui.main_window.QMessageBox.warning')
def test_no_platform_selected(self, mock_warning, mock_dialog) -> None:
"""Test clicking start without any selected platform."""
self.form.push_button_start.click()
# Check that QMessageBox was opened and ProgressWindow was not
mock_warning.assert_called_once_with(
self.form,
'No P2P platform selected!',
'Please choose at least one P2P platform!')
self.assertFalse(mock_dialog.called)
@unittest.mock.patch('easyp2p.ui.main_window.ProgressWindow')
@unittest.mock.patch('easyp2p.ui.main_window.QMessageBox.warning')
def test_end_date_before_start_date(
self, mock_warning, mock_dialog) -> None:
"""Test clicking start with end date set before start date."""
self.set_date_combo_boxes(5, 6, 11, 5)
self.form.push_button_start.click()
# Check that QMessageBox was opened and ProgressWindow was not
mock_warning.assert_called_once_with(
self.form,
'Start date is after end date!',
'Start date must be before end date!')
self.assertFalse(mock_dialog.called, 'ProgressWindow was opened!')
@unittest.mock.patch('easyp2p.ui.main_window.ProgressWindow')
def test_push_start_button_with_bondora_selected(self, mock_dialog) -> None:
"""Test pushing start button after selecting Bondora."""
self.form.check_box_bondora.setChecked(True)
self.set_date_combo_boxes(8, 8, 1, 9)
QLineEdit.setText(self.form.line_edit_output_file, 'Test.xlsx')
self.form.push_button_start.click()
# Check that ProgressWindow opened
mock_dialog.assert_called_once_with(self.form.settings)
# Check that all settings are correct
self.assertEqual(self.form.settings.platforms, {'Bondora'})
self.assertEqual(
self.form.settings.date_range,
(date(2018, 9, 1), date(2019, 2, 28)))
self.assertEqual(self.form.settings.output_file, 'Test.xlsx')
@unittest.mock.patch('easyp2p.ui.main_window.ProgressWindow')
def test_push_start_button_with_increasing_number_of_platforms_selected(
self, mock_dialog) -> None:
"""
Test push start button with increasing number of selected platforms.
"""
self.set_date_combo_boxes(8, 8, 1, 9)
QLineEdit.setText(self.form.line_edit_output_file, 'Test.xlsx')
selected_platforms = set()
for platform in self.PLATFORMS:
check_box = getattr(self.form, 'check_box_' + platform.lower())
check_box.setChecked(True)
selected_platforms.add(platform)
self.form.push_button_start.click()
# Check that ProgressWindow opened
mock_dialog.assert_called_once_with(self.form.settings)
mock_dialog.reset_mock()
# Check that all settings are correct
self.assertEqual(self.form.settings.platforms, selected_platforms)
self.assertEqual(
self.form.settings.date_range,
(date(2018, 9, 1), date(2019, 2, 28)))
self.assertEqual(self.form.settings.output_file, 'Test.xlsx')
@unittest.mock.patch('easyp2p.ui.main_window.SettingsWindow')
def test_push_tool_button_settings(self, mock_dialog) -> None:
"""Test pushing settings button."""
self.form.tool_button_settings.click()
# Check that SettingsWindow opened
mock_dialog.assert_called_once_with(
self.form.get_platforms(False), self.form.settings)
def test_change_language_to_german(self) -> None:
"""Test changing the language to German."""
self.form.action_german.trigger()
all_months = {
self.form.combo_box_start_month.itemText(i) for i in
range(self.form.combo_box_start_month.count())}
all_months_expected = {
QLocale('de_de').monthName(i, 1) for i in range(1, 13)}
self.assertEqual('Startdatum', self.form.groupBox_start_date.title())
self.assertEqual(all_months_expected, all_months)
def test_change_language_to_german_to_english(self) -> None:
"""Test changing the language to German and then back to English."""
self.form.action_german.trigger()
self.form.action_english.trigger()
all_months = {
self.form.combo_box_start_month.itemText(i) for i in
range(self.form.combo_box_start_month.count())}
all_months_expected = {
QLocale('en_US').monthName(i, 1) for i in range(1, 13)}
self.assertEqual(self.form.groupBox_start_date.title(), 'Start date')
self.assertEqual(all_months, all_months_expected)
def test_change_language_to_german_after_date_update(self) -> None:
"""
Test changing the language to German if the dates have been changed.
"""
self.set_date_combo_boxes(4, 7, 11, 8)
self.form.action_german.trigger()
self.assertEqual(
QLocale('de_de').monthName(5, 1),
self.form.combo_box_start_month.currentText())
self.assertEqual(
'2017', self.form.combo_box_start_year.currentText())
self.assertEqual(
QLocale('de_de').monthName(12, 1),
self.form.combo_box_end_month.currentText())
self.assertEqual(
'2018', self.form.combo_box_end_year.currentText())
if __name__ == "__main__":
unittest.main()
| [
"PyQt5.QtCore.QLocale",
"PyQt5.QtWidgets.QLineEdit.setText",
"os.path.join",
"datetime.date.today",
"datetime.date",
"PyQt5.QtWidgets.QApplication",
"datetime.timedelta",
"easyp2p.ui.main_window.MainWindow"
] | [((384, 406), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (396, 406), False, 'from PyQt5.QtWidgets import QApplication, QCheckBox, QLineEdit\n'), ((649, 667), 'easyp2p.ui.main_window.MainWindow', 'MainWindow', (['QT_APP'], {}), '(QT_APP)\n', (659, 667), False, 'from easyp2p.ui.main_window import MainWindow\n'), ((6222, 6285), 'PyQt5.QtWidgets.QLineEdit.setText', 'QLineEdit.setText', (['self.form.line_edit_output_file', '"""Test.xlsx"""'], {}), "(self.form.line_edit_output_file, 'Test.xlsx')\n", (6239, 6285), False, 'from PyQt5.QtWidgets import QApplication, QCheckBox, QLineEdit\n'), ((8185, 8248), 'PyQt5.QtWidgets.QLineEdit.setText', 'QLineEdit.setText', (['self.form.line_edit_output_file', '"""Test.xlsx"""'], {}), "(self.form.line_edit_output_file, 'Test.xlsx')\n", (8202, 8248), False, 'from PyQt5.QtWidgets import QApplication, QCheckBox, QLineEdit\n'), ((9045, 9108), 'PyQt5.QtWidgets.QLineEdit.setText', 'QLineEdit.setText', (['self.form.line_edit_output_file', '"""Test.xlsx"""'], {}), "(self.form.line_edit_output_file, 'Test.xlsx')\n", (9062, 9108), False, 'from PyQt5.QtWidgets import QApplication, QCheckBox, QLineEdit\n'), ((1833, 1850), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (1842, 1850), False, 'from datetime import date, timedelta\n'), ((5917, 6002), 'os.path.join', 'os.path.join', (['self.form.settings.directory', '"""P2P_Results_01052010-30112015.xlsx"""'], {}), "(self.form.settings.directory, 'P2P_Results_01052010-30112015.xlsx'\n )\n", (5929, 6002), False, 'import os\n'), ((8598, 8614), 'datetime.date', 'date', (['(2018)', '(9)', '(1)'], {}), '(2018, 9, 1)\n', (8602, 8614), False, 'from datetime import date, timedelta\n'), ((8616, 8633), 'datetime.date', 'date', (['(2019)', '(2)', '(28)'], {}), '(2019, 2, 28)\n', (8620, 8633), False, 'from datetime import date, timedelta\n'), ((1803, 1815), 'datetime.date.today', 'date.today', ([], {}), '()\n', (1813, 1815), False, 'from datetime import date, timedelta\n'), ((2066, 2075), 'PyQt5.QtCore.QLocale', 'QLocale', ([], {}), '()\n', (2073, 2075), False, 'from PyQt5.QtCore import QLocale\n'), ((2329, 2338), 'PyQt5.QtCore.QLocale', 'QLocale', ([], {}), '()\n', (2336, 2338), False, 'from PyQt5.QtCore import QLocale\n'), ((9770, 9786), 'datetime.date', 'date', (['(2018)', '(9)', '(1)'], {}), '(2018, 9, 1)\n', (9774, 9786), False, 'from datetime import date, timedelta\n'), ((9788, 9805), 'datetime.date', 'date', (['(2019)', '(2)', '(28)'], {}), '(2019, 2, 28)\n', (9792, 9805), False, 'from datetime import date, timedelta\n'), ((10601, 10617), 'PyQt5.QtCore.QLocale', 'QLocale', (['"""de_de"""'], {}), "('de_de')\n", (10608, 10617), False, 'from PyQt5.QtCore import QLocale\n'), ((11213, 11229), 'PyQt5.QtCore.QLocale', 'QLocale', (['"""en_US"""'], {}), "('en_US')\n", (11220, 11229), False, 'from PyQt5.QtCore import QLocale\n'), ((11706, 11722), 'PyQt5.QtCore.QLocale', 'QLocale', (['"""de_de"""'], {}), "('de_de')\n", (11713, 11722), False, 'from PyQt5.QtCore import QLocale\n'), ((11929, 11945), 'PyQt5.QtCore.QLocale', 'QLocale', (['"""de_de"""'], {}), "('de_de')\n", (11936, 11945), False, 'from PyQt5.QtCore import QLocale\n')] |
import json
try:
import cPickle as pickle
except:
import pickle
def save_json(data, file_path):
with open(file_path, "w") as f:
json.dump(data, f)
def save_json_pretty(data, file_path):
"""save formatted json, use this one for some json config files"""
with open(file_path, "w") as f:
f.write(json.dumps(data, indent=4, sort_keys=True))
def load_json(file_path):
with open(file_path, "r") as f:
return json.load(f)
def save_pickle(data, data_path, highest=False):
protocol = 2 if highest else 0
with open(data_path, "w") as f:
pickle.dump(data, f, protocol=protocol)
def load_pickle(pickle_file):
try:
with open(pickle_file, 'rb') as f:
pickle_data = pickle.load(f)
except UnicodeDecodeError as e:
with open(pickle_file, 'rb') as f:
pickle_data = pickle.load(f, encoding='latin1')
except Exception as e:
print('Unable to load data ', pickle_file, ':', e)
raise
return pickle_data
def flat_list_of_lists(l):
"""flatten a list of lists [[1,2], [3,4]] to [1,2,3,4]"""
return [item for sublist in l for item in sublist]
def merge_dicts(list_dicts):
merged_dict = list_dicts[0].copy()
for i in range(1, len(list_dicts)):
merged_dict.update(list_dicts[i])
return merged_dict
| [
"pickle.dump",
"json.dumps",
"pickle.load",
"json.load",
"json.dump"
] | [((150, 168), 'json.dump', 'json.dump', (['data', 'f'], {}), '(data, f)\n', (159, 168), False, 'import json\n'), ((456, 468), 'json.load', 'json.load', (['f'], {}), '(f)\n', (465, 468), False, 'import json\n'), ((599, 638), 'pickle.dump', 'pickle.dump', (['data', 'f'], {'protocol': 'protocol'}), '(data, f, protocol=protocol)\n', (610, 638), False, 'import pickle\n'), ((333, 375), 'json.dumps', 'json.dumps', (['data'], {'indent': '(4)', 'sort_keys': '(True)'}), '(data, indent=4, sort_keys=True)\n', (343, 375), False, 'import json\n'), ((749, 763), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (760, 763), False, 'import pickle\n'), ((869, 902), 'pickle.load', 'pickle.load', (['f'], {'encoding': '"""latin1"""'}), "(f, encoding='latin1')\n", (880, 902), False, 'import pickle\n')] |
import sqlalchemy as sa
import numpy as np
import datetime as dt
from faker import Faker
from jinja2 import Environment, PackageLoader
from database.models.core import (
Base,
Products,
Customers,
TransactionDetails,
Transactions,
)
import logging
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
PRODUCT_LIST = [
{"name": "hat", "price": 10.99},
{"name": "cap", "price": 6.99},
{"name": "shirt", "price": 50.99},
{"name": "sweater", "price": 69.99},
{"name": "shorts", "price": 49.99},
{"name": "jeans", "price": 39.99},
{"name": "neakers", "price": 32.99},
{"name": "boots", "price": 199.99},
{"name": "coats", "price": 249.99},
{"name": "accessories", "price": 149.99},
]
class DBConn:
def __init__(self, **kwargs):
"""
initialize the attributes of a class.
"""
self.host = kwargs.get("host", "host.docker.internal")
self.username = kwargs.get("username", "henry")
self.password = kwargs.get("password", "<PASSWORD>")
self.database = kwargs.get("database", "henry")
self.schema = kwargs.get("schema", "henry")
self.log = logger
def _get_conn_str(self, database_type):
"""
return the connection string based on database types
"""
if database_type == "postgres":
dbapi = "postgresql"
port = 5438
elif database_type == "mysql":
dbapi = "mysql+pymysql"
port = 3307
return f"{dbapi}://{self.username}:{self.password}@{self.host}:{port}" # noqa: E501
def get_conn(self, database_type):
"""
setup the connection to database
"""
conn_str = self._get_conn_str(database_type)
connection = sa.create_engine(conn_str, echo=True)
return connection
@property
def _database_types(self):
return ["mysql", "postgres"]
def get_session(self, database_type):
conn = self.get_conn(database_type)
Session = sa.orm.sessionmaker(bind=conn)
return Session()
class DataGenerator:
def __init__(self):
self.fake = Faker()
def _get_dates(self):
start_date = dt.date(2021, 1, 1) # set the start date
end_date = dt.datetime.now().date() # set the end date
diff = (end_date - start_date).days # calculate the delta
for i in range(0, diff):
date = start_date + dt.timedelta(days=i) # get each of the data
date = date.strftime("%Y-%m-%d") # convert it into datetime string
yield date
@property
def _name(self):
return self.fake.name()
@property
def _address(self):
return self.fake.address()
@property
def _phone(self):
return self.fake.phone_number()
def _get_email(self, name):
first_name = name.split()[0]
last_name = name.split()[-1]
index = np.random.randint(0, 3)
domains = ["gmail", "yahoo", "outlook"]
email = f"{first_name}.{last_name}@{domains[index]}.com"
return email.lower()
@property
def _product_id(self):
product_ids = list(
range(1, len(PRODUCT_LIST) + 1)
) # a list of [0, ... len(Product_list)+1]
index = np.random.randint(0, len(product_ids))
return product_ids[
index
] # return a random number from 0 to length of string
@property
def _quantity(self):
return np.random.randint(1, 10)
def get_data(self):
for date in self._get_dates():
for _ in range(np.random.randint(1, 15)):
name = self._name
data = {
"customers": {
"name": name,
"address": self._address,
"phone": self._phone,
"email": self._get_email(name),
},
"transactions": {
"transaction_date": date,
},
"transaction_details": {
"product_id": self._product_id,
"quantity": np.random.randint(1, 10),
},
}
yield data
class DBSetup(DBConn):
def _create_tables(self):
for database_type in self._database_types:
conn = self.get_conn(database_type)
if database_type == "postgres":
if not conn.dialect.has_schema(conn, self.schema):
conn.execute(sa.schema.CreateSchema(self.schema))
if database_type == "mysql":
conn.execute(f"CREATE DATABASE IF NOT EXISTS {self.schema}")
Base.metadata.create_all(conn)
def reset(self):
for database_type in self._database_types:
conn = self.get_conn(database_type)
Base.metadata.drop_all(conn)
sql = f"DROP SCHEMA IF EXISTS {self.schema}"
if database_type == "postgres":
conn.execute(f"{sql} CASCADE")
else:
conn.execute(sql)
def load_transaction(self, data, session):
customers = data.get("customers")
transactions = data.get("transactions")
transaction_details = data.get("transaction_details")
row = Customers( # maintain the relationship between each tables
**customers,
transactions=[
Transactions(
**transactions,
transaction_details=[
TransactionDetails(**transaction_details)
],
)
],
)
session.add(row)
session.commit()
def _seed_transactions(self):
my_fake_data = DataGenerator()
session = self.get_session("mysql")
for line in my_fake_data.get_data():
self.load_transaction(line, session)
@property
def _product_list(self):
return PRODUCT_LIST
def _seed_products(self):
for database_type in self._database_types: #
session = self.get_session(database_type)
for row in self._product_list:
product = Products(**row) # pass in as a kwargs
session.add(product) # insert data into both databases
session.commit()
def run(self):
self.reset()
self._create_tables()
self._seed_products()
self._seed_transactions()
class ApplicationDataBase(DBConn):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.jinja_env = Environment(
loader=PackageLoader("database", "templates")
)
db_type = "mysql"
def _get_template(self, filename, **kwargs):
temp = self.jinja_env.get_template(filename)
return temp.render(**kwargs)
def get_data(self, date, table_name):
kwargs = {"date": date}
sql = self._get_template(f"{table_name}.sql", **kwargs)
return self.run_query(sql)
def run_query(self, sql):
conn = self.get_conn("mysql")
result = conn.execute(sql)
return [dict(row) for row in result.fetchall()]
if __name__ == "__main__":
kwargs = {"host": "localhost"}
app = ApplicationDataBase(**kwargs)
data1 = app.get_data("2021-08-03", "customers")
data2 = app.get_data("2021-05-01", "transactions")
data3 = app.get_data("2021-07-21", "transaction_details")
print(data1)
print("******")
print(data2)
print("******")
print(data3)
| [
"logging.basicConfig",
"logging.getLogger",
"sqlalchemy.orm.sessionmaker",
"database.models.core.Base.metadata.drop_all",
"database.models.core.Base.metadata.create_all",
"database.models.core.TransactionDetails",
"sqlalchemy.schema.CreateSchema",
"sqlalchemy.create_engine",
"database.models.core.Pr... | [((271, 292), 'logging.basicConfig', 'logging.basicConfig', ([], {}), '()\n', (290, 292), False, 'import logging\n'), ((302, 329), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (319, 329), False, 'import logging\n'), ((1815, 1852), 'sqlalchemy.create_engine', 'sa.create_engine', (['conn_str'], {'echo': '(True)'}), '(conn_str, echo=True)\n', (1831, 1852), True, 'import sqlalchemy as sa\n'), ((2067, 2097), 'sqlalchemy.orm.sessionmaker', 'sa.orm.sessionmaker', ([], {'bind': 'conn'}), '(bind=conn)\n', (2086, 2097), True, 'import sqlalchemy as sa\n'), ((2190, 2197), 'faker.Faker', 'Faker', ([], {}), '()\n', (2195, 2197), False, 'from faker import Faker\n'), ((2246, 2265), 'datetime.date', 'dt.date', (['(2021)', '(1)', '(1)'], {}), '(2021, 1, 1)\n', (2253, 2265), True, 'import datetime as dt\n'), ((2976, 2999), 'numpy.random.randint', 'np.random.randint', (['(0)', '(3)'], {}), '(0, 3)\n', (2993, 2999), True, 'import numpy as np\n'), ((3527, 3551), 'numpy.random.randint', 'np.random.randint', (['(1)', '(10)'], {}), '(1, 10)\n', (3544, 3551), True, 'import numpy as np\n'), ((4790, 4820), 'database.models.core.Base.metadata.create_all', 'Base.metadata.create_all', (['conn'], {}), '(conn)\n', (4814, 4820), False, 'from database.models.core import Base, Products, Customers, TransactionDetails, Transactions\n'), ((4954, 4982), 'database.models.core.Base.metadata.drop_all', 'Base.metadata.drop_all', (['conn'], {}), '(conn)\n', (4976, 4982), False, 'from database.models.core import Base, Products, Customers, TransactionDetails, Transactions\n'), ((2307, 2324), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (2322, 2324), True, 'import datetime as dt\n'), ((2485, 2505), 'datetime.timedelta', 'dt.timedelta', ([], {'days': 'i'}), '(days=i)\n', (2497, 2505), True, 'import datetime as dt\n'), ((3645, 3669), 'numpy.random.randint', 'np.random.randint', (['(1)', '(15)'], {}), '(1, 15)\n', (3662, 3669), True, 'import numpy as np\n'), ((6295, 6310), 'database.models.core.Products', 'Products', ([], {}), '(**row)\n', (6303, 6310), False, 'from database.models.core import Base, Products, Customers, TransactionDetails, Transactions\n'), ((6751, 6789), 'jinja2.PackageLoader', 'PackageLoader', (['"""database"""', '"""templates"""'], {}), "('database', 'templates')\n", (6764, 6789), False, 'from jinja2 import Environment, PackageLoader\n'), ((4229, 4253), 'numpy.random.randint', 'np.random.randint', (['(1)', '(10)'], {}), '(1, 10)\n', (4246, 4253), True, 'import numpy as np\n'), ((4622, 4657), 'sqlalchemy.schema.CreateSchema', 'sa.schema.CreateSchema', (['self.schema'], {}), '(self.schema)\n', (4644, 4657), True, 'import sqlalchemy as sa\n'), ((5644, 5685), 'database.models.core.TransactionDetails', 'TransactionDetails', ([], {}), '(**transaction_details)\n', (5662, 5685), False, 'from database.models.core import Base, Products, Customers, TransactionDetails, Transactions\n')] |
from river import stats, utils
from . import base
class SSB(base.ClusteringMetric):
"""Sum-of-Squares Between Clusters (SSB).
The Sum-of-Squares Between Clusters is the weighted mean of the squares of distances
between cluster centers to the mean value of the whole dataset.
Examples
--------
>>> from river import cluster
>>> from river import stream
>>> from river import metrics
>>> X = [
... [1, 2],
... [1, 4],
... [1, 0],
... [4, 2],
... [4, 4],
... [4, 0],
... [-2, 2],
... [-2, 4],
... [-2, 0]
... ]
>>> k_means = cluster.KMeans(n_clusters=3, halflife=0.4, sigma=3, seed=0)
>>> metric = metrics.cluster.SSB()
>>> for x, _ in stream.iter_array(X):
... k_means = k_means.learn_one(x)
... y_pred = k_means.predict_one(x)
... metric = metric.update(x, y_pred, k_means.centers)
>>> metric
SSB: 8.109389
References
----------
[^1]: <NAME>, <NAME>, and <NAME>, "Sum-of-squares based cluster validity index
and significance analysis," in Adaptive and Natural Computing Algorithms,
<NAME>, <NAME>, and <NAME>, Eds.
Berlin, Germany: Springer, 2009, pp. 313–322.
"""
def __init__(self):
super().__init__()
self._center_all_points = {}
self._n_points = 0
self._n_points_by_clusters = {}
self._squared_distances = {}
self._initialized = False
def update(self, x, y_pred, centers, sample_weight=1.0):
if not self._initialized:
self._center_all_points = {i: stats.Mean() for i in x}
self._initialized = True
for i in self._center_all_points:
self._center_all_points[i].update(x[i], w=sample_weight)
center_all_points = {
i: self._center_all_points[i].get() for i in self._center_all_points
}
self._n_points += 1
try:
self._n_points_by_clusters[y_pred] += 1
except KeyError:
self._n_points_by_clusters[y_pred] = 1
for i in centers:
self._squared_distances[i] = utils.math.minkowski_distance(
centers[i], center_all_points, 2
)
return self
def revert(self, x, y_pred, centers, sample_weight=1.0):
for i in self._center_all_points:
self._center_all_points[i].update(x[i], w=-sample_weight)
center_all_points = {
i: self._center_all_points[i].get() for i in self._center_all_points
}
self._n_points -= 1
self._n_points_by_clusters[y_pred] -= 1
for i in centers:
self._squared_distances[i] = utils.math.minkowski_distance(
centers[i], center_all_points, 2
)
return self
def get(self):
ssb = 0
for i in self._n_points_by_clusters:
try:
ssb += (
1
/ self._n_points
* self._n_points_by_clusters[i]
* self._squared_distances[i]
)
except ZeroDivisionError:
ssb += 0
return ssb
@property
def bigger_is_better(self):
return True
| [
"river.utils.math.minkowski_distance",
"river.stats.Mean"
] | [((2178, 2241), 'river.utils.math.minkowski_distance', 'utils.math.minkowski_distance', (['centers[i]', 'center_all_points', '(2)'], {}), '(centers[i], center_all_points, 2)\n', (2207, 2241), False, 'from river import stats, utils\n'), ((2735, 2798), 'river.utils.math.minkowski_distance', 'utils.math.minkowski_distance', (['centers[i]', 'center_all_points', '(2)'], {}), '(centers[i], center_all_points, 2)\n', (2764, 2798), False, 'from river import stats, utils\n'), ((1644, 1656), 'river.stats.Mean', 'stats.Mean', ([], {}), '()\n', (1654, 1656), False, 'from river import stats, utils\n')] |
#!/usr/bin/env python3
# Monte Carlo Simulation: Dice Game
# Based on Investopedia: Creating a Monte Carlo Simulation
# investopedia.com/articles/investing/093015/create-monte-carlo-simulation-using-excel.asp\
# Here's how the dice game rolls:
# The player throws three dice that have 6 sides 3 times.
# If the total of the 3 throws is 7 or 11, the player wins.
# If the total of the 3 throws is: 3, 4, 5, 16, 17 or 18, the player loses.
# If the total is any other outcome, the player plays again and re-rolls the dice.
# When the player throws the dice again, the game continues in the same way, except
# that the player wins when the total is equal to the sum determined in the first round.
# 5,000 results are needed to prepare the Monte Carlo simulation.
import random
class dice:
def roll(self):
return random.randint(1,6)
dice1 = dice()
dice2 = dice()
dice3 = dice()
win = [7,11]
lose = [3,4,5,16,17,18]
wins = 0
losses = 0
noresult = 0
def main():
global wins, losses, noresult
simulationRuns = 5000
singleSimRolls = 50
rollTimes = 1
for i in range(simulationRuns):
print(f'Simulation {i+1}:')
if goRoll(singleSimRolls, 1) == 'reroll':
threeRoll(singleSimRolls, 2)
win.pop(-1)
print(f'No of wins = {wins}')
print(f'Probability of win = {(wins / simulationRuns * 100):.1f}%')
print(f'No of losses = {losses}')
print(f'Probability of loss = {(losses / simulationRuns * 100):.1f}%')
print(f'No of incomplete = {noresult}')
print(f'Probability of incomplete = {(noresult / simulationRuns * 100):.1f}%')
def goRoll(singleSimRolls, rollTimes):
global wins, losses, noresult
rollSum = dice1.roll() + dice2.roll() + dice3.roll()
print(f'Roll {rollTimes}: {rollSum}')
if rollCheck(rollSum) == 'reroll':
win.append(rollSum)
rollSum = 0
return 'reroll'
elif rollCheck(rollSum) == 'win':
wins +=1
print(f'Won by rolling {rollSum} after {rollTimes} rolls')
return 'win'
elif rollCheck(rollSum) == 'lose':
losses += 1
print(f'Lost by rolling {rollSum} after {rollTimes} rolls')
return 'lose'
def threeRoll(singleSimRolls, rollTimes):
global wins, losses, noresult
if rollTimes <= singleSimRolls:
rollSum = dice1.roll() + dice2.roll() + dice3.roll()
print(f'Roll {rollTimes}: {rollSum}')
if rollCheck(rollSum) == 'win':
print(f'Won by rolling {rollSum} after {rollTimes} rolls')
wins += 1
return 'win'
elif rollCheck(rollSum) == 'lose':
print(f'Lost by rolling {rollSum} after {rollTimes} rolls')
losses += 1
return 'lose'
elif rollCheck(rollSum) == 'reroll':
threeRoll(singleSimRolls, rollTimes + 1)
elif rollTimes > singleSimRolls:
print(f'Did not win or lose after {rollTimes} rolls')
noresult += 1
return 'noresult'
def rollCheck(rollSum = 0):
global win, lose
if rollSum in win:
return 'win'
elif rollSum in lose:
return 'lose'
else:
return 'reroll'
if __name__ == '__main__': main() | [
"random.randint"
] | [((842, 862), 'random.randint', 'random.randint', (['(1)', '(6)'], {}), '(1, 6)\n', (856, 862), False, 'import random\n')] |
import numpy as np
import torch
import argparse
from pina.pinn import PINN
from pina.ppinn import ParametricPINN as pPINN
from pina.label_tensor import LabelTensor
from torch.nn import ReLU, Tanh, Softplus
from pina.adaptive_functions.adaptive_softplus import AdaptiveSoftplus
from problems.parametric_elliptic_optimal_control_alpha_variable import ParametricEllipticOptimalControl
from pina.multi_deep_feed_forward import MultiDeepFeedForward
from pina.deep_feed_forward import DeepFeedForward
alpha = 1
class myFeature(torch.nn.Module):
"""
Feature: sin(x)
"""
def __init__(self):
super(myFeature, self).__init__()
def forward(self, x):
return (-x[:, 0]**2+1) * (-x[:, 1]**2+1)
class CustomMultiDFF(MultiDeepFeedForward):
def __init__(self, dff_dict):
super().__init__(dff_dict)
def forward(self, x):
out = self.uu(x)
p = LabelTensor((out['u_param'] * x[:, 3]).reshape(-1, 1), ['p'])
a = LabelTensor.hstack([out, p])
return a
model = CustomMultiDFF(
{
'uu': {
'input_variables': ['x1', 'x2', 'mu', 'alpha'],
'output_variables': ['u_param', 'y'],
'layers': [40, 40, 20],
'func': Softplus,
'extra_features': [myFeature()],
},
# 'u_param': {
# 'input_variables': ['u', 'mu'],
# 'output_variables': ['u_param'],
# 'layers': [],
# 'func': None
# },
# 'p': {
# 'input_variables': ['u'],
# 'output_variables': ['p'],
# 'layers': [10],
# 'func': None
# },
}
)
opc = ParametricEllipticOptimalControl(alpha)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Run PINA")
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-s", "-save", action="store_true")
group.add_argument("-l", "-load", action="store_true")
args = parser.parse_args()
# model = DeepFeedForward(
# layers=[40, 40, 20],
# output_variables=['u_param', 'y', 'p'],
# input_variables=opc.input_variables+['mu', 'alpha'],
# func=Softplus,
# extra_features=[myFeature()]
# )
pinn = pPINN(
opc,
model,
lr=0.002,
error_norm='mse',
regularizer=1e-8,
lr_accelerate=None)
if args.s:
pinn.span_pts(30, 'grid', ['D1'])
pinn.span_pts(50, 'grid', ['gamma1', 'gamma2', 'gamma3', 'gamma4'])
pinn.train(10000, 20)
# with open('ocp_wrong_history.txt', 'w') as file_:
# for i, losses in enumerate(pinn.history):
# file_.write('{} {}\n'.format(i, sum(losses).item()))
pinn.save_state('pina.ocp')
else:
pinn.load_state('working.pina.ocp')
pinn.load_state('pina.ocp')
import matplotlib
matplotlib.use('GTK3Agg')
import matplotlib.pyplot as plt
# res = 64
# param = torch.tensor([[3., 1]])
# pts_container = []
# for mn, mx in [[-1, 1], [-1, 1]]:
# pts_container.append(np.linspace(mn, mx, res))
# grids_container = np.meshgrid(*pts_container)
# unrolled_pts = torch.tensor([t.flatten() for t in grids_container]).T
# unrolled_pts = torch.cat([unrolled_pts, param.double().repeat(unrolled_pts.shape[0], 1).reshape(-1, 2)], axis=1)
# unrolled_pts = LabelTensor(unrolled_pts, ['x1', 'x2', 'mu', 'alpha'])
# Z_pred = pinn.model(unrolled_pts.tensor)
# print(Z_pred.tensor.shape)
# plt.subplot(2, 3, 1)
# plt.pcolor(Z_pred['y'].reshape(res, res).detach())
# plt.colorbar()
# plt.subplot(2, 3, 2)
# plt.pcolor(Z_pred['u_param'].reshape(res, res).detach())
# plt.colorbar()
# plt.subplot(2, 3, 3)
# plt.pcolor(Z_pred['p'].reshape(res, res).detach())
# plt.colorbar()
# with open('ocp_mu3_a1_plot.txt', 'w') as f_:
# f_.write('x y u p ys\n')
# for (x, y), tru, pre, e in zip(unrolled_pts[:, :2],
# Z_pred['u_param'].reshape(-1, 1),
# Z_pred['p'].reshape(-1, 1),
# Z_pred['y'].reshape(-1, 1),
# ):
# f_.write('{} {} {} {} {}\n'.format(x.item(), y.item(), tru.item(), pre.item(), e.item()))
# param = torch.tensor([[3.0, 0.01]])
# unrolled_pts = torch.tensor([t.flatten() for t in grids_container]).T
# unrolled_pts = torch.cat([unrolled_pts, param.double().repeat(unrolled_pts.shape[0], 1).reshape(-1, 2)], axis=1)
# unrolled_pts = LabelTensor(unrolled_pts, ['x1', 'x2', 'mu', 'alpha'])
# Z_pred = pinn.model(unrolled_pts.tensor)
# plt.subplot(2, 3, 4)
# plt.pcolor(Z_pred['y'].reshape(res, res).detach())
# plt.colorbar()
# plt.subplot(2, 3, 5)
# plt.pcolor(Z_pred['u_param'].reshape(res, res).detach())
# plt.colorbar()
# plt.subplot(2, 3, 6)
# plt.pcolor(Z_pred['p'].reshape(res, res).detach())
# plt.colorbar()
# plt.show()
# with open('ocp_mu3_a0.01_plot.txt', 'w') as f_:
# f_.write('x y u p ys\n')
# for (x, y), tru, pre, e in zip(unrolled_pts[:, :2],
# Z_pred['u_param'].reshape(-1, 1),
# Z_pred['p'].reshape(-1, 1),
# Z_pred['y'].reshape(-1, 1),
# ):
# f_.write('{} {} {} {} {}\n'.format(x.item(), y.item(), tru.item(), pre.item(), e.item()))
y = {}
u = {}
for alpha in [0.01, 0.1, 1]:
y[alpha] = []
u[alpha] = []
for p in np.linspace(0.5, 3, 32):
a = pinn.model(LabelTensor(torch.tensor([[0, 0, p, alpha]]).double(), ['x1', 'x2', 'mu', 'alpha']).tensor)
y[alpha].append(a['y'].detach().numpy()[0])
u[alpha].append(a['u_param'].detach().numpy()[0])
plt.plot(np.linspace(0.5, 3, 32), u[1], label='u')
plt.plot(np.linspace(0.5, 3, 32), u[0.01], label='u')
plt.plot(np.linspace(0.5, 3, 32), u[0.1], label='u')
plt.plot([1, 2, 3], [0.28, 0.56, 0.85], 'o', label='Truth values')
plt.legend()
plt.show()
print(y[1])
print(y[0.1])
print(y[0.01])
with open('elliptic_param_y.txt', 'w') as f_:
f_.write('mu 1 01 001\n')
for mu, y1, y01, y001 in zip(np.linspace(0.5, 3, 32), y[1], y[0.1], y[0.01]):
f_.write('{} {} {} {}\n'.format(mu, y1, y01, y001))
with open('elliptic_param_u.txt', 'w') as f_:
f_.write('mu 1 01 001\n')
for mu, y1, y01, y001 in zip(np.linspace(0.5, 3, 32), u[1], u[0.1], u[0.01]):
f_.write('{} {} {} {}\n'.format(mu, y1, y01, y001))
plt.plot(np.linspace(0.5, 3, 32), y, label='y')
plt.plot([1, 2, 3], [0.062, 0.12, 0.19], 'o', label='Truth values')
plt.legend()
plt.show()
| [
"argparse.ArgumentParser",
"matplotlib.use",
"matplotlib.pyplot.plot",
"problems.parametric_elliptic_optimal_control_alpha_variable.ParametricEllipticOptimalControl",
"torch.tensor",
"numpy.linspace",
"pina.label_tensor.LabelTensor.hstack",
"pina.ppinn.ParametricPINN",
"matplotlib.pyplot.legend",
... | [((1735, 1774), 'problems.parametric_elliptic_optimal_control_alpha_variable.ParametricEllipticOptimalControl', 'ParametricEllipticOptimalControl', (['alpha'], {}), '(alpha)\n', (1767, 1774), False, 'from problems.parametric_elliptic_optimal_control_alpha_variable import ParametricEllipticOptimalControl\n'), ((1817, 1864), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Run PINA"""'}), "(description='Run PINA')\n", (1840, 1864), False, 'import argparse\n'), ((2338, 2426), 'pina.ppinn.ParametricPINN', 'pPINN', (['opc', 'model'], {'lr': '(0.002)', 'error_norm': '"""mse"""', 'regularizer': '(1e-08)', 'lr_accelerate': 'None'}), "(opc, model, lr=0.002, error_norm='mse', regularizer=1e-08,\n lr_accelerate=None)\n", (2343, 2426), True, 'from pina.ppinn import ParametricPINN as pPINN\n'), ((974, 1002), 'pina.label_tensor.LabelTensor.hstack', 'LabelTensor.hstack', (['[out, p]'], {}), '([out, p])\n', (992, 1002), False, 'from pina.label_tensor import LabelTensor\n'), ((2986, 3011), 'matplotlib.use', 'matplotlib.use', (['"""GTK3Agg"""'], {}), "('GTK3Agg')\n", (3000, 3011), False, 'import matplotlib\n'), ((6492, 6558), 'matplotlib.pyplot.plot', 'plt.plot', (['[1, 2, 3]', '[0.28, 0.56, 0.85]', '"""o"""'], {'label': '"""Truth values"""'}), "([1, 2, 3], [0.28, 0.56, 0.85], 'o', label='Truth values')\n", (6500, 6558), True, 'import matplotlib.pyplot as plt\n'), ((6567, 6579), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (6577, 6579), True, 'import matplotlib.pyplot as plt\n'), ((6588, 6598), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6596, 6598), True, 'import matplotlib.pyplot as plt\n'), ((7231, 7298), 'matplotlib.pyplot.plot', 'plt.plot', (['[1, 2, 3]', '[0.062, 0.12, 0.19]', '"""o"""'], {'label': '"""Truth values"""'}), "([1, 2, 3], [0.062, 0.12, 0.19], 'o', label='Truth values')\n", (7239, 7298), True, 'import matplotlib.pyplot as plt\n'), ((7307, 7319), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (7317, 7319), True, 'import matplotlib.pyplot as plt\n'), ((7328, 7338), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (7336, 7338), True, 'import matplotlib.pyplot as plt\n'), ((6025, 6048), 'numpy.linspace', 'np.linspace', (['(0.5)', '(3)', '(32)'], {}), '(0.5, 3, 32)\n', (6036, 6048), True, 'import numpy as np\n'), ((6319, 6342), 'numpy.linspace', 'np.linspace', (['(0.5)', '(3)', '(32)'], {}), '(0.5, 3, 32)\n', (6330, 6342), True, 'import numpy as np\n'), ((6378, 6401), 'numpy.linspace', 'np.linspace', (['(0.5)', '(3)', '(32)'], {}), '(0.5, 3, 32)\n', (6389, 6401), True, 'import numpy as np\n'), ((6440, 6463), 'numpy.linspace', 'np.linspace', (['(0.5)', '(3)', '(32)'], {}), '(0.5, 3, 32)\n', (6451, 6463), True, 'import numpy as np\n'), ((7184, 7207), 'numpy.linspace', 'np.linspace', (['(0.5)', '(3)', '(32)'], {}), '(0.5, 3, 32)\n', (7195, 7207), True, 'import numpy as np\n'), ((6797, 6820), 'numpy.linspace', 'np.linspace', (['(0.5)', '(3)', '(32)'], {}), '(0.5, 3, 32)\n', (6808, 6820), True, 'import numpy as np\n'), ((7048, 7071), 'numpy.linspace', 'np.linspace', (['(0.5)', '(3)', '(32)'], {}), '(0.5, 3, 32)\n', (7059, 7071), True, 'import numpy as np\n'), ((6093, 6125), 'torch.tensor', 'torch.tensor', (['[[0, 0, p, alpha]]'], {}), '([[0, 0, p, alpha]])\n', (6105, 6125), False, 'import torch\n')] |
# Copyright 2020 Makani Technologies LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fake gcloud utils for testing without cloud access."""
from makani.lib.python.batch_sim import gcloud_util
class FakeFilesystem(object):
"""A fake filesystem.
A FakeFilesystem instance is simply a dictionary of file names to file
contents, with Save() and Load() methods to make access look a bit more
file-like.
The class itself also contains LOCAL and CLOUD variables intended to store
references to particular FakeFilesystem instances. These are initialized to
None and intended to be defined as needed via mock.patch. For example:
with mock.patch('makani.batch_sim.gcloud_fakes.FakeFilesystem.LOCAL',
FakeFilesystem()) as local_fs:
<Do something with local files>
with mock.patch('makani.batch_sim.gcloud_fakes.FakeFilesystem.CLOUD',
FakeFilesystem()) as remote_fs:
<Do something with remote files>
In particular, many of the fakes in this module use FakeFilesystem.LOCAL and
FakeFilesystem.CLOUD to simulate actual storage patterns.
"""
LOCAL = None
CLOUD = None
def __init__(self):
self.files = {}
def Save(self, filename, descriptor):
self.files[filename] = descriptor
def Load(self, filename):
return self.files[filename]
class FakeCloudStorageApi(object):
"""A fake of gcloud_util.CloudStorageApi.
This performs simple transfers between FakeFilesystem.LOCAL and
FakeFilesystem.CLOUD.
To simulate working with different local filesystems, FakeFilesystem.LOCAL
may be patched before instantiating the FakeCloudStorageApi.
"""
def __init__(self, bucket=None):
self._local_fs = FakeFilesystem.LOCAL
self._cloud_fs = FakeFilesystem.CLOUD
self._bucket = bucket
def _RemoveBucketFromCloudName(self, cloud_name):
cloud_name = cloud_name.strip()
if cloud_name.startswith('gs://'):
_, cloud_name = gcloud_util.ParseBucketAndPath(cloud_name, None)
return cloud_name
def DownloadFile(self, cloud_name, stream):
cloud_name = self._RemoveBucketFromCloudName(cloud_name)
stream.write(self._cloud_fs.Load(cloud_name))
def UploadFile(self, local_name, cloud_name):
cloud_name = self._RemoveBucketFromCloudName(cloud_name)
self._cloud_fs.Save(cloud_name, self._local_fs.Load(local_name))
def UploadStream(self, stream, cloud_name):
cloud_name = self._RemoveBucketFromCloudName(cloud_name)
self._cloud_fs.Save(cloud_name, stream.getvalue())
def DeletePrefix(self, prefix):
for filename in self.List(prefix):
if filename.startswith(prefix):
self._cloud_fs.files.pop(filename)
def DeleteFile(self, cloud_name):
cloud_name = self._RemoveBucketFromCloudName(cloud_name)
self._cloud_fs.files.pop(cloud_name)
def List(self, prefix):
prefix = self._RemoveBucketFromCloudName(prefix)
return [name for name in self._cloud_fs.files if name.startswith(prefix)]
| [
"makani.lib.python.batch_sim.gcloud_util.ParseBucketAndPath"
] | [((2469, 2517), 'makani.lib.python.batch_sim.gcloud_util.ParseBucketAndPath', 'gcloud_util.ParseBucketAndPath', (['cloud_name', 'None'], {}), '(cloud_name, None)\n', (2499, 2517), False, 'from makani.lib.python.batch_sim import gcloud_util\n')] |
import djclick as click
from django_celery_results.models import TaskResult
@click.command()
def command():
TaskResult.objects.filter(status="STARTED").delete()
| [
"django_celery_results.models.TaskResult.objects.filter",
"djclick.command"
] | [((79, 94), 'djclick.command', 'click.command', ([], {}), '()\n', (92, 94), True, 'import djclick as click\n'), ((114, 157), 'django_celery_results.models.TaskResult.objects.filter', 'TaskResult.objects.filter', ([], {'status': '"""STARTED"""'}), "(status='STARTED')\n", (139, 157), False, 'from django_celery_results.models import TaskResult\n')] |
"""
Plugin for manually animating
large numbers of rigs given a small number of source rigs
See README for installation instructions and usage
"""
from pymel.api.plugins import Command
import maya.OpenMayaMPx as OpenMayaMPx
from characterCrowdSrc.characterCrowd import *
class characterCrowdGui(Command):
def doIt(self, args):
print("loading gui...")
gui()
class ccPreRenderFrame(Command):
def doIt(self, args):
preRender()
class ccPostRenderFrame(Command):
def doIt(self, args):
postRender()
class ccGenerate(Command):
def doIt(self, args):
generateStandin()
class ccDuplicate(Command):
def doIt(self, args):
duplicateStandin()
class ccSave(Command):
def doIt(self, args):
saveStandin()
class ccEdit(Command):
def doIt(self, args):
editStandin()
class ccCache(Command):
def doIt(self, args):
"""
Usage: ccCache # caches selected entire frame range
ccCache 2 5 # caches selected from frame 2 to 5
"""
if args.length() is 0:
return cacheStandin()
else:
startFrame = args.asInt(0)
endFrame = args.asInt(1)
cacheStandin(startFrame, endFrame)
class ccSelectAll(Command):
def doIt(self, args):
selectAllStandins()
## initialize the script plug-in
def initializePlugin(mobject):
mplugin = OpenMayaMPx.MFnPlugin( mobject, "CampbellMorgan", "0.01" )
characterCrowdGui.register()
ccPreRenderFrame.register()
ccPostRenderFrame.register()
ccGenerate.register()
ccDuplicate.register()
ccSave.register()
ccEdit.register()
ccCache.register()
ccSelectAll.register()
print("Loaded CharacterCrowd")
# uninitialize the script plug-in
def uninitializePlugin(mobject):
mplugin = OpenMayaMPx.MFnPlugin( mobject )
characterCrowdGui.deregister()
ccPreRenderFrame.deregister()
ccPostRenderFrame.deregister()
ccGenerate.deregister()
ccDuplicate.deregister()
ccSave.deregister()
ccEdit.deregister()
ccCache.deregister()
ccSelectAll.deregister()
print("Unloaded CharacterCrowd")
| [
"maya.OpenMayaMPx.MFnPlugin"
] | [((1405, 1461), 'maya.OpenMayaMPx.MFnPlugin', 'OpenMayaMPx.MFnPlugin', (['mobject', '"""CampbellMorgan"""', '"""0.01"""'], {}), "(mobject, 'CampbellMorgan', '0.01')\n", (1426, 1461), True, 'import maya.OpenMayaMPx as OpenMayaMPx\n'), ((1826, 1856), 'maya.OpenMayaMPx.MFnPlugin', 'OpenMayaMPx.MFnPlugin', (['mobject'], {}), '(mobject)\n', (1847, 1856), True, 'import maya.OpenMayaMPx as OpenMayaMPx\n')] |
# coding: utf-8
"""
Decision Lens API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import dlxapi
from dlxapi.api.spreadsheet_api import SpreadsheetApi # noqa: E501
from dlxapi.rest import ApiException
class TestSpreadsheetApi(unittest.TestCase):
"""SpreadsheetApi unit test stubs"""
def setUp(self):
self.api = dlxapi.api.spreadsheet_api.SpreadsheetApi() # noqa: E501
def tearDown(self):
pass
def test_create_spreadsheet(self):
"""Test case for create_spreadsheet
"""
pass
def test_create_spreadsheet_for_kloudless_file(self):
"""Test case for create_spreadsheet_for_kloudless_file
"""
pass
def test_delete_spreadsheet(self):
"""Test case for delete_spreadsheet
Delete spreadsheet and mappings # noqa: E501
"""
pass
def test_get_mappings_for_spreadsheet(self):
"""Test case for get_mappings_for_spreadsheet
"""
pass
def test_get_spreadsheet(self):
"""Test case for get_spreadsheet
"""
pass
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"dlxapi.api.spreadsheet_api.SpreadsheetApi"
] | [((1365, 1380), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1378, 1380), False, 'import unittest\n'), ((580, 623), 'dlxapi.api.spreadsheet_api.SpreadsheetApi', 'dlxapi.api.spreadsheet_api.SpreadsheetApi', ([], {}), '()\n', (621, 623), False, 'import dlxapi\n')] |
import tkinter as tk
import tkinter.ttk as ttk
from .pixel_canvas import PixelCanvas
class DemoWindow(ttk.Frame):
def __init__(self, master, model_wrapper,
canvas_size=50, window_size=28,
refresh_period=50, test_image=None, **kw):
ttk.Frame.__init__(self, master=master, **kw)
self.master = master
self.model_wrapper = model_wrapper
self.canvas_size = canvas_size
self.window_size = window_size
self.refresh_period = refresh_period
self._create_interface()
if test_image is not None:
self.cnv_orig.set_image(test_image)
self.columnconfigure(0, weight=410, minsize=215)
self.columnconfigure(1, weight=410, minsize=210)
self.columnconfigure(2, weight=140, minsize=65)
self.rowconfigure(0, weight=0, minsize=50)
self.rowconfigure(1, weight=1, minsize=220)
self.rowconfigure(2, weight=0, minsize=0)
self.master.after(50, lambda: master.focus_force())
self.master.after(100, self._reconstruct_image)
def _create_interface(self):
self.frm_controls = ttk.Frame(self, padding=(10, 15, 10, 10))
self.frm_controls.grid(row=0, column=0, columnspan=3, sticky=(tk.N, tk.S, tk.W, tk.E))
self.lbl_draw_mode = ttk.Label(self.frm_controls, text="Drawing Mode:")
self.lbl_line_width = ttk.Label(self.frm_controls, text="Line Width:")
self.lbl_refresh_rate = ttk.Label(self.frm_controls, text="Refresh (ms):")
self.var_draw_mode = tk.IntVar(value=1)
self.rad_draw = ttk.Radiobutton(self.frm_controls, text="Draw", variable=self.var_draw_mode, value=1)
self.rad_erase = ttk.Radiobutton(self.frm_controls, text="Erase", variable=self.var_draw_mode, value=0)
self.btn_clear = ttk.Button(
self.frm_controls, text="Clear Image",
command=lambda: self.cnv_orig.clear_image()
)
self.var_width = tk.StringVar(self.frm_controls)
self.spn_width = tk.Spinbox(
self.frm_controls, values=(1, 2, 3, 4, 5), width=10,
state="readonly", textvariable=self.var_width
)
self.var_rate = tk.StringVar(self.frm_controls)
self.spn_rate = tk.Spinbox(
self.frm_controls, values=(10, 20, 50, 100, 200, 500, 1000), width=10,
state="readonly", textvariable=self.var_rate
)
self.var_bbox = tk.IntVar(value=1)
self.cbx_bbox = ttk.Checkbutton(self.frm_controls, text="Bounding Boxes", variable=self.var_bbox)
self.lbl_draw_mode.grid(row=0, column=0, columnspan=2, sticky=(tk.N, tk.W))
self.lbl_line_width.grid(row=0, column=3, sticky=(tk.N, tk.W))
self.lbl_refresh_rate.grid(row=0, column=4, sticky=(tk.N, tk.W))
self.rad_draw.grid(row=1, column=0, sticky=(tk.N, tk.S, tk.W, tk.E))
self.rad_erase.grid(row=1, column=1, sticky=(tk.N, tk.S, tk.W, tk.E), padx=(0, 20))
self.btn_clear.grid(row=1, column=2, sticky=(tk.N, tk.S, tk.W, tk.E), padx=(0, 20))
self.spn_width.grid(row=1, column=3, sticky=(tk.N, tk.S, tk.W, tk.E), padx=(0, 20))
self.spn_rate.grid(row=1, column=4, sticky=(tk.N, tk.S, tk.W, tk.E), padx=(0, 20))
self.cbx_bbox.grid(row=1, column=5, sticky=(tk.N, tk.S, tk.W, tk.E))
self.var_draw_mode.trace("w", lambda *_: self._set_draw_mode(self.var_draw_mode.get() == 1))
self.var_width.trace("w", lambda *_: self.cnv_orig.set_line_width(int(self.var_width.get())))
self.var_rate.trace("w", lambda *_: self._set_refresh_period(int(self.var_rate.get())))
self.var_bbox.trace("w", lambda *_: self._set_bbox_visibility(self.var_bbox.get() == 1))
self.frm_canvas_orig = ttk.Frame(self, padding=(10, 10, 5, 10))
self.frm_canvas_orig.grid(row=1, column=0, sticky=(tk.N, tk.S, tk.W, tk.E))
self.frm_canvas_orig.columnconfigure(0, weight=1, minsize=200)
self.frm_canvas_orig.rowconfigure(0, weight=0, minsize=20)
self.frm_canvas_orig.rowconfigure(1, weight=1, minsize=200)
self.lbl_orig = ttk.Label(self.frm_canvas_orig, text="Original Image (draw here):")
self.cnv_orig = PixelCanvas(
self.frm_canvas_orig, self.canvas_size, self.canvas_size, drawable=True,
highlightthickness=0, borderwidth=0, width=400, height=400
)
self.lbl_orig.grid(row=0, column=0, sticky=(tk.N, tk.S, tk.W, tk.E))
self.cnv_orig.grid(row=1, column=0, sticky=(tk.N, tk.S, tk.W, tk.E))
self.frm_canvas_rec = ttk.Frame(self, padding=(5, 10, 5, 10))
self.frm_canvas_rec.grid(row=1, column=1, sticky=(tk.N, tk.S, tk.W, tk.E))
self.frm_canvas_rec.columnconfigure(0, weight=1, minsize=200)
self.frm_canvas_rec.rowconfigure(0, weight=0, minsize=20)
self.frm_canvas_rec.rowconfigure(1, weight=1, minsize=200)
self.lbl_rec = ttk.Label(self.frm_canvas_rec, text="Reconstructed Image:")
self.cnv_rec = PixelCanvas(
self.frm_canvas_rec, self.canvas_size, self.canvas_size, drawable=False,
highlightthickness=0, borderwidth=0, width=400, height=400
)
self.lbl_rec.grid(row=0, column=0, sticky=(tk.N, tk.S, tk.W, tk.E))
self.cnv_rec.grid(row=1, column=0, sticky=(tk.N, tk.S, tk.W, tk.E))
self.frm_windows = ttk.Frame(self, padding=(0, 0, 0, 0))
self.frm_windows.grid(row=1, column=2, sticky=(tk.N, tk.S, tk.W, tk.E))
self.frm_windows.columnconfigure(0, weight=1)
self.frm_canvas_win, self.lbl_win, self.cnv_win = [], [], []
for i in range(3):
self.frm_windows.rowconfigure(i, weight=1)
frm_canvas_win = ttk.Frame(
self.frm_windows,
padding=(5, 10 if i == 0 else 0, 10, 10 if i == 2 else 0)
)
frm_canvas_win.grid(row=i, column=0, sticky=(tk.N, tk.S, tk.W, tk.E))
frm_canvas_win.columnconfigure(0, weight=1, minsize=50)
frm_canvas_win.rowconfigure(0, weight=0, minsize=20)
frm_canvas_win.rowconfigure(1, weight=1, minsize=50)
lbl_win = ttk.Label(
frm_canvas_win, text="VAE Rec. #{0}:".format(i+1)
)
cnv_win = PixelCanvas(
frm_canvas_win, self.window_size, self.window_size, drawable=False,
highlightthickness=0, borderwidth=0, width=120, height=120
)
lbl_win.grid(row=0, column=0, sticky=(tk.S, tk.W))
cnv_win.grid(row=1, column=0, sticky=(tk.N, tk.S, tk.W, tk.E))
self.frm_canvas_win.append(frm_canvas_win)
self.lbl_win.append(lbl_win)
self.cnv_win.append(cnv_win)
self.lbl_status = ttk.Label(self, borderwidth=1, relief="sunken", padding=(5, 2))
self.lbl_status.grid(row=2, column=0, columnspan=3, sticky=(tk.N, tk.S, tk.W, tk.E))
self.cnv_orig.bind("<Button-2>", lambda *_: self.cnv_orig.clear_image())
self.cnv_orig.bind("<Button-3>", lambda *_: self.cnv_orig.clear_image())
self.var_draw_mode.set(1)
self.var_width.set("3")
self.var_rate.set("50")
self.var_bbox.set(1)
def _reconstruct_image(self):
dig, pos, rec, win, lat, loss = self.model_wrapper.infer(
[self.cnv_orig.get_image()]
)
self.cnv_rec.set_image(rec[0])
self.cnv_rec.set_bbox_positions(pos[0])
self.cnv_orig.set_bbox_positions(pos[0])
for i in range(len(self.cnv_win)):
if i < len(win[0]):
self.cnv_win[i].set_image(win[0][i])
self.cnv_win[i].set_bbox_positions(
[[0.0, -2.0, -2.0]] * i + [[0.99, 0.0, 0.0]]
)
else:
self.cnv_win[i].clear_image()
self.cnv_win[i].set_bbox_positions([])
self.lbl_status.configure(
text="Reconstruction loss (negative log-likelihood): {0:.3f}".format(
abs(loss[0])
)
)
self.master.after(self.refresh_period, self._reconstruct_image)
def _set_refresh_period(self, value):
self.refresh_period = value
def _set_bbox_visibility(self, visible):
self.cnv_orig.set_bbox_visibility(visible)
self.cnv_rec.set_bbox_visibility(visible)
def _set_draw_mode(self, draw):
self.cnv_orig.set_erasing_mode(not draw)
self.cnv_orig.config(cursor=("cross" if draw else "icon"))
| [
"tkinter.IntVar",
"tkinter.ttk.Checkbutton",
"tkinter.ttk.Radiobutton",
"tkinter.ttk.Frame",
"tkinter.ttk.Label",
"tkinter.StringVar",
"tkinter.ttk.Frame.__init__",
"tkinter.Spinbox"
] | [((282, 327), 'tkinter.ttk.Frame.__init__', 'ttk.Frame.__init__', (['self'], {'master': 'master'}), '(self, master=master, **kw)\n', (300, 327), True, 'import tkinter.ttk as ttk\n'), ((1145, 1186), 'tkinter.ttk.Frame', 'ttk.Frame', (['self'], {'padding': '(10, 15, 10, 10)'}), '(self, padding=(10, 15, 10, 10))\n', (1154, 1186), True, 'import tkinter.ttk as ttk\n'), ((1312, 1362), 'tkinter.ttk.Label', 'ttk.Label', (['self.frm_controls'], {'text': '"""Drawing Mode:"""'}), "(self.frm_controls, text='Drawing Mode:')\n", (1321, 1362), True, 'import tkinter.ttk as ttk\n'), ((1393, 1441), 'tkinter.ttk.Label', 'ttk.Label', (['self.frm_controls'], {'text': '"""Line Width:"""'}), "(self.frm_controls, text='Line Width:')\n", (1402, 1441), True, 'import tkinter.ttk as ttk\n'), ((1474, 1524), 'tkinter.ttk.Label', 'ttk.Label', (['self.frm_controls'], {'text': '"""Refresh (ms):"""'}), "(self.frm_controls, text='Refresh (ms):')\n", (1483, 1524), True, 'import tkinter.ttk as ttk\n'), ((1554, 1572), 'tkinter.IntVar', 'tk.IntVar', ([], {'value': '(1)'}), '(value=1)\n', (1563, 1572), True, 'import tkinter as tk\n'), ((1597, 1686), 'tkinter.ttk.Radiobutton', 'ttk.Radiobutton', (['self.frm_controls'], {'text': '"""Draw"""', 'variable': 'self.var_draw_mode', 'value': '(1)'}), "(self.frm_controls, text='Draw', variable=self.var_draw_mode,\n value=1)\n", (1612, 1686), True, 'import tkinter.ttk as ttk\n'), ((1708, 1799), 'tkinter.ttk.Radiobutton', 'ttk.Radiobutton', (['self.frm_controls'], {'text': '"""Erase"""', 'variable': 'self.var_draw_mode', 'value': '(0)'}), "(self.frm_controls, text='Erase', variable=self.\n var_draw_mode, value=0)\n", (1723, 1799), True, 'import tkinter.ttk as ttk\n'), ((1974, 2005), 'tkinter.StringVar', 'tk.StringVar', (['self.frm_controls'], {}), '(self.frm_controls)\n', (1986, 2005), True, 'import tkinter as tk\n'), ((2031, 2146), 'tkinter.Spinbox', 'tk.Spinbox', (['self.frm_controls'], {'values': '(1, 2, 3, 4, 5)', 'width': '(10)', 'state': '"""readonly"""', 'textvariable': 'self.var_width'}), "(self.frm_controls, values=(1, 2, 3, 4, 5), width=10, state=\n 'readonly', textvariable=self.var_width)\n", (2041, 2146), True, 'import tkinter as tk\n'), ((2200, 2231), 'tkinter.StringVar', 'tk.StringVar', (['self.frm_controls'], {}), '(self.frm_controls)\n', (2212, 2231), True, 'import tkinter as tk\n'), ((2256, 2387), 'tkinter.Spinbox', 'tk.Spinbox', (['self.frm_controls'], {'values': '(10, 20, 50, 100, 200, 500, 1000)', 'width': '(10)', 'state': '"""readonly"""', 'textvariable': 'self.var_rate'}), "(self.frm_controls, values=(10, 20, 50, 100, 200, 500, 1000),\n width=10, state='readonly', textvariable=self.var_rate)\n", (2266, 2387), True, 'import tkinter as tk\n'), ((2442, 2460), 'tkinter.IntVar', 'tk.IntVar', ([], {'value': '(1)'}), '(value=1)\n', (2451, 2460), True, 'import tkinter as tk\n'), ((2485, 2571), 'tkinter.ttk.Checkbutton', 'ttk.Checkbutton', (['self.frm_controls'], {'text': '"""Bounding Boxes"""', 'variable': 'self.var_bbox'}), "(self.frm_controls, text='Bounding Boxes', variable=self.\n var_bbox)\n", (2500, 2571), True, 'import tkinter.ttk as ttk\n'), ((3746, 3786), 'tkinter.ttk.Frame', 'ttk.Frame', (['self'], {'padding': '(10, 10, 5, 10)'}), '(self, padding=(10, 10, 5, 10))\n', (3755, 3786), True, 'import tkinter.ttk as ttk\n'), ((4102, 4169), 'tkinter.ttk.Label', 'ttk.Label', (['self.frm_canvas_orig'], {'text': '"""Original Image (draw here):"""'}), "(self.frm_canvas_orig, text='Original Image (draw here):')\n", (4111, 4169), True, 'import tkinter.ttk as ttk\n'), ((4558, 4597), 'tkinter.ttk.Frame', 'ttk.Frame', (['self'], {'padding': '(5, 10, 5, 10)'}), '(self, padding=(5, 10, 5, 10))\n', (4567, 4597), True, 'import tkinter.ttk as ttk\n'), ((4908, 4967), 'tkinter.ttk.Label', 'ttk.Label', (['self.frm_canvas_rec'], {'text': '"""Reconstructed Image:"""'}), "(self.frm_canvas_rec, text='Reconstructed Image:')\n", (4917, 4967), True, 'import tkinter.ttk as ttk\n'), ((5350, 5387), 'tkinter.ttk.Frame', 'ttk.Frame', (['self'], {'padding': '(0, 0, 0, 0)'}), '(self, padding=(0, 0, 0, 0))\n', (5359, 5387), True, 'import tkinter.ttk as ttk\n'), ((6743, 6806), 'tkinter.ttk.Label', 'ttk.Label', (['self'], {'borderwidth': '(1)', 'relief': '"""sunken"""', 'padding': '(5, 2)'}), "(self, borderwidth=1, relief='sunken', padding=(5, 2))\n", (6752, 6806), True, 'import tkinter.ttk as ttk\n'), ((5705, 5795), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.frm_windows'], {'padding': '(5, 10 if i == 0 else 0, 10, 10 if i == 2 else 0)'}), '(self.frm_windows, padding=(5, 10 if i == 0 else 0, 10, 10 if i ==\n 2 else 0))\n', (5714, 5795), True, 'import tkinter.ttk as ttk\n')] |
from queue import Queue
import time
from settings import USE_DB, DB_DB, DB_DUMP_TABLE, DB_ACCT_TABLE, REQUEST_SPACING
import logging
from . import helper
import sqlite3
import threading
class Site(object):
'''
Site - parent class used for a generic
'Queue' structure with a few helper methods
and features. Implements the following methods:
empty() - Is the Queue empty
get(): Get the next item in the queue
put(item): Puts an item in the queue
tail(): Shows the last item in the queue
peek(): Shows the next item in the queue
length(): Returns the length of the queue
clear(): Clears the queue
list(): Lists the contents of the Queue
download(url): Returns the content from the URL
'''
# Note from Jordan (original author)
# I would have used the built-in queue, but there is no support for a peek() method
# that I could find... So, I decided to implement my own queue with a few
# changes
def __init__(self, queue=None):
if queue is None:
self.queue = []
def empty(self):
return len(self.queue) == 0
def get(self):
if not self.empty():
result = self.queue[0]
del self.queue[0]
else:
result = None
return result
def put(self, item):
self.queue.append(item)
def peek(self):
return self.queue[0] if not self.empty() else None
def tail(self):
return self.queue[-1] if not self.empty() else None
def length(self):
return len(self.queue)
def clear(self):
self.queue = []
def list(self):
print('\n'.join(url for url in self.queue))
def monitor(self, t_lock, db_lock, db_client):
self.update()
while(1):
while not self.empty():
paste = self.get()
self.ref_id = paste.id
logging.info('[*] Checking + Spacer ' + paste.url)
paste.text = self.get_paste_text(paste)
time.sleep(REQUEST_SPACING)
interesting = helper.run_match(paste)
if interesting:
logging.info('[*] FOUND ' + (paste.type).upper() + ' ' + paste.url)
if USE_DB:
db_lock.acquire()
try:
cursor = db_client.cursor()
cursor.execute('''INSERT INTO %s (
text,
emails,
hashes,
num_emails,
num_hashes,
type,
db_keywords,
url,
author
) VALUES (
?, ?, ?, ?, ?, ?, ?, ?, ?
)''' % (DB_DUMP_TABLE), (paste.text, str(paste.emails), str(paste.hashes), paste.num_emails, paste.num_hashes, paste.type, str(paste.db_keywords), str("https://pastebin.com/"+paste.id), paste.author,))
except:
logging.info('[*] ERROR: Failed to save paste. Manual review: ' + paste.url)
db_lock.release()
self.update()
while self.empty():
logging.debug('[*] No results... sleeping')
time.sleep(self.sleep)
self.update()
| [
"logging.info",
"logging.debug",
"time.sleep"
] | [((2059, 2109), 'logging.info', 'logging.info', (["('[*] Checking + Spacer ' + paste.url)"], {}), "('[*] Checking + Spacer ' + paste.url)\n", (2071, 2109), False, 'import logging\n'), ((2184, 2211), 'time.sleep', 'time.sleep', (['REQUEST_SPACING'], {}), '(REQUEST_SPACING)\n', (2194, 2211), False, 'import time\n'), ((3580, 3623), 'logging.debug', 'logging.debug', (['"""[*] No results... sleeping"""'], {}), "('[*] No results... sleeping')\n", (3593, 3623), False, 'import logging\n'), ((3641, 3663), 'time.sleep', 'time.sleep', (['self.sleep'], {}), '(self.sleep)\n', (3651, 3663), False, 'import time\n'), ((3383, 3459), 'logging.info', 'logging.info', (["('[*] ERROR: Failed to save paste. Manual review: ' + paste.url)"], {}), "('[*] ERROR: Failed to save paste. Manual review: ' + paste.url)\n", (3395, 3459), False, 'import logging\n')] |
from django.contrib import admin
# Register your models here.
from .models import *
admin.site.register(Author)
admin.site.register(Follower)
admin.site.register(Post)
admin.site.register(Comment)
admin.site.register(Request)
admin.site.register(Inbox)
admin.site.register(Likes)
admin.site.register(Liked)
admin.site.register(Usermod)
admin.site.register(Friend) | [
"django.contrib.admin.site.register"
] | [((86, 113), 'django.contrib.admin.site.register', 'admin.site.register', (['Author'], {}), '(Author)\n', (105, 113), False, 'from django.contrib import admin\n'), ((114, 143), 'django.contrib.admin.site.register', 'admin.site.register', (['Follower'], {}), '(Follower)\n', (133, 143), False, 'from django.contrib import admin\n'), ((144, 169), 'django.contrib.admin.site.register', 'admin.site.register', (['Post'], {}), '(Post)\n', (163, 169), False, 'from django.contrib import admin\n'), ((170, 198), 'django.contrib.admin.site.register', 'admin.site.register', (['Comment'], {}), '(Comment)\n', (189, 198), False, 'from django.contrib import admin\n'), ((199, 227), 'django.contrib.admin.site.register', 'admin.site.register', (['Request'], {}), '(Request)\n', (218, 227), False, 'from django.contrib import admin\n'), ((228, 254), 'django.contrib.admin.site.register', 'admin.site.register', (['Inbox'], {}), '(Inbox)\n', (247, 254), False, 'from django.contrib import admin\n'), ((255, 281), 'django.contrib.admin.site.register', 'admin.site.register', (['Likes'], {}), '(Likes)\n', (274, 281), False, 'from django.contrib import admin\n'), ((282, 308), 'django.contrib.admin.site.register', 'admin.site.register', (['Liked'], {}), '(Liked)\n', (301, 308), False, 'from django.contrib import admin\n'), ((309, 337), 'django.contrib.admin.site.register', 'admin.site.register', (['Usermod'], {}), '(Usermod)\n', (328, 337), False, 'from django.contrib import admin\n'), ((338, 365), 'django.contrib.admin.site.register', 'admin.site.register', (['Friend'], {}), '(Friend)\n', (357, 365), False, 'from django.contrib import admin\n')] |
#!/usr/bin/env python
import subprocess
import re
import os
import errno
import collections
import sys
class Platform(object):
pass
sdk_re = re.compile(r'.*-sdk ([a-zA-Z0-9.]*)')
def sdkinfo(sdkname):
ret = {}
for line in subprocess.Popen(['xcodebuild', '-sdk', sdkname, '-version'], stdout=subprocess.PIPE).stdout:
kv = line.strip().split(': ', 1)
if len(kv) == 2:
k,v = kv
ret[k] = v
return ret
desktop_sdk_info = sdkinfo('macosx')
def latest_sdks():
latest_desktop = None
for line in subprocess.Popen(['xcodebuild', '-showsdks'], stdout=subprocess.PIPE).stdout:
match = sdk_re.match(line)
if match:
if 'OS X' in line:
latest_desktop = match.group(1)
return latest_desktop
desktop_sdk = latest_sdks()
class desktop_platform_32(Platform):
sdk='macosx'
arch = 'i386'
name = 'mac32'
triple = 'i386-apple-darwin10'
sdkroot = desktop_sdk_info['Path']
prefix = "#if defined(__i386__) && !defined(__x86_64__)\n\n"
suffix = "\n\n#endif"
class desktop_platform_64(Platform):
sdk='macosx'
arch = 'x86_64'
name = 'mac'
triple = 'x86_64-apple-darwin10'
sdkroot = desktop_sdk_info['Path']
prefix = "#if !defined(__i386__) && defined(__x86_64__)\n\n"
suffix = "\n\n#endif"
def move_file(src_dir, dst_dir, filename, file_suffix=None, prefix='', suffix=''):
if not os.path.exists(dst_dir):
os.makedirs(dst_dir)
out_filename = filename
if file_suffix:
split_name = os.path.splitext(filename)
out_filename = "%s_%s%s" % (split_name[0], file_suffix, split_name[1])
with open(os.path.join(src_dir, filename)) as in_file:
with open(os.path.join(dst_dir, out_filename), 'w') as out_file:
if prefix:
out_file.write(prefix)
out_file.write(in_file.read())
if suffix:
out_file.write(suffix)
headers_seen = collections.defaultdict(set)
def move_source_tree(src_dir, dest_dir, dest_include_dir, arch=None, prefix=None, suffix=None):
for root, dirs, files in os.walk(src_dir, followlinks=True):
relroot = os.path.relpath(root,src_dir)
def move_dir(arch, prefix='', suffix='', files=[]):
for file in files:
file_suffix = None
if file.endswith('.h'):
if dest_include_dir:
file_suffix = arch
if arch:
headers_seen[file].add(arch)
move_file(root, dest_include_dir, file, arch, prefix=prefix, suffix=suffix)
elif dest_dir:
outroot = os.path.join(dest_dir, relroot)
move_file(root, outroot, file, prefix=prefix, suffix=suffix)
if relroot == '.':
move_dir(arch=arch,
files=files,
prefix=prefix,
suffix=suffix)
elif relroot == 'x86':
move_dir(arch='i386',
prefix="#if defined(__i386__) && !defined(__x86_64__)\n\n",
suffix="\n\n#endif",
files=files)
move_dir(arch='x86_64',
prefix="#if !defined(__i386__) && defined(__x86_64__)\n\n",
suffix="\n\n#endif",
files=files)
def build_target(platform):
def xcrun_cmd(cmd):
return subprocess.check_output(['xcrun', '-sdk', platform.sdkroot, '-find', cmd]).strip()
build_dir = 'build_' + platform.name
if not os.path.exists(build_dir):
os.makedirs(build_dir)
env = dict(CC=xcrun_cmd('clang'),
LD=xcrun_cmd('ld'),
CFLAGS='-arch %s -isysroot %s -mmacosx-version-min=10.6' % (platform.arch, platform.sdkroot))
working_dir=os.getcwd()
try:
os.chdir(build_dir)
subprocess.check_call(['../configure', '-host', platform.triple], env=env)
move_source_tree('.', None, '../osx/include',
arch=platform.arch,
prefix=platform.prefix,
suffix=platform.suffix)
move_source_tree('./include', None, '../osx/include',
arch=platform.arch,
prefix=platform.prefix,
suffix=platform.suffix)
finally:
os.chdir(working_dir)
for header_name, archs in headers_seen.iteritems():
basename, suffix = os.path.splitext(header_name)
def main():
move_source_tree('src', 'osx/src', 'osx/include')
move_source_tree('include', None, 'osx/include')
build_target(desktop_platform_32)
build_target(desktop_platform_64)
for header_name, archs in headers_seen.iteritems():
basename, suffix = os.path.splitext(header_name)
with open(os.path.join('osx/include', header_name), 'w') as header:
for arch in archs:
header.write('#include <%s_%s%s>\n' % (basename, arch, suffix))
if __name__ == '__main__':
main()
| [
"subprocess.check_output",
"os.path.exists",
"os.makedirs",
"re.compile",
"subprocess.check_call",
"subprocess.Popen",
"os.path.splitext",
"os.path.join",
"os.getcwd",
"os.chdir",
"collections.defaultdict",
"os.walk",
"os.path.relpath"
] | [((147, 183), 're.compile', 're.compile', (['""".*-sdk ([a-zA-Z0-9.]*)"""'], {}), "('.*-sdk ([a-zA-Z0-9.]*)')\n", (157, 183), False, 'import re\n'), ((1997, 2025), 'collections.defaultdict', 'collections.defaultdict', (['set'], {}), '(set)\n', (2020, 2025), False, 'import collections\n'), ((2152, 2186), 'os.walk', 'os.walk', (['src_dir'], {'followlinks': '(True)'}), '(src_dir, followlinks=True)\n', (2159, 2186), False, 'import os\n'), ((237, 327), 'subprocess.Popen', 'subprocess.Popen', (["['xcodebuild', '-sdk', sdkname, '-version']"], {'stdout': 'subprocess.PIPE'}), "(['xcodebuild', '-sdk', sdkname, '-version'], stdout=\n subprocess.PIPE)\n", (253, 327), False, 'import subprocess\n'), ((556, 625), 'subprocess.Popen', 'subprocess.Popen', (["['xcodebuild', '-showsdks']"], {'stdout': 'subprocess.PIPE'}), "(['xcodebuild', '-showsdks'], stdout=subprocess.PIPE)\n", (572, 625), False, 'import subprocess\n'), ((1443, 1466), 'os.path.exists', 'os.path.exists', (['dst_dir'], {}), '(dst_dir)\n', (1457, 1466), False, 'import os\n'), ((1476, 1496), 'os.makedirs', 'os.makedirs', (['dst_dir'], {}), '(dst_dir)\n', (1487, 1496), False, 'import os\n'), ((1572, 1598), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (1588, 1598), False, 'import os\n'), ((2206, 2236), 'os.path.relpath', 'os.path.relpath', (['root', 'src_dir'], {}), '(root, src_dir)\n', (2221, 2236), False, 'import os\n'), ((3637, 3662), 'os.path.exists', 'os.path.exists', (['build_dir'], {}), '(build_dir)\n', (3651, 3662), False, 'import os\n'), ((3676, 3698), 'os.makedirs', 'os.makedirs', (['build_dir'], {}), '(build_dir)\n', (3687, 3698), False, 'import os\n'), ((3913, 3924), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3922, 3924), False, 'import os\n'), ((4944, 4973), 'os.path.splitext', 'os.path.splitext', (['header_name'], {}), '(header_name)\n', (4960, 4973), False, 'import os\n'), ((1694, 1725), 'os.path.join', 'os.path.join', (['src_dir', 'filename'], {}), '(src_dir, filename)\n', (1706, 1725), False, 'import os\n'), ((3950, 3969), 'os.chdir', 'os.chdir', (['build_dir'], {}), '(build_dir)\n', (3958, 3969), False, 'import os\n'), ((3982, 4056), 'subprocess.check_call', 'subprocess.check_call', (["['../configure', '-host', platform.triple]"], {'env': 'env'}), "(['../configure', '-host', platform.triple], env=env)\n", (4003, 4056), False, 'import subprocess\n'), ((4520, 4541), 'os.chdir', 'os.chdir', (['working_dir'], {}), '(working_dir)\n', (4528, 4541), False, 'import os\n'), ((4634, 4663), 'os.path.splitext', 'os.path.splitext', (['header_name'], {}), '(header_name)\n', (4650, 4663), False, 'import os\n'), ((1757, 1792), 'os.path.join', 'os.path.join', (['dst_dir', 'out_filename'], {}), '(dst_dir, out_filename)\n', (1769, 1792), False, 'import os\n'), ((3501, 3575), 'subprocess.check_output', 'subprocess.check_output', (["['xcrun', '-sdk', platform.sdkroot, '-find', cmd]"], {}), "(['xcrun', '-sdk', platform.sdkroot, '-find', cmd])\n", (3524, 3575), False, 'import subprocess\n'), ((4992, 5032), 'os.path.join', 'os.path.join', (['"""osx/include"""', 'header_name'], {}), "('osx/include', header_name)\n", (5004, 5032), False, 'import os\n'), ((2739, 2770), 'os.path.join', 'os.path.join', (['dest_dir', 'relroot'], {}), '(dest_dir, relroot)\n', (2751, 2770), False, 'import os\n')] |
import pandas as pd
import dash
from dash.dependencies import Input, Output, State
import plotly.express as px
import dash_html_components as html
import dash_core_components as dcc
import dash_bootstrap_components as dbc
# self packages
from .data_generator import load_transactions, comparisons_df
from .nav_bar import nav_bar_template
all_click = []
transactions = load_transactions()
filter_options = {}
cols_for_filter = ['region', 'street', 'propertyType', 'tenure', 'project']
for col in cols_for_filter:
col_options = []
unique_values = transactions[col].unique()
col_options.append({'label': 'All', 'value': 'All'})
for val in unique_values:
col_options.append({'label': val, 'value': val})
filter_options[col] = col_options
# takes in preprocessed click data from the maps and returns the html table rendered version
# note default is invisible table
def render_table(click_data = None, default=False, max_rows=26):
if default:
return html.Table(
id = 'comparison_table'
)
df = comparisons_df()
click_data = click_data[-4:]
for data in click_data:
df = df.append(data)
df.index = [[f'Property {i}' for i in range(len(df))]]
rownames = df.columns
df = df.T
df['info'] = rownames
df.drop(columns=['Property 0'], inplace=True)
columns = list(df.columns)
columns = columns[-1:] + columns[:-1]
df = df[columns]
return html.Table(
# Header
[html.Tr([html.Th(col) for col in df.columns]) ] +
# Body
[html.Tr([
html.Td(df.iloc[i][col]) for col in df.columns
]) for i in range(min(len(df), max_rows))],
id = 'comparison_table',
className = 'table table-bordered active'
)
def render_suggestion(click_data = None, default=False, max_rows=26):
if default:
return html.Table(
id = 'suggestion_table'
)
columns_to_show = ['region', 'street', 'area', 'propertyType', 'nettPrice']
transactions = load_transactions()
transactions = transactions[columns_to_show]
click_data = click_data[-1]
street = click_data['street'].item()
project = click_data['project'].item()
price = click_data['price'].item()
df = transactions.loc[(transactions['street'] == street) & (transactions['nettPrice'] != price) & (transactions['nettPrice'] != 0.0)].head(2)
if df.empty:
return [
html.H3(f'Sugeested properties in the same street: {street}'),
html.H5('There are no available properties in the same area')
]
df.index = [[f'Property {i}' for i in range(len(df))]]
rownames = df.columns
df = df.T
df['info'] = rownames
columns = list(df.columns)
columns = columns[-1:] + columns[:-1]
df = df[columns]
return [
html.H3(f'Sugeested properties in the same street: {street}'),
html.Table(
# Header
[html.Tr([html.Th(col) for col in df.columns]) ] +
# Body
[html.Tr([
html.Td(df.iloc[i][col]) for col in df.columns
]) for i in range(min(len(df), max_rows))],
id = 'suggestion_table',
className = 'table table-bordered active'
)]
# inits transactions dash app that links to flask app
def init_transactions(server):
dashApp = dash.Dash(
server=server,
routes_pathname_prefix='/transactions/',
external_stylesheets=[dbc.themes.BOOTSTRAP]
)
dashApp.index_string = nav_bar_template
# Create Layout
dashApp.layout = html.Div([
html.H1('Transactions Dashboard', style={'text-align': 'center'}),
html.Div([
html.H2(children='Transactions Map', style = {'text-align': 'left'}),
html.Div(children=''' map to visualize transactions '''),
html.Br(),
html.Div([
dcc.Dropdown(
id='regionDropdown',
options = filter_options['region'],
value = 'All',
placeholder='Select a regiion'
),
dcc.Dropdown(
id='streetDropdown',
options = filter_options['street'],
value = 'All',
placeholder='Select a street'
),
dcc.Dropdown(
id='propertyTypeDropdown',
options = filter_options['propertyType'],
value = 'All',
placeholder='Select a property type'
),
dcc.Dropdown(
id='tenureDropdown',
options = filter_options['tenure'],
value = 'All',
placeholder='Select length of tenure'
),
dcc.Dropdown(
id='projectDropdown',
options = filter_options['project'],
value = 'All',
placeholder='Select a project'
),
html.Button('Search', id='search-filter')
]),
html.Br(),
dcc.Loading(
id = 'loading-map',
type = 'default',
children = dcc.Graph(id='transactions_map', figure={})
)
]),
html.Div(
children = render_table(default=True),
id = 'comparison_table_div',
),
html.Div(
children = render_suggestion(default=True),
id = 'suggestion_table_div',
),
html.Div(id='hidden-container')
], className = 'container')
@dashApp.callback(
Output(component_id='transactions_map', component_property='figure'),
[
Input(component_id='search-filter', component_property='n_clicks'),
Input(component_id='transactions_map', component_property='figure'),
],
[
State(component_id='regionDropdown', component_property='value'),
State(component_id='streetDropdown', component_property='value'),
State(component_id='propertyTypeDropdown', component_property='value'),
State(component_id='tenureDropdown', component_property='value'),
State(component_id='projectDropdown', component_property='value'),
]
)
# debug here, add additional arguements to function
def make_map(n_clicks, figure, region, street, propertyType, tenure, project):
transactions = load_transactions()
filters = [region, street, propertyType, tenure, project]
for i in range(len(cols_for_filter)):
if filters[i] != 'All':
transactions = transactions[transactions[cols_for_filter[i]] == filters[i]]
if transactions.empty:
fig = px.scatter_mapbox(lat=['1.3521'], lon=['103.8198'])
else:
fig = px.scatter_mapbox(transactions, lat='x', lon='y', hover_name='project', hover_data=['price', 'region', 'street', 'area'],
custom_data=['noOfUnits', 'propertyType', 'floorRange', 'project', 'tenure'], zoom=12, height=450)
fig.update_layout(mapbox_style='open-street-map')
fig.update_layout(clickmode='event+select')
fig.update_layout(margin={'r':0,'t':0,'l':0,'b':0})
return fig
@dashApp.callback([
Output(component_id='comparison_table_div', component_property='children'),
Output(component_id='suggestion_table_div', component_property='children'),
],
Input(component_id='transactions_map', component_property='clickData'),
)
def display_click_data(click):
if click is None:
return render_table(default=True)
# preprocess the click data from maps
points = click['points'][0]
customdata = points['customdata']
data = {
'x': points['lat'],
'y': points['lon'],
'noOfUnits': customdata[0],
'propertyType': customdata[1],
'floorRange': customdata[2],
'project': customdata[3],
'tenure': customdata[4],
'price': customdata[5],
'region': customdata[6],
'street': customdata[7],
'area': customdata[8]
}
data = {k: [str(v)] for k, v in data.items()}
data = pd.DataFrame.from_dict(data)
all_click.append(data)
return render_table(click_data=all_click), render_suggestion(click_data=all_click)
return dashApp.server
| [
"dash_html_components.Table",
"dash_html_components.Button",
"dash.dependencies.Output",
"dash_html_components.H3",
"dash_html_components.Br",
"dash_html_components.H5",
"pandas.DataFrame.from_dict",
"dash.dependencies.Input",
"dash_html_components.Td",
"dash_html_components.Th",
"dash_html_comp... | [((3326, 3440), 'dash.Dash', 'dash.Dash', ([], {'server': 'server', 'routes_pathname_prefix': '"""/transactions/"""', 'external_stylesheets': '[dbc.themes.BOOTSTRAP]'}), "(server=server, routes_pathname_prefix='/transactions/',\n external_stylesheets=[dbc.themes.BOOTSTRAP])\n", (3335, 3440), False, 'import dash\n'), ((990, 1023), 'dash_html_components.Table', 'html.Table', ([], {'id': '"""comparison_table"""'}), "(id='comparison_table')\n", (1000, 1023), True, 'import dash_html_components as html\n'), ((1868, 1901), 'dash_html_components.Table', 'html.Table', ([], {'id': '"""suggestion_table"""'}), "(id='suggestion_table')\n", (1878, 1901), True, 'import dash_html_components as html\n'), ((2832, 2893), 'dash_html_components.H3', 'html.H3', (['f"""Sugeested properties in the same street: {street}"""'], {}), "(f'Sugeested properties in the same street: {street}')\n", (2839, 2893), True, 'import dash_html_components as html\n'), ((5684, 5752), 'dash.dependencies.Output', 'Output', ([], {'component_id': '"""transactions_map"""', 'component_property': '"""figure"""'}), "(component_id='transactions_map', component_property='figure')\n", (5690, 5752), False, 'from dash.dependencies import Input, Output, State\n'), ((8434, 8462), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['data'], {}), '(data)\n', (8456, 8462), True, 'import pandas as pd\n'), ((7566, 7636), 'dash.dependencies.Input', 'Input', ([], {'component_id': '"""transactions_map"""', 'component_property': '"""clickData"""'}), "(component_id='transactions_map', component_property='clickData')\n", (7571, 7636), False, 'from dash.dependencies import Input, Output, State\n'), ((2441, 2502), 'dash_html_components.H3', 'html.H3', (['f"""Sugeested properties in the same street: {street}"""'], {}), "(f'Sugeested properties in the same street: {street}')\n", (2448, 2502), True, 'import dash_html_components as html\n'), ((2516, 2577), 'dash_html_components.H5', 'html.H5', (['"""There are no available properties in the same area"""'], {}), "('There are no available properties in the same area')\n", (2523, 2577), True, 'import dash_html_components as html\n'), ((3573, 3638), 'dash_html_components.H1', 'html.H1', (['"""Transactions Dashboard"""'], {'style': "{'text-align': 'center'}"}), "('Transactions Dashboard', style={'text-align': 'center'})\n", (3580, 3638), True, 'import dash_html_components as html\n'), ((5587, 5618), 'dash_html_components.Div', 'html.Div', ([], {'id': '"""hidden-container"""'}), "(id='hidden-container')\n", (5595, 5618), True, 'import dash_html_components as html\n'), ((6828, 6879), 'plotly.express.scatter_mapbox', 'px.scatter_mapbox', ([], {'lat': "['1.3521']", 'lon': "['103.8198']"}), "(lat=['1.3521'], lon=['103.8198'])\n", (6845, 6879), True, 'import plotly.express as px\n'), ((6912, 7146), 'plotly.express.scatter_mapbox', 'px.scatter_mapbox', (['transactions'], {'lat': '"""x"""', 'lon': '"""y"""', 'hover_name': '"""project"""', 'hover_data': "['price', 'region', 'street', 'area']", 'custom_data': "['noOfUnits', 'propertyType', 'floorRange', 'project', 'tenure']", 'zoom': '(12)', 'height': '(450)'}), "(transactions, lat='x', lon='y', hover_name='project',\n hover_data=['price', 'region', 'street', 'area'], custom_data=[\n 'noOfUnits', 'propertyType', 'floorRange', 'project', 'tenure'], zoom=\n 12, height=450)\n", (6929, 7146), True, 'import plotly.express as px\n'), ((5776, 5842), 'dash.dependencies.Input', 'Input', ([], {'component_id': '"""search-filter"""', 'component_property': '"""n_clicks"""'}), "(component_id='search-filter', component_property='n_clicks')\n", (5781, 5842), False, 'from dash.dependencies import Input, Output, State\n'), ((5856, 5923), 'dash.dependencies.Input', 'Input', ([], {'component_id': '"""transactions_map"""', 'component_property': '"""figure"""'}), "(component_id='transactions_map', component_property='figure')\n", (5861, 5923), False, 'from dash.dependencies import Input, Output, State\n'), ((5958, 6022), 'dash.dependencies.State', 'State', ([], {'component_id': '"""regionDropdown"""', 'component_property': '"""value"""'}), "(component_id='regionDropdown', component_property='value')\n", (5963, 6022), False, 'from dash.dependencies import Input, Output, State\n'), ((6036, 6100), 'dash.dependencies.State', 'State', ([], {'component_id': '"""streetDropdown"""', 'component_property': '"""value"""'}), "(component_id='streetDropdown', component_property='value')\n", (6041, 6100), False, 'from dash.dependencies import Input, Output, State\n'), ((6114, 6184), 'dash.dependencies.State', 'State', ([], {'component_id': '"""propertyTypeDropdown"""', 'component_property': '"""value"""'}), "(component_id='propertyTypeDropdown', component_property='value')\n", (6119, 6184), False, 'from dash.dependencies import Input, Output, State\n'), ((6198, 6262), 'dash.dependencies.State', 'State', ([], {'component_id': '"""tenureDropdown"""', 'component_property': '"""value"""'}), "(component_id='tenureDropdown', component_property='value')\n", (6203, 6262), False, 'from dash.dependencies import Input, Output, State\n'), ((6276, 6341), 'dash.dependencies.State', 'State', ([], {'component_id': '"""projectDropdown"""', 'component_property': '"""value"""'}), "(component_id='projectDropdown', component_property='value')\n", (6281, 6341), False, 'from dash.dependencies import Input, Output, State\n'), ((7391, 7465), 'dash.dependencies.Output', 'Output', ([], {'component_id': '"""comparison_table_div"""', 'component_property': '"""children"""'}), "(component_id='comparison_table_div', component_property='children')\n", (7397, 7465), False, 'from dash.dependencies import Input, Output, State\n'), ((7475, 7549), 'dash.dependencies.Output', 'Output', ([], {'component_id': '"""suggestion_table_div"""', 'component_property': '"""children"""'}), "(component_id='suggestion_table_div', component_property='children')\n", (7481, 7549), False, 'from dash.dependencies import Input, Output, State\n'), ((3671, 3737), 'dash_html_components.H2', 'html.H2', ([], {'children': '"""Transactions Map"""', 'style': "{'text-align': 'left'}"}), "(children='Transactions Map', style={'text-align': 'left'})\n", (3678, 3737), True, 'import dash_html_components as html\n'), ((3753, 3805), 'dash_html_components.Div', 'html.Div', ([], {'children': '""" map to visualize transactions """'}), "(children=' map to visualize transactions ')\n", (3761, 3805), True, 'import dash_html_components as html\n'), ((3823, 3832), 'dash_html_components.Br', 'html.Br', ([], {}), '()\n', (3830, 3832), True, 'import dash_html_components as html\n'), ((5129, 5138), 'dash_html_components.Br', 'html.Br', ([], {}), '()\n', (5136, 5138), True, 'import dash_html_components as html\n'), ((1491, 1503), 'dash_html_components.Th', 'html.Th', (['col'], {}), '(col)\n', (1498, 1503), True, 'import dash_html_components as html\n'), ((1578, 1602), 'dash_html_components.Td', 'html.Td', (['df.iloc[i][col]'], {}), '(df.iloc[i][col])\n', (1585, 1602), True, 'import dash_html_components as html\n'), ((2950, 2962), 'dash_html_components.Th', 'html.Th', (['col'], {}), '(col)\n', (2957, 2962), True, 'import dash_html_components as html\n'), ((3037, 3061), 'dash_html_components.Td', 'html.Td', (['df.iloc[i][col]'], {}), '(df.iloc[i][col])\n', (3044, 3061), True, 'import dash_html_components as html\n'), ((3873, 3990), 'dash_core_components.Dropdown', 'dcc.Dropdown', ([], {'id': '"""regionDropdown"""', 'options': "filter_options['region']", 'value': '"""All"""', 'placeholder': '"""Select a regiion"""'}), "(id='regionDropdown', options=filter_options['region'], value=\n 'All', placeholder='Select a regiion')\n", (3885, 3990), True, 'import dash_core_components as dcc\n'), ((4105, 4221), 'dash_core_components.Dropdown', 'dcc.Dropdown', ([], {'id': '"""streetDropdown"""', 'options': "filter_options['street']", 'value': '"""All"""', 'placeholder': '"""Select a street"""'}), "(id='streetDropdown', options=filter_options['street'], value=\n 'All', placeholder='Select a street')\n", (4117, 4221), True, 'import dash_core_components as dcc\n'), ((4336, 4471), 'dash_core_components.Dropdown', 'dcc.Dropdown', ([], {'id': '"""propertyTypeDropdown"""', 'options': "filter_options['propertyType']", 'value': '"""All"""', 'placeholder': '"""Select a property type"""'}), "(id='propertyTypeDropdown', options=filter_options[\n 'propertyType'], value='All', placeholder='Select a property type')\n", (4348, 4471), True, 'import dash_core_components as dcc\n'), ((4586, 4710), 'dash_core_components.Dropdown', 'dcc.Dropdown', ([], {'id': '"""tenureDropdown"""', 'options': "filter_options['tenure']", 'value': '"""All"""', 'placeholder': '"""Select length of tenure"""'}), "(id='tenureDropdown', options=filter_options['tenure'], value=\n 'All', placeholder='Select length of tenure')\n", (4598, 4710), True, 'import dash_core_components as dcc\n'), ((4825, 4944), 'dash_core_components.Dropdown', 'dcc.Dropdown', ([], {'id': '"""projectDropdown"""', 'options': "filter_options['project']", 'value': '"""All"""', 'placeholder': '"""Select a project"""'}), "(id='projectDropdown', options=filter_options['project'], value\n ='All', placeholder='Select a project')\n", (4837, 4944), True, 'import dash_core_components as dcc\n'), ((5059, 5100), 'dash_html_components.Button', 'html.Button', (['"""Search"""'], {'id': '"""search-filter"""'}), "('Search', id='search-filter')\n", (5070, 5100), True, 'import dash_html_components as html\n'), ((5262, 5305), 'dash_core_components.Graph', 'dcc.Graph', ([], {'id': '"""transactions_map"""', 'figure': '{}'}), "(id='transactions_map', figure={})\n", (5271, 5305), True, 'import dash_core_components as dcc\n')] |
import os
import uuid
import numpy as np
from tqdm import tqdm
import pickle
from utils.config import opt
from .voc_eval import voc_eval
devkit_path = opt.voc_data_dir[:-8]
year = opt.year
def do_python_eval(classes, image_set, output_dir='output'):
annopath = os.path.join(
devkit_path,
'VOC' + year,
'Annotations',
'{}.xml')
imagesetfile = os.path.join(
devkit_path,
'VOC' + year,
'ImageSets',
'Main',
image_set + '.txt')
cachedir = os.path.join(devkit_path, 'annotations_cache')
aps = []
# The PASCAL VOC metric changed in 2010
use_07_metric = True if int(year) < 2010 else False
print('VOC07 metric? ' + ('Yes' if use_07_metric else 'No'))
if not os.path.isdir(output_dir):
os.mkdir(output_dir)
for i, cls in enumerate(classes):
if cls == '__background__':
continue
filename = get_voc_results_file_template(image_set).format(cls)
rec, prec, ap = voc_eval(
filename, annopath, imagesetfile, cls, cachedir, ovthresh=0.5,
use_07_metric=use_07_metric)
aps += [ap]
print('AP for {} = {:.4f}'.format(cls, ap))
with open(os.path.join(output_dir, cls + '_pr.pkl'), 'wb') as f:
pickle.dump({'rec': rec, 'prec': prec, 'ap': ap}, f)
print('Mean AP = {:.4f}'.format(np.mean(aps)))
print('~~~~~~~~')
print('Results:')
for ap in aps:
print('{:.3f}'.format(ap))
print('{:.3f}'.format(np.mean(aps)))
print('~~~~~~~~')
print('')
print('--------------------------------------------------------------')
print('Results computed with the **unofficial** Python eval code.')
print('Results should be very close to the official MATLAB eval code.')
print('Recompute with `./tools/reval.py --matlab ...` for your paper.')
print('-- Thanks, The Management')
print('--------------------------------------------------------------')
def get_comp_id():
salt = str(uuid.uuid4())
comp_id = 'comp4'
#comp_id = (comp_id + '_' + salt if True else comp_id)
return comp_id
def get_voc_results_file_template(image_set):
# VOCdevkit/results/VOC2007/Main/<comp_id>_det_test_aeroplane.txt
filename = get_comp_id() + '_det_' + image_set + '_{:s}.txt'
filedir = os.path.join(devkit_path, 'results', 'VOC' + year, 'Main')
if not os.path.exists(filedir):
os.makedirs(filedir)
path = os.path.join(filedir, filename)
return path
def write_voc_results_file(image_index, classes, all_boxes, image_set):
for cls_ind, cls in enumerate(classes):
if cls == '__background__':
continue
filename = get_voc_results_file_template(image_set).format(cls)
print('Writing {} VOC results file: {}'.format(cls, filename))
with open(filename, 'wt') as f:
for im_ind, index in enumerate(image_index):
dets = all_boxes[cls_ind][im_ind]
if dets == []:
continue
# the VOCdevkit expects 1-based indices
for k in range(dets.shape[0]):
f.write('{:s} {:.3f} {:.1f} {:.1f} {:.1f} {:.1f}\n'.
format(index, dets[k, -1],
dets[k, 0] + 1, dets[k, 1] + 1,
dets[k, 2] + 1, dets[k, 3] + 1))
def evaluate_detections(image_index, classes, all_boxes, output_dir, image_set):
write_voc_results_file(image_index, classes, all_boxes, image_set)
do_python_eval(classes, image_set, output_dir)
| [
"os.path.exists",
"numpy.mean",
"pickle.dump",
"os.makedirs",
"os.path.join",
"uuid.uuid4",
"os.path.isdir",
"os.mkdir"
] | [((269, 333), 'os.path.join', 'os.path.join', (['devkit_path', "('VOC' + year)", '"""Annotations"""', '"""{}.xml"""'], {}), "(devkit_path, 'VOC' + year, 'Annotations', '{}.xml')\n", (281, 333), False, 'import os\n'), ((386, 471), 'os.path.join', 'os.path.join', (['devkit_path', "('VOC' + year)", '"""ImageSets"""', '"""Main"""', "(image_set + '.txt')"], {}), "(devkit_path, 'VOC' + year, 'ImageSets', 'Main', image_set + '.txt'\n )\n", (398, 471), False, 'import os\n'), ((523, 569), 'os.path.join', 'os.path.join', (['devkit_path', '"""annotations_cache"""'], {}), "(devkit_path, 'annotations_cache')\n", (535, 569), False, 'import os\n'), ((2330, 2388), 'os.path.join', 'os.path.join', (['devkit_path', '"""results"""', "('VOC' + year)", '"""Main"""'], {}), "(devkit_path, 'results', 'VOC' + year, 'Main')\n", (2342, 2388), False, 'import os\n'), ((2465, 2496), 'os.path.join', 'os.path.join', (['filedir', 'filename'], {}), '(filedir, filename)\n', (2477, 2496), False, 'import os\n'), ((759, 784), 'os.path.isdir', 'os.path.isdir', (['output_dir'], {}), '(output_dir)\n', (772, 784), False, 'import os\n'), ((794, 814), 'os.mkdir', 'os.mkdir', (['output_dir'], {}), '(output_dir)\n', (802, 814), False, 'import os\n'), ((2019, 2031), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (2029, 2031), False, 'import uuid\n'), ((2400, 2423), 'os.path.exists', 'os.path.exists', (['filedir'], {}), '(filedir)\n', (2414, 2423), False, 'import os\n'), ((2433, 2453), 'os.makedirs', 'os.makedirs', (['filedir'], {}), '(filedir)\n', (2444, 2453), False, 'import os\n'), ((1289, 1341), 'pickle.dump', 'pickle.dump', (["{'rec': rec, 'prec': prec, 'ap': ap}", 'f'], {}), "({'rec': rec, 'prec': prec, 'ap': ap}, f)\n", (1300, 1341), False, 'import pickle\n'), ((1378, 1390), 'numpy.mean', 'np.mean', (['aps'], {}), '(aps)\n', (1385, 1390), True, 'import numpy as np\n'), ((1517, 1529), 'numpy.mean', 'np.mean', (['aps'], {}), '(aps)\n', (1524, 1529), True, 'import numpy as np\n'), ((1222, 1263), 'os.path.join', 'os.path.join', (['output_dir', "(cls + '_pr.pkl')"], {}), "(output_dir, cls + '_pr.pkl')\n", (1234, 1263), False, 'import os\n')] |
"""Indicators to monitor"""
import click
from configparser import NoOptionError
from query import Query
from config import CONFIG
class BaseIndicator(object):
client = Query()
# name of the metric in influxdb
name = 'base_indicator'
# unit (displays in alerts)
unit = ''
# alert when value gt (>) than threshold or lt (<)
comparison = 'gt'
# timeframe to compute mean of indicator values on
timeframe = '10m'
# some filters to pass to the influx db query (where clause)
filters = None
# divide the raw value from influx by this (eg convert bytes to Mb, Gb...)
divider = 1
def __init__(self):
try:
self.threshold = float(CONFIG.get('thresholds', self.name))
except NoOptionError:
raise click.ClickException('No threshold configured for indicator %s' % self.name)
def get_value(self, host):
"""Get the value from influx for this indicator"""
value = self.client.query_last_mean(
self.name,
host,
timeframe=self.timeframe,
filters=self.filters
)
if value:
return value / self.divider
def is_alert(self, host, value=None):
"""Is this indicator in alert state?"""
value = self.get_value(host) if not value else value
if self.comparison == 'gt' and value > self.threshold:
return True
elif self.comparison == 'lt' and value < self.threshold:
return True
else:
return False
class LoadIndicator(BaseIndicator):
name = 'load_longterm'
unit = ''
comparison = 'gt'
timeframe = '10m'
filters = None
divider = 1
class FreeRAMIndicator(BaseIndicator):
name = 'memory_value'
unit = 'Mb'
comparison = 'lt'
timeframe = '10m'
filters = {
'type_instance': 'free'
}
divider = 1000000
class FreeDiskIndicator(BaseIndicator):
name = 'df_value'
unit = 'Mb'
comparison = 'lt'
timeframe = '10m'
filters = {
'type_instance': 'free'
}
divider = 1000000
| [
"click.ClickException",
"config.CONFIG.get",
"query.Query"
] | [((177, 184), 'query.Query', 'Query', ([], {}), '()\n', (182, 184), False, 'from query import Query\n'), ((702, 737), 'config.CONFIG.get', 'CONFIG.get', (['"""thresholds"""', 'self.name'], {}), "('thresholds', self.name)\n", (712, 737), False, 'from config import CONFIG\n'), ((787, 863), 'click.ClickException', 'click.ClickException', (["('No threshold configured for indicator %s' % self.name)"], {}), "('No threshold configured for indicator %s' % self.name)\n", (807, 863), False, 'import click\n')] |
from setuptools import setup
import glob
setup(name='liftover.py',
version='0.1',
packages=['liftover'],
description='C. elegans liftover utility',
url='https://github.com/AndersenLab/liftover-utils',
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
entry_points="""
[console_scripts]
liftover = liftover.liftover:main
""",
install_requires=["docopt"],
data_files=[('CHROMOSOME_DIFFERENCES', glob.glob("data/CHROMOSOME_DIFFERENCES/sequence*")),
'remap_gff_between_releases.pl'],
zip_safe=False) | [
"glob.glob"
] | [((478, 528), 'glob.glob', 'glob.glob', (['"""data/CHROMOSOME_DIFFERENCES/sequence*"""'], {}), "('data/CHROMOSOME_DIFFERENCES/sequence*')\n", (487, 528), False, 'import glob\n')] |
# Copyright 2018 U.C. Berkeley RISE Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cloudpickle as cp
import pyarrow as pa
import codecs
from io import BytesIO
import numpy as np
from .functions_pb2 import *
from . import shared
SER_FORMAT = 'raw_unicode_escape'
class Serializer():
def __init__(self):
raise NotImplementedError('Cannot instantiate abstract class.')
def _serialize(self, msg):
pass
def _deserialize(self, msg):
pass
def dump(self, msg):
pass
def load(self, msg):
pass
class DefaultSerializer(Serializer):
def __init__(self):
pass
def _serialize(msg):
return msg
def _deserialize(self, msg):
return msg
def dump(self, msg):
return cp.dumps(msg)
def load(self, msg):
return cp.loads(msg)
class StringSerializer(Serializer):
def __init__(self):
pass
def _serialize(self, msg):
return codecs.decode(msg, SER_FORMAT)
def _deserialize(self, msg):
return codecs.encode(msg, SER_FORMAT)
def dump(self, msg):
return self._serialize(cp.dumps(msg))
def load(self, msg):
return cp.loads(self._deserialize(msg))
# TODO: how can we make serializers pluggable?
class NumpySerializer(DefaultSerializer):
def __init__(self):
pass
def dump(self, msg):
return pa.serialize(msg).to_buffer().to_pybytes()
def load(self, msg):
return pa.deserialize(msg)
numpy_ser = NumpySerializer()
default_ser = DefaultSerializer()
string_ser = StringSerializer()
function_ser = default_ser
def get_serializer(kind):
global numpy_ser, default_ser, string_ser
if kind == NUMPY:
return numpy_ser
elif kind == STRING:
return string_ser
elif kind == DEFAULT:
return default_ser
else:
return default_ser
def serialize_val(val, valobj=None, serialize=True):
if not valobj:
valobj = Value()
if isinstance(val, shared.FluentFuture):
valobj.body = default_ser.dump(shared.FluentReference(val.obj_id,
True, LWW))
elif isinstance(val, np.ndarray):
valobj.body = numpy_ser.dump(val)
valobj.type = NUMPY
else:
valobj.body = default_ser.dump(val)
if not serialize:
return valobj
return valobj.SerializeToString()
def deserialize_val(val):
v = Value()
v.ParseFromString(val)
if v.type == DEFAULT:
return default_ser.load(v.body)
elif v.type == STRING:
return string_ser.load(v.body)
elif v.type == NUMPY:
return numpy_ser.load(v.body)
| [
"cloudpickle.dumps",
"cloudpickle.loads",
"pyarrow.serialize",
"pyarrow.deserialize",
"codecs.encode",
"codecs.decode"
] | [((1288, 1301), 'cloudpickle.dumps', 'cp.dumps', (['msg'], {}), '(msg)\n', (1296, 1301), True, 'import cloudpickle as cp\n'), ((1343, 1356), 'cloudpickle.loads', 'cp.loads', (['msg'], {}), '(msg)\n', (1351, 1356), True, 'import cloudpickle as cp\n'), ((1479, 1509), 'codecs.decode', 'codecs.decode', (['msg', 'SER_FORMAT'], {}), '(msg, SER_FORMAT)\n', (1492, 1509), False, 'import codecs\n'), ((1559, 1589), 'codecs.encode', 'codecs.encode', (['msg', 'SER_FORMAT'], {}), '(msg, SER_FORMAT)\n', (1572, 1589), False, 'import codecs\n'), ((1989, 2008), 'pyarrow.deserialize', 'pa.deserialize', (['msg'], {}), '(msg)\n', (2003, 2008), True, 'import pyarrow as pa\n'), ((1647, 1660), 'cloudpickle.dumps', 'cp.dumps', (['msg'], {}), '(msg)\n', (1655, 1660), True, 'import cloudpickle as cp\n'), ((1905, 1922), 'pyarrow.serialize', 'pa.serialize', (['msg'], {}), '(msg)\n', (1917, 1922), True, 'import pyarrow as pa\n')] |
from PIL import Image, ImageDraw, ImageFilter
from PIL import ImageFont
from os import listdir
from os.path import isfile, join
#QRcode拼貼
mypath = "QRcodefloder"
# im = Image.open("advanceduse.png")
list = []
onlyfiles = [ f for f in listdir(mypath) if isfile(join(mypath,f)) ]
print(onlyfiles)
for onlyfile in onlyfiles:
list.append(Image.open("QRcodefloder\\"+onlyfile))
squard = list[0].size[0]
num = 0
for i in range(1,15):
a4im = Image.new('RGB',
(595, 842), # A4 at 72dpi
(255, 255, 255)) # White
image2 = Image.new('RGB', (squard, squard), (0, 0, 0))
draw = ImageDraw.Draw(a4im)
font = ImageFont.truetype('arial.ttf', 7)
width, height = image2.size
print(list[0].size)
crop_width, crop_height = a4im.size
for left in range(0, crop_width, width):
for top in range(0, crop_height, height):
print(num)
if num == len(list):
break
print(left,top)
if left+squard > 595:
break
if top+squard > 842:
break
# print(left // 165)
a4im.paste(image2, (left - left // squard, top - top // squard))
print(left - left // squard, top - top // squard)
a4im.paste(list[num], (left+1 - left // squard, top+1 - top // squard))
draw.text((left + 20 - left // squard, top - top // squard), onlyfiles[num][:7], font=font, fill="#000000")
# print(onlyfiles[num])
num += 1
# a4im.paste(im, im.getbbox()) # Not centered, top-left corner
a4im.filter(ImageFilter.EDGE_ENHANCE)
a4im.save("QR"+str(i)+".jpg", quality=100)
a4im.show()
if num == len(list):
break
| [
"os.listdir",
"PIL.Image.open",
"PIL.Image.new",
"os.path.join",
"PIL.ImageFont.truetype",
"PIL.ImageDraw.Draw"
] | [((448, 493), 'PIL.Image.new', 'Image.new', (['"""RGB"""', '(595, 842)', '(255, 255, 255)'], {}), "('RGB', (595, 842), (255, 255, 255))\n", (457, 493), False, 'from PIL import Image, ImageDraw, ImageFilter\n'), ((573, 618), 'PIL.Image.new', 'Image.new', (['"""RGB"""', '(squard, squard)', '(0, 0, 0)'], {}), "('RGB', (squard, squard), (0, 0, 0))\n", (582, 618), False, 'from PIL import Image, ImageDraw, ImageFilter\n'), ((630, 650), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['a4im'], {}), '(a4im)\n', (644, 650), False, 'from PIL import Image, ImageDraw, ImageFilter\n'), ((662, 696), 'PIL.ImageFont.truetype', 'ImageFont.truetype', (['"""arial.ttf"""', '(7)'], {}), "('arial.ttf', 7)\n", (680, 696), False, 'from PIL import ImageFont\n'), ((234, 249), 'os.listdir', 'listdir', (['mypath'], {}), '(mypath)\n', (241, 249), False, 'from os import listdir\n'), ((338, 377), 'PIL.Image.open', 'Image.open', (["('QRcodefloder\\\\' + onlyfile)"], {}), "('QRcodefloder\\\\' + onlyfile)\n", (348, 377), False, 'from PIL import Image, ImageDraw, ImageFilter\n'), ((260, 275), 'os.path.join', 'join', (['mypath', 'f'], {}), '(mypath, f)\n', (264, 275), False, 'from os.path import isfile, join\n')] |
import os
try:
from xdebug.unittesting import XdebugDeferrableTestCase
except:
from SublimeTextXdebug.xdebug.unittesting import XdebugDeferrableTestCase
class TestBreakpointStep(XdebugDeferrableTestCase):
breakpoint_step_file = 'breakpoint_step.php'
breakpoint_step_file_local_path = os.path.join(XdebugDeferrableTestCase.local_path, breakpoint_step_file)
def test_step_into(self):
self.set_breakpoint(self.breakpoint_step_file_local_path, 11)
self.run_command('xdebug_session_start')
yield self.window_has_debug_layout
breakpoint_view = self.get_view_by_title('Xdebug Breakpoint')
context_view = self.get_view_by_title('Xdebug Context')
stack_view = self.get_view_by_title('Xdebug Stack')
self.assertViewContains(breakpoint_view, '=> {file_local_path}\n\t|+| 11'.format(file_local_path=self.breakpoint_step_file_local_path))
self.assertViewIsEmpty(context_view)
self.assertViewIsEmpty(stack_view)
self.send_server_request(path=self.breakpoint_step_file)
def context_and_stack_have_content():
return not self.view_is_empty(context_view) and not self.view_is_empty(stack_view)
yield context_and_stack_have_content
self.assertViewContains(context_view, '$greeting = <uninitialized>')
self.assertViewContains(stack_view, '[0] file://{remote_path}/{file}:11, {{main}}()'.format(remote_path=self.remote_path, file=self.breakpoint_step_file))
context_view_contents = self.get_contents_of_view(context_view)
stack_view_contents = self.get_contents_of_view(stack_view)
def context_and_stack_have_different_content():
return self.get_contents_of_view(context_view) != context_view_contents and self.get_contents_of_view(stack_view) != stack_view_contents
self.run_command('xdebug_execute', {'command': 'step_into'})
yield context_and_stack_have_different_content
yield context_and_stack_have_content
self.assertViewContains(context_view, '$greet = <uninitialized>')
self.assertViewContains(context_view, '$name = (string) Stranger')
self.assertViewContains(stack_view, '[0] file://{remote_path}/{file}:4, greet()'.format(remote_path=self.remote_path, file=self.breakpoint_step_file))
context_view_contents = self.get_contents_of_view(context_view)
stack_view_contents = self.get_contents_of_view(stack_view)
def context_and_stack_have_different_content():
return self.get_contents_of_view(context_view) != context_view_contents and self.get_contents_of_view(stack_view) != stack_view_contents
self.run_command('xdebug_execute', {'command': 'step_into'})
yield context_and_stack_have_different_content
yield context_and_stack_have_content
self.assertViewContains(context_view, '$greet = (string) Hi')
self.assertViewContains(context_view, '$name = (string) Stranger')
self.assertViewContains(stack_view, '[0] file://{remote_path}/{file}:5, greet()'.format(remote_path=self.remote_path, file=self.breakpoint_step_file))
def test_step_out(self):
self.set_breakpoint(self.breakpoint_step_file_local_path, 5)
self.run_command('xdebug_session_start')
yield self.window_has_debug_layout
breakpoint_view = self.get_view_by_title('Xdebug Breakpoint')
context_view = self.get_view_by_title('Xdebug Context')
stack_view = self.get_view_by_title('Xdebug Stack')
self.assertViewContains(breakpoint_view, '=> {file_local_path}\n\t|+| 5'.format(file_local_path=self.breakpoint_step_file_local_path))
self.assertViewIsEmpty(context_view)
self.assertViewIsEmpty(stack_view)
self.send_server_request(path=self.breakpoint_step_file)
def context_and_stack_have_content():
return not self.view_is_empty(context_view) and not self.view_is_empty(stack_view)
yield context_and_stack_have_content
self.assertViewContains(context_view, '$greet = (string) Hi')
self.assertViewContains(context_view, '$name = (string) Stranger')
self.assertViewContains(stack_view, '[0] file://{remote_path}/{file}:5, greet()'.format(remote_path=self.remote_path, file=self.breakpoint_step_file))
context_view_contents = self.get_contents_of_view(context_view)
stack_view_contents = self.get_contents_of_view(stack_view)
def context_and_stack_have_different_content():
return self.get_contents_of_view(context_view) != context_view_contents and self.get_contents_of_view(stack_view) != stack_view_contents
self.run_command('xdebug_execute', {'command': 'step_out'})
yield context_and_stack_have_different_content
yield context_and_stack_have_content
self.assertViewContains(context_view, '$greeting = (string) Hello Stranger!')
self.assertViewContains(stack_view, '[0] file://{remote_path}/{file}:12, {{main}}()'.format(remote_path=self.remote_path, file=self.breakpoint_step_file))
def test_step_over(self):
self.set_breakpoint(self.breakpoint_step_file_local_path, 11)
self.run_command('xdebug_session_start')
yield self.window_has_debug_layout
breakpoint_view = self.get_view_by_title('Xdebug Breakpoint')
context_view = self.get_view_by_title('Xdebug Context')
stack_view = self.get_view_by_title('Xdebug Stack')
self.assertViewContains(breakpoint_view, '=> {file_local_path}\n\t|+| 11'.format(file_local_path=self.breakpoint_step_file_local_path))
self.assertViewIsEmpty(context_view)
self.assertViewIsEmpty(stack_view)
self.send_server_request(path=self.breakpoint_step_file)
def context_and_stack_have_content():
return not self.view_is_empty(context_view) and not self.view_is_empty(stack_view)
yield context_and_stack_have_content
self.assertViewContains(context_view, '$greeting = <uninitialized>')
self.assertViewContains(stack_view, '[0] file://{remote_path}/{file}:11, {{main}}()'.format(remote_path=self.remote_path, file=self.breakpoint_step_file))
context_view_contents = self.get_contents_of_view(context_view)
stack_view_contents = self.get_contents_of_view(stack_view)
def context_and_stack_have_different_content():
return self.get_contents_of_view(context_view) != context_view_contents and self.get_contents_of_view(stack_view) != stack_view_contents
self.run_command('xdebug_execute', {'command': 'step_over'})
yield context_and_stack_have_different_content
yield context_and_stack_have_content
self.assertViewContains(context_view, '$greeting = (string) Hello Stranger!')
self.assertViewContains(stack_view, '[0] file://{remote_path}/{file}:12, {{main}}()'.format(remote_path=self.remote_path, file=self.breakpoint_step_file))
| [
"os.path.join"
] | [((302, 373), 'os.path.join', 'os.path.join', (['XdebugDeferrableTestCase.local_path', 'breakpoint_step_file'], {}), '(XdebugDeferrableTestCase.local_path, breakpoint_step_file)\n', (314, 373), False, 'import os\n')] |
import unittest
from wework import settings, wechat
class TestSettings(unittest.TestCase):
def test_init(self):
t = settings.init(
CROP_ID='a',
PROVIDER_SECRET='a',
REGISTER_URL='www.quseit.com/',
HELPER='wegq.DjangoHelper'
)
self.assertTrue(isinstance(t, wechat.WorkWechatApi))
def test_error(self):
with self.assertRaises(settings.InitError):
settings.init(
CROP_ID='a',
PROVIDER_SECRET='a',
HELPER='wegq.DjangoHelper'
)
with self.assertRaises(settings.InitError):
settings.init(
CROP_ID='a',
PROVIDER_SECRET='a',
REGISTER_URL='www.quseit.com',
HELPER='wegq.DjangoHelper'
)
with self.assertRaises(settings.InitError):
settings.init(
CROP_ID='a',
PROVIDER_SECRET='a',
REGISTER_URL='www.quseit.com',
HELPER=type('MyHelper', (object, ), {}),
) | [
"wework.settings.init"
] | [((130, 242), 'wework.settings.init', 'settings.init', ([], {'CROP_ID': '"""a"""', 'PROVIDER_SECRET': '"""a"""', 'REGISTER_URL': '"""www.quseit.com/"""', 'HELPER': '"""wegq.DjangoHelper"""'}), "(CROP_ID='a', PROVIDER_SECRET='a', REGISTER_URL=\n 'www.quseit.com/', HELPER='wegq.DjangoHelper')\n", (143, 242), False, 'from wework import settings, wechat\n'), ((448, 523), 'wework.settings.init', 'settings.init', ([], {'CROP_ID': '"""a"""', 'PROVIDER_SECRET': '"""a"""', 'HELPER': '"""wegq.DjangoHelper"""'}), "(CROP_ID='a', PROVIDER_SECRET='a', HELPER='wegq.DjangoHelper')\n", (461, 523), False, 'from wework import settings, wechat\n'), ((651, 762), 'wework.settings.init', 'settings.init', ([], {'CROP_ID': '"""a"""', 'PROVIDER_SECRET': '"""a"""', 'REGISTER_URL': '"""www.quseit.com"""', 'HELPER': '"""wegq.DjangoHelper"""'}), "(CROP_ID='a', PROVIDER_SECRET='a', REGISTER_URL=\n 'www.quseit.com', HELPER='wegq.DjangoHelper')\n", (664, 762), False, 'from wework import settings, wechat\n')] |
from distutils.core import setup
setup(
name = 'timetools',
version = '1.0.0',
description = 'CL tools for timestamps',
author = '<NAME>',
author_email = '<EMAIL>',
url = 'https://github.com/mchalek/timetools',
scripts = [
'bin/now',
'bin/when',
'bin/ts',
'bin/daysago',
'bin/hoursago',
])
| [
"distutils.core.setup"
] | [((34, 286), 'distutils.core.setup', 'setup', ([], {'name': '"""timetools"""', 'version': '"""1.0.0"""', 'description': '"""CL tools for timestamps"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'url': '"""https://github.com/mchalek/timetools"""', 'scripts': "['bin/now', 'bin/when', 'bin/ts', 'bin/daysago', 'bin/hoursago']"}), "(name='timetools', version='1.0.0', description=\n 'CL tools for timestamps', author='<NAME>', author_email='<EMAIL>', url\n ='https://github.com/mchalek/timetools', scripts=['bin/now', 'bin/when',\n 'bin/ts', 'bin/daysago', 'bin/hoursago'])\n", (39, 286), False, 'from distutils.core import setup\n')] |
from unittest import TestCase
from parameterized import parameterized
from tests.test_utils import mock_request_handler
from web.web_auth_utils import remove_webpack_suffixes, is_allowed_during_login
class WebpackSuffixesTest(TestCase):
def test_remove_webpack_suffixes_when_css(self):
normalized = remove_webpack_suffixes('js/chunk-login-vendors.59040343.css')
self.assertEqual('js/chunk-login-vendors.css', normalized)
def test_remove_webpack_suffixes_when_js(self):
normalized = remove_webpack_suffixes('js/login.be16f278.js')
self.assertEqual('js/login.js', normalized)
def test_remove_webpack_suffixes_when_js_map(self):
normalized = remove_webpack_suffixes('js/login.be16f278.js.map')
self.assertEqual('js/login.js.map', normalized)
def test_remove_webpack_suffixes_when_favicon(self):
normalized = remove_webpack_suffixes('favicon.123.ico')
self.assertEqual('favicon.123.ico', normalized)
def test_remove_webpack_suffixes_when_no_suffixes(self):
normalized = remove_webpack_suffixes('css/chunk-login-vendors.css')
self.assertEqual('css/chunk-login-vendors.css', normalized)
def test_remove_webpack_suffixes_when_no_extension(self):
normalized = remove_webpack_suffixes('data/some_file')
self.assertEqual('data/some_file', normalized)
class LoginResourcesTest(TestCase):
@parameterized.expand([
('/favicon.ico'),
('login.html'),
('/js/login.be16f278.js'),
('/js/login.be16f278.js.map'),
('/js/chunk-login-vendors.18e22e7f.js'),
('/js/chunk-login-vendors.18e22e7f.js.map'),
('/img/titleBackground_login.a6c36d4c.jpg'),
('/css/login.8e74be0f.css'),
('/fonts/roboto-latin-400.60fa3c06.woff'),
('/fonts/roboto-latin-400.479970ff.woff2'),
('/fonts/roboto-latin-500.020c97dc.woff2'),
('/fonts/roboto-latin-500.87284894.woff')
])
def test_is_allowed_during_login_when_allowed(self, resource):
request_handler = mock_request_handler(method='GET')
allowed = is_allowed_during_login(resource, 'login.html', request_handler)
self.assertTrue(allowed, 'Resource ' + resource + ' should be allowed, but was not')
def test_is_allowed_during_login_when_prohibited(self):
request_handler = mock_request_handler(method='GET')
resource = 'admin.html'
allowed = is_allowed_during_login(resource, 'login.html', request_handler)
self.assertFalse(allowed, 'Resource ' + resource + ' should NOT be allowed, but WAS')
| [
"web.web_auth_utils.is_allowed_during_login",
"tests.test_utils.mock_request_handler",
"web.web_auth_utils.remove_webpack_suffixes",
"parameterized.parameterized.expand"
] | [((1413, 1865), 'parameterized.parameterized.expand', 'parameterized.expand', (["['/favicon.ico', 'login.html', '/js/login.be16f278.js',\n '/js/login.be16f278.js.map', '/js/chunk-login-vendors.18e22e7f.js',\n '/js/chunk-login-vendors.18e22e7f.js.map',\n '/img/titleBackground_login.a6c36d4c.jpg', '/css/login.8e74be0f.css',\n '/fonts/roboto-latin-400.60fa3c06.woff',\n '/fonts/roboto-latin-400.479970ff.woff2',\n '/fonts/roboto-latin-500.020c97dc.woff2',\n '/fonts/roboto-latin-500.87284894.woff']"], {}), "(['/favicon.ico', 'login.html', '/js/login.be16f278.js',\n '/js/login.be16f278.js.map', '/js/chunk-login-vendors.18e22e7f.js',\n '/js/chunk-login-vendors.18e22e7f.js.map',\n '/img/titleBackground_login.a6c36d4c.jpg', '/css/login.8e74be0f.css',\n '/fonts/roboto-latin-400.60fa3c06.woff',\n '/fonts/roboto-latin-400.479970ff.woff2',\n '/fonts/roboto-latin-500.020c97dc.woff2',\n '/fonts/roboto-latin-500.87284894.woff'])\n", (1433, 1865), False, 'from parameterized import parameterized\n'), ((315, 377), 'web.web_auth_utils.remove_webpack_suffixes', 'remove_webpack_suffixes', (['"""js/chunk-login-vendors.59040343.css"""'], {}), "('js/chunk-login-vendors.59040343.css')\n", (338, 377), False, 'from web.web_auth_utils import remove_webpack_suffixes, is_allowed_during_login\n'), ((519, 566), 'web.web_auth_utils.remove_webpack_suffixes', 'remove_webpack_suffixes', (['"""js/login.be16f278.js"""'], {}), "('js/login.be16f278.js')\n", (542, 566), False, 'from web.web_auth_utils import remove_webpack_suffixes, is_allowed_during_login\n'), ((697, 748), 'web.web_auth_utils.remove_webpack_suffixes', 'remove_webpack_suffixes', (['"""js/login.be16f278.js.map"""'], {}), "('js/login.be16f278.js.map')\n", (720, 748), False, 'from web.web_auth_utils import remove_webpack_suffixes, is_allowed_during_login\n'), ((884, 926), 'web.web_auth_utils.remove_webpack_suffixes', 'remove_webpack_suffixes', (['"""favicon.123.ico"""'], {}), "('favicon.123.ico')\n", (907, 926), False, 'from web.web_auth_utils import remove_webpack_suffixes, is_allowed_during_login\n'), ((1066, 1120), 'web.web_auth_utils.remove_webpack_suffixes', 'remove_webpack_suffixes', (['"""css/chunk-login-vendors.css"""'], {}), "('css/chunk-login-vendors.css')\n", (1089, 1120), False, 'from web.web_auth_utils import remove_webpack_suffixes, is_allowed_during_login\n'), ((1273, 1314), 'web.web_auth_utils.remove_webpack_suffixes', 'remove_webpack_suffixes', (['"""data/some_file"""'], {}), "('data/some_file')\n", (1296, 1314), False, 'from web.web_auth_utils import remove_webpack_suffixes, is_allowed_during_login\n'), ((2057, 2091), 'tests.test_utils.mock_request_handler', 'mock_request_handler', ([], {'method': '"""GET"""'}), "(method='GET')\n", (2077, 2091), False, 'from tests.test_utils import mock_request_handler\n'), ((2111, 2175), 'web.web_auth_utils.is_allowed_during_login', 'is_allowed_during_login', (['resource', '"""login.html"""', 'request_handler'], {}), "(resource, 'login.html', request_handler)\n", (2134, 2175), False, 'from web.web_auth_utils import remove_webpack_suffixes, is_allowed_during_login\n'), ((2356, 2390), 'tests.test_utils.mock_request_handler', 'mock_request_handler', ([], {'method': '"""GET"""'}), "(method='GET')\n", (2376, 2390), False, 'from tests.test_utils import mock_request_handler\n'), ((2442, 2506), 'web.web_auth_utils.is_allowed_during_login', 'is_allowed_during_login', (['resource', '"""login.html"""', 'request_handler'], {}), "(resource, 'login.html', request_handler)\n", (2465, 2506), False, 'from web.web_auth_utils import remove_webpack_suffixes, is_allowed_during_login\n')] |
# Generated by Django 3.0.5 on 2020-07-03 20:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('home', '0019_comment_content_type'),
]
operations = [
migrations.RemoveField(
model_name='comment',
name='content_type',
),
]
| [
"django.db.migrations.RemoveField"
] | [((226, 291), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""comment"""', 'name': '"""content_type"""'}), "(model_name='comment', name='content_type')\n", (248, 291), False, 'from django.db import migrations\n')] |
from datetime import datetime
class Price:
date: datetime = datetime(1, 1, 1)
currency: str = 'BRL'
symbol: str = ''
current: float = 0
open: float = 0
close: float = 0
low: float = 0
high: float = 0
volume: float = 0
interval: str = ''
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
| [
"datetime.datetime"
] | [((66, 83), 'datetime.datetime', 'datetime', (['(1)', '(1)', '(1)'], {}), '(1, 1, 1)\n', (74, 83), False, 'from datetime import datetime\n')] |
# -*- coding: utf-8 -*-
"""
pybitcoin
~~~~~
:copyright: (c) 2014 by Halfmoon Labs
:license: MIT, see LICENSE for more details.
"""
import os
import json
import hashlib
import ecdsa
from binascii import hexlify, unhexlify
from ecdsa.keys import SigningKey
from .key_formatting import compress, encode_privkey, get_privkey_format
from .b58check import b58check_encode, b58check_decode
from .public_key import ECPublicKey
from .utils import (
random_secret_exponent, is_secret_exponent, PUBLIC_KEY_MAGIC_BYTE
)
class ECPrivateKey():
_curve = ecdsa.curves.SECP256k1
_hash_function = hashlib.sha256
_pubkeyhash_version_byte = 0
def __init__(self, private_key=None, compressed=True):
""" Takes in a private key/secret exponent.
"""
self._compressed = compressed
if not private_key:
secret_exponent = random_secret_exponent(self._curve.order)
else:
secret_exponent = encode_privkey(private_key, 'decimal')
# make sure that: 1 <= secret_exponent < curve_order
if not is_secret_exponent(secret_exponent, self._curve.order):
raise IndexError(
("Secret exponent is outside of the valid range."
"Must be >= 1 and < the curve order."))
self._ecdsa_private_key = ecdsa.keys.SigningKey.from_secret_exponent(
secret_exponent, self._curve, self._hash_function
)
@classmethod
def wif_version_byte(cls):
if hasattr(cls, '_wif_version_byte'):
return cls._wif_version_byte
return (cls._pubkeyhash_version_byte + 128) % 256
def to_bin(self):
if self._compressed:
return encode_privkey(
self._ecdsa_private_key.to_string(), 'bin_compressed')
else:
return self._ecdsa_private_key.to_string()
def to_hex(self):
if self._compressed:
return encode_privkey(
self._ecdsa_private_key.to_string(), 'hex_compressed')
else:
return hexlify(self.to_bin())
def to_wif(self):
if self._compressed:
return encode_privkey(
self._ecdsa_private_key.to_string(), 'wif_compressed', vbyte=self._pubkeyhash_version_byte)
else:
return b58check_encode(
self.to_bin(), version_byte=self.wif_version_byte())
def to_pem(self):
return self._ecdsa_private_key.to_pem()
def to_der(self):
return hexlify(self._ecdsa_private_key.to_der())
def public_key(self):
# lazily calculate and set the public key
if not hasattr(self, '_public_key'):
ecdsa_public_key = self._ecdsa_private_key.get_verifying_key()
bin_public_key_string = "%s%s" % (
PUBLIC_KEY_MAGIC_BYTE, ecdsa_public_key.to_string())
if self._compressed:
bin_public_key_string = compress(bin_public_key_string)
# create the public key object from the public key string
self._public_key = ECPublicKey(
bin_public_key_string,
version_byte=self._pubkeyhash_version_byte)
# return the public key object
return self._public_key
| [
"ecdsa.keys.SigningKey.from_secret_exponent"
] | [((1326, 1423), 'ecdsa.keys.SigningKey.from_secret_exponent', 'ecdsa.keys.SigningKey.from_secret_exponent', (['secret_exponent', 'self._curve', 'self._hash_function'], {}), '(secret_exponent, self._curve,\n self._hash_function)\n', (1368, 1423), False, 'import ecdsa\n')] |
import json
import requests
from utils.RequestsUtil import RequestsUtil
from config.Conf import ConfYaml
import os
request_util = RequestsUtil()
conf = ConfYaml()
def login():
url = os.path.join(conf.get_yaml_data(),"authorizations/")
# url = "http://192.168.3.11:8064/authorizations/"
data = {"username": "python", "password": "<PASSWORD>"}
# r = requests.post(url, data=data)
r = request_util.requests_api(url,method="post",json=data)
return r
def info(token):
url = os.path.join(conf.get_yaml_data(),"user/")
# url = "http://192.168.3.11:8064/user/"
headers = {
"Authorization":"JWT "+token
}
return request_util.requests_api(url,headers=headers)
# r = requests.get(url,headers=headers)
# print(r.json())
def cate():
url = os.path.join(conf.get_yaml_data(),"categories/115/skus/")
# url = "http://192.168.3.11:8064/categories/115/skus/"
data = {"page":"1","page_size":"10","ording":"create_time"}
return request_util.requests_api(url,json=data)
# r = requests.get(url,params=data)
# print(r.json())
def cart(token):
url = os.path.join(conf.get_yaml_data(),"cart/")
# url = "http://192.168.3.11:8064/cart/"
data = {"sku_id":"3","count":"1","selected":"true"}
headers = {
"Authorization": "JWT " + token
}
return request_util.requests_api(url,method="post",headers=headers,json=data)
# r = requests.post(url,data=data,headers=headers)
# print(r.json())
def order(token):
url = "http://192.168.3.11:8064/orders/"
data = {"address":"1","pay_method":"1"}
headers = {
"Authorization": "JWT " + token
}
r = requests.post(url,data=data,headers=headers)
print(r.json())
if __name__=="__main__":
# 首先登录,获取token
r = login()
print(r)
# # 获取token
str_token = r['body']['token']
info_data = info(str_token)
print(info_data)
cate = cate()
print("cate接口相关数据:{}".format(cate))
# # 拿着token到用户中心
cart = cart(str_token)
print("cart接口相关数据:{}".format(cart))
# order(str_token) | [
"requests.post",
"config.Conf.ConfYaml",
"utils.RequestsUtil.RequestsUtil"
] | [((132, 146), 'utils.RequestsUtil.RequestsUtil', 'RequestsUtil', ([], {}), '()\n', (144, 146), False, 'from utils.RequestsUtil import RequestsUtil\n'), ((154, 164), 'config.Conf.ConfYaml', 'ConfYaml', ([], {}), '()\n', (162, 164), False, 'from config.Conf import ConfYaml\n'), ((1662, 1708), 'requests.post', 'requests.post', (['url'], {'data': 'data', 'headers': 'headers'}), '(url, data=data, headers=headers)\n', (1675, 1708), False, 'import requests\n')] |
#!/usr/bin/env python3
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import numpy as np
import torch as th
import torchvision
from tqdm import tqdm
def main(args):
trainloader, testloader = get_loaders(args.batch_size, args.fashion)
epsilon = 0.5
beta = 1.0
alpha1 = 0.1
alpha2 = 0.05
a = np.sqrt(2.0 / (784 + 500))
W1 = np.random.uniform(-a, a, (784, 500))
b1 = np.random.uniform(-a, a, 500)
a = np.sqrt(2.0 / (500 + 10))
W2 = np.random.uniform(-a, a, (500, 10))
b2 = np.random.uniform(-a, a, 10)
states = [(np.random.uniform(0, 1., (args.batch_size, 500)), \
np.random.uniform(0, 1., (args.batch_size, 10))) for _ in range(len(trainloader))]
for epoch in range(args.epochs):
running_loss = running_energy = running_true_positive = 0.
for i, (x, labels) in enumerate(tqdm(trainloader, desc=f"Epoch {epoch}")):
x, labels = x.view(-1, 784).numpy(), labels.numpy()
h, y = states[i]
# Free phase
for j in range(20):
dh = d_rho(h) * (x @ W1 + y @ W2.T + b1) - h
dy = d_rho(y) * (h @ W2 + b2) - y
h = rho(h + epsilon * dh)
y = rho(y + epsilon * dy)
'''
energy = (np.square(h).sum() + np.square(y).sum() \
- (W1 * (x.T @ h)).sum() - (W2 * (h.T @ y)).sum()) / 2 \
- (h @ b1).sum() - (y @ b2).sum())
print(np.round(energy, 4), np.round(np.linalg.norm(dh), 4))
'''
h_free, y_free = np.copy(h), np.copy(y)
states[i] = h_free, y_free
t = np.zeros((x.shape[0], 10))
t[np.arange(t.shape[0]), labels] = 1
# Weakly clamped phase
for j in range(4):
dh = d_rho(h) * (x @ W1 + y @ W2.T + b1) - h
dy = d_rho(y) * (h @ W2 + b2) - y + beta * (t - y)
h = rho(h + epsilon * dh)
y = rho(y + epsilon * dy)
'''
energy = (np.square(h).sum() + np.square(y).sum() \
- (W1 * (x.T @ h)).sum() - (W2 * (h.T @ y)).sum()) / 2 \
- (h @ b1).sum() - (y @ b2).sum()
print(np.round(energy, 4), np.round(np.linalg.norm(dh), 4))
'''
h_clamped = np.copy(h)
y_clamped = np.copy(y)
W1 += alpha1 / beta * (rho(x.T) @ rho(h_clamped) - rho(x.T) @ rho(h_free)) / args.batch_size
W2 += alpha2 / beta * (rho(h_clamped.T) @ rho(y_clamped) - rho(h_free.T) @ rho(y_free)) / args.batch_size
b1 += alpha1 / beta * (rho(h_clamped) - rho(h_free)).mean(0)
b2 += alpha2 / beta * (rho(y_clamped) - rho(y_free)).mean(0)
running_energy += (np.square(h_free).sum() + np.square(y_free).sum() \
- (W1 * (x.T @ h_free)).sum() - (W2 * (h_free.T @ y_free)).sum()) / 2 \
- (h_free @ b1).sum() - (y_free @ b2).sum()
running_loss += np.square(t - y_free).sum()
running_true_positive += np.count_nonzero(np.argmax(y_free, 1) == labels)
energy_avg = running_energy / (len(trainloader) * args.batch_size)
accuracy_avg = running_true_positive / (len(trainloader) * args.batch_size)
loss_avg = running_loss / (len(trainloader) * args.batch_size)
print(f"Energy: {energy_avg}, Accuracy: {accuracy_avg}, Loss: {loss_avg}")
def rho(x):
return np.copy(np.clip(x, 0., 1.))
def d_rho(x):
return (x >= 0.) * (x <= 1.)
def get_loaders(batch_size, fashion=False):
mnist = torchvision.datasets.MNIST
if fashion:
mnist = torchvision.datasets.FashionMNIST
transform = torchvision.transforms.Compose(
[torchvision.transforms.ToTensor(),])
trainloader = th.utils.data.DataLoader(
mnist(root="./data", train=True, download=True, transform=transform),
batch_size=batch_size,
shuffle=True,
num_workers=2)
testloader = th.utils.data.DataLoader(
mnist(root="./data", train=False, download=True, transform=transform),
batch_size=batch_size,
shuffle=False,
num_workers=2)
return trainloader, testloader
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--epochs", type=int, default=1000)
parser.add_argument("--batch-size", type=int, default=20)
parser.add_argument("--fashion", action="store_true", default=False)
args = parser.parse_args()
main(args)
| [
"numpy.clip",
"numpy.copy",
"numpy.sqrt",
"argparse.ArgumentParser",
"tqdm.tqdm",
"numpy.argmax",
"numpy.square",
"numpy.zeros",
"numpy.random.uniform",
"torchvision.transforms.ToTensor",
"numpy.arange"
] | [((398, 424), 'numpy.sqrt', 'np.sqrt', (['(2.0 / (784 + 500))'], {}), '(2.0 / (784 + 500))\n', (405, 424), True, 'import numpy as np\n'), ((434, 470), 'numpy.random.uniform', 'np.random.uniform', (['(-a)', 'a', '(784, 500)'], {}), '(-a, a, (784, 500))\n', (451, 470), True, 'import numpy as np\n'), ((480, 509), 'numpy.random.uniform', 'np.random.uniform', (['(-a)', 'a', '(500)'], {}), '(-a, a, 500)\n', (497, 509), True, 'import numpy as np\n'), ((519, 544), 'numpy.sqrt', 'np.sqrt', (['(2.0 / (500 + 10))'], {}), '(2.0 / (500 + 10))\n', (526, 544), True, 'import numpy as np\n'), ((554, 589), 'numpy.random.uniform', 'np.random.uniform', (['(-a)', 'a', '(500, 10)'], {}), '(-a, a, (500, 10))\n', (571, 589), True, 'import numpy as np\n'), ((599, 627), 'numpy.random.uniform', 'np.random.uniform', (['(-a)', 'a', '(10)'], {}), '(-a, a, 10)\n', (616, 627), True, 'import numpy as np\n'), ((4371, 4396), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (4394, 4396), False, 'import argparse\n'), ((3583, 3603), 'numpy.clip', 'np.clip', (['x', '(0.0)', '(1.0)'], {}), '(x, 0.0, 1.0)\n', (3590, 3603), True, 'import numpy as np\n'), ((644, 693), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(1.0)', '(args.batch_size, 500)'], {}), '(0, 1.0, (args.batch_size, 500))\n', (661, 693), True, 'import numpy as np\n'), ((708, 756), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(1.0)', '(args.batch_size, 10)'], {}), '(0, 1.0, (args.batch_size, 10))\n', (725, 756), True, 'import numpy as np\n'), ((936, 976), 'tqdm.tqdm', 'tqdm', (['trainloader'], {'desc': 'f"""Epoch {epoch}"""'}), "(trainloader, desc=f'Epoch {epoch}')\n", (940, 976), False, 'from tqdm import tqdm\n'), ((1751, 1777), 'numpy.zeros', 'np.zeros', (['(x.shape[0], 10)'], {}), '((x.shape[0], 10))\n', (1759, 1777), True, 'import numpy as np\n'), ((2447, 2457), 'numpy.copy', 'np.copy', (['h'], {}), '(h)\n', (2454, 2457), True, 'import numpy as np\n'), ((2482, 2492), 'numpy.copy', 'np.copy', (['y'], {}), '(y)\n', (2489, 2492), True, 'import numpy as np\n'), ((3859, 3892), 'torchvision.transforms.ToTensor', 'torchvision.transforms.ToTensor', ([], {}), '()\n', (3890, 3892), False, 'import torchvision\n'), ((1672, 1682), 'numpy.copy', 'np.copy', (['h'], {}), '(h)\n', (1679, 1682), True, 'import numpy as np\n'), ((1684, 1694), 'numpy.copy', 'np.copy', (['y'], {}), '(y)\n', (1691, 1694), True, 'import numpy as np\n'), ((1792, 1813), 'numpy.arange', 'np.arange', (['t.shape[0]'], {}), '(t.shape[0])\n', (1801, 1813), True, 'import numpy as np\n'), ((3123, 3144), 'numpy.square', 'np.square', (['(t - y_free)'], {}), '(t - y_free)\n', (3132, 3144), True, 'import numpy as np\n'), ((3205, 3225), 'numpy.argmax', 'np.argmax', (['y_free', '(1)'], {}), '(y_free, 1)\n', (3214, 3225), True, 'import numpy as np\n'), ((2895, 2912), 'numpy.square', 'np.square', (['h_free'], {}), '(h_free)\n', (2904, 2912), True, 'import numpy as np\n'), ((2921, 2938), 'numpy.square', 'np.square', (['y_free'], {}), '(y_free)\n', (2930, 2938), True, 'import numpy as np\n')] |
#!/usr/bin/env python
# mypy: ignore-errors
#
#
# Speedo
#
#
import os
from setuptools import setup, find_namespace_packages
from speedo_common.version import speedo_version as __version__
def get_requirements(filename: str):
"""Build the requirements list from the filename"""
requirements_list = []
with open(filename, "r") as reqs:
for install in reqs:
requirements_list.append(install.strip())
return requirements_list
if os.getenv("SPEEDO_TARGET", "") == "client":
setup(
name="Speedo - python client library",
version=__version__,
description="Speedo - python client library",
url="https://www.nokx.ch",
author="nokx",
author_email="<EMAIL>",
packages=find_namespace_packages(include=["speedo_client.*"])
+ find_namespace_packages(include=["speedo_common.*"])
+ ["speedo_client", "speedo_common"],
install_requires=get_requirements("requirements_client.txt"),
python_requires=">=3",
)
elif os.getenv("SPEEDO_TARGET", "") == "server":
setup(
name="Speedo - server",
version=__version__,
description="Speedo - a fast RESTful web API",
url="https://www.nokx.ch",
author="nokx",
author_email="<EMAIL>",
scripts=["scripts/speedo"],
package_data={"": ["alembic.ini"]},
packages=find_namespace_packages(include=["speedo_server.*"])
+ find_namespace_packages(include=["speedo_common.*"])
+ ["speedo_server", "speedo_common"],
install_requires=get_requirements("requirements_server.txt"),
python_requires=">=3",
)
else:
raise EnvironmentError(
"Please target the following environment variable"
"\n\n\t"
"$ export SPEEDO_TARGET=server # or"
"\n\t"
"$ export SPEEDO_TARGET=client"
"\n\n"
"before running the installation"
"\n\n\t"
"$ python setup.py install --prefix=/tmp/destination"
"\n\t"
"$ # you can use pip instead (use a virtualenv)"
"\n\t"
"$ pip install ."
"\n"
)
| [
"setuptools.find_namespace_packages",
"os.getenv"
] | [((468, 498), 'os.getenv', 'os.getenv', (['"""SPEEDO_TARGET"""', '""""""'], {}), "('SPEEDO_TARGET', '')\n", (477, 498), False, 'import os\n'), ((1034, 1064), 'os.getenv', 'os.getenv', (['"""SPEEDO_TARGET"""', '""""""'], {}), "('SPEEDO_TARGET', '')\n", (1043, 1064), False, 'import os\n'), ((760, 812), 'setuptools.find_namespace_packages', 'find_namespace_packages', ([], {'include': "['speedo_client.*']"}), "(include=['speedo_client.*'])\n", (783, 812), False, 'from setuptools import setup, find_namespace_packages\n'), ((823, 875), 'setuptools.find_namespace_packages', 'find_namespace_packages', ([], {'include': "['speedo_common.*']"}), "(include=['speedo_common.*'])\n", (846, 875), False, 'from setuptools import setup, find_namespace_packages\n'), ((1392, 1444), 'setuptools.find_namespace_packages', 'find_namespace_packages', ([], {'include': "['speedo_server.*']"}), "(include=['speedo_server.*'])\n", (1415, 1444), False, 'from setuptools import setup, find_namespace_packages\n'), ((1455, 1507), 'setuptools.find_namespace_packages', 'find_namespace_packages', ([], {'include': "['speedo_common.*']"}), "(include=['speedo_common.*'])\n", (1478, 1507), False, 'from setuptools import setup, find_namespace_packages\n')] |
""" This script demonstrates the use of a convolutional LSTM network.
This network is used to predict the next frame of an artificially
generated movie which contains moving squares.
"""
from keras.models import Sequential
from keras.layers.convolutional import Conv3D
from keras.layers.convolutional_recurrent import ConvLSTM2D
from keras.layers.normalization import BatchNormalization
import numpy as np
import pylab as plt
import skimage.transform
import tensorflow as tf
import keras
import keras.backend as K
import keras.layers as KL
import keras.engine as KE
import keras.models as KM
from keras.models import *
from keras.layers import *
from keras.optimizers import *
from keras.callbacks import ModelCheckpoint, LearningRateScheduler
from keras import backend as keras
def getBiConvLSTM2d(input_image,filters,kernel_size,name):
L1 = KL.Bidirectional(KL.ConvLSTM2D(filters=filters, kernel_size=kernel_size,activation='relu', padding='same', return_sequences=True))(input_image)
L1 = KL.BatchNormalization(name="batchNormL_"+name)(L1)
return L1
def unet(inputs):
conv1 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(inputs)
conv1 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv1)
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)
conv2 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool1)
conv2 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv2)
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)
conv3 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool2)
conv3 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv3)
pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)
conv4 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool3)
conv4 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv4)
drop4 = Dropout(0.5)(conv4)
print(conv4)
pool4 = MaxPooling2D(pool_size=(2, 2))(drop4)
conv5 = Conv2D(1024, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool4)
conv5 = Conv2D(1024, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv5)
drop5 = Dropout(0.5)(conv5)
up6 = Conv2D(512, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(drop5))
print(drop4)
print(up6)
merge6 = concatenate([drop4,up6], axis = 3)
conv6 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge6)
conv6 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv6)
up7 = Conv2D(256, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv6))
merge7 = concatenate([conv3,up7], axis = 3)
conv7 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge7)
conv7 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv7)
up8 = Conv2D(128, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv7))
merge8 = concatenate([conv2,up8], axis = 3)
conv8 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge8)
conv8 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv8)
up9 = Conv2D(64, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv8))
merge9 = concatenate([conv1,up9], axis = 3)
conv9 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge9)
conv9 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv9)
conv9 = Conv2D(2, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv9)
conv10 = Conv2D(1, 1, activation = 'sigmoid')(conv9)
return conv10
def LSTMUnet():
'''
Input frame, with size 8*256*256*1 numpy array
'''
row = 256
col = 256
input_image = KL.Input(shape=[8, row,col,1], name="input_image")
L4 = input_image
for i in range(3):
L4 = getBiConvLSTM2d(L4,filters=20, kernel_size=(3, 3),name='top'+str(i))
L5 = KL.Conv3D(filters=8, kernel_size=(3, 3, 8),
activation='relu',
padding='same', data_format='channels_last')(L4)
# L5 = KL.BatchNormalization(name="batchNormL_sel5")(L5)
L6 = KL.Conv3D(filters=1, kernel_size=(3, 3, 4),
activation='relu',
padding='same', data_format='channels_last')(L5)
L6 = KL.BatchNormalization(name="batchNormL_sel5")(L6)
L7 = KL.Conv3D(filters=1, kernel_size=(3, 3, 8),
activation='relu',
padding='same', data_format='channels_first')(L6)
L7 = Reshape((row,col,1))(L7)
seg = unet(L7)
model = KM.Model(input_image,seg)
model.compile(loss='mean_squared_error', optimizer='adadelta')
return model
| [
"keras.layers.ConvLSTM2D",
"keras.layers.Input",
"keras.models.Model",
"keras.layers.BatchNormalization",
"keras.layers.Conv3D"
] | [((4372, 4424), 'keras.layers.Input', 'KL.Input', ([], {'shape': '[8, row, col, 1]', 'name': '"""input_image"""'}), "(shape=[8, row, col, 1], name='input_image')\n", (4380, 4424), True, 'import keras.layers as KL\n'), ((5197, 5223), 'keras.models.Model', 'KM.Model', (['input_image', 'seg'], {}), '(input_image, seg)\n', (5205, 5223), True, 'import keras.models as KM\n'), ((1002, 1050), 'keras.layers.BatchNormalization', 'KL.BatchNormalization', ([], {'name': "('batchNormL_' + name)"}), "(name='batchNormL_' + name)\n", (1023, 1050), True, 'import keras.layers as KL\n'), ((4559, 4671), 'keras.layers.Conv3D', 'KL.Conv3D', ([], {'filters': '(8)', 'kernel_size': '(3, 3, 8)', 'activation': '"""relu"""', 'padding': '"""same"""', 'data_format': '"""channels_last"""'}), "(filters=8, kernel_size=(3, 3, 8), activation='relu', padding=\n 'same', data_format='channels_last')\n", (4568, 4671), True, 'import keras.layers as KL\n'), ((4774, 4886), 'keras.layers.Conv3D', 'KL.Conv3D', ([], {'filters': '(1)', 'kernel_size': '(3, 3, 4)', 'activation': '"""relu"""', 'padding': '"""same"""', 'data_format': '"""channels_last"""'}), "(filters=1, kernel_size=(3, 3, 4), activation='relu', padding=\n 'same', data_format='channels_last')\n", (4783, 4886), True, 'import keras.layers as KL\n'), ((4927, 4972), 'keras.layers.BatchNormalization', 'KL.BatchNormalization', ([], {'name': '"""batchNormL_sel5"""'}), "(name='batchNormL_sel5')\n", (4948, 4972), True, 'import keras.layers as KL\n'), ((4987, 5100), 'keras.layers.Conv3D', 'KL.Conv3D', ([], {'filters': '(1)', 'kernel_size': '(3, 3, 8)', 'activation': '"""relu"""', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(filters=1, kernel_size=(3, 3, 8), activation='relu', padding=\n 'same', data_format='channels_first')\n", (4996, 5100), True, 'import keras.layers as KL\n'), ((866, 983), 'keras.layers.ConvLSTM2D', 'KL.ConvLSTM2D', ([], {'filters': 'filters', 'kernel_size': 'kernel_size', 'activation': '"""relu"""', 'padding': '"""same"""', 'return_sequences': '(True)'}), "(filters=filters, kernel_size=kernel_size, activation='relu',\n padding='same', return_sequences=True)\n", (879, 983), True, 'import keras.layers as KL\n')] |
# ======================================================
# Packaging Python Projects Tutorial : https://packaging.python.org/tutorials/packaging-projects/
#
# Installation : # pip install -i https://test.pypi.org/simple/ utils-edchin
#
#
# ===================================================================
# Importing
if __name__ == "__main__":
''' -- Run tests --
'''
import numpy as np
import utils_edchin.pyxlib as edc_xlib # import class
pyx = edc_xlib() # create instance
num_list = [-10, 2, 5, 3, 8, 4, 7, 5, 10, 99, 1000]
qSet = pyx.quartileSet(num_list)
print('\nList :', num_list)
print('Quartile limit-Lower:', qSet[0])
print(' Upper:',qSet[1])
print(' Outliers :', pyx.listOutliers(num_list))
print('w/o Outliers :', pyx.removeOutliers(num_list))
print(' my Var :', pyx.variance_edc(pyx.removeOutliers(num_list)))
print(' Numpy.Var :', np.var(pyx.removeOutliers(num_list)))
str_ing = 'Pennsylvania'
print('\n', str_ing,'string reversed =', pyx.str_reverse(str_ing))
import pandas
import utils_edchin.DataProcessor as edc_dp # import class
dp = edc_dp() # create instance
df_in = pandas.DataFrame({"zip":[45763, 73627, 78632, 22374, 31455], "abbrev": ["OH", "MI", "SD", "PR", "PA"]})
df_out = dp.add_state_names(df_in) # use it
print('\nInput:\n', df_in.head())
print('Output:\n', df_out.head())
| [
"utils_edchin.pyxlib",
"pandas.DataFrame",
"utils_edchin.DataProcessor"
] | [((478, 488), 'utils_edchin.pyxlib', 'edc_xlib', ([], {}), '()\n', (486, 488), True, 'import utils_edchin.pyxlib as edc_xlib\n'), ((1178, 1186), 'utils_edchin.DataProcessor', 'edc_dp', ([], {}), '()\n', (1184, 1186), True, 'import utils_edchin.DataProcessor as edc_dp\n'), ((1218, 1327), 'pandas.DataFrame', 'pandas.DataFrame', (["{'zip': [45763, 73627, 78632, 22374, 31455], 'abbrev': ['OH', 'MI', 'SD',\n 'PR', 'PA']}"], {}), "({'zip': [45763, 73627, 78632, 22374, 31455], 'abbrev': [\n 'OH', 'MI', 'SD', 'PR', 'PA']})\n", (1234, 1327), False, 'import pandas\n')] |
import unittest
from datetime import datetime, timedelta
import json
from ..main import create_app
from base64 import b64encode
from ..models import *
class ApiTestCase(unittest.TestCase):
def setUp(self):
# Configure app and create a test_client
app = create_app('app.config.TestConfig')
app.app_context().push()
self.app = app.test_client()
# propagate the exceptions to the test client
self.app.testing = True
app.config['DEBUG'] = True
# Authorization header to use when authorized
self.authorized_headers = {
'Authorization': 'Basic ' + b64encode("testuser:testpass")
}
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
def api_post_json(self, path, data, auth=False):
if auth:
headers = self.authorized_headers
else:
headers = None
return self.app.post('/api/v1/' + path,
data=data, follow_redirects=True,
content_type='application/json',
headers = headers)
def test_upload_without_auth(self):
radio_ob = dict(
classroom_id=1,
local_id=1,
remote_id=2,
observed_at=datetime.now().isoformat(),
)
event_data = json.dumps([radio_ob])
result = self.api_post_json('radio_observations', event_data)
self.assertEqual(result.status_code, 401)
def test_upload_with_auth(self):
m = SensorMapping(1, 1, datetime.now(), None, MappingType.child, 1)
db.session.add(m)
m = SensorMapping(1, 2, datetime.now(), None, MappingType.child, 2)
db.session.add(m)
db.session.commit()
radio_ob = dict(
classroom_id=1,
local_id=1,
remote_id=2,
observed_at=datetime.now().isoformat(),
)
event_data = json.dumps([radio_ob])
result = self.api_post_json('radio_observations', event_data, True)
self.assertEqual(result.status_code, 201)
events = RadioObservation.query.all()
self.assertEqual(len(events), 1)
def test_upload_duplicate(self):
m = SensorMapping(1, 1, datetime.now(), None, MappingType.child, 1)
db.session.add(m)
m = SensorMapping(1, 2, datetime.now(), None, MappingType.child, 2)
db.session.add(m)
db.session.commit()
now = datetime.now()
radio_ob = dict(
classroom_id=1,
local_id=1,
remote_id=2,
observed_at=now.isoformat(),
)
event_data = json.dumps([radio_ob])
result = self.api_post_json('radio_observations', event_data, True)
one_hour_ago = now - timedelta(hours=1)
radio_ob2 = dict(
classroom_id=1,
local_id=2,
remote_id=1,
observed_at=now.isoformat(),
)
event_data = json.dumps([radio_ob, radio_ob2])
result = self.api_post_json('radio_observations', event_data, True)
self.assertEqual(result.status_code, 201)
events = RadioObservation.query.all()
self.assertEqual(len(events), 2)
def test_mapping_create(self):
mapping_item = dict(
classroom_id=1,
sensor_id=1,
entity_type='child',
entity_id=5, # child_id
)
result = self.api_post_json('sensor_mappings', json.dumps([mapping_item]), True)
self.assertEqual(result.status_code, 201)
mappings = SensorMapping.query.all()
self.assertEqual(len(mappings), 1)
def test_mapping_update_same_sensor_id(self):
# When updating the mapping of a sensor, existing mappings to that
# sensor should be ended
mappings = [
dict(
classroom_id=1,
sensor_id=1,
entity_type='child',
entity_id=5),
dict(
classroom_id=1,
sensor_id=1,
entity_type='child',
entity_id=6)]
result = self.api_post_json('sensor_mappings', json.dumps(mappings), True)
self.assertEqual(result.status_code, 201)
mappings = SensorMapping.query.all()
self.assertEqual(len(mappings), 2)
mappings = SensorMapping.query.filter_by(end_time=None).all()
self.assertEqual(len(mappings), 1)
self.assertEqual(mappings[0].entity_id, 6)
def test_mapping_update_same_entity(self):
# When updating a mapping to an entity, existing mappings to that
# entity should be ended
mappings = [
dict(
classroom_id=1,
sensor_id=1,
entity_type='child',
entity_id=5),
dict(
classroom_id=1,
sensor_id=2,
entity_type='child',
entity_id=5)]
result = self.api_post_json('sensor_mappings', json.dumps(mappings), True)
self.assertEqual(result.status_code, 201)
mappings = SensorMapping.query.all()
self.assertEqual(len(mappings), 2)
mappings = SensorMapping.query.filter_by(end_time=None).all()
self.assertEqual(len(mappings), 1)
self.assertEqual(mappings[0].sensor_id, 2)
def test_get_mappings(self):
m = SensorMapping(1, 1, datetime.now(), None, MappingType.child, 1)
db.session.add(m)
db.session.commit()
result = self.app.get('/api/v1/sensor_mappings?classroom_id=1', headers=self.authorized_headers)
self.assertEqual(result.status_code, 200)
mappings = json.loads(result.data)
self.assertEqual(len(mappings), 1)
def test_area_create(self):
area = dict(
classroom_id=1,
name='test',
)
result = self.api_post_json('areas', json.dumps(area), True)
self.assertEqual(result.status_code, 201)
areas = Area.query.all()
self.assertEqual(len(areas), 1)
self.assertEqual(areas[0].name, "test")
def test_material_create(self):
material = dict(
classroom_id=1,
name='red rods',
lesson_id=5
)
result = self.api_post_json('materials', json.dumps(material), True)
self.assertEqual(result.status_code, 201)
materials = Material.query.all()
self.assertEqual(len(materials), 1)
self.assertEqual(materials[0].name, "red rods")
def test_classrooms_index(self):
result = self.app.get('/api/v1/classrooms', headers=self.authorized_headers)
self.assertEqual(result.status_code, 200)
classrooms = json.loads(result.data)
self.assertEqual(len(classrooms), 1)
self.assertEqual(classrooms[0].get('name'), "test classroom")
if __name__ == '__main__':
unittest.main()
| [
"json.loads",
"base64.b64encode",
"json.dumps",
"datetime.datetime.now",
"unittest.main",
"datetime.timedelta"
] | [((6897, 6912), 'unittest.main', 'unittest.main', ([], {}), '()\n', (6910, 6912), False, 'import unittest\n'), ((1331, 1353), 'json.dumps', 'json.dumps', (['[radio_ob]'], {}), '([radio_ob])\n', (1341, 1353), False, 'import json\n'), ((1929, 1951), 'json.dumps', 'json.dumps', (['[radio_ob]'], {}), '([radio_ob])\n', (1939, 1951), False, 'import json\n'), ((2450, 2464), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2462, 2464), False, 'from datetime import datetime, timedelta\n'), ((2640, 2662), 'json.dumps', 'json.dumps', (['[radio_ob]'], {}), '([radio_ob])\n', (2650, 2662), False, 'import json\n'), ((2964, 2997), 'json.dumps', 'json.dumps', (['[radio_ob, radio_ob2]'], {}), '([radio_ob, radio_ob2])\n', (2974, 2997), False, 'import json\n'), ((5686, 5709), 'json.loads', 'json.loads', (['result.data'], {}), '(result.data)\n', (5696, 5709), False, 'import json\n'), ((6725, 6748), 'json.loads', 'json.loads', (['result.data'], {}), '(result.data)\n', (6735, 6748), False, 'import json\n'), ((1544, 1558), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1556, 1558), False, 'from datetime import datetime, timedelta\n'), ((1646, 1660), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1658, 1660), False, 'from datetime import datetime, timedelta\n'), ((2235, 2249), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2247, 2249), False, 'from datetime import datetime, timedelta\n'), ((2337, 2351), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2349, 2351), False, 'from datetime import datetime, timedelta\n'), ((2769, 2787), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (2778, 2787), False, 'from datetime import datetime, timedelta\n'), ((3463, 3489), 'json.dumps', 'json.dumps', (['[mapping_item]'], {}), '([mapping_item])\n', (3473, 3489), False, 'import json\n'), ((4162, 4182), 'json.dumps', 'json.dumps', (['mappings'], {}), '(mappings)\n', (4172, 4182), False, 'import json\n'), ((5016, 5036), 'json.dumps', 'json.dumps', (['mappings'], {}), '(mappings)\n', (5026, 5036), False, 'import json\n'), ((5414, 5428), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (5426, 5428), False, 'from datetime import datetime, timedelta\n'), ((5915, 5931), 'json.dumps', 'json.dumps', (['area'], {}), '(area)\n', (5925, 5931), False, 'import json\n'), ((6312, 6332), 'json.dumps', 'json.dumps', (['material'], {}), '(material)\n', (6322, 6332), False, 'import json\n'), ((633, 663), 'base64.b64encode', 'b64encode', (['"""testuser:testpass"""'], {}), "('testuser:testpass')\n", (642, 663), False, 'from base64 import b64encode\n'), ((1271, 1285), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1283, 1285), False, 'from datetime import datetime, timedelta\n'), ((1870, 1884), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1882, 1884), False, 'from datetime import datetime, timedelta\n')] |
from setuptools import setup, find_packages
with open('README.md') as readme_file:
README=readme_file.read()
with open('HISTORY.md') as history_file:
HISTORY=history_file.read()
setup_args=dict(
name='DisBot',
version='1.0',
description='Tool to create a quick new discord.py bot',
long_description_content_type="text/markdown",
long_description=README + '\n\n' + HISTORY,
license='MIT',
packages=find_packages(),
author='<NAME>',
author_email='<EMAIL>',
keywords=['discord.py', 'discord template', 'discord bot template', 'Disbot'],
url='https://github.com/GSri30/Disbot',
download_url='https://pypi.org/project/DisBot',
include_package_data=True,
entry_points={
'console_scripts':[
'disbot-admin=Disbot.cli:Main'
]
},
)
install_requires=[
'click',
'python-dotenv',
'discord.py',
]
if __name__=="__main__":
setup(**setup_args,install_requires=install_requires) | [
"setuptools.find_packages",
"setuptools.setup"
] | [((941, 995), 'setuptools.setup', 'setup', ([], {'install_requires': 'install_requires'}), '(**setup_args, install_requires=install_requires)\n', (946, 995), False, 'from setuptools import setup, find_packages\n'), ((436, 451), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (449, 451), False, 'from setuptools import setup, find_packages\n')] |
import asyncio
import logging
import json
import websockets
from typing import Any, Callable, Sequence
from .constants import (
ChannelType
)
WEBSOCKET_URL = 'wss://www.avanza.se/_push/cometd'
logger = logging.getLogger("avanza_socket")
class AvanzaSocket:
def __init__(self, push_subscription_id, cookies):
self._socket = None
self._client_id = None
self._message_count = 1
self._push_subscription_id = push_subscription_id
self._connected = False
self._subscriptions = {}
self._cookies = cookies
self._subscribe_event = None
async def init(self):
asyncio.ensure_future(self.__create_socket())
await self.__wait_for_websocket_to_be_connected()
async def __wait_for_websocket_to_be_connected(self):
timeout_count = 40
timeout_value = 0.250
# Waits for a maximum of 10 seconds for the connection to be complete
for _ in range(0, timeout_count):
if self._connected:
return
await asyncio.sleep(timeout_value)
raise TimeoutError('\
We weren\'t able to connect \
to the websocket within the expected timeframe \
')
async def __create_socket(self):
async with websockets.connect(
WEBSOCKET_URL,
extra_headers={'Cookie': self._cookies}
) as self._socket:
await self.__send_handshake_message()
await self.__socket_message_handler()
async def __send_handshake_message(self):
await self.__send({
'advice': {
'timeout': 60000,
'interval': 0
},
'channel': '/meta/handshake',
'ext': {'subscriptionId': self._push_subscription_id},
'minimumVersion': '1.0',
'supportedConnectionTypes': [
'websocket',
'long-polling',
'callback-polling'
],
'version': '1.0'
})
async def __send_connect_message(self):
await self.__send({
'channel': '/meta/connect',
'clientId': self._client_id,
'connectionType': 'websocket',
'id': self._message_count
})
async def __socket_subscribe(
self,
subscription_string,
callback: Callable[[str, dict], Any],
wait_for_reply_timeout_seconds
):
if self._subscribe_event is None:
self._subscribe_event = asyncio.Event()
self._subscriptions[subscription_string] = {
'callback': callback
}
self._subscribe_event.clear()
await self.__send({
'channel': '/meta/subscribe',
'clientId': self._client_id,
'subscription': subscription_string
})
# Wait for subscription ack message.
if wait_for_reply_timeout_seconds is not None:
try:
await asyncio.wait_for(self._subscribe_event.wait(), timeout=wait_for_reply_timeout_seconds)
except asyncio.TimeoutError:
logger.warning('timeout waiting for subscription reply!')
async def __send(self, message):
wrapped_message = [
{
**message,
'id': str(self._message_count)
}
]
logger.info(f'Outgoing message: {wrapped_message}')
await self._socket.send(json.dumps(wrapped_message))
self._message_count = self._message_count + 1
async def __handshake(self, message: dict):
if message.get('successful', False):
self._client_id = message.get('clientId')
await self.__send({
'advice': {'timeout': 0},
'channel': '/meta/connect',
'clientId': self._client_id,
'connectionType': 'websocket'
})
return
advice = message.get('advice')
if advice and advice.get('reconnect') == 'handshake':
await self.__send_handshake_message()
async def __connect(self, message: dict):
successful = message.get('successful', False)
advice = message.get('advice', {})
reconnect = advice.get('reconnect') == 'retry'
interval = advice.get('interval')
connect_successful = successful and (
not advice or (reconnect and interval >= 0)
)
if connect_successful:
await self.__send({
'channel': '/meta/connect',
'clientId': self._client_id,
'connectionType': 'websocket'
})
if not self._connected:
self._connected = True
await self.__resubscribe_existing_subscriptions()
elif self._client_id:
await self.__send_connect_message()
async def __resubscribe_existing_subscriptions(self):
for key, value in self._subscriptions.items():
if value.get('client_id') != self._client_id:
await self.__socket_subscribe(
key,
value['callback']
)
async def __disconnect(self, message):
await self.__send_handshake_message()
async def __register_subscription(self, message):
subscription = message.get('subscription')
if subscription is None:
raise ValueError('No subscription channel found on subscription message')
self._subscriptions[subscription]['client_id'] = self._client_id
self._subscribe_event.set()
async def __socket_message_handler(self):
message_action = {
'/meta/disconnect': self.__disconnect,
'/meta/handshake': self.__handshake,
'/meta/connect': self.__connect,
'/meta/subscribe': self.__register_subscription
}
async for message in self._socket:
message = json.loads(message)[0]
message_channel = message.get('channel')
error = message.get('error')
logger.info(f'Incoming message: {message}')
if error:
logger.error(error)
action = message_action.get(message_channel)
# Use user subscribed action
if action is None:
callback = self._subscriptions[message_channel]['callback']
callback(message)
else:
await action(message)
async def subscribe_to_id(
self,
channel: ChannelType,
id: str,
callback: Callable[[str, dict], Any],
wait_for_reply_timeout_seconds,
):
return await self.subscribe_to_ids(channel, [id], callback, wait_for_reply_timeout_seconds)
async def subscribe_to_ids(
self,
channel: ChannelType,
ids: Sequence[str],
callback: Callable[[str, dict], Any],
wait_for_reply_timeout_seconds
):
valid_channels_for_multiple_ids = [
ChannelType.ORDERS,
ChannelType.DEALS,
ChannelType.POSITIONS
]
if (
len(ids) > 1 and
channel not in valid_channels_for_multiple_ids
):
raise ValueError(f'Multiple ids is not supported for channels other than {valid_channels_for_multiple_ids}')
subscription_string = f'/{channel.value}/{",".join(ids)}'
await self.__socket_subscribe(subscription_string, callback, wait_for_reply_timeout_seconds)
| [
"logging.getLogger",
"json.loads",
"json.dumps",
"asyncio.Event",
"websockets.connect",
"asyncio.sleep"
] | [((209, 243), 'logging.getLogger', 'logging.getLogger', (['"""avanza_socket"""'], {}), "('avanza_socket')\n", (226, 243), False, 'import logging\n'), ((1283, 1357), 'websockets.connect', 'websockets.connect', (['WEBSOCKET_URL'], {'extra_headers': "{'Cookie': self._cookies}"}), "(WEBSOCKET_URL, extra_headers={'Cookie': self._cookies})\n", (1301, 1357), False, 'import websockets\n'), ((2520, 2535), 'asyncio.Event', 'asyncio.Event', ([], {}), '()\n', (2533, 2535), False, 'import asyncio\n'), ((1052, 1080), 'asyncio.sleep', 'asyncio.sleep', (['timeout_value'], {}), '(timeout_value)\n', (1065, 1080), False, 'import asyncio\n'), ((3456, 3483), 'json.dumps', 'json.dumps', (['wrapped_message'], {}), '(wrapped_message)\n', (3466, 3483), False, 'import json\n'), ((5952, 5971), 'json.loads', 'json.loads', (['message'], {}), '(message)\n', (5962, 5971), False, 'import json\n')] |
import logging
import os
import xml.etree.ElementTree as ET
from collections import defaultdict
from event.io.dataset.base import (
Span,
DataLoader,
DEDocument,
RelationMention,
)
class RichERE(DataLoader):
def __init__(self, params, corpus, with_doc=False):
super().__init__(params, corpus, with_doc)
self.params = params
def parse_ere(self, ere_file, doc):
root = ET.parse(ere_file).getroot()
doc_info = root.attrib
doc.set_id = doc_info['doc_id']
doc.set_doc_type = doc_info['source_type']
for entity_node in root.find('entities'):
entity_ids = []
ent = doc.add_entity(entity_node.attrib['type'],
entity_node.attrib['id'])
for entity_mention in entity_node.findall('entity_mention'):
ent_info = entity_mention.attrib
entity_ids.append(ent_info['id'])
entity_text = entity_mention.find('mention_text').text
entity_span = Span(ent_info['offset'], ent_info['length'])
doc.add_entity_mention(
ent, entity_span, entity_text,
ent_info['id'],
noun_type=ent_info['noun_type'],
entity_type=ent_info.get('type', None),
)
for filler in root.find('fillers'):
filler_info = filler.attrib
b = int(filler_info['offset'])
l = int(filler_info['length'])
doc.add_filler(
Span(b, b + l), filler.text,
eid=filler_info['id'], filler_type=filler_info['type']
)
for event_node in root.find('hoppers'):
evm_ids = []
event = doc.add_hopper(event_node.attrib['id'])
for event_mention in event_node.findall('event_mention'):
evm_info = event_mention.attrib
evm_ids.append(evm_info['id'])
trigger = event_mention.find('trigger')
trigger_text = trigger.text
offset = trigger.attrib['offset']
length = trigger.attrib['length']
evm = doc.add_predicate(
event, Span(offset, offset + length), trigger_text,
eid=evm_info['id'],
frame_type=evm_info['type'] + '_' + evm_info['subtype'],
realis=evm_info['realis'])
for em_arg in event_mention.findall('em_arg'):
arg_info = em_arg.attrib
arg_ent_mention = None
if 'entity_mention_id' in arg_info:
arg_ent_mention = arg_info['entity_mention_id']
if 'filler_id' in arg_info:
arg_ent_mention = arg_info['filler_id']
role = arg_info['role']
doc.add_argument_mention(evm, arg_ent_mention, role)
for relation_node in root.find('relations'):
relation_info = relation_node.attrib
relation = doc.add_relation(
relation_info['id'],
relation_type=relation_info['type'] + '_' + relation_info[
'subtype']
)
for rel_mention_node in relation_node.findall('relation_mention'):
rel_mention_id = rel_mention_node.attrib['id']
rel_realis = rel_mention_node.attrib['realis']
args = {}
for mention_part in rel_mention_node:
if mention_part.tag.startswith('rel_arg'):
if 'entity_mention_id' in mention_part.attrib:
ent_id = mention_part.attrib['entity_mention_id']
else:
ent_id = mention_part.attrib['filler_id']
role = mention_part.attrib['role']
args[role] = ent_id
trigger = rel_mention_node.find('trigger')
if trigger is not None:
trigger_text = trigger.text
trigger_begin = trigger.attrib['offset']
trigger_len = trigger.attrib['length']
else:
trigger_text = ''
trigger_begin = None
trigger_len = None
rel_mention = RelationMention(
rel_mention_id, Span(trigger_begin, trigger_len),
trigger_text, rel_realis
)
for role, ent in args.items():
rel_mention.add_arg(role, ent)
relation.add_mention(rel_mention)
def read_rich_ere(self, corpus, source_path, l_ere_path, ranges):
with open(source_path) as source:
text = source.read()
doc = DEDocument(corpus, text, ranges, self.params.ignore_quote)
for ere_path in l_ere_path:
with open(ere_path) as ere:
logging.info("Processing: " + os.path.basename(ere_path))
self.parse_ere(ere, doc)
return doc
def get_doc(self):
super().get_doc()
sources = {}
eres = defaultdict(list)
annotate_ranges = defaultdict(list)
for fn in os.listdir(self.params.ere):
basename = fn.replace(self.params.ere_ext, '')
if self.params.ere_split:
parts = basename.split('_')
r = [int(p) for p in parts[-1].split('-')]
annotate_ranges[basename].append(r)
basename = '_'.join(parts[:-1])
ere_fn = os.path.join(self.params.ere, fn)
eres[basename].append(ere_fn)
for fn in os.listdir(self.params.source):
txt_base = fn.replace(self.params.src_ext, '')
if txt_base in eres:
sources[txt_base] = os.path.join(self.params.source, fn)
if not os.path.exists(self.params.out_dir):
os.makedirs(self.params.out_dir)
for basename, source in sources.items():
l_ere = eres[basename]
ranges = annotate_ranges[basename]
doc = self.read_rich_ere(self.corpus, source, l_ere, ranges)
yield doc
| [
"os.path.exists",
"os.listdir",
"xml.etree.ElementTree.parse",
"os.makedirs",
"os.path.join",
"collections.defaultdict",
"event.io.dataset.base.Span",
"os.path.basename",
"event.io.dataset.base.DEDocument"
] | [((5264, 5281), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (5275, 5281), False, 'from collections import defaultdict\n'), ((5308, 5325), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (5319, 5325), False, 'from collections import defaultdict\n'), ((5345, 5372), 'os.listdir', 'os.listdir', (['self.params.ere'], {}), '(self.params.ere)\n', (5355, 5372), False, 'import os\n'), ((5791, 5821), 'os.listdir', 'os.listdir', (['self.params.source'], {}), '(self.params.source)\n', (5801, 5821), False, 'import os\n'), ((4880, 4938), 'event.io.dataset.base.DEDocument', 'DEDocument', (['corpus', 'text', 'ranges', 'self.params.ignore_quote'], {}), '(corpus, text, ranges, self.params.ignore_quote)\n', (4890, 4938), False, 'from event.io.dataset.base import Span, DataLoader, DEDocument, RelationMention\n'), ((5696, 5729), 'os.path.join', 'os.path.join', (['self.params.ere', 'fn'], {}), '(self.params.ere, fn)\n', (5708, 5729), False, 'import os\n'), ((6004, 6039), 'os.path.exists', 'os.path.exists', (['self.params.out_dir'], {}), '(self.params.out_dir)\n', (6018, 6039), False, 'import os\n'), ((6053, 6085), 'os.makedirs', 'os.makedirs', (['self.params.out_dir'], {}), '(self.params.out_dir)\n', (6064, 6085), False, 'import os\n'), ((419, 437), 'xml.etree.ElementTree.parse', 'ET.parse', (['ere_file'], {}), '(ere_file)\n', (427, 437), True, 'import xml.etree.ElementTree as ET\n'), ((1048, 1092), 'event.io.dataset.base.Span', 'Span', (["ent_info['offset']", "ent_info['length']"], {}), "(ent_info['offset'], ent_info['length'])\n", (1052, 1092), False, 'from event.io.dataset.base import Span, DataLoader, DEDocument, RelationMention\n'), ((1567, 1581), 'event.io.dataset.base.Span', 'Span', (['b', '(b + l)'], {}), '(b, b + l)\n', (1571, 1581), False, 'from event.io.dataset.base import Span, DataLoader, DEDocument, RelationMention\n'), ((5951, 5987), 'os.path.join', 'os.path.join', (['self.params.source', 'fn'], {}), '(self.params.source, fn)\n', (5963, 5987), False, 'import os\n'), ((2252, 2281), 'event.io.dataset.base.Span', 'Span', (['offset', '(offset + length)'], {}), '(offset, offset + length)\n', (2256, 2281), False, 'from event.io.dataset.base import Span, DataLoader, DEDocument, RelationMention\n'), ((4469, 4501), 'event.io.dataset.base.Span', 'Span', (['trigger_begin', 'trigger_len'], {}), '(trigger_begin, trigger_len)\n', (4473, 4501), False, 'from event.io.dataset.base import Span, DataLoader, DEDocument, RelationMention\n'), ((5073, 5099), 'os.path.basename', 'os.path.basename', (['ere_path'], {}), '(ere_path)\n', (5089, 5099), False, 'import os\n')] |
"""
File: examples/util/rectangular_binner.py
Author: <NAME>
Date: 22 Sep 2018
Description: Example script showing the use of the RectangularBinner class.
"""
from __future__ import division
import numpy as np
import matplotlib.pyplot as pl
from pylinex import RectangularBinner
fontsize = 24
num_old_x_values = 1000
num_new_x_values = 20
wavelength = 0.4
old_x_values = np.linspace(-1, 1, num_old_x_values)[1:-1]
old_error = np.ones_like(old_x_values)
old_y_values =\
np.sin(2 * np.pi * old_x_values / wavelength) * np.sinh(old_x_values)
new_x_bin_edges = np.linspace(-1, 1, num_new_x_values + 1)
weights = np.ones_like(old_y_values)
binner = RectangularBinner(old_x_values, new_x_bin_edges)
new_x_values = binner.binned_x_values
(new_y_values, new_weights) = binner.bin(old_y_values, weights=weights,\
return_weights=True)
new_error = binner.bin_error(old_error, weights=weights, return_weights=False)
fig = pl.figure(figsize=(12,9))
ax = fig.add_subplot(111)
ax.plot(old_x_values, old_y_values, label='unbinned')
ax.plot(new_x_values, new_y_values, label='binned')
ax.legend(fontsize=fontsize)
ax.tick_params(labelsize=fontsize, width=2.5, length=7.5, which='major')
ax.tick_params(labelsize=fontsize, width=1.5, length=4.5, which='minor')
pl.show()
| [
"numpy.ones_like",
"numpy.sinh",
"numpy.linspace",
"matplotlib.pyplot.figure",
"numpy.sin",
"pylinex.RectangularBinner",
"matplotlib.pyplot.show"
] | [((431, 457), 'numpy.ones_like', 'np.ones_like', (['old_x_values'], {}), '(old_x_values)\n', (443, 457), True, 'import numpy as np\n'), ((566, 606), 'numpy.linspace', 'np.linspace', (['(-1)', '(1)', '(num_new_x_values + 1)'], {}), '(-1, 1, num_new_x_values + 1)\n', (577, 606), True, 'import numpy as np\n'), ((617, 643), 'numpy.ones_like', 'np.ones_like', (['old_y_values'], {}), '(old_y_values)\n', (629, 643), True, 'import numpy as np\n'), ((654, 702), 'pylinex.RectangularBinner', 'RectangularBinner', (['old_x_values', 'new_x_bin_edges'], {}), '(old_x_values, new_x_bin_edges)\n', (671, 702), False, 'from pylinex import RectangularBinner\n'), ((925, 951), 'matplotlib.pyplot.figure', 'pl.figure', ([], {'figsize': '(12, 9)'}), '(figsize=(12, 9))\n', (934, 951), True, 'import matplotlib.pyplot as pl\n'), ((1259, 1268), 'matplotlib.pyplot.show', 'pl.show', ([], {}), '()\n', (1266, 1268), True, 'import matplotlib.pyplot as pl\n'), ((376, 412), 'numpy.linspace', 'np.linspace', (['(-1)', '(1)', 'num_old_x_values'], {}), '(-1, 1, num_old_x_values)\n', (387, 412), True, 'import numpy as np\n'), ((478, 523), 'numpy.sin', 'np.sin', (['(2 * np.pi * old_x_values / wavelength)'], {}), '(2 * np.pi * old_x_values / wavelength)\n', (484, 523), True, 'import numpy as np\n'), ((526, 547), 'numpy.sinh', 'np.sinh', (['old_x_values'], {}), '(old_x_values)\n', (533, 547), True, 'import numpy as np\n')] |
__license__ = open('LICENSE').read()
version = open('VERSION').read()
from setuptools import setup
long_description = open('README.md').read()
setup(
name = 'bulmate',
version = version,
author = 'gardsted',
author_email = '<EMAIL>',
license = 'MIT',
url = 'https://github.com/gardsted/bulmate/',
description = 'Bulmate is a Python library for dominating CCCP with Bulma',
long_description = long_description,
long_description_content_type='text/markdown',
keywords = 'framework templating template html xhtml python html5 css bulma',
python_requires='>=3.5.*',
classifiers = [
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing :: Markup :: HTML',
],
packages = ['bulmate'],
requirements = ["cccp", "dominate"],
include_package_data = True,
)
| [
"setuptools.setup"
] | [((146, 1049), 'setuptools.setup', 'setup', ([], {'name': '"""bulmate"""', 'version': 'version', 'author': '"""gardsted"""', 'author_email': '"""<EMAIL>"""', 'license': '"""MIT"""', 'url': '"""https://github.com/gardsted/bulmate/"""', 'description': '"""Bulmate is a Python library for dominating CCCP with Bulma"""', 'long_description': 'long_description', 'long_description_content_type': '"""text/markdown"""', 'keywords': '"""framework templating template html xhtml python html5 css bulma"""', 'python_requires': '""">=3.5.*"""', 'classifiers': "['Intended Audience :: Developers',\n 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)',\n 'Operating System :: OS Independent', 'Programming Language :: Python',\n 'Topic :: Internet :: WWW/HTTP :: Dynamic Content',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n 'Topic :: Text Processing :: Markup :: HTML']", 'packages': "['bulmate']", 'requirements': "['cccp', 'dominate']", 'include_package_data': '(True)'}), "(name='bulmate', version=version, author='gardsted', author_email=\n '<EMAIL>', license='MIT', url='https://github.com/gardsted/bulmate/',\n description=\n 'Bulmate is a Python library for dominating CCCP with Bulma',\n long_description=long_description, long_description_content_type=\n 'text/markdown', keywords=\n 'framework templating template html xhtml python html5 css bulma',\n python_requires='>=3.5.*', classifiers=[\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)',\n 'Operating System :: OS Independent', 'Programming Language :: Python',\n 'Topic :: Internet :: WWW/HTTP :: Dynamic Content',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n 'Topic :: Text Processing :: Markup :: HTML'], packages=['bulmate'],\n requirements=['cccp', 'dominate'], include_package_data=True)\n", (151, 1049), False, 'from setuptools import setup\n')] |
class Calculador_de_impostos:
def realiza_calculo(self, orcamento, imposto):
imposto_calculado = imposto.calcula(orcamento)
print(imposto_calculado)
if __name__ == '__main__':
from orcamento import Orcamento, Item
from impostos import ISS, ICMS, ICPP, IKCV
orcamento = Orcamento()
orcamento.adiciona_item(Item('ITEM 1', 50))
orcamento.adiciona_item(Item('ITEM 2', 200))
orcamento.adiciona_item(Item('ITEM 3', 250))
calculador_de_impostos = Calculador_de_impostos()
print('ISS e ICMS')
calculador_de_impostos.realiza_calculo(orcamento, ICMS(ISS()))
print('ICPP e IKCV')
calculador_de_impostos.realiza_calculo(orcamento, IKCV(ICPP()))
| [
"orcamento.Item",
"impostos.ISS",
"impostos.ICPP",
"orcamento.Orcamento"
] | [((305, 316), 'orcamento.Orcamento', 'Orcamento', ([], {}), '()\n', (314, 316), False, 'from orcamento import Orcamento, Item\n'), ((345, 363), 'orcamento.Item', 'Item', (['"""ITEM 1"""', '(50)'], {}), "('ITEM 1', 50)\n", (349, 363), False, 'from orcamento import Orcamento, Item\n'), ((393, 412), 'orcamento.Item', 'Item', (['"""ITEM 2"""', '(200)'], {}), "('ITEM 2', 200)\n", (397, 412), False, 'from orcamento import Orcamento, Item\n'), ((442, 461), 'orcamento.Item', 'Item', (['"""ITEM 3"""', '(250)'], {}), "('ITEM 3', 250)\n", (446, 461), False, 'from orcamento import Orcamento, Item\n'), ((601, 606), 'impostos.ISS', 'ISS', ([], {}), '()\n', (604, 606), False, 'from impostos import ISS, ICMS, ICPP, IKCV\n'), ((694, 700), 'impostos.ICPP', 'ICPP', ([], {}), '()\n', (698, 700), False, 'from impostos import ISS, ICMS, ICPP, IKCV\n')] |
import sys
sys.path.extend(['..'])
import tensorflow as tf
config = tf.ConfigProto(log_device_placement=False)
config.gpu_options.allow_growth = True
sess = tf.Session(config=config)
from generator.generate_code import *
from nltk.translate.bleu_score import corpus_bleu
from config.config import *
from base.BaseModel import *
from utils.tokenizer import *
def evaluate_model(input_path, model_path, tokenizer, max_length=48, display=False):
'''
Evaluate model by comparing actual vs predictions via the BLEU scoring criteria
:param input_path: input path containing images + gui code pairs to evaluate model on
:param model_path: path to model files
:param tokenizer: a Keras Tokenizer object fit on vocab
:param max_length: context length
:param display: bool on whether to print out DSL code predictions and actual labels to standard output
:return: 4-ngram BLEU score, list of actual DSL code, list of predicted DSL code
'''
model_json_path = glob.glob(os.path.join(model_path, '*.json'))[0]
model_weights_path = glob.glob(os.path.join(model_path, '*.hdf5'))[0]
with open(model_json_path, 'r') as fh:
model_json = fh.read()
model = model_from_json(model_json)
model.load_weights(model_weights_path)
print('Successfully loaded model and model weights...')
images, texts = load_data(input_path)
actual, predictions = list(), list()
for i in range(len(texts)):
predicted_code = generate_code(model, images[i], tokenizer, max_length, display)
# store actual and predicted
if display:
print('\n\nActual---->\n\n' + texts[i])
actual.append([texts[i].split()])
predictions.append(predicted_code.split())
bleu = corpus_bleu(actual, predictions)
return bleu, actual, predictions
if __name__ == '__main__':
argv = sys.argv[1:]
if len(argv) != 1:
print('Need to supply an argument specifying model path')
exit(0)
model_path = argv[0]
test_dir = '../data/test/'
# model_path = '../results/'
vocab_path = '../data/code.vocab'
tokenizer = tokenizer(vocab_path)
bleu, actual, predictions = evaluate_model(test_dir, model_path, tokenizer, CONTEXT_LENGTH, display=False)
# Calculate BLEU score (standard is 4-gram, but just get all individual N-Gram BLEU scores from 1 gram to 4 gram)
# By default, the sentence_bleu() and corpus_bleu() scores calculate the cumulative 4-gram BLEU score, also called BLEU-4.
# It is common to report the cumulative BLEU-1 to BLEU-4 scores when describing the skill of a text generation system.
# 4-gram is the most strict and corresponds the best to human translations
print('BLEU-1: %f' % corpus_bleu(actual, predictions, weights=(1.0, 0, 0, 0)))
print('BLEU-2: %f' % corpus_bleu(actual, predictions, weights=(0.5, 0.5, 0, 0)))
print('BLEU-3: %f' % corpus_bleu(actual, predictions, weights=(0.3, 0.3, 0.3, 0)))
print('BLEU-4: %f' % corpus_bleu(actual, predictions, weights=(0.25, 0.25, 0.25, 0.25)))
bleu_score_path = os.path.join(model_path, 'bleu_score.txt')
with open(bleu_score_path, 'w') as fh:
fh.write('Test set dir: %s\n' % test_dir)
fh.write('BLEU-1: %f \n' % corpus_bleu(actual, predictions, weights=(1.0, 0, 0, 0)))
fh.write('BLEU-2: %f \n' % corpus_bleu(actual, predictions, weights=(0.5, 0.5, 0, 0)))
fh.write('BLEU-3: %f \n' % corpus_bleu(actual, predictions, weights=(0.3, 0.3, 0.3, 0)))
fh.write('BLEU-4: %f \n' % corpus_bleu(actual, predictions, weights=(0.25, 0.25, 0.25, 0.25)))
| [
"tensorflow.ConfigProto",
"tensorflow.Session",
"sys.path.extend",
"nltk.translate.bleu_score.corpus_bleu"
] | [((11, 34), 'sys.path.extend', 'sys.path.extend', (["['..']"], {}), "(['..'])\n", (26, 34), False, 'import sys\n'), ((69, 111), 'tensorflow.ConfigProto', 'tf.ConfigProto', ([], {'log_device_placement': '(False)'}), '(log_device_placement=False)\n', (83, 111), True, 'import tensorflow as tf\n'), ((158, 183), 'tensorflow.Session', 'tf.Session', ([], {'config': 'config'}), '(config=config)\n', (168, 183), True, 'import tensorflow as tf\n'), ((1750, 1782), 'nltk.translate.bleu_score.corpus_bleu', 'corpus_bleu', (['actual', 'predictions'], {}), '(actual, predictions)\n', (1761, 1782), False, 'from nltk.translate.bleu_score import corpus_bleu\n'), ((2727, 2783), 'nltk.translate.bleu_score.corpus_bleu', 'corpus_bleu', (['actual', 'predictions'], {'weights': '(1.0, 0, 0, 0)'}), '(actual, predictions, weights=(1.0, 0, 0, 0))\n', (2738, 2783), False, 'from nltk.translate.bleu_score import corpus_bleu\n'), ((2810, 2868), 'nltk.translate.bleu_score.corpus_bleu', 'corpus_bleu', (['actual', 'predictions'], {'weights': '(0.5, 0.5, 0, 0)'}), '(actual, predictions, weights=(0.5, 0.5, 0, 0))\n', (2821, 2868), False, 'from nltk.translate.bleu_score import corpus_bleu\n'), ((2895, 2955), 'nltk.translate.bleu_score.corpus_bleu', 'corpus_bleu', (['actual', 'predictions'], {'weights': '(0.3, 0.3, 0.3, 0)'}), '(actual, predictions, weights=(0.3, 0.3, 0.3, 0))\n', (2906, 2955), False, 'from nltk.translate.bleu_score import corpus_bleu\n'), ((2982, 3048), 'nltk.translate.bleu_score.corpus_bleu', 'corpus_bleu', (['actual', 'predictions'], {'weights': '(0.25, 0.25, 0.25, 0.25)'}), '(actual, predictions, weights=(0.25, 0.25, 0.25, 0.25))\n', (2993, 3048), False, 'from nltk.translate.bleu_score import corpus_bleu\n'), ((3244, 3300), 'nltk.translate.bleu_score.corpus_bleu', 'corpus_bleu', (['actual', 'predictions'], {'weights': '(1.0, 0, 0, 0)'}), '(actual, predictions, weights=(1.0, 0, 0, 0))\n', (3255, 3300), False, 'from nltk.translate.bleu_score import corpus_bleu\n'), ((3337, 3395), 'nltk.translate.bleu_score.corpus_bleu', 'corpus_bleu', (['actual', 'predictions'], {'weights': '(0.5, 0.5, 0, 0)'}), '(actual, predictions, weights=(0.5, 0.5, 0, 0))\n', (3348, 3395), False, 'from nltk.translate.bleu_score import corpus_bleu\n'), ((3432, 3492), 'nltk.translate.bleu_score.corpus_bleu', 'corpus_bleu', (['actual', 'predictions'], {'weights': '(0.3, 0.3, 0.3, 0)'}), '(actual, predictions, weights=(0.3, 0.3, 0.3, 0))\n', (3443, 3492), False, 'from nltk.translate.bleu_score import corpus_bleu\n'), ((3529, 3595), 'nltk.translate.bleu_score.corpus_bleu', 'corpus_bleu', (['actual', 'predictions'], {'weights': '(0.25, 0.25, 0.25, 0.25)'}), '(actual, predictions, weights=(0.25, 0.25, 0.25, 0.25))\n', (3540, 3595), False, 'from nltk.translate.bleu_score import corpus_bleu\n')] |
# Deploy idle CPU alarms to stop EC2 instances
import boto3
account_id = boto3.client("sts").get_caller_identity().get("Account")
region = boto3.session.Session().region_name
client = boto3.client("cloudwatch")
ec = boto3.client("ec2")
reservations = ec.describe_instances()
exceptions = ["prf-"]
for r in reservations["Reservations"]:
for i in r["Instances"]:
instance_id = i["InstanceId"]
for t in i["Tags"]:
if t["Key"] == "Name":
iname = t["Value"]
name_excepted = any([e in iname for e in exceptions])
if name_excepted:
continue
else:
alarm_name = "CPU Alarm " + iname + instance_id
alarm = client.put_metric_alarm(
AlarmName=alarm_name,
MetricName="CPUUtilization",
Namespace="AWS/EC2",
Statistic="Maximum",
ComparisonOperator="LessThanOrEqualToThreshold",
Threshold=1.0,
Period=60 * 60, # in seconds
EvaluationPeriods=2,
Dimensions=[{"Name": "InstanceId", "Value": instance_id}],
Unit="Percent",
ActionsEnabled=True,
AlarmActions=[
":".join(
[
"arn:aws:swf",
region,
account_id,
"action/actions/AWS_EC2.InstanceId.Stop/1.0",
]
)
],
)
| [
"boto3.session.Session",
"boto3.client"
] | [((185, 211), 'boto3.client', 'boto3.client', (['"""cloudwatch"""'], {}), "('cloudwatch')\n", (197, 211), False, 'import boto3\n'), ((217, 236), 'boto3.client', 'boto3.client', (['"""ec2"""'], {}), "('ec2')\n", (229, 236), False, 'import boto3\n'), ((140, 163), 'boto3.session.Session', 'boto3.session.Session', ([], {}), '()\n', (161, 163), False, 'import boto3\n'), ((74, 93), 'boto3.client', 'boto3.client', (['"""sts"""'], {}), "('sts')\n", (86, 93), False, 'import boto3\n')] |
import pytest
import tempfile
import re
import os
import shutil
from raiden.utils import get_contract_path
from raiden.utils.solc import compile_files_cwd
from raiden.exceptions import ContractVersionMismatch
from raiden.blockchain.abi import CONTRACT_VERSION_RE, CONTRACT_MANAGER, CONTRACT_CHANNEL_MANAGER
def replace_contract_version(file_path, new_version):
version_re = re.compile(CONTRACT_VERSION_RE)
with open(file_path, 'r') as original:
replaced = tempfile.NamedTemporaryFile()
for line in original.readlines():
if version_re.match(line):
line = re.sub(r'[0-9]+\.[0-9]+\.[0-9\_]', new_version, line)
replaced.write(line.encode())
replaced.flush()
shutil.copy2(replaced.name, file_path)
class TempSolidityDir:
def __init__(self, original_directory, tmpdir):
tempdir = tmpdir.mkdir(os.path.basename(original_directory))
self.name = tempdir.strpath
os.rmdir(self.name) # directory must not exist when using shutil.copytree()
shutil.copytree(original_directory, self.name)
@pytest.mark.parametrize('number_of_nodes', [1])
@pytest.mark.parametrize('channels_per_node', [0])
def test_deploy_contract(raiden_network, deploy_client, tmpdir):
"""Test deploying contract with different version than the one we have set in Registry.sol.
This test makes sense only for geth backend, tester uses mocked Registry class.
"""
contract_path = get_contract_path('Registry.sol')
# Create temporary directory to put all files required to compile the changed contract to.
# Why? Solidity uses first 40 characters of the file path as a library symbol.
# It would be nice to just do a copy of 'Registry.sol', replace version and include statements
# and then by path substitution argument of solc set the path to something like
# raiden-contracts=/path/to/your/raiden/source/contracts. But then if the path is too long,
# Python solidity compiler will fail because of duplicate library symbol.
temp_dir = TempSolidityDir(os.path.dirname(contract_path), tmpdir)
replaced_registry_path = os.path.join(temp_dir.name, 'Registry.sol')
CONTRACT_MANAGER.get_contract_abi(CONTRACT_CHANNEL_MANAGER)
replace_contract_version(replaced_registry_path, '0.0.31415')
contracts = compile_files_cwd([replaced_registry_path])
contract_proxy = deploy_client.deploy_solidity_contract(
'Registry',
contracts,
dict(),
None,
contract_path=replaced_registry_path,
)
contract_address = contract_proxy.contract_address
app0 = raiden_network[0]
with pytest.raises(ContractVersionMismatch):
app0.raiden.chain.registry(contract_address)
| [
"raiden.utils.get_contract_path",
"re.compile",
"shutil.copy2",
"os.path.join",
"raiden.utils.solc.compile_files_cwd",
"shutil.copytree",
"pytest.mark.parametrize",
"os.rmdir",
"os.path.dirname",
"pytest.raises",
"os.path.basename",
"raiden.blockchain.abi.CONTRACT_MANAGER.get_contract_abi",
... | [((1103, 1150), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""number_of_nodes"""', '[1]'], {}), "('number_of_nodes', [1])\n", (1126, 1150), False, 'import pytest\n'), ((1152, 1201), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""channels_per_node"""', '[0]'], {}), "('channels_per_node', [0])\n", (1175, 1201), False, 'import pytest\n'), ((382, 413), 're.compile', 're.compile', (['CONTRACT_VERSION_RE'], {}), '(CONTRACT_VERSION_RE)\n', (392, 413), False, 'import re\n'), ((1475, 1508), 'raiden.utils.get_contract_path', 'get_contract_path', (['"""Registry.sol"""'], {}), "('Registry.sol')\n", (1492, 1508), False, 'from raiden.utils import get_contract_path\n'), ((2150, 2193), 'os.path.join', 'os.path.join', (['temp_dir.name', '"""Registry.sol"""'], {}), "(temp_dir.name, 'Registry.sol')\n", (2162, 2193), False, 'import os\n'), ((2199, 2258), 'raiden.blockchain.abi.CONTRACT_MANAGER.get_contract_abi', 'CONTRACT_MANAGER.get_contract_abi', (['CONTRACT_CHANNEL_MANAGER'], {}), '(CONTRACT_CHANNEL_MANAGER)\n', (2232, 2258), False, 'from raiden.blockchain.abi import CONTRACT_VERSION_RE, CONTRACT_MANAGER, CONTRACT_CHANNEL_MANAGER\n'), ((2342, 2385), 'raiden.utils.solc.compile_files_cwd', 'compile_files_cwd', (['[replaced_registry_path]'], {}), '([replaced_registry_path])\n', (2359, 2385), False, 'from raiden.utils.solc import compile_files_cwd\n'), ((476, 505), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (503, 505), False, 'import tempfile\n'), ((739, 777), 'shutil.copy2', 'shutil.copy2', (['replaced.name', 'file_path'], {}), '(replaced.name, file_path)\n', (751, 777), False, 'import shutil\n'), ((968, 987), 'os.rmdir', 'os.rmdir', (['self.name'], {}), '(self.name)\n', (976, 987), False, 'import os\n'), ((1053, 1099), 'shutil.copytree', 'shutil.copytree', (['original_directory', 'self.name'], {}), '(original_directory, self.name)\n', (1068, 1099), False, 'import shutil\n'), ((2081, 2111), 'os.path.dirname', 'os.path.dirname', (['contract_path'], {}), '(contract_path)\n', (2096, 2111), False, 'import os\n'), ((2663, 2701), 'pytest.raises', 'pytest.raises', (['ContractVersionMismatch'], {}), '(ContractVersionMismatch)\n', (2676, 2701), False, 'import pytest\n'), ((886, 922), 'os.path.basename', 'os.path.basename', (['original_directory'], {}), '(original_directory)\n', (902, 922), False, 'import os\n'), ((610, 665), 're.sub', 're.sub', (['"""[0-9]+\\\\.[0-9]+\\\\.[0-9\\\\_]"""', 'new_version', 'line'], {}), "('[0-9]+\\\\.[0-9]+\\\\.[0-9\\\\_]', new_version, line)\n", (616, 665), False, 'import re\n')] |
# Copyright (c) 2020, <NAME>, Honda Research Institute Europe GmbH, and
# Technical University of Darmstadt.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of <NAME>, Honda Research Institute Europe GmbH,
# or Technical University of Darmstadt, nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL <NAME>, HONDA RESEARCH INSTITUTE EUROPE GMBH,
# OR TECHNICAL UNIVERSITY OF DARMSTADT BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
Script to test the simplified box flipping task using a hard-coded time-based policy
"""
import math
import rcsenv
import torch as to
import pyrado
from pyrado.domain_randomization.domain_parameter import UniformDomainParam
from pyrado.domain_randomization.domain_randomizer import DomainRandomizer
from pyrado.environment_wrappers.domain_randomization import DomainRandWrapperLive
from pyrado.environments.rcspysim.mini_golf import MiniGolfIKSim, MiniGolfJointCtrlSim
from pyrado.policies.features import FeatureStack, const_feat
from pyrado.policies.feed_back.linear import LinearPolicy
from pyrado.policies.feed_forward.dummy import IdlePolicy
from pyrado.policies.feed_forward.poly_time import PolySplineTimePolicy
from pyrado.policies.special.environment_specific import create_mg_joint_pos_policy
from pyrado.sampling.rollout import after_rollout_query, rollout
from pyrado.utils.data_types import RenderMode
from pyrado.utils.input_output import print_cbt
rcsenv.setLogLevel(2)
def create_idle_setup(physicsEngine: str, dt: float, max_steps: int, checkJointLimits: bool):
# Set up environment
env = MiniGolfIKSim(
usePhysicsNode=True,
physicsEngine=physicsEngine,
dt=dt,
max_steps=max_steps,
checkJointLimits=checkJointLimits,
fixedInitState=True,
observeForceTorque=True,
)
# Set up policy
policy = IdlePolicy(env.spec) # don't move at all
return env, policy
def create_pst_setup(physicsEngine: str, dt: float, max_steps: int, checkJointLimits: bool):
# Set up environment
relativeZdTask = True
print_cbt(f"relativeZdTask = {relativeZdTask}", "c", bright=True)
env = MiniGolfIKSim(
relativeZdTask=relativeZdTask,
usePhysicsNode=True,
physicsEngine=physicsEngine,
dt=dt,
max_steps=max_steps,
checkJointLimits=checkJointLimits,
fixedInitState=True,
observeForceTorque=False,
collisionAvoidanceIK=True,
)
# Set up policy
if relativeZdTask:
policy_hparam = dict(
t_end=0.6,
cond_lvl="vel",
# Zd (rel), Y (rel), Zdist (abs), PHI (abs), THETA (abs)
cond_final=[
[0.0, 0.0, 0.01, math.pi / 2, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0],
],
cond_init=[
[-100.0, 0.0, 0.01, math.pi / 2, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0],
],
overtime_behavior="hold",
)
else:
policy_hparam = dict(
t_end=3.0,
cond_lvl="vel",
# X (abs), Y (rel), Z (abs), A (abs), C (abs)
# cond_final=[[0.5, 0.0, 0.04, -0.876], [0.5, 0.0, 0.0, 0.0]],
# cond_init=[[0.1, 0.0, 0.04, -0.876], [0.0, 0.0, 0.0, 0.0]],
# X (abs), Y (rel), Zdist (abs), PHI (abs), THETA (abs)
cond_final=[
[0.9, 0.0, 0.005, math.pi / 2, 0.0], # math.pi / 2 - 0.4
[0.0, 0.0, 0.0, 0.0, 0.0],
],
cond_init=[
[0.3, 0.0, 0.01, math.pi / 2, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0],
],
overtime_behavior="hold",
)
policy = PolySplineTimePolicy(env.spec, dt, **policy_hparam)
return env, policy
def create_lin_setup(physicsEngine: str, dt: float, max_steps: int, checkJointLimits: bool):
# Set up environment
env = MiniGolfIKSim(
usePhysicsNode=True,
physicsEngine=physicsEngine,
dt=dt,
max_steps=max_steps,
checkJointLimits=checkJointLimits,
fixedInitState=True,
)
# Set up policy
policy = LinearPolicy(env.spec, FeatureStack([const_feat]))
policy.param_values = to.tensor([0.6, 0.0, 0.03]) # X (abs), Y (rel), Z (abs), C (abs)
return env, policy
def create_time_setup(physicsEngine: str, dt: float, max_steps: int, checkJointLimits: bool):
# Set up environment
env = MiniGolfJointCtrlSim(
usePhysicsNode=True,
physicsEngine=physicsEngine,
dt=dt,
max_steps=max_steps,
checkJointLimits=checkJointLimits,
fixedInitState=True,
collisionAvoidanceIK=False,
graphFileName="gMiniGolf_gt.xml",
physicsConfigFile="pMiniGolf_gt.xml",
)
# Set up policy
policy = create_mg_joint_pos_policy(env, t_strike_end=0.5)
return env, policy
if __name__ == "__main__":
# Choose setup
setup_type = "pst" # idle, pst, lin, or time
physicsEngine = "Bullet" # Bullet or Vortex
dt = 1 / 100.0
max_steps = int(8 / dt)
checkJointLimits = True
randomize = False
if setup_type == "idle":
env, policy = create_idle_setup(physicsEngine, dt, max_steps, checkJointLimits)
elif setup_type == "pst":
env, policy = create_pst_setup(physicsEngine, dt, max_steps, checkJointLimits)
elif setup_type == "lin":
env, policy = create_lin_setup(physicsEngine, dt, max_steps, checkJointLimits)
elif setup_type == "time":
env, policy = create_time_setup(physicsEngine, dt, max_steps, checkJointLimits)
else:
raise pyrado.ValueErr(given=setup_type, eq_constraint="idle, pst, lin, or time")
if randomize:
dp_nom = env.get_nominal_domain_param()
randomizer = DomainRandomizer(
UniformDomainParam(
name="ball_restitution",
mean=dp_nom["ball_restitution"],
halfspan=dp_nom["ball_restitution"],
),
UniformDomainParam(
name="ball_radius", mean=dp_nom["ball_radius"], halfspan=dp_nom["ball_radius"] / 5, clip_lo=5e-3
),
UniformDomainParam(name="ball_mass", mean=dp_nom["ball_mass"], halfspan=dp_nom["ball_mass"] / 2, clip_lo=0),
UniformDomainParam(name="club_mass", mean=dp_nom["club_mass"], halfspan=dp_nom["club_mass"] / 5),
UniformDomainParam(
name="ball_friction_coefficient",
mean=dp_nom["ball_friction_coefficient"],
halfspan=dp_nom["ball_friction_coefficient"] / 4,
clip_lo=0,
),
UniformDomainParam(
name="ball_rolling_friction_coefficient",
mean=dp_nom["ball_rolling_friction_coefficient"],
halfspan=dp_nom["ball_rolling_friction_coefficient"] / 3,
clip_lo=0,
),
UniformDomainParam(
name="ground_friction_coefficient",
mean=dp_nom["ground_friction_coefficient"],
halfspan=dp_nom["ground_friction_coefficient"] / 4,
clip_lo=0,
),
UniformDomainParam(name="ball_slip", mean=dp_nom["ball_slip"], halfspan=dp_nom["ball_slip"] / 2, clip_lo=0),
UniformDomainParam(
name="ground_slip", mean=dp_nom["ground_slip"], halfspan=dp_nom["ground_slip"] / 2, clip_lo=0
),
UniformDomainParam(name="obstacleleft_pos_offset_x", mean=0, halfspan=0.03),
UniformDomainParam(name="obstacleleft_pos_offset_y", mean=0, halfspan=0.03),
UniformDomainParam(name="obstacleleft_rot_offset_c", mean=0 / 180 * math.pi, halfspan=5 / 180 * math.pi),
UniformDomainParam(name="obstacleright_pos_offset_x", mean=0, halfspan=0.03),
UniformDomainParam(name="obstacleright_pos_offset_y", mean=0, halfspan=0.03),
UniformDomainParam(name="obstacleright_rot_offset_c", mean=0 / 180 * math.pi, halfspan=5 / 180 * math.pi),
)
env = DomainRandWrapperLive(env, randomizer)
# Simulate and plot
print(env.obs_space)
done, param, state = False, None, None
while not done:
ro = rollout(
env,
policy,
render_mode=RenderMode(text=False, video=True),
eval=True,
max_steps=max_steps,
reset_kwargs=dict(domain_param=param, init_state=state),
stop_on_done=False,
)
print_cbt(f"Return: {ro.undiscounted_return()}", "g", bright=True)
done, state, param = after_rollout_query(env, policy, ro)
| [
"pyrado.utils.input_output.print_cbt",
"pyrado.utils.data_types.RenderMode",
"pyrado.policies.feed_forward.dummy.IdlePolicy",
"pyrado.policies.special.environment_specific.create_mg_joint_pos_policy",
"pyrado.environments.rcspysim.mini_golf.MiniGolfJointCtrlSim",
"pyrado.policies.feed_forward.poly_time.Po... | [((2703, 2724), 'rcsenv.setLogLevel', 'rcsenv.setLogLevel', (['(2)'], {}), '(2)\n', (2721, 2724), False, 'import rcsenv\n'), ((2856, 3037), 'pyrado.environments.rcspysim.mini_golf.MiniGolfIKSim', 'MiniGolfIKSim', ([], {'usePhysicsNode': '(True)', 'physicsEngine': 'physicsEngine', 'dt': 'dt', 'max_steps': 'max_steps', 'checkJointLimits': 'checkJointLimits', 'fixedInitState': '(True)', 'observeForceTorque': '(True)'}), '(usePhysicsNode=True, physicsEngine=physicsEngine, dt=dt,\n max_steps=max_steps, checkJointLimits=checkJointLimits, fixedInitState=\n True, observeForceTorque=True)\n', (2869, 3037), False, 'from pyrado.environments.rcspysim.mini_golf import MiniGolfIKSim, MiniGolfJointCtrlSim\n'), ((3126, 3146), 'pyrado.policies.feed_forward.dummy.IdlePolicy', 'IdlePolicy', (['env.spec'], {}), '(env.spec)\n', (3136, 3146), False, 'from pyrado.policies.feed_forward.dummy import IdlePolicy\n'), ((3342, 3407), 'pyrado.utils.input_output.print_cbt', 'print_cbt', (['f"""relativeZdTask = {relativeZdTask}"""', '"""c"""'], {'bright': '(True)'}), "(f'relativeZdTask = {relativeZdTask}', 'c', bright=True)\n", (3351, 3407), False, 'from pyrado.utils.input_output import print_cbt\n'), ((3418, 3661), 'pyrado.environments.rcspysim.mini_golf.MiniGolfIKSim', 'MiniGolfIKSim', ([], {'relativeZdTask': 'relativeZdTask', 'usePhysicsNode': '(True)', 'physicsEngine': 'physicsEngine', 'dt': 'dt', 'max_steps': 'max_steps', 'checkJointLimits': 'checkJointLimits', 'fixedInitState': '(True)', 'observeForceTorque': '(False)', 'collisionAvoidanceIK': '(True)'}), '(relativeZdTask=relativeZdTask, usePhysicsNode=True,\n physicsEngine=physicsEngine, dt=dt, max_steps=max_steps,\n checkJointLimits=checkJointLimits, fixedInitState=True,\n observeForceTorque=False, collisionAvoidanceIK=True)\n', (3431, 3661), False, 'from pyrado.environments.rcspysim.mini_golf import MiniGolfIKSim, MiniGolfJointCtrlSim\n'), ((4961, 5012), 'pyrado.policies.feed_forward.poly_time.PolySplineTimePolicy', 'PolySplineTimePolicy', (['env.spec', 'dt'], {}), '(env.spec, dt, **policy_hparam)\n', (4981, 5012), False, 'from pyrado.policies.feed_forward.poly_time import PolySplineTimePolicy\n'), ((5167, 5323), 'pyrado.environments.rcspysim.mini_golf.MiniGolfIKSim', 'MiniGolfIKSim', ([], {'usePhysicsNode': '(True)', 'physicsEngine': 'physicsEngine', 'dt': 'dt', 'max_steps': 'max_steps', 'checkJointLimits': 'checkJointLimits', 'fixedInitState': '(True)'}), '(usePhysicsNode=True, physicsEngine=physicsEngine, dt=dt,\n max_steps=max_steps, checkJointLimits=checkJointLimits, fixedInitState=True\n )\n', (5180, 5323), False, 'from pyrado.environments.rcspysim.mini_golf import MiniGolfIKSim, MiniGolfJointCtrlSim\n'), ((5481, 5508), 'torch.tensor', 'to.tensor', (['[0.6, 0.0, 0.03]'], {}), '([0.6, 0.0, 0.03])\n', (5490, 5508), True, 'import torch as to\n'), ((5702, 5970), 'pyrado.environments.rcspysim.mini_golf.MiniGolfJointCtrlSim', 'MiniGolfJointCtrlSim', ([], {'usePhysicsNode': '(True)', 'physicsEngine': 'physicsEngine', 'dt': 'dt', 'max_steps': 'max_steps', 'checkJointLimits': 'checkJointLimits', 'fixedInitState': '(True)', 'collisionAvoidanceIK': '(False)', 'graphFileName': '"""gMiniGolf_gt.xml"""', 'physicsConfigFile': '"""pMiniGolf_gt.xml"""'}), "(usePhysicsNode=True, physicsEngine=physicsEngine, dt=\n dt, max_steps=max_steps, checkJointLimits=checkJointLimits,\n fixedInitState=True, collisionAvoidanceIK=False, graphFileName=\n 'gMiniGolf_gt.xml', physicsConfigFile='pMiniGolf_gt.xml')\n", (5722, 5970), False, 'from pyrado.environments.rcspysim.mini_golf import MiniGolfIKSim, MiniGolfJointCtrlSim\n'), ((6070, 6119), 'pyrado.policies.special.environment_specific.create_mg_joint_pos_policy', 'create_mg_joint_pos_policy', (['env'], {'t_strike_end': '(0.5)'}), '(env, t_strike_end=0.5)\n', (6096, 6119), False, 'from pyrado.policies.special.environment_specific import create_mg_joint_pos_policy\n'), ((5427, 5453), 'pyrado.policies.features.FeatureStack', 'FeatureStack', (['[const_feat]'], {}), '([const_feat])\n', (5439, 5453), False, 'from pyrado.policies.features import FeatureStack, const_feat\n'), ((9317, 9355), 'pyrado.environment_wrappers.domain_randomization.DomainRandWrapperLive', 'DomainRandWrapperLive', (['env', 'randomizer'], {}), '(env, randomizer)\n', (9338, 9355), False, 'from pyrado.environment_wrappers.domain_randomization import DomainRandWrapperLive\n'), ((9859, 9895), 'pyrado.sampling.rollout.after_rollout_query', 'after_rollout_query', (['env', 'policy', 'ro'], {}), '(env, policy, ro)\n', (9878, 9895), False, 'from pyrado.sampling.rollout import after_rollout_query, rollout\n'), ((7077, 7194), 'pyrado.domain_randomization.domain_parameter.UniformDomainParam', 'UniformDomainParam', ([], {'name': '"""ball_restitution"""', 'mean': "dp_nom['ball_restitution']", 'halfspan': "dp_nom['ball_restitution']"}), "(name='ball_restitution', mean=dp_nom['ball_restitution'],\n halfspan=dp_nom['ball_restitution'])\n", (7095, 7194), False, 'from pyrado.domain_randomization.domain_parameter import UniformDomainParam\n'), ((7267, 7389), 'pyrado.domain_randomization.domain_parameter.UniformDomainParam', 'UniformDomainParam', ([], {'name': '"""ball_radius"""', 'mean': "dp_nom['ball_radius']", 'halfspan': "(dp_nom['ball_radius'] / 5)", 'clip_lo': '(0.005)'}), "(name='ball_radius', mean=dp_nom['ball_radius'], halfspan\n =dp_nom['ball_radius'] / 5, clip_lo=0.005)\n", (7285, 7389), False, 'from pyrado.domain_randomization.domain_parameter import UniformDomainParam\n'), ((7427, 7539), 'pyrado.domain_randomization.domain_parameter.UniformDomainParam', 'UniformDomainParam', ([], {'name': '"""ball_mass"""', 'mean': "dp_nom['ball_mass']", 'halfspan': "(dp_nom['ball_mass'] / 2)", 'clip_lo': '(0)'}), "(name='ball_mass', mean=dp_nom['ball_mass'], halfspan=\n dp_nom['ball_mass'] / 2, clip_lo=0)\n", (7445, 7539), False, 'from pyrado.domain_randomization.domain_parameter import UniformDomainParam\n'), ((7548, 7649), 'pyrado.domain_randomization.domain_parameter.UniformDomainParam', 'UniformDomainParam', ([], {'name': '"""club_mass"""', 'mean': "dp_nom['club_mass']", 'halfspan': "(dp_nom['club_mass'] / 5)"}), "(name='club_mass', mean=dp_nom['club_mass'], halfspan=\n dp_nom['club_mass'] / 5)\n", (7566, 7649), False, 'from pyrado.domain_randomization.domain_parameter import UniformDomainParam\n'), ((7658, 7823), 'pyrado.domain_randomization.domain_parameter.UniformDomainParam', 'UniformDomainParam', ([], {'name': '"""ball_friction_coefficient"""', 'mean': "dp_nom['ball_friction_coefficient']", 'halfspan': "(dp_nom['ball_friction_coefficient'] / 4)", 'clip_lo': '(0)'}), "(name='ball_friction_coefficient', mean=dp_nom[\n 'ball_friction_coefficient'], halfspan=dp_nom[\n 'ball_friction_coefficient'] / 4, clip_lo=0)\n", (7676, 7823), False, 'from pyrado.domain_randomization.domain_parameter import UniformDomainParam\n'), ((7906, 8095), 'pyrado.domain_randomization.domain_parameter.UniformDomainParam', 'UniformDomainParam', ([], {'name': '"""ball_rolling_friction_coefficient"""', 'mean': "dp_nom['ball_rolling_friction_coefficient']", 'halfspan': "(dp_nom['ball_rolling_friction_coefficient'] / 3)", 'clip_lo': '(0)'}), "(name='ball_rolling_friction_coefficient', mean=dp_nom[\n 'ball_rolling_friction_coefficient'], halfspan=dp_nom[\n 'ball_rolling_friction_coefficient'] / 3, clip_lo=0)\n", (7924, 8095), False, 'from pyrado.domain_randomization.domain_parameter import UniformDomainParam\n'), ((8178, 8349), 'pyrado.domain_randomization.domain_parameter.UniformDomainParam', 'UniformDomainParam', ([], {'name': '"""ground_friction_coefficient"""', 'mean': "dp_nom['ground_friction_coefficient']", 'halfspan': "(dp_nom['ground_friction_coefficient'] / 4)", 'clip_lo': '(0)'}), "(name='ground_friction_coefficient', mean=dp_nom[\n 'ground_friction_coefficient'], halfspan=dp_nom[\n 'ground_friction_coefficient'] / 4, clip_lo=0)\n", (8196, 8349), False, 'from pyrado.domain_randomization.domain_parameter import UniformDomainParam\n'), ((8432, 8544), 'pyrado.domain_randomization.domain_parameter.UniformDomainParam', 'UniformDomainParam', ([], {'name': '"""ball_slip"""', 'mean': "dp_nom['ball_slip']", 'halfspan': "(dp_nom['ball_slip'] / 2)", 'clip_lo': '(0)'}), "(name='ball_slip', mean=dp_nom['ball_slip'], halfspan=\n dp_nom['ball_slip'] / 2, clip_lo=0)\n", (8450, 8544), False, 'from pyrado.domain_randomization.domain_parameter import UniformDomainParam\n'), ((8553, 8671), 'pyrado.domain_randomization.domain_parameter.UniformDomainParam', 'UniformDomainParam', ([], {'name': '"""ground_slip"""', 'mean': "dp_nom['ground_slip']", 'halfspan': "(dp_nom['ground_slip'] / 2)", 'clip_lo': '(0)'}), "(name='ground_slip', mean=dp_nom['ground_slip'], halfspan\n =dp_nom['ground_slip'] / 2, clip_lo=0)\n", (8571, 8671), False, 'from pyrado.domain_randomization.domain_parameter import UniformDomainParam\n'), ((8710, 8785), 'pyrado.domain_randomization.domain_parameter.UniformDomainParam', 'UniformDomainParam', ([], {'name': '"""obstacleleft_pos_offset_x"""', 'mean': '(0)', 'halfspan': '(0.03)'}), "(name='obstacleleft_pos_offset_x', mean=0, halfspan=0.03)\n", (8728, 8785), False, 'from pyrado.domain_randomization.domain_parameter import UniformDomainParam\n'), ((8799, 8874), 'pyrado.domain_randomization.domain_parameter.UniformDomainParam', 'UniformDomainParam', ([], {'name': '"""obstacleleft_pos_offset_y"""', 'mean': '(0)', 'halfspan': '(0.03)'}), "(name='obstacleleft_pos_offset_y', mean=0, halfspan=0.03)\n", (8817, 8874), False, 'from pyrado.domain_randomization.domain_parameter import UniformDomainParam\n'), ((8888, 8996), 'pyrado.domain_randomization.domain_parameter.UniformDomainParam', 'UniformDomainParam', ([], {'name': '"""obstacleleft_rot_offset_c"""', 'mean': '(0 / 180 * math.pi)', 'halfspan': '(5 / 180 * math.pi)'}), "(name='obstacleleft_rot_offset_c', mean=0 / 180 * math.pi,\n halfspan=5 / 180 * math.pi)\n", (8906, 8996), False, 'from pyrado.domain_randomization.domain_parameter import UniformDomainParam\n'), ((9006, 9082), 'pyrado.domain_randomization.domain_parameter.UniformDomainParam', 'UniformDomainParam', ([], {'name': '"""obstacleright_pos_offset_x"""', 'mean': '(0)', 'halfspan': '(0.03)'}), "(name='obstacleright_pos_offset_x', mean=0, halfspan=0.03)\n", (9024, 9082), False, 'from pyrado.domain_randomization.domain_parameter import UniformDomainParam\n'), ((9096, 9172), 'pyrado.domain_randomization.domain_parameter.UniformDomainParam', 'UniformDomainParam', ([], {'name': '"""obstacleright_pos_offset_y"""', 'mean': '(0)', 'halfspan': '(0.03)'}), "(name='obstacleright_pos_offset_y', mean=0, halfspan=0.03)\n", (9114, 9172), False, 'from pyrado.domain_randomization.domain_parameter import UniformDomainParam\n'), ((9186, 9296), 'pyrado.domain_randomization.domain_parameter.UniformDomainParam', 'UniformDomainParam', ([], {'name': '"""obstacleright_rot_offset_c"""', 'mean': '(0 / 180 * math.pi)', 'halfspan': '(5 / 180 * math.pi)'}), "(name='obstacleright_rot_offset_c', mean=0 / 180 * math.\n pi, halfspan=5 / 180 * math.pi)\n", (9204, 9296), False, 'from pyrado.domain_randomization.domain_parameter import UniformDomainParam\n'), ((9552, 9586), 'pyrado.utils.data_types.RenderMode', 'RenderMode', ([], {'text': '(False)', 'video': '(True)'}), '(text=False, video=True)\n', (9562, 9586), False, 'from pyrado.utils.data_types import RenderMode\n'), ((6884, 6958), 'pyrado.ValueErr', 'pyrado.ValueErr', ([], {'given': 'setup_type', 'eq_constraint': '"""idle, pst, lin, or time"""'}), "(given=setup_type, eq_constraint='idle, pst, lin, or time')\n", (6899, 6958), False, 'import pyrado\n')] |
# @l2g 43 python3
# [43] Multiply Strings
# Difficulty: Medium
# https://leetcode.com/problems/multiply-strings
#
# Given two non-negative integers num1 and num2 represented as strings,
# return the product of num1 and num2,also represented as a string.
# Note: You must not use any built-in BigInteger library or convert the inputs to integer directly.
#
# Example 1:
# Input: num1 = "2", num2 = "3"
# Output: "6"
# Example 2:
# Input: num1 = "123", num2 = "456"
# Output: "56088"
#
#
# Constraints:
#
# 1 <= num1.length, num2.length <= 200
# num1 and num2 consist of digits only.
# Both num1 and num2 do not contain any leading zero, except the number 0 itself.
#
#
class Solution:
def multiply(self, num1: str, num2: str) -> str:
ans = []
for i, n1 in enumerate(reversed(num1)):
for j, n2 in enumerate(reversed(num2)):
f_num = (ord(n1) - ord("0")) * (10 ** i)
s_num = (ord(n2) - ord("0")) * (10 ** j)
ans.append(f_num * s_num)
return str(sum(ans))
if __name__ == "__main__":
import os
import pytest
pytest.main([os.path.join("tests", "test_43.py")])
| [
"os.path.join"
] | [((1123, 1158), 'os.path.join', 'os.path.join', (['"""tests"""', '"""test_43.py"""'], {}), "('tests', 'test_43.py')\n", (1135, 1158), False, 'import os\n')] |
from instauto.api.client import ApiClient
from instauto.helpers.post import unlike_post
client = ApiClient.initiate_from_file('.instauto.save')
unlike_post(client, "media_id")
| [
"instauto.helpers.post.unlike_post",
"instauto.api.client.ApiClient.initiate_from_file"
] | [((99, 145), 'instauto.api.client.ApiClient.initiate_from_file', 'ApiClient.initiate_from_file', (['""".instauto.save"""'], {}), "('.instauto.save')\n", (127, 145), False, 'from instauto.api.client import ApiClient\n'), ((146, 177), 'instauto.helpers.post.unlike_post', 'unlike_post', (['client', '"""media_id"""'], {}), "(client, 'media_id')\n", (157, 177), False, 'from instauto.helpers.post import unlike_post\n')] |
import random
import sys
import time
from scenario.example import sc1_simple_lockdown_removal, sc2_yoyo_lockdown_removal, sc0_base_lockdown, \
scx_base_just_a_flu, sc3_loose_lockdown, sc4_rogue_citizen, sc5_rogue_neighborhood, sc6_travelers
from simulator.constants.keys import scenario_id_key, random_seed_key, draw_graph_key
from simulator.helper.parser import get_parser
from simulator.helper.plot import chose_draw_plot
from simulator.helper.simulation import get_default_params
from simulator.helper.environment import get_environment_simulation
if __name__ == '__main__':
params = get_default_params()
args = get_parser().parse_args()
for arg in vars(args):
v = getattr(args, arg)
if arg in params and v is not None:
params[arg] = v
random.seed(params[random_seed_key])
t_start = time.time()
env_dic = get_environment_simulation(params)
if params[scenario_id_key] == -1:
stats_result = scx_base_just_a_flu.launch_run(params, env_dic)
elif params[scenario_id_key] == 0: # Total lockdown
stats_result = sc0_base_lockdown.launch_run(params, env_dic)
elif params[scenario_id_key] == 1: # Lockdown removal after N days
stats_result = sc1_simple_lockdown_removal.launch_run(params, env_dic)
elif params[scenario_id_key] == 2: # Yoyo lockdown removal
stats_result = sc2_yoyo_lockdown_removal.launch_run(params, env_dic)
elif params[scenario_id_key] == 3: # Yoyo lockdown removal
stats_result = sc3_loose_lockdown.launch_run(params, env_dic)
elif params[scenario_id_key] == 4: # Rogue citizen
stats_result = sc4_rogue_citizen.launch_run(params, env_dic)
elif params[scenario_id_key] == 5: # Rogue block
stats_result = sc5_rogue_neighborhood.launch_run(params, env_dic)
elif params[scenario_id_key] == 6: # Rogue block
stats_result = sc6_travelers.launch_run(params, env_dic)
else:
sys.exit(0)
print("It took : %.2f seconds" % (time.time() - t_start))
chose_draw_plot(params[draw_graph_key], stats_result) | [
"scenario.example.sc2_yoyo_lockdown_removal.launch_run",
"scenario.example.sc4_rogue_citizen.launch_run",
"scenario.example.sc6_travelers.launch_run",
"random.seed",
"simulator.helper.simulation.get_default_params",
"simulator.helper.parser.get_parser",
"scenario.example.sc1_simple_lockdown_removal.laun... | [((597, 617), 'simulator.helper.simulation.get_default_params', 'get_default_params', ([], {}), '()\n', (615, 617), False, 'from simulator.helper.simulation import get_default_params\n'), ((790, 826), 'random.seed', 'random.seed', (['params[random_seed_key]'], {}), '(params[random_seed_key])\n', (801, 826), False, 'import random\n'), ((842, 853), 'time.time', 'time.time', ([], {}), '()\n', (851, 853), False, 'import time\n'), ((869, 903), 'simulator.helper.environment.get_environment_simulation', 'get_environment_simulation', (['params'], {}), '(params)\n', (895, 903), False, 'from simulator.helper.environment import get_environment_simulation\n'), ((2034, 2087), 'simulator.helper.plot.chose_draw_plot', 'chose_draw_plot', (['params[draw_graph_key]', 'stats_result'], {}), '(params[draw_graph_key], stats_result)\n', (2049, 2087), False, 'from simulator.helper.plot import chose_draw_plot\n'), ((965, 1012), 'scenario.example.scx_base_just_a_flu.launch_run', 'scx_base_just_a_flu.launch_run', (['params', 'env_dic'], {}), '(params, env_dic)\n', (995, 1012), False, 'from scenario.example import sc1_simple_lockdown_removal, sc2_yoyo_lockdown_removal, sc0_base_lockdown, scx_base_just_a_flu, sc3_loose_lockdown, sc4_rogue_citizen, sc5_rogue_neighborhood, sc6_travelers\n'), ((630, 642), 'simulator.helper.parser.get_parser', 'get_parser', ([], {}), '()\n', (640, 642), False, 'from simulator.helper.parser import get_parser\n'), ((1093, 1138), 'scenario.example.sc0_base_lockdown.launch_run', 'sc0_base_lockdown.launch_run', (['params', 'env_dic'], {}), '(params, env_dic)\n', (1121, 1138), False, 'from scenario.example import sc1_simple_lockdown_removal, sc2_yoyo_lockdown_removal, sc0_base_lockdown, scx_base_just_a_flu, sc3_loose_lockdown, sc4_rogue_citizen, sc5_rogue_neighborhood, sc6_travelers\n'), ((1234, 1289), 'scenario.example.sc1_simple_lockdown_removal.launch_run', 'sc1_simple_lockdown_removal.launch_run', (['params', 'env_dic'], {}), '(params, env_dic)\n', (1272, 1289), False, 'from scenario.example import sc1_simple_lockdown_removal, sc2_yoyo_lockdown_removal, sc0_base_lockdown, scx_base_just_a_flu, sc3_loose_lockdown, sc4_rogue_citizen, sc5_rogue_neighborhood, sc6_travelers\n'), ((2005, 2016), 'time.time', 'time.time', ([], {}), '()\n', (2014, 2016), False, 'import time\n'), ((1377, 1430), 'scenario.example.sc2_yoyo_lockdown_removal.launch_run', 'sc2_yoyo_lockdown_removal.launch_run', (['params', 'env_dic'], {}), '(params, env_dic)\n', (1413, 1430), False, 'from scenario.example import sc1_simple_lockdown_removal, sc2_yoyo_lockdown_removal, sc0_base_lockdown, scx_base_just_a_flu, sc3_loose_lockdown, sc4_rogue_citizen, sc5_rogue_neighborhood, sc6_travelers\n'), ((1518, 1564), 'scenario.example.sc3_loose_lockdown.launch_run', 'sc3_loose_lockdown.launch_run', (['params', 'env_dic'], {}), '(params, env_dic)\n', (1547, 1564), False, 'from scenario.example import sc1_simple_lockdown_removal, sc2_yoyo_lockdown_removal, sc0_base_lockdown, scx_base_just_a_flu, sc3_loose_lockdown, sc4_rogue_citizen, sc5_rogue_neighborhood, sc6_travelers\n'), ((1644, 1689), 'scenario.example.sc4_rogue_citizen.launch_run', 'sc4_rogue_citizen.launch_run', (['params', 'env_dic'], {}), '(params, env_dic)\n', (1672, 1689), False, 'from scenario.example import sc1_simple_lockdown_removal, sc2_yoyo_lockdown_removal, sc0_base_lockdown, scx_base_just_a_flu, sc3_loose_lockdown, sc4_rogue_citizen, sc5_rogue_neighborhood, sc6_travelers\n'), ((1767, 1817), 'scenario.example.sc5_rogue_neighborhood.launch_run', 'sc5_rogue_neighborhood.launch_run', (['params', 'env_dic'], {}), '(params, env_dic)\n', (1800, 1817), False, 'from scenario.example import sc1_simple_lockdown_removal, sc2_yoyo_lockdown_removal, sc0_base_lockdown, scx_base_just_a_flu, sc3_loose_lockdown, sc4_rogue_citizen, sc5_rogue_neighborhood, sc6_travelers\n'), ((1895, 1936), 'scenario.example.sc6_travelers.launch_run', 'sc6_travelers.launch_run', (['params', 'env_dic'], {}), '(params, env_dic)\n', (1919, 1936), False, 'from scenario.example import sc1_simple_lockdown_removal, sc2_yoyo_lockdown_removal, sc0_base_lockdown, scx_base_just_a_flu, sc3_loose_lockdown, sc4_rogue_citizen, sc5_rogue_neighborhood, sc6_travelers\n'), ((1955, 1966), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1963, 1966), False, 'import sys\n')] |
# Create your views here.
from django.shortcuts import render, redirect
from django.contrib.auth import authenticate, login
from django.contrib.auth.models import User
from django.template import loader
from django.forms.utils import ErrorList
from django.http import HttpResponse
from .origo import Origo_Thread
from .origo_category import Origo_Category_Thread
from .supply_it import Supply_it_Thread
from .furlongflooring import FF_Thread
from .reydonsports import RDS_Thread
from .reydonsports_category import RDS_Category_Thread
from .totalimports import TotalImports_Thread
from .totalimports_category import TotalImports_Category_Thread
from os.path import join, dirname
# from .origo import scrape_status as origo_scrape_status
import glob, os, zipfile, openpyxl, xlsxwriter
from os import path
from django.contrib.auth.decorators import login_required
from bs4 import BeautifulSoup
import requests, time, math
from datetime import datetime
# from filewrap import Filewrapper
# dotenv_path = join(dirname(__file__), '.env')
# load_dotenv(dotenv_path)
cur_path = dirname(__file__)
root_path = cur_path[:cur_path.rfind(os.path.sep)]
# root_path = root_path[:root_path.rfind(os.path.sep)]
cur_site = ""
# t_origo = None
t_origo = []
t_origo_cat = None
t_supply_it = None
t_ff = None
t_rds = []
t_rds_cat = None
t_totalimports = []
t_totalimports_cat = None
t_totalimports_delay = []
# sites = [{"url": "https://origo-online.origo.ie", "short": "origo"}, {"url": "https://www.supply-it.ie/", "short": "supply_it"}, {"url": "https://online.furlongflooring.com/", "short": "furlongflooring"}]
# sites = [{"url": "https://www.reydonsports.com/", "short": "reydonsports"}]
# sites = [{"url": "https://www.supply-it.ie/", "short": "supply_it"}]
sites = [{"url": "http://totalimports.ie/", "short": "totalimports"}]
# sites = [{"url": "https://origo-online.origo.ie", "short": "origo"}]
# sites = [{"url": "https://online.furlongflooring.com/", "short": "furlongflooring"}]
scrape_status = None
THREAD_COUNT = 5
ALLOW_DELAY = 120
@login_required
def index(request):
global sites
context = {}
context['sites'] = sites
html_template = loader.get_template( 'main/index.html' )
return HttpResponse(html_template.render(context, request))
@login_required
def start_scrape(request):
global t_origo, t_supply_it, t_ff, t_rds, t_totalimports, t_totalimports_cat, t_totalimports_delay, cur_site, stock_scrape
print("start_scrape")
cur_site = request.GET["site"]
scrape_type = request.GET["scrape_type"]
if cur_site == "origo":
if len(t_origo) == 0 and t_origo_cat == None:
stock_scrape = 0
if scrape_type == "stock": stock_scrape = 1
origo_category_scrape(stock_scrape)
# totalimports_scrape(stock_scrape)
# if t_origo == None or t_origo.status == "scraping is ended":
# t_origo = Origo_Thread(scrape_type)
# t_origo.start()
elif cur_site == "supply_it":
if t_supply_it == None:
t_supply_it = Supply_it_Thread(scrape_type)
t_supply_it.start()
elif cur_site == "furlongflooring":
if t_ff == None or t_ff.status == "scraping is ended":
t_ff = FF_Thread(scrape_type)
t_ff.start()
elif cur_site == "reydonsports":
if len(t_rds) == 0 and t_rds_cat == None:
stock_scrape = 0
if scrape_type == "stock": stock_scrape = 1
reydonsports_scrape(stock_scrape)
elif cur_site == "totalimports":
if len(t_totalimports) == 0 and t_totalimports_cat == None:
stock_scrape = 0
if scrape_type == "stock": stock_scrape = 1
totalimports_category_scrape(stock_scrape)
# totalimports_scrape(stock_scrape)
return HttpResponse(root_path)
@login_required
def get_scraping_status(request):
global t_origo, t_origo_cat, t_supply_it, t_ff, t_rds, t_rds_cat, t_totalimports, t_totalimports_cat, t_totalimports_delay, stock_scrape, scrape_status
res = ""
cur_site = request.GET["site"]
if cur_site == "origo" :
# res = t_origo.status
if len(t_origo) > 0:
scrape_status = ""
for tt in t_origo:
try:
scrape_status += tt.status + "\n"
except:
scrape_status += "\n"
# scrape_status = "\n".join([tt.status for tt in t_origo if tt != None])
i = 0
for t in t_origo:
i += 1
try:
if t.status != "ended":
break
except:
pass
if i == len(t_origo):
# generate .xlsx file name
timestamp = datetime.now().strftime("%Y-%m%d-%H%M%S")
xlsfile_name = 'products-' + timestamp + '.xlsx'
if stock_scrape == 1: xlsfile_name = 'stock-' + timestamp + '.xlsx'
xlsfile_name = join(root_path, "xls", "origo", xlsfile_name)
workbook = xlsxwriter.Workbook(xlsfile_name)
worksheet = workbook.add_worksheet()
row_num = 0
for j in range(THREAD_COUNT):
tmp_wb_obj = openpyxl.load_workbook(join(root_path, "xls", "origo", str(j) + "-temp.xlsx"))
sheet = tmp_wb_obj.active
for k, row in enumerate(sheet.iter_rows(values_only=True)):
if k == 0:
if j == 0:
# Write Header
for val, col in zip(row, range(len(row))):
worksheet.write(0, col, val)
else:
row_num += 1
for val, col in zip(row, range(len(row))):
worksheet.write(row_num, col, val)
tmp_wb_obj.close()
workbook.close()
scrape_status = "scraping is ended"
break
elif t_origo_cat != None:
scrape_status = t_origo_cat.status
if scrape_status == "ended":
t_origo_cat = None
origo_scrape(stock_scrape)
# totalimports_scrape()
res = scrape_status
if scrape_status == "scraping is ended":
t_origo.clear()
elif cur_site == "supply_it" :
res = t_supply_it.status
elif cur_site == "furlongflooring" :
res = t_ff.status
elif cur_site == "reydonsports" :
if len(t_rds) > 0:
scrape_status = ""
for tt in t_rds:
try:
scrape_status += tt.status + "\n"
except:
scrape_status += "\n"
# scrape_status = "\n".join([tt.status for tt in t_rds if tt != None])
i = 0
for t in t_rds:
i += 1
try:
if t.status != "ended":
break
except:
pass
if i == len(t_rds):
# generate .xlsx file name
timestamp = datetime.now().strftime("%Y-%m%d-%H%M%S")
xlsfile_name = 'products-' + timestamp + '.xlsx'
if stock_scrape == 1: xlsfile_name = 'stock-' + timestamp + '.xlsx'
xlsfile_name = join(root_path, "xls", "reydonsports", xlsfile_name)
workbook = xlsxwriter.Workbook(xlsfile_name)
worksheet = workbook.add_worksheet()
row_num = 0
for j in range(THREAD_COUNT):
tmp_wb_obj = openpyxl.load_workbook(join(root_path, "xls", "reydonsports", str(j) + "-temp.xlsx"))
sheet = tmp_wb_obj.active
for k, row in enumerate(sheet.iter_rows(values_only=True)):
if k == 0:
if j == 0:
# Write Header
for val, col in zip(row, range(len(row))):
worksheet.write(0, col, val)
else:
row_num += 1
for val, col in zip(row, range(len(row))):
worksheet.write(row_num, col, val)
tmp_wb_obj.close()
workbook.close()
scrape_status = "scraping is ended"
break
elif t_rds_cat != None:
scrape_status = t_rds_cat.status
if scrape_status == "ended":
t_rds_cat = None
reydonsports_scrape(stock_scrape)
# totalimports_scrape()
res = scrape_status
if scrape_status == "scraping is ended":
t_rds.clear()
elif cur_site == "totalimports" :
if len(t_totalimports) > 0:
# check if thread works fine
pre_scrape_status = []
if scrape_status != None: pre_scrape_status = scrape_status.split("\n")
scrape_status = ""
for tt, i in zip(t_totalimports, range(len(t_totalimports))):
if tt.status != "ended" and len(pre_scrape_status) > i and pre_scrape_status[i] == tt.status:
t_totalimports_delay[i] += 1
if t_totalimports_delay[i] >= ALLOW_DELAY:
totalimports_thread_start(i, stock_scrape)
else:
t_totalimports_delay[i] = 0
try:
scrape_status += tt.status + "\n"
except:
scrape_status += "\n"
# scrape_status = "\n".join([tt.status for tt in t_totalimports if tt != None])
i = 0
for t in t_totalimports:
i += 1
try:
if t.status != "ended":
break
except:
pass
if i == len(t_totalimports):
# generate .xlsx file name
timestamp = datetime.now().strftime("%Y-%m%d-%H%M%S")
xlsfile_name = 'products-' + timestamp + '.xlsx'
if stock_scrape == 1: xlsfile_name = 'stock-' + timestamp + '.xlsx'
xlsfile_name = join(root_path, "xls", "totalimports", xlsfile_name)
workbook = xlsxwriter.Workbook(xlsfile_name)
worksheet = workbook.add_worksheet()
row_num = 0
for j in range(THREAD_COUNT):
tmp_wb_obj = openpyxl.load_workbook(join(root_path, "xls", "totalimports", str(j) + "-temp.xlsx"))
sheet = tmp_wb_obj.active
for k, row in enumerate(sheet.iter_rows(values_only=True)):
if k == 0:
if j == 0:
# Write Header
for val, col in zip(row, range(len(row))):
worksheet.write(0, col, val)
else:
row_num += 1
for val, col in zip(row, range(len(row))):
worksheet.write(row_num, col, val)
tmp_wb_obj.close()
workbook.close()
scrape_status = "scraping is ended"
break
elif t_totalimports_cat != None:
scrape_status = t_totalimports_cat.status
if scrape_status == "ended":
t_totalimports_cat = None
# reydonsports_scrape()
totalimports_scrape(stock_scrape)
res = scrape_status
if scrape_status == "scraping is ended":
t_totalimports.clear()
return HttpResponse(res)
@login_required
def get_xls_list(request):
global root_path
res = ""
for site in sites:
products_arr = []
stock_arr = []
for file in glob.glob(join(root_path, "xls", site["short"], "products-2*.xlsx")):
products_arr.append(file[file.rfind(os.path.sep) + 10 : -5])
for file in glob.glob(join(root_path, "xls", site["short"], "stock-2*.xlsx")):
stock_arr.append(file[file.rfind(os.path.sep) + 7 : -5])
products_arr.sort(reverse=True)
stock_arr.sort(reverse=True)
if res != "": res += ", "
res += '"' + site["short"] + '": {"full": "' + '_'.join(products_arr) + '", "stock": "' + '_'.join(stock_arr) + '"}'
res = '{' + res + '}'
return HttpResponse(res)
@login_required
def download(request):
# Create file_name & file_path
site = request.GET["site"]
stock = request.GET["stock"]
diff = request.GET["diff"]
recent = request.GET["recent"]
compare = request.GET["compare"]
file_prefix = "products-"
if stock == "1" : file_prefix = "stock-"
file_name = file_prefix
if diff == "1" : file_name += "diff-"
file_name += recent
if diff == "1" : file_name += "_" + compare
zipfile_name = site + "-" + file_name + ".zip"
file_name += ".xlsx"
file_path = []
if diff =="1":
file_path.append(os.path.join(root_path, "xls", site, file_prefix + "add-" + recent + "_" + compare + ".xlsx"))
file_path.append(os.path.join(root_path, "xls", site, file_prefix + "remove-" + recent + "_" + compare + ".xlsx"))
zipfile_name = site + "-" + file_prefix + "compare-" + recent + "_" + compare + ".zip"
else:
file_path.append(os.path.join(root_path, "xls", site, file_name))
response = HttpResponse(content_type='application/zip')
zf = zipfile.ZipFile(response, 'w')
for file in file_path:
# Generate if there is no different .xlsx file
if diff == "1" and not path.exists(file) :
compare_xlsx(site, stock, recent, compare)
with open(file, 'rb') as fh:
zf.writestr(file[file.rfind(os.path.sep) + 1:], fh.read())
# return as zipfile
response['Content-Disposition'] = f'attachment; filename={zipfile_name}'
return response
@login_required
def compare_xlsx(site, stock, recent, compare) :
global root_path
# fields = ['id', 'category', 'title', 'stock', 'list price', 'nett price', 'description', 'URL', 'image']
fields = []
file_prefix = "products-"
if stock == "1":
# fields = ['id', 'stock']
file_prefix = "stock-"
add_file_name = file_prefix + "add-" + recent + "_" + compare + ".xlsx"
remove_file_name = file_prefix + "remove-" + recent + "_" + compare + ".xlsx"
older_products = {}
newer_products = {}
wb_obj = openpyxl.load_workbook(join(root_path, "xls", site, file_prefix + compare + ".xlsx"))
sheet = wb_obj.active
older_products = {}
for i, row in enumerate(sheet.iter_rows(values_only=True)):
if i == 0:
fields = row
else:
try:
if row[0] in older_products: continue
except:
pass
older_products[row[0]] = row
wb_obj = openpyxl.load_workbook(join(root_path, "xls", site, file_prefix + recent + ".xlsx"))
sheet = wb_obj.active
newer_products = {}
for i, row in enumerate(sheet.iter_rows(values_only=True)):
if i > 0:
try:
if row[0] in newer_products: continue
except:
pass
newer_products[row[0]] = row
older_products_2 = older_products.copy()
for row in older_products_2:
try:
if row in newer_products:
del older_products[row]
del newer_products[row]
except:
pass
workbook = xlsxwriter.Workbook(join(root_path, "xls", site, add_file_name))
worksheet = workbook.add_worksheet("Add")
i = -1
for val in fields:
i += 1
worksheet.write(0, i, val)
i = 0
for row in newer_products:
i += 1
j = -1
for val in newer_products[row]:
j += 1
worksheet.write(i, j, val)
workbook.close()
workbook = xlsxwriter.Workbook(join(root_path, "xls", site, remove_file_name))
worksheet = workbook.add_worksheet("Remove")
i = -1
for val in fields:
i += 1
worksheet.write(0, i, val)
i = 0
for row in older_products:
i += 1
j = -1
for val in older_products[row]:
j += 1
worksheet.write(i, j, val)
workbook.close()
def status_publishing(text) :
global scrape_status
scrape_status = text
def reydonsports_category_scrape(stock_scrape=0):
global t_rds_cat, t_rds
t_rds_cat = RDS_Category_Thread(stock_scrape)
t_rds_cat.start()
def reydonsports_scrape(stock_scrape=0):
global t_rds
products_url_txt = open("reydonsports_products_url.txt","r")
lines = len(products_url_txt.readlines())
start_index = 0
for i in range(THREAD_COUNT):
end_index = start_index + math.ceil(lines / THREAD_COUNT)
if end_index > lines + 1: end_index = lines + 1
th = RDS_Thread(i, start_index, end_index, stock_scrape)
th.start()
t_rds.append(th)
start_index = end_index
def totalimports_category_scrape(stock_scrape=0):
global t_totalimports_cat, t_totalimports
t_totalimports_cat = TotalImports_Category_Thread(stock_scrape)
t_totalimports_cat.start()
def totalimports_thread_start(thread_index, stock_scrape=0):
global t_totalimports, t_totalimports_delay
products_url_txt = open("totalimports_products_url.txt","r")
lines = len(products_url_txt.readlines())
start_index = 0
for i in range(THREAD_COUNT):
end_index = start_index + math.ceil(lines / THREAD_COUNT)
if end_index > lines + 1: end_index = lines + 1
if i == thread_index :
th = TotalImports_Thread(i, start_index, end_index, stock_scrape)
th.start()
if thread_index < len(t_totalimports):
t_totalimports[thread_index] = th
t_totalimports_delay[thread_index] = 0
else:
t_totalimports.append(th)
t_totalimports_delay.append(0)
break
start_index = end_index
def totalimports_scrape(stock_scrape=0):
for i in range(THREAD_COUNT):
totalimports_thread_start(i, stock_scrape)
def origo_category_scrape(stock_scrape=0):
global t_origo_cat, t_origo
t_origo_cat = Origo_Category_Thread(stock_scrape)
t_origo_cat.start()
def origo_scrape(stock_scrape=0):
global t_origo
products_url_txt = open("origo_products_url.txt","r")
lines = len(products_url_txt.readlines())
start_index = 0
for i in range(THREAD_COUNT):
end_index = start_index + math.ceil(lines / THREAD_COUNT)
if end_index > lines + 1: end_index = lines + 1
th = RDS_Thread(i, start_index, end_index, stock_scrape)
th.start()
t_origo.append(th)
start_index = end_index | [
"os.path.exists",
"math.ceil",
"zipfile.ZipFile",
"django.http.HttpResponse",
"os.path.join",
"xlsxwriter.Workbook",
"os.path.dirname",
"datetime.datetime.now",
"django.template.loader.get_template"
] | [((1075, 1092), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (1082, 1092), False, 'from os.path import join, dirname\n'), ((2161, 2199), 'django.template.loader.get_template', 'loader.get_template', (['"""main/index.html"""'], {}), "('main/index.html')\n", (2180, 2199), False, 'from django.template import loader\n'), ((3807, 3830), 'django.http.HttpResponse', 'HttpResponse', (['root_path'], {}), '(root_path)\n', (3819, 3830), False, 'from django.http import HttpResponse\n'), ((12512, 12529), 'django.http.HttpResponse', 'HttpResponse', (['res'], {}), '(res)\n', (12524, 12529), False, 'from django.http import HttpResponse\n'), ((13295, 13312), 'django.http.HttpResponse', 'HttpResponse', (['res'], {}), '(res)\n', (13307, 13312), False, 'from django.http import HttpResponse\n'), ((14331, 14375), 'django.http.HttpResponse', 'HttpResponse', ([], {'content_type': '"""application/zip"""'}), "(content_type='application/zip')\n", (14343, 14375), False, 'from django.http import HttpResponse\n'), ((14385, 14415), 'zipfile.ZipFile', 'zipfile.ZipFile', (['response', '"""w"""'], {}), "(response, 'w')\n", (14400, 14415), False, 'import glob, os, zipfile, openpyxl, xlsxwriter\n'), ((15417, 15478), 'os.path.join', 'join', (['root_path', '"""xls"""', 'site', "(file_prefix + compare + '.xlsx')"], {}), "(root_path, 'xls', site, file_prefix + compare + '.xlsx')\n", (15421, 15478), False, 'from os.path import join, dirname\n'), ((15844, 15904), 'os.path.join', 'join', (['root_path', '"""xls"""', 'site', "(file_prefix + recent + '.xlsx')"], {}), "(root_path, 'xls', site, file_prefix + recent + '.xlsx')\n", (15848, 15904), False, 'from os.path import join, dirname\n'), ((16473, 16516), 'os.path.join', 'join', (['root_path', '"""xls"""', 'site', 'add_file_name'], {}), "(root_path, 'xls', site, add_file_name)\n", (16477, 16516), False, 'from os.path import join, dirname\n'), ((16926, 16972), 'os.path.join', 'join', (['root_path', '"""xls"""', 'site', 'remove_file_name'], {}), "(root_path, 'xls', site, remove_file_name)\n", (16930, 16972), False, 'from os.path import join, dirname\n'), ((12728, 12785), 'os.path.join', 'join', (['root_path', '"""xls"""', "site['short']", '"""products-2*.xlsx"""'], {}), "(root_path, 'xls', site['short'], 'products-2*.xlsx')\n", (12732, 12785), False, 'from os.path import join, dirname\n'), ((12891, 12945), 'os.path.join', 'join', (['root_path', '"""xls"""', "site['short']", '"""stock-2*.xlsx"""'], {}), "(root_path, 'xls', site['short'], 'stock-2*.xlsx')\n", (12895, 12945), False, 'from os.path import join, dirname\n'), ((13918, 14015), 'os.path.join', 'os.path.join', (['root_path', '"""xls"""', 'site', "(file_prefix + 'add-' + recent + '_' + compare + '.xlsx')"], {}), "(root_path, 'xls', site, file_prefix + 'add-' + recent + '_' +\n compare + '.xlsx')\n", (13930, 14015), False, 'import glob, os, zipfile, openpyxl, xlsxwriter\n'), ((14038, 14138), 'os.path.join', 'os.path.join', (['root_path', '"""xls"""', 'site', "(file_prefix + 'remove-' + recent + '_' + compare + '.xlsx')"], {}), "(root_path, 'xls', site, file_prefix + 'remove-' + recent + '_' +\n compare + '.xlsx')\n", (14050, 14138), False, 'import glob, os, zipfile, openpyxl, xlsxwriter\n'), ((14266, 14313), 'os.path.join', 'os.path.join', (['root_path', '"""xls"""', 'site', 'file_name'], {}), "(root_path, 'xls', site, file_name)\n", (14278, 14313), False, 'import glob, os, zipfile, openpyxl, xlsxwriter\n'), ((17865, 17896), 'math.ceil', 'math.ceil', (['(lines / THREAD_COUNT)'], {}), '(lines / THREAD_COUNT)\n', (17874, 17896), False, 'import requests, time, math\n'), ((18620, 18651), 'math.ceil', 'math.ceil', (['(lines / THREAD_COUNT)'], {}), '(lines / THREAD_COUNT)\n', (18629, 18651), False, 'import requests, time, math\n'), ((19702, 19733), 'math.ceil', 'math.ceil', (['(lines / THREAD_COUNT)'], {}), '(lines / THREAD_COUNT)\n', (19711, 19733), False, 'import requests, time, math\n'), ((14530, 14547), 'os.path.exists', 'path.exists', (['file'], {}), '(file)\n', (14541, 14547), False, 'from os import path\n'), ((5034, 5079), 'os.path.join', 'join', (['root_path', '"""xls"""', '"""origo"""', 'xlsfile_name'], {}), "(root_path, 'xls', 'origo', xlsfile_name)\n", (5038, 5079), False, 'from os.path import join, dirname\n'), ((5112, 5145), 'xlsxwriter.Workbook', 'xlsxwriter.Workbook', (['xlsfile_name'], {}), '(xlsfile_name)\n', (5131, 5145), False, 'import glob, os, zipfile, openpyxl, xlsxwriter\n'), ((4800, 4814), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4812, 4814), False, 'from datetime import datetime\n'), ((7691, 7743), 'os.path.join', 'join', (['root_path', '"""xls"""', '"""reydonsports"""', 'xlsfile_name'], {}), "(root_path, 'xls', 'reydonsports', xlsfile_name)\n", (7695, 7743), False, 'from os.path import join, dirname\n'), ((7776, 7809), 'xlsxwriter.Workbook', 'xlsxwriter.Workbook', (['xlsfile_name'], {}), '(xlsfile_name)\n', (7795, 7809), False, 'import glob, os, zipfile, openpyxl, xlsxwriter\n'), ((10840, 10892), 'os.path.join', 'join', (['root_path', '"""xls"""', '"""totalimports"""', 'xlsfile_name'], {}), "(root_path, 'xls', 'totalimports', xlsfile_name)\n", (10844, 10892), False, 'from os.path import join, dirname\n'), ((10925, 10958), 'xlsxwriter.Workbook', 'xlsxwriter.Workbook', (['xlsfile_name'], {}), '(xlsfile_name)\n', (10944, 10958), False, 'import glob, os, zipfile, openpyxl, xlsxwriter\n'), ((7457, 7471), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7469, 7471), False, 'from datetime import datetime\n'), ((10606, 10620), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (10618, 10620), False, 'from datetime import datetime\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 vi:et
import sys, io, os, logging
import re
import json
import tempfile
import subprocess
import urllib.request, urllib.error
logger = logging.getLogger(__name__)
__all__ = (
"get_remote",
"create_release",
"download_default_release_asset",
"upload_release_asset",
"update_release_with_mirror_urls",
"get_repo_releases",
)
def get_remote(target_repo, cfg_root):
"""
Get the organization/repo_name from a repo
"""
remote = cfg_root.get("remote", "origin")
cmd = ["git", "config", f"remote.{remote}.url"]
res = subprocess.run(cmd, stdout=subprocess.PIPE, cwd=target_repo)
url = res.stdout.rstrip().decode()
m = re.match(r"^<EMAIL>:(?P<repo>\S+)\.git$", url)
if m is not None:
return m.group("repo")
m = re.match(r"^https://github.com/(?P<repo>\S+)\.git$", url)
if m is not None:
return m.group("repo")
raise ValueError(url)
def create_release(github_repo, git_tag, gh_token, cfg_root):
"""
Create a new release within the target repository.
:return: release metadata with id on success.
"""
logger.info("Creating a new release for %s at revision %s", github_repo, git_tag)
url = "https://api.github.com/repos/{}/releases".format(github_repo)
headers = {
"Authorization": "token {}".format(gh_token),
"Content-Type": "application/json",
}
root = {
"tag_name": git_tag,
"name": git_tag,
"draft": False,
"prerelease": False,
}
payload = json.dumps(root).encode("utf-8")
req = urllib.request.Request(url, headers=headers, method="POST", data=payload)
with urllib.request.urlopen(req) as resp:
if resp.getcode() != 201:
raise RuntimeError(
"Bad response: {} / {}".format(resp.getcode(), resp.read())
)
ret = json.loads(resp.read().decode("utf-8"))
logger.debug("ret: %s", ret)
release_id = ret["id"]
logger.info("Release (id:%s) successfully createad.", release_id)
return release_id
def download_default_release_asset(github_repo, release_id, gh_token, target_dir):
"""
Download the default asset of a given release
:return: relative path to downloaded file.
"""
logger.info("Downloading release default artifact...")
url = f"https://api.github.com/repos/{github_repo}/releases/{release_id}"
headers = {
"Authorization": f"token {gh_token}",
"Content-Type": "application/octet-stream",
}
req = urllib.request.Request(url, headers=headers, method="GET")
with urllib.request.urlopen(req) as resp:
if resp.getcode() != 200:
raise RuntimeError(
"Bad response: {} / {}".format(resp.getcode(), resp.read())
)
ret = json.loads(resp.read().decode("utf-8"))
logger.debug("ret: %s", ret)
pnpv = "{}-{}".format(github_repo.split("/")[-1], ret["tag_name"])
asset_name = f"{pnpv}.zip"
downloaded_asset_path = os.path.join(target_dir, asset_name)
urllib.request.urlretrieve(
ret["zipball_url"], downloaded_asset_path, #reporthook=...
)
return downloaded_asset_path
def upload_release_asset(github_repo, release_id, asset_path, gh_token):
"""
Uploads a release asset to a target release.
:return: Download link of the uploaded asset.
"""
logger.info("Uploading default release asset to sct-data/%s", github_repo)
asset_name = os.path.basename(asset_path)
url = f"https://uploads.github.com/repos/{github_repo}/releases/{release_id}/assets?name={asset_name}"
headers = {
"Authorization": f"token {gh_token}",
"Content-Type": "application/octet-stream",
}
with io.open(asset_path, "rb") as fi:
payload = fi.read()
req = urllib.request.Request(url, headers=headers, method="POST", data=payload)
with urllib.request.urlopen(req) as resp:
if resp.getcode() != 201:
raise RuntimeError(
"Bad response: {} / {}".format(resp.getcode(), resp.read())
)
ret = json.loads(resp.read().decode("utf-8"))
logger.debug("ret: %s", ret)
logger.info("Release asset uploaded successfully.")
return ret["browser_download_url"]
def update_release_with_mirror_urls(github_repo, release_id, gh_token, urls):
"""
Include osf download url (in case osf upload was performed) to the Github release
"""
logger.info("Uploading release with OSF download url.")
url = f"https://api.github.com/repos/{github_repo}/releases/{release_id}"
headers = {
"Authorization": f"token {gh_token}",
"Content-Type": "application/json",
}
body = "Asset also available at {}".format(urls)
root = {"body": body}
payload = json.dumps(body).encode("utf-8")
req = urllib.request.Request(url, headers=headers, method="PATCH", data=payload)
with urllib.request.urlopen(req) as resp:
if resp.getcode() != 200:
raise RuntimeError(
"Bad response: {} / {}".format(resp.getcode(), resp.read())
)
ret = json.loads(resp.read().decode("utf-8"))
return ret
def get_org_repos(org):
url = f"https://api.github.com/orgs/{org}/repos"
headers = {
"Content-Type": "application/json",
}
req = urllib.request.Request(url, headers=headers, method="GET")
with urllib.request.urlopen(req) as resp:
if resp.getcode() != 200:
raise RuntimeError(
"Bad response: {} / {}".format(resp.getcode(), resp.read())
)
ret = json.loads(resp.read().decode("utf-8"))
logger.debug("ret: %s", ret)
return [ repo["name"] for repo in ret ]
def get_repo_tags(github_repo):
url = f"https://api.github.com/repos/{github_repo}/tags"
headers = {
"Content-Type": "application/json",
}
req = urllib.request.Request(url, headers=headers, method="GET")
with urllib.request.urlopen(req) as resp:
if resp.getcode() != 200:
raise RuntimeError(
"Bad response: {} / {}".format(resp.getcode(), resp.read())
)
ret = json.loads(resp.read().decode("utf-8"))
logger.debug("ret: %s", ret)
return [ tag["name"] for tag in ret ]
def get_repo_releases(github_repo):
url = f"https://api.github.com/repos/{github_repo}/releases"
req = urllib.request.Request(url)
with urllib.request.urlopen(req) as resp:
if resp.getcode() != 200:
msg = "Bad response: {} / {}".format(resp.getcode(), resp.read())
raise RuntimeError(msg)
ret = json.loads(resp.read().decode("utf-8"))
return { rel["tag_name"]: rel["id"] for rel in ret }, { rel["id"]: rel for rel in ret }
| [
"logging.getLogger",
"subprocess.run",
"re.match",
"os.path.join",
"io.open",
"json.dumps",
"os.path.basename"
] | [((181, 208), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (198, 208), False, 'import sys, io, os, logging\n'), ((591, 651), 'subprocess.run', 'subprocess.run', (['cmd'], {'stdout': 'subprocess.PIPE', 'cwd': 'target_repo'}), '(cmd, stdout=subprocess.PIPE, cwd=target_repo)\n', (605, 651), False, 'import subprocess\n'), ((700, 747), 're.match', 're.match', (['"""^<EMAIL>:(?P<repo>\\\\S+)\\\\.git$"""', 'url'], {}), "('^<EMAIL>:(?P<repo>\\\\S+)\\\\.git$', url)\n", (708, 747), False, 'import re\n'), ((809, 867), 're.match', 're.match', (['"""^https://github.com/(?P<repo>\\\\S+)\\\\.git$"""', 'url'], {}), "('^https://github.com/(?P<repo>\\\\S+)\\\\.git$', url)\n", (817, 867), False, 'import re\n'), ((3507, 3535), 'os.path.basename', 'os.path.basename', (['asset_path'], {}), '(asset_path)\n', (3523, 3535), False, 'import sys, io, os, logging\n'), ((3034, 3070), 'os.path.join', 'os.path.join', (['target_dir', 'asset_name'], {}), '(target_dir, asset_name)\n', (3046, 3070), False, 'import sys, io, os, logging\n'), ((3769, 3794), 'io.open', 'io.open', (['asset_path', '"""rb"""'], {}), "(asset_path, 'rb')\n", (3776, 3794), False, 'import sys, io, os, logging\n'), ((1538, 1554), 'json.dumps', 'json.dumps', (['root'], {}), '(root)\n', (1548, 1554), False, 'import json\n'), ((4828, 4844), 'json.dumps', 'json.dumps', (['body'], {}), '(body)\n', (4838, 4844), False, 'import json\n')] |
from pygsti.report.table import ReportTable
from ..util import BaseCase
class TableInstanceTester(BaseCase):
custom_headings = {
'html': 'test',
'python': 'test',
'latex': 'test'
}
def setUp(self):
self.table = ReportTable(self.custom_headings, ['Normal'] * 4) # Four formats
def test_element_accessors(self):
self.table.add_row(['1.0'], ['Normal'])
self.assertTrue('1.0' in self.table)
self.assertEqual(len(self.table), self.table.num_rows)
row_by_key = self.table.row(key=self.table.row_names[0])
row_by_idx = self.table.row(index=0)
self.assertEqual(row_by_key, row_by_idx)
col_by_key = self.table.col(key=self.table.col_names[0])
col_by_idx = self.table.col(index=0)
self.assertEqual(col_by_key, col_by_idx)
def test_to_string(self):
s = str(self.table)
# TODO assert correctness
def test_render_HTML(self):
self.table.add_row(['1.0'], ['Normal'])
self.table.add_row(['1.0'], ['Normal'])
render = self.table.render('html')
# TODO assert correctness
def test_render_LaTeX(self):
self.table.add_row(['1.0'], ['Normal'])
self.table.add_row(['1.0'], ['Normal'])
render = self.table.render('latex')
# TODO assert correctness
def test_finish(self):
self.table.add_row(['1.0'], ['Normal'])
self.table.finish()
# TODO assert correctness
def test_render_raises_on_unknown_format(self):
with self.assertRaises(NotImplementedError):
self.table.render('foobar')
def test_raise_on_invalid_accessor(self):
# XXX are these neccessary? EGN: maybe not - checks invalid inputs, which maybe shouldn't need testing?
with self.assertRaises(KeyError):
self.table['foobar']
with self.assertRaises(KeyError):
self.table.row(key='foobar') # invalid key
with self.assertRaises(ValueError):
self.table.row(index=100000) # out of bounds
with self.assertRaises(ValueError):
self.table.row() # must specify key or index
with self.assertRaises(ValueError):
self.table.row(key='foobar', index=1) # cannot specify key and index
with self.assertRaises(KeyError):
self.table.col(key='foobar') # invalid key
with self.assertRaises(ValueError):
self.table.col(index=100000) # out of bounds
with self.assertRaises(ValueError):
self.table.col() # must specify key or index
with self.assertRaises(ValueError):
self.table.col(key='foobar', index=1) # cannot specify key and index
class CustomHeadingTableTester(TableInstanceTester):
def setUp(self):
self.table = ReportTable([0.1], ['Normal'], self.custom_headings)
def test_labels(self):
self.table.add_row(['1.0'], ['Normal'])
self.assertTrue('1.0' in self.table)
rowLabels = list(self.table.keys())
self.assertEqual(rowLabels, self.table.row_names)
self.assertEqual(len(rowLabels), self.table.num_rows)
self.assertTrue(rowLabels[0] in self.table)
row1Data = self.table[rowLabels[0]]
colLabels = list(row1Data.keys())
self.assertEqual(colLabels, self.table.col_names)
self.assertEqual(len(colLabels), self.table.num_cols)
class CustomHeadingNoFormatTableTester(TableInstanceTester):
def setUp(self):
self.table = ReportTable(self.custom_headings, None)
| [
"pygsti.report.table.ReportTable"
] | [((258, 307), 'pygsti.report.table.ReportTable', 'ReportTable', (['self.custom_headings', "(['Normal'] * 4)"], {}), "(self.custom_headings, ['Normal'] * 4)\n", (269, 307), False, 'from pygsti.report.table import ReportTable\n'), ((2820, 2872), 'pygsti.report.table.ReportTable', 'ReportTable', (['[0.1]', "['Normal']", 'self.custom_headings'], {}), "([0.1], ['Normal'], self.custom_headings)\n", (2831, 2872), False, 'from pygsti.report.table import ReportTable\n'), ((3523, 3562), 'pygsti.report.table.ReportTable', 'ReportTable', (['self.custom_headings', 'None'], {}), '(self.custom_headings, None)\n', (3534, 3562), False, 'from pygsti.report.table import ReportTable\n')] |
from django.contrib.auth.models import User
from datetime import datetime, timedelta
from django.core.urlresolvers import reverse
from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponseRedirect
from django.template import RequestContext
from django.views.generic.simple import direct_to_template
from schedule.models import Event
from schedule.periods import Period
def site_index(request, template_name='index.html'):
# most future office hours to show
MAX_FUTURE_OFFICE_HOURS = 30
# furthest into the future to display office hours
MAX_FUTURE_DAYS = 30
users_available_now = User.objects.filter(profile__is_available=True)
events = Event.objects.all()
now = Period(events=events, start=datetime.now(),
end=datetime.now() + timedelta(minutes=1))
occurences = now.get_occurrences()
users_holding_office_hours_now = map(lambda x: x.event.creator, occurences)
users = set(list(users_available_now) + users_holding_office_hours_now)
future = Period(events=events, start=datetime.now(),
end=datetime.now() + timedelta(days=MAX_FUTURE_DAYS))
upcoming_office_hours = future.get_occurrences()
upcoming_office_hours = upcoming_office_hours[:MAX_FUTURE_OFFICE_HOURS]
return direct_to_template(request, template_name, locals())
def about(request):
return direct_to_template(request, 'about.html')
| [
"schedule.models.Event.objects.all",
"django.views.generic.simple.direct_to_template",
"django.contrib.auth.models.User.objects.filter",
"datetime.datetime.now",
"datetime.timedelta"
] | [((647, 694), 'django.contrib.auth.models.User.objects.filter', 'User.objects.filter', ([], {'profile__is_available': '(True)'}), '(profile__is_available=True)\n', (666, 694), False, 'from django.contrib.auth.models import User\n'), ((708, 727), 'schedule.models.Event.objects.all', 'Event.objects.all', ([], {}), '()\n', (725, 727), False, 'from schedule.models import Event\n'), ((1393, 1434), 'django.views.generic.simple.direct_to_template', 'direct_to_template', (['request', '"""about.html"""'], {}), "(request, 'about.html')\n", (1411, 1434), False, 'from django.views.generic.simple import direct_to_template\n'), ((766, 780), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (778, 780), False, 'from datetime import datetime, timedelta\n'), ((1078, 1092), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1090, 1092), False, 'from datetime import datetime, timedelta\n'), ((803, 817), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (815, 817), False, 'from datetime import datetime, timedelta\n'), ((820, 840), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(1)'}), '(minutes=1)\n', (829, 840), False, 'from datetime import datetime, timedelta\n'), ((1118, 1132), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1130, 1132), False, 'from datetime import datetime, timedelta\n'), ((1135, 1166), 'datetime.timedelta', 'timedelta', ([], {'days': 'MAX_FUTURE_DAYS'}), '(days=MAX_FUTURE_DAYS)\n', (1144, 1166), False, 'from datetime import datetime, timedelta\n')] |
from keras.models import Model
from keras.layers import Input
from keras.layers.core import Activation
from keras.layers.convolutional import Conv3D, Deconv3D
from keras.layers.advanced_activations import LeakyReLU
from keras.layers.normalization import BatchNormalization
def generator(phase_train=True, params={'z_size':200, 'strides':(2,2,2), 'kernel_size':(4,4,4)}):
"""
Returns a Generator Model with input params and phase_train
Args:
phase_train (boolean): training phase or not
params (dict): Dictionary with model parameters
Returns:
model (keras.Model): Keras Generator model
"""
z_size = params['z_size']
strides = params['strides']
kernel_size = params['kernel_size']
inputs = Input(shape=(1, 1, 1, z_size))
g1 = Deconv3D(filters=512, kernel_size=kernel_size,
strides=(1, 1, 1), kernel_initializer='glorot_normal',
bias_initializer='zeros', padding='valid')(inputs)
g1 = BatchNormalization()(g1, training=phase_train)
g1 = Activation(activation='relu')(g1)
g2 = Deconv3D(filters=256, kernel_size=kernel_size,
strides=strides, kernel_initializer='glorot_normal',
bias_initializer='zeros', padding='same')(g1)
g2 = BatchNormalization()(g2, training=phase_train)
g2 = Activation(activation='relu')(g2)
g3 = Deconv3D(filters=128, kernel_size=kernel_size,
strides=strides, kernel_initializer='glorot_normal',
bias_initializer='zeros', padding='same')(g2)
g3 = BatchNormalization()(g3, training=phase_train)
g3 = Activation(activation='relu')(g3)
g4 = Deconv3D(filters=64, kernel_size=kernel_size,
strides=strides, kernel_initializer='glorot_normal',
bias_initializer='zeros', padding='same')(g3)
g4 = BatchNormalization()(g4, training=phase_train)
g4 = Activation(activation='relu')(g4)
g5 = Deconv3D(filters=1, kernel_size=kernel_size,
strides=strides, kernel_initializer='glorot_normal',
bias_initializer='zeros', padding='same')(g4)
g5 = BatchNormalization()(g5, training=phase_train)
g5 = Activation(activation='sigmoid')(g5)
model = Model(inputs=inputs, outputs=g5)
model.summary()
return model
def discriminator(phase_train = True, params={'cube_len':64, 'strides':(2,2,2), 'kernel_size':(4,4,4), 'leak_value':0.2}):
"""
Returns a Discriminator Model with input params and phase_train
Args:
phase_train (boolean): training phase or not
params (dict): Dictionary with model parameters
Returns:
model (keras.Model): Keras Discriminator model
"""
cube_len = params['cube_len']
strides = params['strides']
kernel_size = params['kernel_size']
leak_value = params['leak_value']
inputs = Input(shape=(cube_len, cube_len, cube_len, 1))
d1 = Conv3D(filters=64, kernel_size=kernel_size,
strides=strides, kernel_initializer='glorot_normal',
bias_initializer='zeros', padding='same')(inputs)
d1 = BatchNormalization()(d1, training=phase_train)
d1 = LeakyReLU(leak_value)(d1)
d2 = Conv3D(filters=128, kernel_size=kernel_size,
strides=strides, kernel_initializer='glorot_normal',
bias_initializer='zeros', padding='same')(d1)
d2 = BatchNormalization()(d2, training=phase_train)
d2 = LeakyReLU(leak_value)(d2)
d3 = Conv3D(filters=256, kernel_size=kernel_size,
strides=strides, kernel_initializer='glorot_normal',
bias_initializer='zeros', padding='same')(d2)
d3 = BatchNormalization()(d3, training=phase_train)
d3 = LeakyReLU(leak_value)(d3)
d4 = Conv3D(filters=512, kernel_size=kernel_size,
strides=strides, kernel_initializer='glorot_normal',
bias_initializer='zeros', padding='same')(d3)
d4 = BatchNormalization()(d4, training=phase_train)
d4 = LeakyReLU(leak_value)(d4)
d5 = Conv3D(filters=1, kernel_size=kernel_size,
strides=(1, 1, 1), kernel_initializer='glorot_normal',
bias_initializer='zeros', padding='valid')(d4)
d5 = BatchNormalization()(d5, training=phase_train)
d5 = Activation(activation='sigmoid')(d5)
model = Model(inputs=inputs, outputs=d5)
model.summary()
return model | [
"keras.layers.core.Activation",
"keras.layers.normalization.BatchNormalization",
"keras.layers.convolutional.Conv3D",
"keras.layers.convolutional.Deconv3D",
"keras.layers.Input",
"keras.models.Model",
"keras.layers.advanced_activations.LeakyReLU"
] | [((762, 792), 'keras.layers.Input', 'Input', ([], {'shape': '(1, 1, 1, z_size)'}), '(shape=(1, 1, 1, z_size))\n', (767, 792), False, 'from keras.layers import Input\n'), ((2269, 2301), 'keras.models.Model', 'Model', ([], {'inputs': 'inputs', 'outputs': 'g5'}), '(inputs=inputs, outputs=g5)\n', (2274, 2301), False, 'from keras.models import Model\n'), ((2903, 2949), 'keras.layers.Input', 'Input', ([], {'shape': '(cube_len, cube_len, cube_len, 1)'}), '(shape=(cube_len, cube_len, cube_len, 1))\n', (2908, 2949), False, 'from keras.layers import Input\n'), ((4384, 4416), 'keras.models.Model', 'Model', ([], {'inputs': 'inputs', 'outputs': 'd5'}), '(inputs=inputs, outputs=d5)\n', (4389, 4416), False, 'from keras.models import Model\n'), ((803, 956), 'keras.layers.convolutional.Deconv3D', 'Deconv3D', ([], {'filters': '(512)', 'kernel_size': 'kernel_size', 'strides': '(1, 1, 1)', 'kernel_initializer': '"""glorot_normal"""', 'bias_initializer': '"""zeros"""', 'padding': '"""valid"""'}), "(filters=512, kernel_size=kernel_size, strides=(1, 1, 1),\n kernel_initializer='glorot_normal', bias_initializer='zeros', padding=\n 'valid')\n", (811, 956), False, 'from keras.layers.convolutional import Conv3D, Deconv3D\n'), ((1001, 1021), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (1019, 1021), False, 'from keras.layers.normalization import BatchNormalization\n'), ((1057, 1086), 'keras.layers.core.Activation', 'Activation', ([], {'activation': '"""relu"""'}), "(activation='relu')\n", (1067, 1086), False, 'from keras.layers.core import Activation\n'), ((1101, 1251), 'keras.layers.convolutional.Deconv3D', 'Deconv3D', ([], {'filters': '(256)', 'kernel_size': 'kernel_size', 'strides': 'strides', 'kernel_initializer': '"""glorot_normal"""', 'bias_initializer': '"""zeros"""', 'padding': '"""same"""'}), "(filters=256, kernel_size=kernel_size, strides=strides,\n kernel_initializer='glorot_normal', bias_initializer='zeros', padding=\n 'same')\n", (1109, 1251), False, 'from keras.layers.convolutional import Conv3D, Deconv3D\n'), ((1292, 1312), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (1310, 1312), False, 'from keras.layers.normalization import BatchNormalization\n'), ((1348, 1377), 'keras.layers.core.Activation', 'Activation', ([], {'activation': '"""relu"""'}), "(activation='relu')\n", (1358, 1377), False, 'from keras.layers.core import Activation\n'), ((1392, 1542), 'keras.layers.convolutional.Deconv3D', 'Deconv3D', ([], {'filters': '(128)', 'kernel_size': 'kernel_size', 'strides': 'strides', 'kernel_initializer': '"""glorot_normal"""', 'bias_initializer': '"""zeros"""', 'padding': '"""same"""'}), "(filters=128, kernel_size=kernel_size, strides=strides,\n kernel_initializer='glorot_normal', bias_initializer='zeros', padding=\n 'same')\n", (1400, 1542), False, 'from keras.layers.convolutional import Conv3D, Deconv3D\n'), ((1583, 1603), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (1601, 1603), False, 'from keras.layers.normalization import BatchNormalization\n'), ((1639, 1668), 'keras.layers.core.Activation', 'Activation', ([], {'activation': '"""relu"""'}), "(activation='relu')\n", (1649, 1668), False, 'from keras.layers.core import Activation\n'), ((1683, 1832), 'keras.layers.convolutional.Deconv3D', 'Deconv3D', ([], {'filters': '(64)', 'kernel_size': 'kernel_size', 'strides': 'strides', 'kernel_initializer': '"""glorot_normal"""', 'bias_initializer': '"""zeros"""', 'padding': '"""same"""'}), "(filters=64, kernel_size=kernel_size, strides=strides,\n kernel_initializer='glorot_normal', bias_initializer='zeros', padding=\n 'same')\n", (1691, 1832), False, 'from keras.layers.convolutional import Conv3D, Deconv3D\n'), ((1873, 1893), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (1891, 1893), False, 'from keras.layers.normalization import BatchNormalization\n'), ((1929, 1958), 'keras.layers.core.Activation', 'Activation', ([], {'activation': '"""relu"""'}), "(activation='relu')\n", (1939, 1958), False, 'from keras.layers.core import Activation\n'), ((1973, 2121), 'keras.layers.convolutional.Deconv3D', 'Deconv3D', ([], {'filters': '(1)', 'kernel_size': 'kernel_size', 'strides': 'strides', 'kernel_initializer': '"""glorot_normal"""', 'bias_initializer': '"""zeros"""', 'padding': '"""same"""'}), "(filters=1, kernel_size=kernel_size, strides=strides,\n kernel_initializer='glorot_normal', bias_initializer='zeros', padding=\n 'same')\n", (1981, 2121), False, 'from keras.layers.convolutional import Conv3D, Deconv3D\n'), ((2162, 2182), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (2180, 2182), False, 'from keras.layers.normalization import BatchNormalization\n'), ((2218, 2250), 'keras.layers.core.Activation', 'Activation', ([], {'activation': '"""sigmoid"""'}), "(activation='sigmoid')\n", (2228, 2250), False, 'from keras.layers.core import Activation\n'), ((2960, 3107), 'keras.layers.convolutional.Conv3D', 'Conv3D', ([], {'filters': '(64)', 'kernel_size': 'kernel_size', 'strides': 'strides', 'kernel_initializer': '"""glorot_normal"""', 'bias_initializer': '"""zeros"""', 'padding': '"""same"""'}), "(filters=64, kernel_size=kernel_size, strides=strides,\n kernel_initializer='glorot_normal', bias_initializer='zeros', padding=\n 'same')\n", (2966, 3107), False, 'from keras.layers.convolutional import Conv3D, Deconv3D\n'), ((3152, 3172), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (3170, 3172), False, 'from keras.layers.normalization import BatchNormalization\n'), ((3208, 3229), 'keras.layers.advanced_activations.LeakyReLU', 'LeakyReLU', (['leak_value'], {}), '(leak_value)\n', (3217, 3229), False, 'from keras.layers.advanced_activations import LeakyReLU\n'), ((3244, 3392), 'keras.layers.convolutional.Conv3D', 'Conv3D', ([], {'filters': '(128)', 'kernel_size': 'kernel_size', 'strides': 'strides', 'kernel_initializer': '"""glorot_normal"""', 'bias_initializer': '"""zeros"""', 'padding': '"""same"""'}), "(filters=128, kernel_size=kernel_size, strides=strides,\n kernel_initializer='glorot_normal', bias_initializer='zeros', padding=\n 'same')\n", (3250, 3392), False, 'from keras.layers.convolutional import Conv3D, Deconv3D\n'), ((3433, 3453), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (3451, 3453), False, 'from keras.layers.normalization import BatchNormalization\n'), ((3489, 3510), 'keras.layers.advanced_activations.LeakyReLU', 'LeakyReLU', (['leak_value'], {}), '(leak_value)\n', (3498, 3510), False, 'from keras.layers.advanced_activations import LeakyReLU\n'), ((3525, 3673), 'keras.layers.convolutional.Conv3D', 'Conv3D', ([], {'filters': '(256)', 'kernel_size': 'kernel_size', 'strides': 'strides', 'kernel_initializer': '"""glorot_normal"""', 'bias_initializer': '"""zeros"""', 'padding': '"""same"""'}), "(filters=256, kernel_size=kernel_size, strides=strides,\n kernel_initializer='glorot_normal', bias_initializer='zeros', padding=\n 'same')\n", (3531, 3673), False, 'from keras.layers.convolutional import Conv3D, Deconv3D\n'), ((3714, 3734), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (3732, 3734), False, 'from keras.layers.normalization import BatchNormalization\n'), ((3770, 3791), 'keras.layers.advanced_activations.LeakyReLU', 'LeakyReLU', (['leak_value'], {}), '(leak_value)\n', (3779, 3791), False, 'from keras.layers.advanced_activations import LeakyReLU\n'), ((3806, 3954), 'keras.layers.convolutional.Conv3D', 'Conv3D', ([], {'filters': '(512)', 'kernel_size': 'kernel_size', 'strides': 'strides', 'kernel_initializer': '"""glorot_normal"""', 'bias_initializer': '"""zeros"""', 'padding': '"""same"""'}), "(filters=512, kernel_size=kernel_size, strides=strides,\n kernel_initializer='glorot_normal', bias_initializer='zeros', padding=\n 'same')\n", (3812, 3954), False, 'from keras.layers.convolutional import Conv3D, Deconv3D\n'), ((3995, 4015), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (4013, 4015), False, 'from keras.layers.normalization import BatchNormalization\n'), ((4051, 4072), 'keras.layers.advanced_activations.LeakyReLU', 'LeakyReLU', (['leak_value'], {}), '(leak_value)\n', (4060, 4072), False, 'from keras.layers.advanced_activations import LeakyReLU\n'), ((4087, 4236), 'keras.layers.convolutional.Conv3D', 'Conv3D', ([], {'filters': '(1)', 'kernel_size': 'kernel_size', 'strides': '(1, 1, 1)', 'kernel_initializer': '"""glorot_normal"""', 'bias_initializer': '"""zeros"""', 'padding': '"""valid"""'}), "(filters=1, kernel_size=kernel_size, strides=(1, 1, 1),\n kernel_initializer='glorot_normal', bias_initializer='zeros', padding=\n 'valid')\n", (4093, 4236), False, 'from keras.layers.convolutional import Conv3D, Deconv3D\n'), ((4277, 4297), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (4295, 4297), False, 'from keras.layers.normalization import BatchNormalization\n'), ((4333, 4365), 'keras.layers.core.Activation', 'Activation', ([], {'activation': '"""sigmoid"""'}), "(activation='sigmoid')\n", (4343, 4365), False, 'from keras.layers.core import Activation\n')] |
# Add paths toward dependecies in different subdirectories
import os
import sys
sys.path.append(os.path.abspath('./drone'))
sys.path.append(os.path.abspath('./log'))
# Add dependencies
from drone_list import DroneList
from environment import Environment
from setup_logging import LogsConfig
logsConfig = LogsConfig()
logger = logsConfig.logger('EnvironmentApi')
def api_environment_set_mode(data):
mode = data['mode_chosen']
number_drones = data['number_of_drone']
DroneList.delete_drones()
Environment.set_mode(mode)
if (Environment.is_in_simulation()):
DroneList.createDrones(int(number_drones), mode)
Environment.launch_simulation(number_drones)
else:
DroneList.createDrones(int(number_drones), mode)
def api_environment_set_real_position(data):
DroneList.initial_posisitions.clear()
DroneList.initial_posisitions.extend(data) | [
"environment.Environment.is_in_simulation",
"environment.Environment.launch_simulation",
"setup_logging.LogsConfig",
"drone_list.DroneList.initial_posisitions.extend",
"drone_list.DroneList.initial_posisitions.clear",
"drone_list.DroneList.delete_drones",
"environment.Environment.set_mode",
"os.path.a... | [((306, 318), 'setup_logging.LogsConfig', 'LogsConfig', ([], {}), '()\n', (316, 318), False, 'from setup_logging import LogsConfig\n'), ((96, 122), 'os.path.abspath', 'os.path.abspath', (['"""./drone"""'], {}), "('./drone')\n", (111, 122), False, 'import os\n'), ((140, 164), 'os.path.abspath', 'os.path.abspath', (['"""./log"""'], {}), "('./log')\n", (155, 164), False, 'import os\n'), ((481, 506), 'drone_list.DroneList.delete_drones', 'DroneList.delete_drones', ([], {}), '()\n', (504, 506), False, 'from drone_list import DroneList\n'), ((511, 537), 'environment.Environment.set_mode', 'Environment.set_mode', (['mode'], {}), '(mode)\n', (531, 537), False, 'from environment import Environment\n'), ((547, 577), 'environment.Environment.is_in_simulation', 'Environment.is_in_simulation', ([], {}), '()\n', (575, 577), False, 'from environment import Environment\n'), ((807, 844), 'drone_list.DroneList.initial_posisitions.clear', 'DroneList.initial_posisitions.clear', ([], {}), '()\n', (842, 844), False, 'from drone_list import DroneList\n'), ((849, 891), 'drone_list.DroneList.initial_posisitions.extend', 'DroneList.initial_posisitions.extend', (['data'], {}), '(data)\n', (885, 891), False, 'from drone_list import DroneList\n'), ((645, 689), 'environment.Environment.launch_simulation', 'Environment.launch_simulation', (['number_drones'], {}), '(number_drones)\n', (674, 689), False, 'from environment import Environment\n')] |
import ansiblemetrics.utils as utils
from ansiblemetrics.ansible_metric import AnsibleMetric
class NumIncludedVars(AnsibleMetric):
""" This class measures the number of included variables in a playbook.
"""
def count(self):
"""Return the number of included variables.
Example
-------
.. highlight:: python
.. code-block:: python
from ansiblemetrics.general.num_included_vars import NumIncludedVars
playbook = '''
- name: Include a play after another play
include_vars: myvars.yaml
'''
NumIncludedVars(playbook).count()
>> 1
Returns
-------
int
number of included variables
"""
script = self.playbook
keys = utils.all_keys(script)
return sum(1 for i in keys if i == 'include_vars')
| [
"ansiblemetrics.utils.all_keys"
] | [((815, 837), 'ansiblemetrics.utils.all_keys', 'utils.all_keys', (['script'], {}), '(script)\n', (829, 837), True, 'import ansiblemetrics.utils as utils\n')] |
from allennlp.data.vocabulary import Vocabulary
from content_analyzer.models.rnn_classifier import RnnClassifier
from allennlp.data.tokenizers.word_tokenizer import WordTokenizer
from content_analyzer.data.dataset_readers.twitter import TwitterNLPDatasetReader
from allennlp.data.token_indexers import PretrainedBertIndexer
from allennlp.modules.token_embedders import PretrainedBertEmbedder
from allennlp.modules.text_field_embedders import BasicTextFieldEmbedder
from allennlp.modules.seq2vec_encoders import Seq2VecEncoder, PytorchSeq2VecWrapper
import torch
from allennlp.predictors import Predictor
from allennlp.predictors.text_classifier import TextClassifierPredictor
import overrides
from allennlp.common.util import JsonDict
indexer = PretrainedBertIndexer('bert-base-uncased')
wt = WordTokenizer()
tdr = TwitterNLPDatasetReader({"tokens": indexer}, wt)
GRU_args = {
"bidirectional": True,
"input_size": 768,
"hidden_size": 768,
"num_layers": 1,
}
print("vocab")
vocab = Vocabulary.from_files("out/flood_model/vocabulary")
print("embedder")
token_embedder = PretrainedBertEmbedder("bert-base-uncased")
text_embedder = BasicTextFieldEmbedder({"tokens": token_embedder}, allow_unmatched_keys = True)
print("encoder")
seq2vec = PytorchSeq2VecWrapper(torch.nn.GRU(batch_first=True, **GRU_args))
print("model")
model = RnnClassifier(vocab, text_embedder, seq2vec)
print("model state")
with open("out/flood_model/best.th", 'rb') as f:
state_dict = torch.load(f)
model.load_state_dict(state_dict)
predictor = TextClassifierPredictor(model, tdr)
prediction = predictor.predict("five people missing according to state police. if you have any information please contact us.")
print(prediction) | [
"allennlp.data.token_indexers.PretrainedBertIndexer",
"allennlp.modules.text_field_embedders.BasicTextFieldEmbedder",
"content_analyzer.data.dataset_readers.twitter.TwitterNLPDatasetReader",
"torch.load",
"allennlp.data.vocabulary.Vocabulary.from_files",
"allennlp.modules.token_embedders.PretrainedBertEmb... | [((746, 788), 'allennlp.data.token_indexers.PretrainedBertIndexer', 'PretrainedBertIndexer', (['"""bert-base-uncased"""'], {}), "('bert-base-uncased')\n", (767, 788), False, 'from allennlp.data.token_indexers import PretrainedBertIndexer\n'), ((794, 809), 'allennlp.data.tokenizers.word_tokenizer.WordTokenizer', 'WordTokenizer', ([], {}), '()\n', (807, 809), False, 'from allennlp.data.tokenizers.word_tokenizer import WordTokenizer\n'), ((816, 864), 'content_analyzer.data.dataset_readers.twitter.TwitterNLPDatasetReader', 'TwitterNLPDatasetReader', (["{'tokens': indexer}", 'wt'], {}), "({'tokens': indexer}, wt)\n", (839, 864), False, 'from content_analyzer.data.dataset_readers.twitter import TwitterNLPDatasetReader\n'), ((999, 1050), 'allennlp.data.vocabulary.Vocabulary.from_files', 'Vocabulary.from_files', (['"""out/flood_model/vocabulary"""'], {}), "('out/flood_model/vocabulary')\n", (1020, 1050), False, 'from allennlp.data.vocabulary import Vocabulary\n'), ((1086, 1129), 'allennlp.modules.token_embedders.PretrainedBertEmbedder', 'PretrainedBertEmbedder', (['"""bert-base-uncased"""'], {}), "('bert-base-uncased')\n", (1108, 1129), False, 'from allennlp.modules.token_embedders import PretrainedBertEmbedder\n'), ((1146, 1223), 'allennlp.modules.text_field_embedders.BasicTextFieldEmbedder', 'BasicTextFieldEmbedder', (["{'tokens': token_embedder}"], {'allow_unmatched_keys': '(True)'}), "({'tokens': token_embedder}, allow_unmatched_keys=True)\n", (1168, 1223), False, 'from allennlp.modules.text_field_embedders import BasicTextFieldEmbedder\n'), ((1342, 1386), 'content_analyzer.models.rnn_classifier.RnnClassifier', 'RnnClassifier', (['vocab', 'text_embedder', 'seq2vec'], {}), '(vocab, text_embedder, seq2vec)\n', (1355, 1386), False, 'from content_analyzer.models.rnn_classifier import RnnClassifier\n'), ((1540, 1575), 'allennlp.predictors.text_classifier.TextClassifierPredictor', 'TextClassifierPredictor', (['model', 'tdr'], {}), '(model, tdr)\n', (1563, 1575), False, 'from allennlp.predictors.text_classifier import TextClassifierPredictor\n'), ((1275, 1317), 'torch.nn.GRU', 'torch.nn.GRU', ([], {'batch_first': '(True)'}), '(batch_first=True, **GRU_args)\n', (1287, 1317), False, 'import torch\n'), ((1475, 1488), 'torch.load', 'torch.load', (['f'], {}), '(f)\n', (1485, 1488), False, 'import torch\n')] |
import math
import numpy as np
from scipy.special import expit, logit
import matplotlib.pyplot as plt
from mmur.viz import _set_plot_style
COLORS = _set_plot_style()
def plot_logstic_dgp(N=500, figsize=None):
"""Plot example of DGP as used in mmur.generators.LogisticGenerator.
Parameters
----------
N : int
number of points to generate in plot
figsize : tuple, default=None
figure passed to plt.subplots, default size is (12, 7)
Returns
-------
fig : matplotlib.figure.Figure
ax : matplotlib.axes._subplots.AxesSubplot
"""
betas = np.array((0.5, 1.2))
X = np.ones((N, 2))
X[:, 1] = np.random.uniform(-10., 10.1, size=N)
L = X.dot(betas)
gt_proba = expit(L)
proba_noisy = expit(L + np.random.normal(0, 0.5, size=N))
y = np.random.binomial(1, proba_noisy)
figsize = figsize or (12, 7)
fig, ax = plt.subplots(figsize=figsize)
sidx = np.argsort(X[:, 1])
x = X[sidx, 1]
ax.plot(x, gt_proba[sidx], label='true P', lw=2)
ax.scatter(x, proba_noisy[sidx], c='grey', marker='x', label='noisy P')
ax.scatter(x, y[sidx], c=COLORS[2], marker='x', s=50, label='y')
ax.legend(fontsize=14)
ax.set_ylabel('probability', fontsize=14)
ax.set_xlabel('X', fontsize=14)
ax.set_title('Logistic data generating process', fontsize=16)
return fig, ax
def plot_probas(
probas, ground_truth, n_sets=None, alt_label=None, axs=None
):
"""Plot sorted probabilities compared to ground truth probability.
Parameters
---------
probas : np.ndarray[float]
the classifier probabilities of shape (holdout_samples, n_sets)
ground_truth : np.ndarray[float]
ground truth probabilities, 1d array
n_sets : int, float, default=None
number of columns in proba to plot. If int it is interpreted as the
number of columns. If a float as a fraction of the columns. Default
is max(0.1 * probas.shape[1], 30)
alt_label : str, default=None
label for the source of probabilities, default is 'holdout'
axs : np.ndarray[matplotlib.axes._subplots.AxesSubplot], default=None
an array containing the axes to plot on, must be 1d and of length >= 2
Returns
-------
fig : matplotlib.figure.Figure, optional
the figure is returned when ``axs`` is None
axs : matplotlib.axes._subplots.AxesSubplot
the created or passed axes object
"""
if probas.ndim == 1:
probas = probas[:, None]
alt_label = alt_label or 'holdout'
if axs is None:
fig, axs = plt.subplots(figsize=(14, 7), nrows=1, ncols=2)
else:
fig = None
n_cols = probas.shape[1]
if isinstance(n_sets, int):
n_sets = max(n_cols, n_sets)
elif isinstance(n_sets, float):
n_sets = max(math.floor(n_sets * n_cols), n_cols)
else:
n_sets = max(math.floor(0.1 * probas.shape[1]), min(30, n_cols))
sorted_gt = np.sort(ground_truth)
xvals = logit(sorted_gt)
for i in range(n_sets - 1):
sarr = np.sort(probas[:, i])
axs[0].plot(xvals, sarr, c='grey', alpha=0.5)
axs[1].plot(sorted_gt, sarr, c='grey', alpha=0.5)
# plot outside loop for easier labelling
sarr = np.sort(probas[:, -1])
axs[0].plot(xvals, sarr, c='grey', alpha=0.5, label=alt_label)
axs[1].plot(sorted_gt, sarr, c='grey', alpha=0.5, label=alt_label)
# plot DGP
axs[0].plot(
xvals,
sorted_gt,
c='red',
ls='--',
lw=2,
zorder=10,
label='DGP',
)
axs[0].set_title('Probabilities', fontsize=18)
axs[0].set_ylabel('proba', fontsize=18)
axs[0].set_xlabel('DGP linear estimate', fontsize=18)
axs[0].tick_params(labelsize=16)
axs[0].legend(fontsize=18)
# plot DGP
axs[1].plot(
ground_truth,
ground_truth,
c='red',
ls='--',
lw=2,
zorder=10,
label='DGP'
)
axs[1].set_title('Q-Q ', fontsize=18)
axs[1].set_ylabel('proba -- ground truth', fontsize=18)
axs[1].set_xlabel('proba -- draws', fontsize=18)
axs[1].tick_params(labelsize=16)
axs[1].legend(fontsize=18)
if fig is not None:
fig.tight_layout()
return fig, axs
return axs
| [
"numpy.random.normal",
"numpy.ones",
"math.floor",
"numpy.sort",
"scipy.special.expit",
"numpy.array",
"numpy.argsort",
"scipy.special.logit",
"mmur.viz._set_plot_style",
"numpy.random.uniform",
"matplotlib.pyplot.subplots",
"numpy.random.binomial"
] | [((150, 167), 'mmur.viz._set_plot_style', '_set_plot_style', ([], {}), '()\n', (165, 167), False, 'from mmur.viz import _set_plot_style\n'), ((599, 619), 'numpy.array', 'np.array', (['(0.5, 1.2)'], {}), '((0.5, 1.2))\n', (607, 619), True, 'import numpy as np\n'), ((628, 643), 'numpy.ones', 'np.ones', (['(N, 2)'], {}), '((N, 2))\n', (635, 643), True, 'import numpy as np\n'), ((658, 696), 'numpy.random.uniform', 'np.random.uniform', (['(-10.0)', '(10.1)'], {'size': 'N'}), '(-10.0, 10.1, size=N)\n', (675, 696), True, 'import numpy as np\n'), ((732, 740), 'scipy.special.expit', 'expit', (['L'], {}), '(L)\n', (737, 740), False, 'from scipy.special import expit, logit\n'), ((811, 845), 'numpy.random.binomial', 'np.random.binomial', (['(1)', 'proba_noisy'], {}), '(1, proba_noisy)\n', (829, 845), True, 'import numpy as np\n'), ((894, 923), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (906, 923), True, 'import matplotlib.pyplot as plt\n'), ((935, 954), 'numpy.argsort', 'np.argsort', (['X[:, 1]'], {}), '(X[:, 1])\n', (945, 954), True, 'import numpy as np\n'), ((2953, 2974), 'numpy.sort', 'np.sort', (['ground_truth'], {}), '(ground_truth)\n', (2960, 2974), True, 'import numpy as np\n'), ((2987, 3003), 'scipy.special.logit', 'logit', (['sorted_gt'], {}), '(sorted_gt)\n', (2992, 3003), False, 'from scipy.special import expit, logit\n'), ((3243, 3265), 'numpy.sort', 'np.sort', (['probas[:, -1]'], {}), '(probas[:, -1])\n', (3250, 3265), True, 'import numpy as np\n'), ((2583, 2630), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(14, 7)', 'nrows': '(1)', 'ncols': '(2)'}), '(figsize=(14, 7), nrows=1, ncols=2)\n', (2595, 2630), True, 'import matplotlib.pyplot as plt\n'), ((3052, 3073), 'numpy.sort', 'np.sort', (['probas[:, i]'], {}), '(probas[:, i])\n', (3059, 3073), True, 'import numpy as np\n'), ((769, 801), 'numpy.random.normal', 'np.random.normal', (['(0)', '(0.5)'], {'size': 'N'}), '(0, 0.5, size=N)\n', (785, 801), True, 'import numpy as np\n'), ((2816, 2843), 'math.floor', 'math.floor', (['(n_sets * n_cols)'], {}), '(n_sets * n_cols)\n', (2826, 2843), False, 'import math\n'), ((2884, 2917), 'math.floor', 'math.floor', (['(0.1 * probas.shape[1])'], {}), '(0.1 * probas.shape[1])\n', (2894, 2917), False, 'import math\n')] |
import unittest
from SubTypeTree import SubTypeTreeFactory
from SubTypeTree import VenderTreeFactory
from SubTypeTree import SubTypeTree
from SubTypeTree import VenderTree
from SubTypeTree import GitHubVenderTree
from SubTypeTree import StandardTree
from SubTypeTree import ParsonalTree
from SubTypeTree import UnregisteredTree
class TestStandardTree_BlackBox(unittest.TestCase):
def test_Values(self):
tree_list = ['html']
tree = StandardTree(tree_list)
self.assertEqual(None, tree.GetFacet())
self.assertEqual(tree_list, tree.TreeList)
| [
"SubTypeTree.StandardTree"
] | [((451, 474), 'SubTypeTree.StandardTree', 'StandardTree', (['tree_list'], {}), '(tree_list)\n', (463, 474), False, 'from SubTypeTree import StandardTree\n')] |
# -*- coding: utf-8 -*-
from django.forms.models import modelform_factory
try:
from unittest.case import TestCase, expectedFailure
except ImportError:
from django.utils.unittest.case import TestCase, expectedFailure
from django_ace import AceWidget
from editregions.contrib.embeds.forms import JavaScriptEditorForm, StylesheetAssetForm, JavascriptAssetForm
from editregions.contrib.embeds.models import JavaScript, JavascriptAsset, StylesheetAsset
class JavaScriptEditorFormTestCase(TestCase):
def test_init(self):
form = modelform_factory(model=JavaScript, form=JavaScriptEditorForm,
fields=['content'])()
self.assertIsInstance(form.fields['content'].widget, AceWidget)
self.assertEqual(form.fields['content'].widget.mode, 'javascript')
self.assertEqual(form.fields['content'].widget.theme, 'chrome')
class StylesheetAssetFormTestCase(TestCase):
def test_init(self):
form = StylesheetAssetForm()
self.assertEqual(form.only_patterns, ('editregions/embeds/*.css',))
self.assertEqual(form.fields['local'].choices, [('', '---------')])
def test_found_patterns(self):
form = StylesheetAssetForm(only_patterns=('test-*.css',))
self.assertEqual(form.only_patterns, ('test-*.css',))
expected = sorted(list(form.fields['local'].choices))
self.assertEqual(expected, [
('test-1.css', 'test-1.css'), ('test-2.css', 'test-2.css')
])
@expectedFailure
def test_skipping_fields(self):
"""I dunno why this still has local in it afterwards. hmmms."""
form = modelform_factory(model=StylesheetAsset,
form=StylesheetAssetForm, fields=[],
exclude=['local'])()
self.assertNotIn('local', form.fields)
class JavascriptAssetFormTestCase(TestCase):
def test_init(self):
form = JavascriptAssetForm()
self.assertEqual(form.only_patterns, ('editregions/embeds/*.js',))
self.assertEqual(form.fields['local'].choices, [('', '---------')])
def test_found_patterns(self):
form = JavascriptAssetForm(only_patterns=('test-*.js',))
self.assertEqual(form.only_patterns, ('test-*.js',))
expected = sorted(list(form.fields['local'].choices))
self.assertEqual(expected, [
('test-1.js', 'test-1.js')
])
@expectedFailure
def test_skipping_fields(self):
"""I dunno why this still has local in it afterwards. hmmms."""
form = modelform_factory(model=JavascriptAsset,
form=JavascriptAssetForm, fields=[],
exclude=['local'])()
self.assertNotIn('local', form.fields)
| [
"editregions.contrib.embeds.forms.JavascriptAssetForm",
"editregions.contrib.embeds.forms.StylesheetAssetForm",
"django.forms.models.modelform_factory"
] | [((968, 989), 'editregions.contrib.embeds.forms.StylesheetAssetForm', 'StylesheetAssetForm', ([], {}), '()\n', (987, 989), False, 'from editregions.contrib.embeds.forms import JavaScriptEditorForm, StylesheetAssetForm, JavascriptAssetForm\n'), ((1193, 1243), 'editregions.contrib.embeds.forms.StylesheetAssetForm', 'StylesheetAssetForm', ([], {'only_patterns': "('test-*.css',)"}), "(only_patterns=('test-*.css',))\n", (1212, 1243), False, 'from editregions.contrib.embeds.forms import JavaScriptEditorForm, StylesheetAssetForm, JavascriptAssetForm\n'), ((1931, 1952), 'editregions.contrib.embeds.forms.JavascriptAssetForm', 'JavascriptAssetForm', ([], {}), '()\n', (1950, 1952), False, 'from editregions.contrib.embeds.forms import JavaScriptEditorForm, StylesheetAssetForm, JavascriptAssetForm\n'), ((2155, 2204), 'editregions.contrib.embeds.forms.JavascriptAssetForm', 'JavascriptAssetForm', ([], {'only_patterns': "('test-*.js',)"}), "(only_patterns=('test-*.js',))\n", (2174, 2204), False, 'from editregions.contrib.embeds.forms import JavaScriptEditorForm, StylesheetAssetForm, JavascriptAssetForm\n'), ((544, 631), 'django.forms.models.modelform_factory', 'modelform_factory', ([], {'model': 'JavaScript', 'form': 'JavaScriptEditorForm', 'fields': "['content']"}), "(model=JavaScript, form=JavaScriptEditorForm, fields=[\n 'content'])\n", (561, 631), False, 'from django.forms.models import modelform_factory\n'), ((1632, 1733), 'django.forms.models.modelform_factory', 'modelform_factory', ([], {'model': 'StylesheetAsset', 'form': 'StylesheetAssetForm', 'fields': '[]', 'exclude': "['local']"}), "(model=StylesheetAsset, form=StylesheetAssetForm, fields=[\n ], exclude=['local'])\n", (1649, 1733), False, 'from django.forms.models import modelform_factory\n'), ((2560, 2661), 'django.forms.models.modelform_factory', 'modelform_factory', ([], {'model': 'JavascriptAsset', 'form': 'JavascriptAssetForm', 'fields': '[]', 'exclude': "['local']"}), "(model=JavascriptAsset, form=JavascriptAssetForm, fields=[\n ], exclude=['local'])\n", (2577, 2661), False, 'from django.forms.models import modelform_factory\n')] |
from __future__ import print_function
import sys
import os
import re
import numpy as np
import subprocess
from matplotlib import pyplot as plt
inputpath = os.path.join(os.path.realpath('..'),'INPUT/')
print("Initialising")
fig, ax = plt.subplots()
n=0
for filenum in ['INPUT/0.txt','INPUT/1.txt','INPUT/2.txt']:
os.rename(filenum, 'INPUT/equilibrium.map')
subprocess.call(["csphoenix"])
os.rename('INPUT/equilibrium.map', filenum)
n_variable = 8
n_multiplier = n_variable * 8
omegafile = 'OUTPUT/omega_csp'
omega_min = -2.0
omega_max = 2.0
gamma_min = -0.1
gamma_max = 0.1
with open(omegafile, 'r') as f:
line = f.readline()
[m, nr] = map(int, line.split())
print('M = ', m)
print('NR = ', nr)
n_output = m * n_multiplier * nr
r = np.zeros(n_output)
q = np.zeros(n_output)
gamma = np.zeros(n_output)
omega = np.zeros(n_output)
i = 0
for line in f:
[rf, qf, omegaf, gammaf] = map(float, line.split())
#print(rf, qf, gammaf, omegaf)
r[i] = rf
q[i] = qf
gamma[i] = gammaf
omega[i] = omegaf
i = i + 1
f.close()
plt.scatter(r, omega, s=0.5, marker='x', label='flow='+str(n))
n=n+1
inner = 0.0
outer = 1.0
## NAME THE OUTPUT FILES
plt.xlim([np.min(r),np.max(r)])
plt.xlabel('s')
plt.ylim([omega_min,omega_max])
plt.ylabel('$\omega / \omega_{A0}$')
ax.legend()
plt.title('Continuous Spectrum Frequency')
plt.figure()
plt.show()
#inner = 0.0
#outer = 1.0
## NAME THE OUTPUT FILES
#plt.xlim([np.min(r),np.max(r)])
#plt.xlabel('s')
#plt.ylim([omega_min,omega_max])
#plt.ylabel('$\omega / \omega_{A0}$')
#ax.legend()
#plt.title('Continuous Spectrum Frequency')
#plt.savefig("/SecondDisk/PHOENIX_RUNS/NSTX/OVERPLOTnumeric012.png")
#print("Frequency continuum plot done")
| [
"matplotlib.pyplot.title",
"matplotlib.pyplot.ylabel",
"os.rename",
"matplotlib.pyplot.xlabel",
"numpy.max",
"os.path.realpath",
"matplotlib.pyplot.figure",
"numpy.zeros",
"subprocess.call",
"numpy.min",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((234, 248), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (246, 248), True, 'from matplotlib import pyplot as plt\n'), ((1454, 1464), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1462, 1464), True, 'from matplotlib import pyplot as plt\n'), ((169, 191), 'os.path.realpath', 'os.path.realpath', (['""".."""'], {}), "('..')\n", (185, 191), False, 'import os\n'), ((314, 357), 'os.rename', 'os.rename', (['filenum', '"""INPUT/equilibrium.map"""'], {}), "(filenum, 'INPUT/equilibrium.map')\n", (323, 357), False, 'import os\n'), ((359, 389), 'subprocess.call', 'subprocess.call', (["['csphoenix']"], {}), "(['csphoenix'])\n", (374, 389), False, 'import subprocess\n'), ((391, 434), 'os.rename', 'os.rename', (['"""INPUT/equilibrium.map"""', 'filenum'], {}), "('INPUT/equilibrium.map', filenum)\n", (400, 434), False, 'import os\n'), ((1296, 1311), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""s"""'], {}), "('s')\n", (1306, 1311), True, 'from matplotlib import pyplot as plt\n'), ((1313, 1345), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[omega_min, omega_max]'], {}), '([omega_min, omega_max])\n', (1321, 1345), True, 'from matplotlib import pyplot as plt\n'), ((1346, 1384), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""$\\\\omega / \\\\omega_{A0}$"""'], {}), "('$\\\\omega / \\\\omega_{A0}$')\n", (1356, 1384), True, 'from matplotlib import pyplot as plt\n'), ((1397, 1439), 'matplotlib.pyplot.title', 'plt.title', (['"""Continuous Spectrum Frequency"""'], {}), "('Continuous Spectrum Frequency')\n", (1406, 1439), True, 'from matplotlib import pyplot as plt\n'), ((1441, 1453), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1451, 1453), True, 'from matplotlib import pyplot as plt\n'), ((775, 793), 'numpy.zeros', 'np.zeros', (['n_output'], {}), '(n_output)\n', (783, 793), True, 'import numpy as np\n'), ((803, 821), 'numpy.zeros', 'np.zeros', (['n_output'], {}), '(n_output)\n', (811, 821), True, 'import numpy as np\n'), ((835, 853), 'numpy.zeros', 'np.zeros', (['n_output'], {}), '(n_output)\n', (843, 853), True, 'import numpy as np\n'), ((867, 885), 'numpy.zeros', 'np.zeros', (['n_output'], {}), '(n_output)\n', (875, 885), True, 'import numpy as np\n'), ((1273, 1282), 'numpy.min', 'np.min', (['r'], {}), '(r)\n', (1279, 1282), True, 'import numpy as np\n'), ((1283, 1292), 'numpy.max', 'np.max', (['r'], {}), '(r)\n', (1289, 1292), True, 'import numpy as np\n')] |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import os
from typing import Any, Dict, Generator, cast
import pytest
from _pytest.fixtures import SubRequest
from _pytest.monkeypatch import MonkeyPatch
from omegaconf import OmegaConf
from torchgeo.datamodules import ChesapeakeCVPRDataModule
from torchgeo.trainers.chesapeake import ChesapeakeCVPRSegmentationTask
from .test_utils import FakeTrainer, mocked_log
class TestChesapeakeCVPRSegmentationTask:
@pytest.fixture(scope="class", params=[5, 7])
def class_set(self, request: SubRequest) -> int:
return cast(int, request.param)
@pytest.fixture(scope="class")
def datamodule(self, class_set: int) -> ChesapeakeCVPRDataModule:
dm = ChesapeakeCVPRDataModule(
os.path.join("tests", "data", "chesapeake", "cvpr"),
["de-test"],
["de-test"],
["de-test"],
patch_size=32,
patches_per_tile=2,
batch_size=2,
num_workers=0,
class_set=class_set,
)
dm.prepare_data()
dm.setup()
return dm
@pytest.fixture
def config(self, class_set: int) -> Dict[str, Any]:
task_conf = OmegaConf.load(
os.path.join("conf", "task_defaults", f"chesapeake_cvpr_{class_set}.yaml")
)
task_args = OmegaConf.to_object(task_conf.experiment.module)
task_args = cast(Dict[str, Any], task_args)
return task_args
@pytest.fixture
def task(
self, config: Dict[str, Any], monkeypatch: Generator[MonkeyPatch, None, None]
) -> ChesapeakeCVPRSegmentationTask:
task = ChesapeakeCVPRSegmentationTask(**config)
trainer = FakeTrainer()
monkeypatch.setattr(task, "trainer", trainer) # type: ignore[attr-defined]
monkeypatch.setattr(task, "log", mocked_log) # type: ignore[attr-defined]
return task
def test_validation(
self, datamodule: ChesapeakeCVPRDataModule, task: ChesapeakeCVPRSegmentationTask
) -> None:
batch = next(iter(datamodule.val_dataloader()))
task.validation_step(batch, 0)
task.validation_epoch_end(0)
| [
"torchgeo.trainers.chesapeake.ChesapeakeCVPRSegmentationTask",
"os.path.join",
"omegaconf.OmegaConf.to_object",
"pytest.fixture",
"typing.cast"
] | [((511, 555), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""class"""', 'params': '[5, 7]'}), "(scope='class', params=[5, 7])\n", (525, 555), False, 'import pytest\n'), ((655, 684), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""class"""'}), "(scope='class')\n", (669, 684), False, 'import pytest\n'), ((624, 648), 'typing.cast', 'cast', (['int', 'request.param'], {}), '(int, request.param)\n', (628, 648), False, 'from typing import Any, Dict, Generator, cast\n'), ((1382, 1430), 'omegaconf.OmegaConf.to_object', 'OmegaConf.to_object', (['task_conf.experiment.module'], {}), '(task_conf.experiment.module)\n', (1401, 1430), False, 'from omegaconf import OmegaConf\n'), ((1451, 1482), 'typing.cast', 'cast', (['Dict[str, Any]', 'task_args'], {}), '(Dict[str, Any], task_args)\n', (1455, 1482), False, 'from typing import Any, Dict, Generator, cast\n'), ((1685, 1725), 'torchgeo.trainers.chesapeake.ChesapeakeCVPRSegmentationTask', 'ChesapeakeCVPRSegmentationTask', ([], {}), '(**config)\n', (1715, 1725), False, 'from torchgeo.trainers.chesapeake import ChesapeakeCVPRSegmentationTask\n'), ((806, 857), 'os.path.join', 'os.path.join', (['"""tests"""', '"""data"""', '"""chesapeake"""', '"""cvpr"""'], {}), "('tests', 'data', 'chesapeake', 'cvpr')\n", (818, 857), False, 'import os\n'), ((1277, 1351), 'os.path.join', 'os.path.join', (['"""conf"""', '"""task_defaults"""', 'f"""chesapeake_cvpr_{class_set}.yaml"""'], {}), "('conf', 'task_defaults', f'chesapeake_cvpr_{class_set}.yaml')\n", (1289, 1351), False, 'import os\n')] |
import logging
import os
from typing import Generic, List, Type, Any
import torch
import torch.nn as nn
from ..downloading.downloading_utils import from_cache
from ..featurization.featurization_api import T_BatchEncoding, T_Config, PretrainedFeaturizerMixin
class PretrainedModelBase(nn.Module, Generic[T_BatchEncoding, T_Config]):
def __init__(self, config: T_Config):
super().__init__()
self.config = config
def forward(self, batch: T_BatchEncoding):
raise NotImplementedError
@classmethod
def _get_archive_dict(cls) -> dict:
raise NotImplementedError
@classmethod
def get_config_cls(cls) -> Type[T_Config]:
raise NotImplementedError
@classmethod
def get_featurizer_cls(cls) -> Type[PretrainedFeaturizerMixin[Any, T_BatchEncoding, T_Config]]:
raise NotImplementedError
@classmethod
def from_pretrained(cls,
pretrained_name: str, *,
excluded: List[str] = None,
config: T_Config = None) -> "PretrainedModelBase[T_BatchEncoding, T_Config]":
archive_dict = cls._get_archive_dict()
file_path = from_cache(pretrained_name, archive_dict, 'pt')
if not file_path:
file_path = os.path.expanduser(pretrained_name)
if not os.path.exists(file_path):
raise FileNotFoundError(file_path)
if not config:
raise AttributeError('Set \'config\' attribute when using local path to weights.')
if not config:
config_cls = cls.get_config_cls()
config = config_cls.from_pretrained(pretrained_name)
model = cls(config)
model.load_weights(file_path, excluded=excluded)
return model
def init_weights(self, init_type: str):
for p in self.parameters():
if p.dim() > 1:
if init_type == 'uniform':
nn.init.xavier_uniform_(p)
elif init_type == 'normal':
nn.init.xavier_normal_(p)
else:
raise NotImplementedError()
def _remove_excluded(self, dictionary: dict, *, excluded: List[str] = None):
excluded = excluded if excluded else []
return {k: v for k, v in dictionary.items() if all(k.split('.')[0] != p for p in excluded)}
def load_weights(self, file_path: str, *, excluded: List[str] = None):
state_dict = torch.load(file_path, map_location='cpu')
state_dict = self._remove_excluded(state_dict, excluded=excluded)
result = self.load_state_dict(state_dict, strict=False)
if len(result.missing_keys) > 0:
logging.info(f'Missing keys when loading: {result.missing_keys}')
if len(result.unexpected_keys) > 0:
logging.warning(f'Unexpected keys when loading: {result.unexpected_keys}')
def save_weights(self, file_path: str, *, excluded: List[str] = None):
state_dict = self.state_dict()
state_dict = self._remove_excluded(state_dict, excluded=excluded)
torch.save(state_dict, file_path)
| [
"os.path.exists",
"torch.nn.init.xavier_uniform_",
"torch.load",
"logging.warning",
"torch.nn.init.xavier_normal_",
"torch.save",
"logging.info",
"os.path.expanduser"
] | [((2459, 2500), 'torch.load', 'torch.load', (['file_path'], {'map_location': '"""cpu"""'}), "(file_path, map_location='cpu')\n", (2469, 2500), False, 'import torch\n'), ((3086, 3119), 'torch.save', 'torch.save', (['state_dict', 'file_path'], {}), '(state_dict, file_path)\n', (3096, 3119), False, 'import torch\n'), ((1274, 1309), 'os.path.expanduser', 'os.path.expanduser', (['pretrained_name'], {}), '(pretrained_name)\n', (1292, 1309), False, 'import os\n'), ((2692, 2757), 'logging.info', 'logging.info', (['f"""Missing keys when loading: {result.missing_keys}"""'], {}), "(f'Missing keys when loading: {result.missing_keys}')\n", (2704, 2757), False, 'import logging\n'), ((2814, 2888), 'logging.warning', 'logging.warning', (['f"""Unexpected keys when loading: {result.unexpected_keys}"""'], {}), "(f'Unexpected keys when loading: {result.unexpected_keys}')\n", (2829, 2888), False, 'import logging\n'), ((1329, 1354), 'os.path.exists', 'os.path.exists', (['file_path'], {}), '(file_path)\n', (1343, 1354), False, 'import os\n'), ((1945, 1971), 'torch.nn.init.xavier_uniform_', 'nn.init.xavier_uniform_', (['p'], {}), '(p)\n', (1968, 1971), True, 'import torch.nn as nn\n'), ((2036, 2061), 'torch.nn.init.xavier_normal_', 'nn.init.xavier_normal_', (['p'], {}), '(p)\n', (2058, 2061), True, 'import torch.nn as nn\n')] |
from fixture.orm import ORMFixture
from fixture.db import DbFixture
from model.group import Group
from model.contact import Contact
database = ORMFixture(host="localhost", name="addressbook", user="root", password="")
try:
l = database.get_contacts_in_group(Group(id="174"))
# l = sorted(database.get_groups_contact_added(Contact(id="1")), key=Group.id_or_max)
for item in l:
print(item)
print(len(l))
finally:
pass
| [
"model.group.Group",
"fixture.orm.ORMFixture"
] | [((144, 218), 'fixture.orm.ORMFixture', 'ORMFixture', ([], {'host': '"""localhost"""', 'name': '"""addressbook"""', 'user': '"""root"""', 'password': '""""""'}), "(host='localhost', name='addressbook', user='root', password='')\n", (154, 218), False, 'from fixture.orm import ORMFixture\n'), ((264, 279), 'model.group.Group', 'Group', ([], {'id': '"""174"""'}), "(id='174')\n", (269, 279), False, 'from model.group import Group\n')] |