code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
import numpy as np
import sounddevice as sd
MME = 0
sd.default.channels = 2
sd.default.dtype = 'int16'
sd.default.latency = 'low'
sd.default.samplerate = 48000
class PCMStream:
def __init__(self):
self.stream = None
def read(self, num_bytes):
# frame is 4 bytes
frames = int(num_bytes / 4)
data = self.stream.read(frames)[0]
# convert to pcm format
return data.tobytes()
def change_device(self, num):
if (self.stream is not None):
self.stream.stop()
self.stream.close()
self.stream = sd.InputStream(device=num)
self.stream.start()
def query_devices():
index = 0
options = {}
for item in sd.query_devices():
# pip version only supports MME api
if (item.get('max_input_channels') > 0 and item.get('hostapi') == MME):
options[item.get('name')] = index
index += 1
return options
| [
"sounddevice.InputStream",
"sounddevice.query_devices"
] | [((756, 774), 'sounddevice.query_devices', 'sd.query_devices', ([], {}), '()\n', (772, 774), True, 'import sounddevice as sd\n'), ((624, 650), 'sounddevice.InputStream', 'sd.InputStream', ([], {'device': 'num'}), '(device=num)\n', (638, 650), True, 'import sounddevice as sd\n')] |
from abc import ABC
import numpy as np
from sklearn.cluster import DBSCAN, KMeans
from typing import Callable, Optional, Union
class BaseClusterer(ABC):
"""Abstract base class for Clusterers."""
def __call__(self, data: np.ndarray) -> np.ndarray:
pass
class DbscanClusterer(BaseClusterer):
"""DBSCAN clustering technique.
Args:
Reference:
https://scikit-learn.org/stable/modules/generated/sklearn.cluster.DBSCAN.html
"""
def __init__(self,
eps: float = 0.5,
min_samples: int = 5,
metric: Union[str, Callable] = "euclidean",
metric_params: Optional[dict] = None,
algorithm: str = "auto",
leaf_size: int = 30,
p: Optional[float] = None) -> None:
self._clusterer = DBSCAN(eps=eps,
min_samples=min_samples,
metric=metric,
metric_params=metric_params,
algorithm=algorithm,
leaf_size=leaf_size,
p=p,
n_jobs=None)
def __call__(self, data: np.ndarray) -> np.ndarray:
return self._clusterer.fit_predict(data)
class KmeansClusterer(BaseClusterer):
"""KMeans clustering technique.
Args:
Reference:
https://scikit-learn.org/stable/modules/generated/sklearn.cluster.KMeans.html
"""
def __init__(self,
n_clusters: int = 8,
init: Union[str, np.ndarray] = "k-means++",
n_init: int = 10,
max_iter: int = 300,
tol: float = 1e-4,
precompute_distances: Union[str, bool] = "auto",
verbose: int = 0,
random_state: Union[int, None] = None,
copy_x: bool = True,
algorithm: str = "auto") -> None:
self._clusterer = KMeans(n_clusters=n_clusters,
init=init,
n_init=n_init,
max_iter=max_iter,
tol=tol,
precompute_distances=precompute_distances,
verbose=verbose,
random_state=random_state,
copy_x=copy_x,
algorithm=algorithm,
n_jobs=None)
def __call__(self, data: np.ndarray) -> np.ndarray:
return self._clusterer.fit_predict(data)
| [
"sklearn.cluster.KMeans",
"sklearn.cluster.DBSCAN"
] | [((842, 991), 'sklearn.cluster.DBSCAN', 'DBSCAN', ([], {'eps': 'eps', 'min_samples': 'min_samples', 'metric': 'metric', 'metric_params': 'metric_params', 'algorithm': 'algorithm', 'leaf_size': 'leaf_size', 'p': 'p', 'n_jobs': 'None'}), '(eps=eps, min_samples=min_samples, metric=metric, metric_params=\n metric_params, algorithm=algorithm, leaf_size=leaf_size, p=p, n_jobs=None)\n', (848, 991), False, 'from sklearn.cluster import DBSCAN, KMeans\n'), ((2031, 2258), 'sklearn.cluster.KMeans', 'KMeans', ([], {'n_clusters': 'n_clusters', 'init': 'init', 'n_init': 'n_init', 'max_iter': 'max_iter', 'tol': 'tol', 'precompute_distances': 'precompute_distances', 'verbose': 'verbose', 'random_state': 'random_state', 'copy_x': 'copy_x', 'algorithm': 'algorithm', 'n_jobs': 'None'}), '(n_clusters=n_clusters, init=init, n_init=n_init, max_iter=max_iter,\n tol=tol, precompute_distances=precompute_distances, verbose=verbose,\n random_state=random_state, copy_x=copy_x, algorithm=algorithm, n_jobs=None)\n', (2037, 2258), False, 'from sklearn.cluster import DBSCAN, KMeans\n')] |
# (C) Copyright 2015 Hewlett Packard Enterprise Development LP
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import opstestfw
def lagHeartbeat(**kwargs):
"""
Library function to configure heartbeat speed on a LAG
:param deviceObj: device object
:type deviceObj: VSwitch device object
:param lagId: LAG identifier
:type lagId: int
:param lacpFastFlag: True for LACP fast heartbeat, false for slow heartbeat
:type lacpFastFlag: boolean
:return: returnStruct object
:rtype: object
"""
# Params
lagId = kwargs.get('lagId', None)
deviceObj = kwargs.get('deviceObj', None)
lacpFastFlag = kwargs.get('lacpFastFlag', True)
# Variables
overallBuffer = []
finalReturnCode = 0
# If device, LAG Id or lacpFastFlag are not passed, return an error
if deviceObj is None or lagId is None or lacpFastFlag is None:
opstestfw.LogOutput('error',
"Need to pass deviceObj and lagId to use "
"this routine")
returnCls = opstestfw.returnStruct(returnCode=1)
return returnCls
# Get into vtyshelll
returnStructure = deviceObj.VtyshShell(enter=True)
overallBuffer.append(returnStructure.buffer())
returnCode = returnStructure.returnCode()
if returnCode != 0:
opstestfw.LogOutput('error', "Failed to get vtysh prompt")
bufferString = ""
for curLine in overallBuffer:
bufferString += str(curLine)
returnCls = opstestfw.returnStruct(returnCode=returnCode,
buffer=bufferString)
return returnCls
# Get into config context
returnStructure = deviceObj.ConfigVtyShell(enter=True)
returnCode = returnStructure.returnCode()
overallBuffer.append(returnStructure.buffer())
if returnCode != 0:
opstestfw.LogOutput('error', "Failed to get vtysh config prompt")
bufferString = ""
for curLine in overallBuffer:
bufferString += str(curLine)
returnCls = opstestfw.returnStruct(returnCode=returnCode,
buffer=bufferString)
return returnCls
# enter LAG configuration context
command = "interface lag %s" % str(lagId)
returnDevInt = deviceObj.DeviceInteract(command=command)
returnCode = returnDevInt['returnCode']
overallBuffer.append(returnDevInt['buffer'])
if returnCode != 0:
opstestfw.LogOutput('error', "Failed to create LAG " + str(lagId)
+ " on device " + deviceObj.device)
else:
opstestfw.LogOutput('debug', "Created LAG " + str(lagId)
+ " on device " + deviceObj.device)
# configure LAG heartbeat settings
command = ""
if lacpFastFlag is False:
command = "no "
command += "lacp rate fast"
returnDevInt = deviceObj.DeviceInteract(command=command)
finalReturnCode = returnDevInt['returnCode']
overallBuffer.append(returnDevInt['buffer'])
if finalReturnCode != 0:
if lacpFastFlag is True:
opstestfw.LogOutput('error',
"Failed to configure LACP fast heartbeat on "
"interface lag " + str(lagId) + " on device "
+ deviceObj.device)
else:
opstestfw.LogOutput('error',
"Failed to configure LACP slow heartbeat on "
"interface lag " + str(lagId) + " on device "
+ deviceObj.device)
else:
if lacpFastFlag is True:
opstestfw.LogOutput('debug',
"Configured LACP fast heartbeat on interface"
" lag " + str(lagId) + " on device "
+ deviceObj.device)
else:
opstestfw.LogOutput('debug',
"Configure LACP slow heartbeat on interface"
" lag " + str(lagId) + " on device "
+ deviceObj.device)
# exit LAG configuration context
command = "exit"
returnDevInt = deviceObj.DeviceInteract(command=command)
returnCode = returnDevInt['returnCode']
overallBuffer.append(returnDevInt['buffer'])
if returnCode != 0:
opstestfw.LogOutput('error', "Failed to exit LAG " + str(lagId)
+ " configuration context")
bufferString = ""
for curLine in overallBuffer:
bufferString += str(curLine)
returnCls = opstestfw.returnStruct(returnCode=returnCode,
buffer=bufferString)
return returnCls
# Get out of config context
returnStructure = deviceObj.ConfigVtyShell(enter=False)
returnCode = returnStructure.returnCode()
overallBuffer.append(returnStructure.buffer())
if returnCode != 0:
opstestfw.LogOutput('error',
"Failed to get out of vtysh config context")
bufferString = ""
for curLine in overallBuffer:
bufferString += str(curLine)
returnCls = opstestfw.returnStruct(returnCode=returnCode,
buffer=bufferString)
return returnCls
# Get out of vtyshell
returnStructure = deviceObj.VtyshShell(enter=False)
returnCode = returnStructure.returnCode()
overallBuffer.append(returnStructure.buffer())
if returnCode != 0:
opstestfw.LogOutput('error', "Failed to exit vty shell")
bufferString = ""
for curLine in overallBuffer:
bufferString += str(curLine)
returnCls = opstestfw.returnStruct(returnCode=returnCode,
buffer=bufferString)
return returnCls
# Compile information to return
bufferString = ""
for curLine in overallBuffer:
bufferString += str(curLine)
returnCls = opstestfw.returnStruct(returnCode=finalReturnCode,
buffer=bufferString)
return returnCls
| [
"opstestfw.returnStruct",
"opstestfw.LogOutput"
] | [((6583, 6654), 'opstestfw.returnStruct', 'opstestfw.returnStruct', ([], {'returnCode': 'finalReturnCode', 'buffer': 'bufferString'}), '(returnCode=finalReturnCode, buffer=bufferString)\n', (6605, 6654), False, 'import opstestfw\n'), ((1446, 1534), 'opstestfw.LogOutput', 'opstestfw.LogOutput', (['"""error"""', '"""Need to pass deviceObj and lagId to use this routine"""'], {}), "('error',\n 'Need to pass deviceObj and lagId to use this routine')\n", (1465, 1534), False, 'import opstestfw\n'), ((1610, 1646), 'opstestfw.returnStruct', 'opstestfw.returnStruct', ([], {'returnCode': '(1)'}), '(returnCode=1)\n', (1632, 1646), False, 'import opstestfw\n'), ((1882, 1940), 'opstestfw.LogOutput', 'opstestfw.LogOutput', (['"""error"""', '"""Failed to get vtysh prompt"""'], {}), "('error', 'Failed to get vtysh prompt')\n", (1901, 1940), False, 'import opstestfw\n'), ((2066, 2132), 'opstestfw.returnStruct', 'opstestfw.returnStruct', ([], {'returnCode': 'returnCode', 'buffer': 'bufferString'}), '(returnCode=returnCode, buffer=bufferString)\n', (2088, 2132), False, 'import opstestfw\n'), ((2420, 2485), 'opstestfw.LogOutput', 'opstestfw.LogOutput', (['"""error"""', '"""Failed to get vtysh config prompt"""'], {}), "('error', 'Failed to get vtysh config prompt')\n", (2439, 2485), False, 'import opstestfw\n'), ((2611, 2677), 'opstestfw.returnStruct', 'opstestfw.returnStruct', ([], {'returnCode': 'returnCode', 'buffer': 'bufferString'}), '(returnCode=returnCode, buffer=bufferString)\n', (2633, 2677), False, 'import opstestfw\n'), ((5188, 5254), 'opstestfw.returnStruct', 'opstestfw.returnStruct', ([], {'returnCode': 'returnCode', 'buffer': 'bufferString'}), '(returnCode=returnCode, buffer=bufferString)\n', (5210, 5254), False, 'import opstestfw\n'), ((5546, 5619), 'opstestfw.LogOutput', 'opstestfw.LogOutput', (['"""error"""', '"""Failed to get out of vtysh config context"""'], {}), "('error', 'Failed to get out of vtysh config context')\n", (5565, 5619), False, 'import opstestfw\n'), ((5773, 5839), 'opstestfw.returnStruct', 'opstestfw.returnStruct', ([], {'returnCode': 'returnCode', 'buffer': 'bufferString'}), '(returnCode=returnCode, buffer=bufferString)\n', (5795, 5839), False, 'import opstestfw\n'), ((6120, 6176), 'opstestfw.LogOutput', 'opstestfw.LogOutput', (['"""error"""', '"""Failed to exit vty shell"""'], {}), "('error', 'Failed to exit vty shell')\n", (6139, 6176), False, 'import opstestfw\n'), ((6302, 6368), 'opstestfw.returnStruct', 'opstestfw.returnStruct', ([], {'returnCode': 'returnCode', 'buffer': 'bufferString'}), '(returnCode=returnCode, buffer=bufferString)\n', (6324, 6368), False, 'import opstestfw\n')] |
import numpy as np
def sigmoid(t):
return 1 / (1 + np.exp(-t))
def sigmoid_derivative(p):
return p * (1 - p)
class NeuralNetwork:
#Do not change this function header
def __init__(self,x=[[]],y=[],numLayers=2,numNodes=2,eta=0.001,maxIter=10000):
self.data = np.append(x,np.ones([len(x),1]),1)
self.labels = np.array(y)
self.nLayers = numLayers
self.nNodes = numNodes
self.eta = eta
self.maxIt = maxIter
self.weights = list()
self.outputs = list()
self.weights.append(np.random.rand(len(x[0])+1,self.nNodes))
for index in range(self.nLayers-1):
self.weights.append(np.random.rand(self.nNodes+1,self.nNodes))
self.weights.append(np.random.rand(self.nNodes+1,1))
for index in range(int(self.maxIt/90)):
self.train(self.data)
def train(self,x=[[]]):
for index in range(len(x)):
self.feedforward(self.data[index])
self.backprop(self.data[index],self.labels[index])
def predict(self,x=[]):
self.feedforward(np.append(x,1))
return self.outputs.pop()[0]
def feedforward(self,point):
self.outputs = list()
self.outputs.append(np.append(sigmoid(np.dot(point,self.weights[0])),1))
for index in range(1,len(self.weights)-1):
self.outputs.append(np.append(sigmoid(np.dot(self.outputs[index-1],self.weights[index])),1))
self.outputs.append(sigmoid(np.dot(self.outputs[len(self.outputs)-1],self.weights[len(self.weights)-1])))
def backprop(self, point, lable):
sensitivity=[]
copyOutputs=self.outputs.copy()
output=np.array(copyOutputs.pop())
sensitivity.append((lable-output)*sigmoid_derivative(output))
while len(copyOutputs)!=0:
sensitivity.append(np.multiply(np.dot(sensitivity[len(sensitivity)-1],self.weights[len(copyOutputs)].T),sigmoid_derivative(copyOutputs.pop()))[:-1])
sensitivity.reverse()
changeWeight=[]
changeWeight.append(np.array([np.multiply(np.multiply(self.outputs[len(sensitivity)-2],sensitivity[len(sensitivity)-1]),self.eta)]).T)
for index in range(len(sensitivity)-2,0,-1):
changeWeight.append(np.multiply(np.outer(self.outputs[index-1],sensitivity[index]),self.eta))
changeWeight.append(np.multiply(np.outer(point,sensitivity[0]),self.eta))
# print(self.weights)
for index in range(len(self.weights)):
self.weights[index]+=(changeWeight[len(changeWeight)-index-1])
# print(self.weights)
| [
"numpy.random.rand",
"numpy.exp",
"numpy.array",
"numpy.append",
"numpy.outer",
"numpy.dot"
] | [((342, 353), 'numpy.array', 'np.array', (['y'], {}), '(y)\n', (350, 353), True, 'import numpy as np\n'), ((56, 66), 'numpy.exp', 'np.exp', (['(-t)'], {}), '(-t)\n', (62, 66), True, 'import numpy as np\n'), ((746, 780), 'numpy.random.rand', 'np.random.rand', (['(self.nNodes + 1)', '(1)'], {}), '(self.nNodes + 1, 1)\n', (760, 780), True, 'import numpy as np\n'), ((1092, 1107), 'numpy.append', 'np.append', (['x', '(1)'], {}), '(x, 1)\n', (1101, 1107), True, 'import numpy as np\n'), ((675, 719), 'numpy.random.rand', 'np.random.rand', (['(self.nNodes + 1)', 'self.nNodes'], {}), '(self.nNodes + 1, self.nNodes)\n', (689, 719), True, 'import numpy as np\n'), ((2367, 2398), 'numpy.outer', 'np.outer', (['point', 'sensitivity[0]'], {}), '(point, sensitivity[0])\n', (2375, 2398), True, 'import numpy as np\n'), ((1255, 1285), 'numpy.dot', 'np.dot', (['point', 'self.weights[0]'], {}), '(point, self.weights[0])\n', (1261, 1285), True, 'import numpy as np\n'), ((2265, 2318), 'numpy.outer', 'np.outer', (['self.outputs[index - 1]', 'sensitivity[index]'], {}), '(self.outputs[index - 1], sensitivity[index])\n', (2273, 2318), True, 'import numpy as np\n'), ((1391, 1443), 'numpy.dot', 'np.dot', (['self.outputs[index - 1]', 'self.weights[index]'], {}), '(self.outputs[index - 1], self.weights[index])\n', (1397, 1443), True, 'import numpy as np\n')] |
import argparse
import os
import cv2
import numpy as np
import torch
from torch import nn
from deepface.backbones.iresnet import iresnet18, iresnet34, iresnet50, iresnet100, iresnet200
from deepface.backbones.mobilefacenet import get_mbf
from deepface.commons import functions
import gdown
url={
'ms1mv3_r50':'https://eb9uqq.dm.files.1drv.com/y4mo1LyxVkMS7RwyNFyD7Oj_LrukPmnMwHsL9rjh0By0Pbgglx-f55KwzpQ7rMhHYsgqz8WXcFOFpNKcgwBwPpmd2UjEOc2JwcdRAitVfngManBko6wU-y2HTwGi--_4R9_TmfTqO4yGQEIhR9-d4LOcisKC8YzL4bth1b4tSJ8nloIIq7xGizPX3jWiYfFHzirG5-VgJ3guFBVZKE7pupRsw',
'ms1mv3_r18':'https://eb9uqq.dm.files.1drv.com/y4mpJ0NiyBPDzo_aQlh9QHwL52UljHSI60KSPv0-p2oTb4qnoUA5Cu3Ul-Tfxc8l7uyg9BYE_hoItNc9JjqYRW-qmIIM0JeMqKGjyl5sZQvwPZUxazPW8THT9CrWpwzaKkrBXFDc_uEDGAvDpaB1lhrc83aG5lBOeuI6LbtMLBHyR7TA2YdPxcIvPGnsbqjvWl1rXQFG4zD2_TxL_m4avN43Q',
'ms1mv3_r34': 'https://eb9uqq.dm.files.1drv.com/y4mU3JhshWSlooEzKRYnCPrOb1-xpZqS_Z90rOXm8D6KOL-PpOhvlsDYAgiTWkGG8TYqC2kdgr4I66XBkhEtqhptKTRFY90gnLTesR9Sw0xNGb46_ULn6IcfRMTW18uKJS2pwGpwabu7SpL3Z1EsX-gcd74M26gMJ11svjthg15CzpGQhVASMZMMfSvlUGhyP5HPFxOQi3X0cpAUMm8P9Yn8Q',
'ms1mv3_r100':'https://eb9uqq.dm.files.1drv.com/y4mNdH0KjE7_R3tIT1h86Ov1XshRRgT1BUBeVIrUgRasS5x93UeCpP023bspth03rUtIg1raK3EtRqMtrGf_DvA0pIf2RgB7FsHsBaNoJYF1JqUl7Q8qsTpYGxOaq7-ow0Hiejjz5JRU9nWOJSniOlM2STvDKZH-Zs6pHiyLEfLhikQkm8xC2SYkcas-xedihqRJCVmzTI4LfBqtFbX1nxU-Q',
'glint360_r18':'https://eb9uqq.dm.files.1drv.com/y4mn1hArpddPJw-OM6IzTll6TpxZaSVjs6HyzeYC2m-tg-v9qqBjoI37Lr20K-RNFr-9_AlbnguKxxzrC4lqSykaUNWaJhya12ZdOIIwS1h2kPGSjGJkCEyEca9YkV5Mkesiee8nHibkeLvY5uSoe5PSLtm_umgqd6l3f4-RSnP4ecGrtYM3-Jt49YgKPwDcb5hNyXVBixUqVhTmyOiw9pM3g',
'glint360_r34': 'https://eb9uqq.dm.files.1drv.com/y4mDEvblVeT<KEY>',
'glint360_r50': 'https://eb9uqq.dm.files.1drv.com/y4m7HMGc6qBhL2PwUcsjx4z-Pm57HD2Uze1oa27yGL4BXt4Ech3sIbi59XUpBJMv6kxAAxJP00W_lWyN8T8Dm2rZ8eLQVxMiNoskpN0JZOfjTeiovnhNwBsOc3RN2Y91xNqzyMPs-5GQ4qKdZ_LNlulu8wckJcWvTIFSupsLkmtnym8PnL5u7XTERhXBTgL5nwoutQg6Yvb8Ixr_5VY1m2LaQ',
'glint360_r100': 'https://eb9uqq.dm.files.1drv.com/y4m6MECUN2ituEEi6oi8ksrTVHaNKfu21zaqpVA750ynYQqsP-RSDbGFX_MyK-OdWOnFp9NZuFTU711TVGAUMbttVWclSzruJRQUEp7-D8fZLMUBPc43lXSAkReo6WCfWaHIFZltEsfO3WomoCyePTRlEgShXYxVpSnu_VDuD8_MC7WcRmBJGznahexUgSQE0NcVJDvYkq2MW1eaeEQ0T4d6Q'
}
def getmodel(name, **kwargs):
# resnet
if name == "r18":
base_model= iresnet18(False, **kwargs)
elif name == "r34":
base_model= iresnet34(False, **kwargs)
elif name == "r50":
base_model= iresnet50(False, **kwargs)
elif name == "r100":
base_model= iresnet100(False, **kwargs)
elif name == "r200":
base_model= iresnet200(False, **kwargs)
elif name == "r2060":
from deepface.backbones.iresnet2060 import iresnet2060
base_model= iresnet2060(False, **kwargs)
elif name == "mbf":
fp16 = kwargs.get("fp16", False)
num_features = kwargs.get("num_features", 512)
base_model= get_mbf(fp16=fp16, num_features=num_features)
else:
raise ValueError()
return base_model
class Model_ArcFace(nn.Module):
def __init__(self,name,weight):
super().__init__()
self.model= getmodel(name, fp16=False)
self.model.load_state_dict(torch.load(weight, map_location=torch.device("cpu") ))
self.model.eval()
@torch.no_grad()
def predict(self,image):
self.img=image
self.img = np.transpose(self.img, (0,3, 1, 2))
self.img = torch.from_numpy(self.img).float()
self.img.div_(255).sub_(0.5).div_(0.5)
print(self.img.shape)
feat = self.model(self.img)
feat=feat.numpy()
return feat
@torch.no_grad()
def predict1(self,image):
self.img=image
if self.img is None:
self.img = np.random.randint(0, 255, size=(112, 112, 3), dtype=np.uint8)
else:
self.img = cv2.imread(self.img)
self.img = cv2.resize(self.img, (112, 112))
self.img = cv2.cvtColor(self.img, cv2.COLOR_BGR2RGB)
self.img = np.transpose(self.img, (2, 0, 1))
self.img = torch.from_numpy(self.img).unsqueeze(0).float()
self.img.div_(255).sub_(0.5).div_(0.5)
feat = self.model(self.img)
feat=feat.numpy()
# print(feat.shape)
return feat
def loadModel_ms1mv3_r50(url = 'https://eb9uqq.dm.files.1drv.com/y4mo1LyxVkMS7RwyNFyD7Oj_LrukPmnMwHsL9rjh0By0Pbgglx-f55KwzpQ7rMhHYsgqz8WXcFOFpNKcgwBwPpmd2UjEOc2JwcdRAitVfngManBko6wU-y2HTwGi--_4R9_TmfTqO4yGQEIhR9-d4LOcisKC8YzL4bth1b4tSJ8nloIIq7xGizPX3jWiYfFHzirG5-VgJ3guFBVZKE7pupRsw'):
home = functions.get_deepface_home()
file_name = "backbone.pth"
output = home+'/.deepface/weights/ms1mv3_arcface_r50/'+file_name
if os.path.exists(output) != True and os.path.exists(home+'/.deepface/weights/ms1mv3_arcface_r50/') !=True :
os.mkdir(home+'/.deepface/weights/ms1mv3_arcface_r50/')
print(file_name," will be downloaded to ",output)
gdown.download(url, output, quiet=False)
model=Model_ArcFace('r50',output)
return model
def loadModel(name):
home = functions.get_deepface_home()
file_name = "backbone.pth"
output= home + '/.deepface/weights/'+name+"/"+file_name
if os.path.exists(output) != True:
os.mkdir(home+ '/.deepface/weights/'+name+"/")
print(file_name," will be downloaded to ",output)
gdown.download(url[name], output, quiet=False)
name_model=name.split("_")[-1]
model= Model_ArcFace(name_model,output)
return model
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='PyTorch ArcFace Training')
parser.add_argument('--model_name', type=str, default='glint360_r100', help='backbone network')
parser.add_argument('--img', type=str, default='/home/quang/Documents/FACE/deepface/tests/dataset/img1.jpg')
args = parser.parse_args()
model_name=args.model_name
path_img=args.img
model=loadModel_ms1mv3_r50()
first_parameter = next(model.parameters())
input_shape = first_parameter.size()
input_shape=(112,112)
# input_shape = model.layers[0].input_shape
print(input_shape)
img1 = functions.preprocess_face(path_img,input_shape)
feat=model.predict(img1)
print(feat.shape) | [
"deepface.backbones.iresnet.iresnet34",
"deepface.backbones.iresnet2060.iresnet2060",
"deepface.backbones.mobilefacenet.get_mbf",
"deepface.backbones.iresnet.iresnet18",
"torch.from_numpy",
"os.path.exists",
"argparse.ArgumentParser",
"deepface.backbones.iresnet.iresnet50",
"os.mkdir",
"gdown.down... | [((3328, 3343), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3341, 3343), False, 'import torch\n'), ((3678, 3693), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3691, 3693), False, 'import torch\n'), ((4617, 4646), 'deepface.commons.functions.get_deepface_home', 'functions.get_deepface_home', ([], {}), '()\n', (4644, 4646), False, 'from deepface.commons import functions\n'), ((5120, 5149), 'deepface.commons.functions.get_deepface_home', 'functions.get_deepface_home', ([], {}), '()\n', (5147, 5149), False, 'from deepface.commons import functions\n'), ((5591, 5654), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""PyTorch ArcFace Training"""'}), "(description='PyTorch ArcFace Training')\n", (5614, 5654), False, 'import argparse\n'), ((6181, 6229), 'deepface.commons.functions.preprocess_face', 'functions.preprocess_face', (['path_img', 'input_shape'], {}), '(path_img, input_shape)\n', (6206, 6229), False, 'from deepface.commons import functions\n'), ((2367, 2393), 'deepface.backbones.iresnet.iresnet18', 'iresnet18', (['(False)'], {}), '(False, **kwargs)\n', (2376, 2393), False, 'from deepface.backbones.iresnet import iresnet18, iresnet34, iresnet50, iresnet100, iresnet200\n'), ((3415, 3451), 'numpy.transpose', 'np.transpose', (['self.img', '(0, 3, 1, 2)'], {}), '(self.img, (0, 3, 1, 2))\n', (3427, 3451), True, 'import numpy as np\n'), ((3998, 4039), 'cv2.cvtColor', 'cv2.cvtColor', (['self.img', 'cv2.COLOR_BGR2RGB'], {}), '(self.img, cv2.COLOR_BGR2RGB)\n', (4010, 4039), False, 'import cv2\n'), ((4059, 4092), 'numpy.transpose', 'np.transpose', (['self.img', '(2, 0, 1)'], {}), '(self.img, (2, 0, 1))\n', (4071, 4092), True, 'import numpy as np\n'), ((4869, 4926), 'os.mkdir', 'os.mkdir', (["(home + '/.deepface/weights/ms1mv3_arcface_r50/')"], {}), "(home + '/.deepface/weights/ms1mv3_arcface_r50/')\n", (4877, 4926), False, 'import os\n'), ((4991, 5031), 'gdown.download', 'gdown.download', (['url', 'output'], {'quiet': '(False)'}), '(url, output, quiet=False)\n', (5005, 5031), False, 'import gdown\n'), ((5248, 5270), 'os.path.exists', 'os.path.exists', (['output'], {}), '(output)\n', (5262, 5270), False, 'import os\n'), ((5288, 5339), 'os.mkdir', 'os.mkdir', (["(home + '/.deepface/weights/' + name + '/')"], {}), "(home + '/.deepface/weights/' + name + '/')\n", (5296, 5339), False, 'import os\n'), ((5401, 5447), 'gdown.download', 'gdown.download', (['url[name]', 'output'], {'quiet': '(False)'}), '(url[name], output, quiet=False)\n', (5415, 5447), False, 'import gdown\n'), ((2438, 2464), 'deepface.backbones.iresnet.iresnet34', 'iresnet34', (['(False)'], {}), '(False, **kwargs)\n', (2447, 2464), False, 'from deepface.backbones.iresnet import iresnet18, iresnet34, iresnet50, iresnet100, iresnet200\n'), ((3803, 3864), 'numpy.random.randint', 'np.random.randint', (['(0)', '(255)'], {'size': '(112, 112, 3)', 'dtype': 'np.uint8'}), '(0, 255, size=(112, 112, 3), dtype=np.uint8)\n', (3820, 3864), True, 'import numpy as np\n'), ((3902, 3922), 'cv2.imread', 'cv2.imread', (['self.img'], {}), '(self.img)\n', (3912, 3922), False, 'import cv2\n'), ((3946, 3978), 'cv2.resize', 'cv2.resize', (['self.img', '(112, 112)'], {}), '(self.img, (112, 112))\n', (3956, 3978), False, 'import cv2\n'), ((4754, 4776), 'os.path.exists', 'os.path.exists', (['output'], {}), '(output)\n', (4768, 4776), False, 'import os\n'), ((4790, 4853), 'os.path.exists', 'os.path.exists', (["(home + '/.deepface/weights/ms1mv3_arcface_r50/')"], {}), "(home + '/.deepface/weights/ms1mv3_arcface_r50/')\n", (4804, 4853), False, 'import os\n'), ((2509, 2535), 'deepface.backbones.iresnet.iresnet50', 'iresnet50', (['(False)'], {}), '(False, **kwargs)\n', (2518, 2535), False, 'from deepface.backbones.iresnet import iresnet18, iresnet34, iresnet50, iresnet100, iresnet200\n'), ((3470, 3496), 'torch.from_numpy', 'torch.from_numpy', (['self.img'], {}), '(self.img)\n', (3486, 3496), False, 'import torch\n'), ((2581, 2608), 'deepface.backbones.iresnet.iresnet100', 'iresnet100', (['(False)'], {}), '(False, **kwargs)\n', (2591, 2608), False, 'from deepface.backbones.iresnet import iresnet18, iresnet34, iresnet50, iresnet100, iresnet200\n'), ((3274, 3293), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (3286, 3293), False, 'import torch\n'), ((2654, 2681), 'deepface.backbones.iresnet.iresnet200', 'iresnet200', (['(False)'], {}), '(False, **kwargs)\n', (2664, 2681), False, 'from deepface.backbones.iresnet import iresnet18, iresnet34, iresnet50, iresnet100, iresnet200\n'), ((4112, 4138), 'torch.from_numpy', 'torch.from_numpy', (['self.img'], {}), '(self.img)\n', (4128, 4138), False, 'import torch\n'), ((2791, 2819), 'deepface.backbones.iresnet2060.iresnet2060', 'iresnet2060', (['(False)'], {}), '(False, **kwargs)\n', (2802, 2819), False, 'from deepface.backbones.iresnet2060 import iresnet2060\n'), ((2960, 3005), 'deepface.backbones.mobilefacenet.get_mbf', 'get_mbf', ([], {'fp16': 'fp16', 'num_features': 'num_features'}), '(fp16=fp16, num_features=num_features)\n', (2967, 3005), False, 'from deepface.backbones.mobilefacenet import get_mbf\n')] |
from plugin.core.environment import Environment
from ConfigParser import NoOptionError, NoSectionError, ParsingError, SafeConfigParser
import logging
import os
log = logging.getLogger(__name__)
CONFIGURATION_FILES = [
'advanced'
]
class ConfigurationFile(object):
def __init__(self, path):
self._path = path
self._relpath = os.path.relpath(self._path, Environment.path.plugin_support)
self._parser = None
self._error = False
def __getitem__(self, section):
# Ensure file is loaded
self.load()
# Construct section
return ConfigurationSection(self._parser, section)
def load(self):
if self._parser or self._error:
return
log.debug('Parsing configuration file: %r', self._relpath)
try:
self._parser = SafeConfigParser()
self._parser.read(self._path)
except ParsingError as ex:
log.info(ex.message)
self._parser = None
self._error = True
except Exception as ex:
log.warn('Unable to parse configuration file: %r - %s', self._relpath, ex, exc_info=True)
self._parser = None
self._error = True
class ConfigurationSection(object):
def __init__(self, parser, name):
self._parser = parser
self._name = name
def _get(self, func, key, default=None):
if not self._parser:
return default
if not self._parser.has_option(self._name, key):
return default
try:
return getattr(self._parser, func)(self._name, key)
except (NoSectionError, NoOptionError):
return default
def get(self, key, default=None):
return self._get('get', key, default)
def get_int(self, key, default=None):
return self._get('getint', key, default)
def get_float(self, key, default=None):
return self._get('getfloat', key, default)
def get_boolean(self, key, default=None):
return self._get('getboolean', key, default)
def __getitem__(self, key):
if not self._parser:
return None
return self._parser.get(self._name, key)
def __setitem__(self, key, value):
if not self._parser:
return
self._parser.set(self._name, key, value)
class ConfigurationMeta(type):
def __new__(cls, name, parents, dct):
# Load configuration files
for name in CONFIGURATION_FILES:
# Build path
path = os.path.join(Environment.path.plugin_data, '%s.ini' % name)
# Parse configuration file
dct[name] = ConfigurationFile(path)
# Construct object
return super(ConfigurationMeta, cls).__new__(cls, name, parents, dct)
class Configuration(object):
__metaclass__ = ConfigurationMeta
advanced = None
| [
"logging.getLogger",
"ConfigParser.SafeConfigParser",
"os.path.join",
"os.path.relpath"
] | [((168, 195), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (185, 195), False, 'import logging\n'), ((353, 413), 'os.path.relpath', 'os.path.relpath', (['self._path', 'Environment.path.plugin_support'], {}), '(self._path, Environment.path.plugin_support)\n', (368, 413), False, 'import os\n'), ((837, 855), 'ConfigParser.SafeConfigParser', 'SafeConfigParser', ([], {}), '()\n', (853, 855), False, 'from ConfigParser import NoOptionError, NoSectionError, ParsingError, SafeConfigParser\n'), ((2542, 2601), 'os.path.join', 'os.path.join', (['Environment.path.plugin_data', "('%s.ini' % name)"], {}), "(Environment.path.plugin_data, '%s.ini' % name)\n", (2554, 2601), False, 'import os\n')] |
#!/usr/bin/env python3
"""
Lists the effective alarms.
"""
import click
from jaws_libp.clients import EffectiveAlarmConsumer
# pylint: disable=missing-function-docstring,no-value-for-parameter
@click.command()
@click.option('--monitor', is_flag=True, help="Monitor indefinitely")
@click.option('--nometa', is_flag=True, help="Exclude audit headers and timestamp")
@click.option('--export', is_flag=True, help="Dump records in AVRO JSON format")
def list_effective_alarms(monitor, nometa, export):
consumer = EffectiveAlarmConsumer('list_effective_alarms.py')
consumer.consume_then_done(monitor, nometa, export)
def click_main() -> None:
list_effective_alarms()
if __name__ == "__main__":
click_main()
| [
"click.option",
"jaws_libp.clients.EffectiveAlarmConsumer",
"click.command"
] | [((202, 217), 'click.command', 'click.command', ([], {}), '()\n', (215, 217), False, 'import click\n'), ((219, 287), 'click.option', 'click.option', (['"""--monitor"""'], {'is_flag': '(True)', 'help': '"""Monitor indefinitely"""'}), "('--monitor', is_flag=True, help='Monitor indefinitely')\n", (231, 287), False, 'import click\n'), ((289, 376), 'click.option', 'click.option', (['"""--nometa"""'], {'is_flag': '(True)', 'help': '"""Exclude audit headers and timestamp"""'}), "('--nometa', is_flag=True, help=\n 'Exclude audit headers and timestamp')\n", (301, 376), False, 'import click\n'), ((373, 452), 'click.option', 'click.option', (['"""--export"""'], {'is_flag': '(True)', 'help': '"""Dump records in AVRO JSON format"""'}), "('--export', is_flag=True, help='Dump records in AVRO JSON format')\n", (385, 452), False, 'import click\n'), ((520, 570), 'jaws_libp.clients.EffectiveAlarmConsumer', 'EffectiveAlarmConsumer', (['"""list_effective_alarms.py"""'], {}), "('list_effective_alarms.py')\n", (542, 570), False, 'from jaws_libp.clients import EffectiveAlarmConsumer\n')] |
import pytest
import xarray as xr
from datatree.datatree import DataTree
from datatree.mapping import TreeIsomorphismError, check_isomorphic, map_over_subtree
from datatree.testing import assert_equal
from datatree.treenode import TreeNode
from .test_datatree import create_test_datatree
empty = xr.Dataset()
class TestCheckTreesIsomorphic:
def test_not_a_tree(self):
with pytest.raises(TypeError, match="not a tree"):
check_isomorphic("s", 1)
def test_different_widths(self):
dt1 = DataTree.from_dict(data_objects={"a": empty})
dt2 = DataTree.from_dict(data_objects={"b": empty, "c": empty})
expected_err_str = (
"Number of children on node 'root' of the left object: 1\n"
"Number of children on node 'root' of the right object: 2"
)
with pytest.raises(TreeIsomorphismError, match=expected_err_str):
check_isomorphic(dt1, dt2)
def test_different_heights(self):
dt1 = DataTree.from_dict(data_objects={"a": empty})
dt2 = DataTree.from_dict(data_objects={"b": empty, "b/c": empty})
expected_err_str = (
"Number of children on node 'root/a' of the left object: 0\n"
"Number of children on node 'root/b' of the right object: 1"
)
with pytest.raises(TreeIsomorphismError, match=expected_err_str):
check_isomorphic(dt1, dt2)
def test_names_different(self):
dt1 = DataTree.from_dict(data_objects={"a": xr.Dataset()})
dt2 = DataTree.from_dict(data_objects={"b": empty})
expected_err_str = (
"Node 'root/a' in the left object has name 'a'\n"
"Node 'root/b' in the right object has name 'b'"
)
with pytest.raises(TreeIsomorphismError, match=expected_err_str):
check_isomorphic(dt1, dt2, require_names_equal=True)
def test_isomorphic_names_equal(self):
dt1 = DataTree.from_dict(
data_objects={"a": empty, "b": empty, "b/c": empty, "b/d": empty}
)
dt2 = DataTree.from_dict(
data_objects={"a": empty, "b": empty, "b/c": empty, "b/d": empty}
)
check_isomorphic(dt1, dt2, require_names_equal=True)
def test_isomorphic_ordering(self):
dt1 = DataTree.from_dict(
data_objects={"a": empty, "b": empty, "b/d": empty, "b/c": empty}
)
dt2 = DataTree.from_dict(
data_objects={"a": empty, "b": empty, "b/c": empty, "b/d": empty}
)
check_isomorphic(dt1, dt2, require_names_equal=False)
def test_isomorphic_names_not_equal(self):
dt1 = DataTree.from_dict(
data_objects={"a": empty, "b": empty, "b/c": empty, "b/d": empty}
)
dt2 = DataTree.from_dict(
data_objects={"A": empty, "B": empty, "B/C": empty, "B/D": empty}
)
check_isomorphic(dt1, dt2)
def test_not_isomorphic_complex_tree(self):
dt1 = create_test_datatree()
dt2 = create_test_datatree()
dt2.set_node("set1/set2", TreeNode("set3"))
with pytest.raises(TreeIsomorphismError, match="root/set1/set2"):
check_isomorphic(dt1, dt2)
def test_checking_from_root(self):
dt1 = create_test_datatree()
dt2 = create_test_datatree()
dt1.parent = DataTree(name="real_root")
with pytest.raises(TreeIsomorphismError):
check_isomorphic(dt1, dt2, check_from_root=True)
class TestMapOverSubTree:
def test_no_trees_passed(self):
@map_over_subtree
def times_ten(ds):
return 10.0 * ds
with pytest.raises(TypeError, match="Must pass at least one tree"):
times_ten("dt")
def test_not_isomorphic(self):
dt1 = create_test_datatree()
dt2 = create_test_datatree()
dt2["set4"] = None
@map_over_subtree
def times_ten(ds1, ds2):
return ds1 * ds2
with pytest.raises(TreeIsomorphismError):
times_ten(dt1, dt2)
def test_no_trees_returned(self):
dt1 = create_test_datatree()
dt2 = create_test_datatree()
@map_over_subtree
def bad_func(ds1, ds2):
return None
with pytest.raises(TypeError, match="return value of None"):
bad_func(dt1, dt2)
def test_single_dt_arg(self):
dt = create_test_datatree()
@map_over_subtree
def times_ten(ds):
return 10.0 * ds
expected = create_test_datatree(modify=lambda ds: 10.0 * ds)
result_tree = times_ten(dt)
assert_equal(result_tree, expected)
def test_single_dt_arg_plus_args_and_kwargs(self):
dt = create_test_datatree()
@map_over_subtree
def multiply_then_add(ds, times, add=0.0):
return (times * ds) + add
expected = create_test_datatree(modify=lambda ds: (10.0 * ds) + 2.0)
result_tree = multiply_then_add(dt, 10.0, add=2.0)
assert_equal(result_tree, expected)
def test_multiple_dt_args(self):
dt1 = create_test_datatree()
dt2 = create_test_datatree()
@map_over_subtree
def add(ds1, ds2):
return ds1 + ds2
expected = create_test_datatree(modify=lambda ds: 2.0 * ds)
result = add(dt1, dt2)
assert_equal(result, expected)
def test_dt_as_kwarg(self):
dt1 = create_test_datatree()
dt2 = create_test_datatree()
@map_over_subtree
def add(ds1, value=0.0):
return ds1 + value
expected = create_test_datatree(modify=lambda ds: 2.0 * ds)
result = add(dt1, value=dt2)
assert_equal(result, expected)
def test_return_multiple_dts(self):
dt = create_test_datatree()
@map_over_subtree
def minmax(ds):
return ds.min(), ds.max()
dt_min, dt_max = minmax(dt)
expected_min = create_test_datatree(modify=lambda ds: ds.min())
assert_equal(dt_min, expected_min)
expected_max = create_test_datatree(modify=lambda ds: ds.max())
assert_equal(dt_max, expected_max)
def test_return_wrong_type(self):
dt1 = create_test_datatree()
@map_over_subtree
def bad_func(ds1):
return "string"
with pytest.raises(TypeError, match="not Dataset or DataArray"):
bad_func(dt1)
def test_return_tuple_of_wrong_types(self):
dt1 = create_test_datatree()
@map_over_subtree
def bad_func(ds1):
return xr.Dataset(), "string"
with pytest.raises(TypeError, match="not Dataset or DataArray"):
bad_func(dt1)
@pytest.mark.xfail
def test_return_inconsistent_number_of_results(self):
dt1 = create_test_datatree()
@map_over_subtree
def bad_func(ds):
# Datasets in create_test_datatree() have different numbers of dims
# TODO need to instead return different numbers of Dataset objects for this test to catch the intended error
return tuple(ds.dims)
with pytest.raises(TypeError, match="instead returns"):
bad_func(dt1)
def test_wrong_number_of_arguments_for_func(self):
dt = create_test_datatree()
@map_over_subtree
def times_ten(ds):
return 10.0 * ds
with pytest.raises(
TypeError, match="takes 1 positional argument but 2 were given"
):
times_ten(dt, dt)
def test_map_single_dataset_against_whole_tree(self):
dt = create_test_datatree()
@map_over_subtree
def nodewise_merge(node_ds, fixed_ds):
return xr.merge([node_ds, fixed_ds])
other_ds = xr.Dataset({"z": ("z", [0])})
expected = create_test_datatree(modify=lambda ds: xr.merge([ds, other_ds]))
result_tree = nodewise_merge(dt, other_ds)
assert_equal(result_tree, expected)
@pytest.mark.xfail
def test_trees_with_different_node_names(self):
# TODO test this after I've got good tests for renaming nodes
raise NotImplementedError
def test_dt_method(self):
dt = create_test_datatree()
def multiply_then_add(ds, times, add=0.0):
return times * ds + add
expected = create_test_datatree(modify=lambda ds: (10.0 * ds) + 2.0)
result_tree = dt.map_over_subtree(multiply_then_add, 10.0, add=2.0)
assert_equal(result_tree, expected)
@pytest.mark.xfail
class TestMapOverSubTreeInplace:
def test_map_over_subtree_inplace(self):
raise NotImplementedError
| [
"xarray.merge",
"datatree.mapping.check_isomorphic",
"xarray.Dataset",
"datatree.datatree.DataTree.from_dict",
"datatree.treenode.TreeNode",
"datatree.testing.assert_equal",
"pytest.raises",
"datatree.datatree.DataTree"
] | [((299, 311), 'xarray.Dataset', 'xr.Dataset', ([], {}), '()\n', (309, 311), True, 'import xarray as xr\n'), ((525, 570), 'datatree.datatree.DataTree.from_dict', 'DataTree.from_dict', ([], {'data_objects': "{'a': empty}"}), "(data_objects={'a': empty})\n", (543, 570), False, 'from datatree.datatree import DataTree\n'), ((585, 642), 'datatree.datatree.DataTree.from_dict', 'DataTree.from_dict', ([], {'data_objects': "{'b': empty, 'c': empty}"}), "(data_objects={'b': empty, 'c': empty})\n", (603, 642), False, 'from datatree.datatree import DataTree\n'), ((991, 1036), 'datatree.datatree.DataTree.from_dict', 'DataTree.from_dict', ([], {'data_objects': "{'a': empty}"}), "(data_objects={'a': empty})\n", (1009, 1036), False, 'from datatree.datatree import DataTree\n'), ((1051, 1110), 'datatree.datatree.DataTree.from_dict', 'DataTree.from_dict', ([], {'data_objects': "{'b': empty, 'b/c': empty}"}), "(data_objects={'b': empty, 'b/c': empty})\n", (1069, 1110), False, 'from datatree.datatree import DataTree\n'), ((1528, 1573), 'datatree.datatree.DataTree.from_dict', 'DataTree.from_dict', ([], {'data_objects': "{'b': empty}"}), "(data_objects={'b': empty})\n", (1546, 1573), False, 'from datatree.datatree import DataTree\n'), ((1933, 2022), 'datatree.datatree.DataTree.from_dict', 'DataTree.from_dict', ([], {'data_objects': "{'a': empty, 'b': empty, 'b/c': empty, 'b/d': empty}"}), "(data_objects={'a': empty, 'b': empty, 'b/c': empty,\n 'b/d': empty})\n", (1951, 2022), False, 'from datatree.datatree import DataTree\n'), ((2055, 2144), 'datatree.datatree.DataTree.from_dict', 'DataTree.from_dict', ([], {'data_objects': "{'a': empty, 'b': empty, 'b/c': empty, 'b/d': empty}"}), "(data_objects={'a': empty, 'b': empty, 'b/c': empty,\n 'b/d': empty})\n", (2073, 2144), False, 'from datatree.datatree import DataTree\n'), ((2171, 2223), 'datatree.mapping.check_isomorphic', 'check_isomorphic', (['dt1', 'dt2'], {'require_names_equal': '(True)'}), '(dt1, dt2, require_names_equal=True)\n', (2187, 2223), False, 'from datatree.mapping import TreeIsomorphismError, check_isomorphic, map_over_subtree\n'), ((2279, 2368), 'datatree.datatree.DataTree.from_dict', 'DataTree.from_dict', ([], {'data_objects': "{'a': empty, 'b': empty, 'b/d': empty, 'b/c': empty}"}), "(data_objects={'a': empty, 'b': empty, 'b/d': empty,\n 'b/c': empty})\n", (2297, 2368), False, 'from datatree.datatree import DataTree\n'), ((2401, 2490), 'datatree.datatree.DataTree.from_dict', 'DataTree.from_dict', ([], {'data_objects': "{'a': empty, 'b': empty, 'b/c': empty, 'b/d': empty}"}), "(data_objects={'a': empty, 'b': empty, 'b/c': empty,\n 'b/d': empty})\n", (2419, 2490), False, 'from datatree.datatree import DataTree\n'), ((2517, 2570), 'datatree.mapping.check_isomorphic', 'check_isomorphic', (['dt1', 'dt2'], {'require_names_equal': '(False)'}), '(dt1, dt2, require_names_equal=False)\n', (2533, 2570), False, 'from datatree.mapping import TreeIsomorphismError, check_isomorphic, map_over_subtree\n'), ((2633, 2722), 'datatree.datatree.DataTree.from_dict', 'DataTree.from_dict', ([], {'data_objects': "{'a': empty, 'b': empty, 'b/c': empty, 'b/d': empty}"}), "(data_objects={'a': empty, 'b': empty, 'b/c': empty,\n 'b/d': empty})\n", (2651, 2722), False, 'from datatree.datatree import DataTree\n'), ((2755, 2844), 'datatree.datatree.DataTree.from_dict', 'DataTree.from_dict', ([], {'data_objects': "{'A': empty, 'B': empty, 'B/C': empty, 'B/D': empty}"}), "(data_objects={'A': empty, 'B': empty, 'B/C': empty,\n 'B/D': empty})\n", (2773, 2844), False, 'from datatree.datatree import DataTree\n'), ((2871, 2897), 'datatree.mapping.check_isomorphic', 'check_isomorphic', (['dt1', 'dt2'], {}), '(dt1, dt2)\n', (2887, 2897), False, 'from datatree.mapping import TreeIsomorphismError, check_isomorphic, map_over_subtree\n'), ((3321, 3347), 'datatree.datatree.DataTree', 'DataTree', ([], {'name': '"""real_root"""'}), "(name='real_root')\n", (3329, 3347), False, 'from datatree.datatree import DataTree\n'), ((4584, 4619), 'datatree.testing.assert_equal', 'assert_equal', (['result_tree', 'expected'], {}), '(result_tree, expected)\n', (4596, 4619), False, 'from datatree.testing import assert_equal\n'), ((4973, 5008), 'datatree.testing.assert_equal', 'assert_equal', (['result_tree', 'expected'], {}), '(result_tree, expected)\n', (4985, 5008), False, 'from datatree.testing import assert_equal\n'), ((5312, 5342), 'datatree.testing.assert_equal', 'assert_equal', (['result', 'expected'], {}), '(result, expected)\n', (5324, 5342), False, 'from datatree.testing import assert_equal\n'), ((5655, 5685), 'datatree.testing.assert_equal', 'assert_equal', (['result', 'expected'], {}), '(result, expected)\n', (5667, 5685), False, 'from datatree.testing import assert_equal\n'), ((5969, 6003), 'datatree.testing.assert_equal', 'assert_equal', (['dt_min', 'expected_min'], {}), '(dt_min, expected_min)\n', (5981, 6003), False, 'from datatree.testing import assert_equal\n'), ((6084, 6118), 'datatree.testing.assert_equal', 'assert_equal', (['dt_max', 'expected_max'], {}), '(dt_max, expected_max)\n', (6096, 6118), False, 'from datatree.testing import assert_equal\n'), ((7716, 7745), 'xarray.Dataset', 'xr.Dataset', (["{'z': ('z', [0])}"], {}), "({'z': ('z', [0])})\n", (7726, 7745), True, 'import xarray as xr\n'), ((7889, 7924), 'datatree.testing.assert_equal', 'assert_equal', (['result_tree', 'expected'], {}), '(result_tree, expected)\n', (7901, 7924), False, 'from datatree.testing import assert_equal\n'), ((8422, 8457), 'datatree.testing.assert_equal', 'assert_equal', (['result_tree', 'expected'], {}), '(result_tree, expected)\n', (8434, 8457), False, 'from datatree.testing import assert_equal\n'), ((390, 434), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""not a tree"""'}), "(TypeError, match='not a tree')\n", (403, 434), False, 'import pytest\n'), ((448, 472), 'datatree.mapping.check_isomorphic', 'check_isomorphic', (['"""s"""', '(1)'], {}), "('s', 1)\n", (464, 472), False, 'from datatree.mapping import TreeIsomorphismError, check_isomorphic, map_over_subtree\n'), ((838, 897), 'pytest.raises', 'pytest.raises', (['TreeIsomorphismError'], {'match': 'expected_err_str'}), '(TreeIsomorphismError, match=expected_err_str)\n', (851, 897), False, 'import pytest\n'), ((911, 937), 'datatree.mapping.check_isomorphic', 'check_isomorphic', (['dt1', 'dt2'], {}), '(dt1, dt2)\n', (927, 937), False, 'from datatree.mapping import TreeIsomorphismError, check_isomorphic, map_over_subtree\n'), ((1310, 1369), 'pytest.raises', 'pytest.raises', (['TreeIsomorphismError'], {'match': 'expected_err_str'}), '(TreeIsomorphismError, match=expected_err_str)\n', (1323, 1369), False, 'import pytest\n'), ((1383, 1409), 'datatree.mapping.check_isomorphic', 'check_isomorphic', (['dt1', 'dt2'], {}), '(dt1, dt2)\n', (1399, 1409), False, 'from datatree.mapping import TreeIsomorphismError, check_isomorphic, map_over_subtree\n'), ((1749, 1808), 'pytest.raises', 'pytest.raises', (['TreeIsomorphismError'], {'match': 'expected_err_str'}), '(TreeIsomorphismError, match=expected_err_str)\n', (1762, 1808), False, 'import pytest\n'), ((1822, 1874), 'datatree.mapping.check_isomorphic', 'check_isomorphic', (['dt1', 'dt2'], {'require_names_equal': '(True)'}), '(dt1, dt2, require_names_equal=True)\n', (1838, 1874), False, 'from datatree.mapping import TreeIsomorphismError, check_isomorphic, map_over_subtree\n'), ((3055, 3071), 'datatree.treenode.TreeNode', 'TreeNode', (['"""set3"""'], {}), "('set3')\n", (3063, 3071), False, 'from datatree.treenode import TreeNode\n'), ((3086, 3145), 'pytest.raises', 'pytest.raises', (['TreeIsomorphismError'], {'match': '"""root/set1/set2"""'}), "(TreeIsomorphismError, match='root/set1/set2')\n", (3099, 3145), False, 'import pytest\n'), ((3159, 3185), 'datatree.mapping.check_isomorphic', 'check_isomorphic', (['dt1', 'dt2'], {}), '(dt1, dt2)\n', (3175, 3185), False, 'from datatree.mapping import TreeIsomorphismError, check_isomorphic, map_over_subtree\n'), ((3361, 3396), 'pytest.raises', 'pytest.raises', (['TreeIsomorphismError'], {}), '(TreeIsomorphismError)\n', (3374, 3396), False, 'import pytest\n'), ((3410, 3458), 'datatree.mapping.check_isomorphic', 'check_isomorphic', (['dt1', 'dt2'], {'check_from_root': '(True)'}), '(dt1, dt2, check_from_root=True)\n', (3426, 3458), False, 'from datatree.mapping import TreeIsomorphismError, check_isomorphic, map_over_subtree\n'), ((3619, 3680), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""Must pass at least one tree"""'}), "(TypeError, match='Must pass at least one tree')\n", (3632, 3680), False, 'import pytest\n'), ((3950, 3985), 'pytest.raises', 'pytest.raises', (['TreeIsomorphismError'], {}), '(TreeIsomorphismError)\n', (3963, 3985), False, 'import pytest\n'), ((4229, 4283), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""return value of None"""'}), "(TypeError, match='return value of None')\n", (4242, 4283), False, 'import pytest\n'), ((6291, 6349), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""not Dataset or DataArray"""'}), "(TypeError, match='not Dataset or DataArray')\n", (6304, 6349), False, 'import pytest\n'), ((6573, 6631), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""not Dataset or DataArray"""'}), "(TypeError, match='not Dataset or DataArray')\n", (6586, 6631), False, 'import pytest\n'), ((7080, 7129), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""instead returns"""'}), "(TypeError, match='instead returns')\n", (7093, 7129), False, 'import pytest\n'), ((7346, 7424), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""takes 1 positional argument but 2 were given"""'}), "(TypeError, match='takes 1 positional argument but 2 were given')\n", (7359, 7424), False, 'import pytest\n'), ((7666, 7695), 'xarray.merge', 'xr.merge', (['[node_ds, fixed_ds]'], {}), '([node_ds, fixed_ds])\n', (7674, 7695), True, 'import xarray as xr\n'), ((6536, 6548), 'xarray.Dataset', 'xr.Dataset', ([], {}), '()\n', (6546, 6548), True, 'import xarray as xr\n'), ((1499, 1511), 'xarray.Dataset', 'xr.Dataset', ([], {}), '()\n', (1509, 1511), True, 'import xarray as xr\n'), ((7804, 7828), 'xarray.merge', 'xr.merge', (['[ds, other_ds]'], {}), '([ds, other_ds])\n', (7812, 7828), True, 'import xarray as xr\n')] |
from os import system
from requests import get
from pyfiglet import figlet_format
from colored import fore, back, style, attr
attr(0)
print(back.BLACK)
print(fore.BLUE_VIOLET + style.BOLD)
system("clear")
print(figlet_format("DIRETORY BRUTE\nBY MOLEEY", width=58, justify="center", font="smslant"))
site = input("Link Do Site: ")
txt = open("wordlist.txt", "r")
line = txt.readlines()
for diretory in line:
req = get(site+diretory)
status = req.status_code
if status != 404:
print(f"URL: {site+diretory} Status: {status}")
| [
"os.system",
"pyfiglet.figlet_format",
"requests.get",
"colored.attr"
] | [((126, 133), 'colored.attr', 'attr', (['(0)'], {}), '(0)\n', (130, 133), False, 'from colored import fore, back, style, attr\n'), ((189, 204), 'os.system', 'system', (['"""clear"""'], {}), "('clear')\n", (195, 204), False, 'from os import system\n'), ((212, 305), 'pyfiglet.figlet_format', 'figlet_format', (['"""DIRETORY BRUTE\nBY MOLEEY"""'], {'width': '(58)', 'justify': '"""center"""', 'font': '"""smslant"""'}), '("""DIRETORY BRUTE\nBY MOLEEY""", width=58, justify=\'center\',\n font=\'smslant\')\n', (225, 305), False, 'from pyfiglet import figlet_format\n'), ((417, 437), 'requests.get', 'get', (['(site + diretory)'], {}), '(site + diretory)\n', (420, 437), False, 'from requests import get\n')] |
# -*- coding: utf-8 -*-
# Copyright (C) 2021 <NAME>
'''
MIT License
Copyright (c) 2021 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
import pynmea2
from .pipe import Pipe
class SeatalkPipe(Pipe):
""" Transform $STALK sentences to native NMEA sentences """
def __init__(self):
self.angle = 0
self.speed = 0
def transform(self, sentence: pynmea2.NMEASentence) -> list[pynmea2.NMEASentence]:
s = str(sentence)
if not s.startswith('$STALK'):
return [sentence]
s = s.split(',')[1::]
nmea = None
try:
# AWA Corresponding NMEA sentence: MWV
if len(s) >= 4 and s[0] == '10' and s[1] == '01':
angle = (int('0x'+s[3], 16) + int('0x'+s[2], 16) * 0xff) / 2
#print ('awa', angle)
#nmea = pynmea2.MWV(True, 'R', angle, 'N', 'A')
# Create nmea string mwv for wind angle
self.angle = "{:.1f}".format(angle)
nmea = '$IIMWV,%s,R,%s,k,A' % (self.angle, self.speed)
# AWS Corresponding NMEA sentence: MWV
elif len(s) >= 4 and s[0] == '11' and s[1] == '01':
speed = (int('0x' + s[2], 16) & 0x7f) + int('0x' + s[3][1], 16) / 10
#print('aws', speed)
#nmea = pynmea2.MWV(True, 'R', speed, 'N', 'A')
# Create nmea string mwv for wind speed
self.speed = "{:.1f}".format(speed)
nmea = '$IIMWV,%s,R,%s,k,A' % (self.angle, self.speed)
# DEPTH NMEA sentences: DPT, DBT
elif len(s) >= 5 and s[0] == '00' and s[1] == '02':
depth = (int('0x'+s[3], 16) + int('0x'+s[4], 16) * 0xff ) / 10 * 0.3048
#print ('depth', depth)
#nmea = pynmea2.DPT('IN', 'DPT', (str(depth)))
# Create nmea string dpt for depth
depth = "{:.1f}".format(depth)
nmea = '$IIDBT,,f,%s,M,,F' % (depth)
# Water temp Corresponding NMEA sentence: MTW
elif len(s) >= 4 and s[0] == '27' and s[1] == '01':
temp = ((int('0x'+s[2], 16) + int('0x'+s[3], 16) * 0xff ) - 100.)/10.
#print ('temp', temp)
#nmea = pynmea2.MTW(temp, 'celsius')
# Create nmea string mtw for water temp
temp = "{:.1f}".format(temp)
#nmea = '$IIMTW,%s,C,%s,C,%s,C' % (temp, temp, temp)
nmea = '$IIMDA,,I,,B,,C,%s,C,,,,C,,T,,M,,N,,M' % (temp)
# Compass
elif len(s) >= 4 and s[0] == '9c':
U = int('0x' + s[1][0], 16)
VW = int('0x' + s[2], 16)
hdg = (U & 0x3) * 90 + (VW & 0x3F) * 2 + ((U & (2 if 0xC == 0xC else 1)) if (U & 0xC) else 0)
# print('heading', hdg)
hdg = "{:.0f}".format(hdg)
nmea = 'IIHDM,%s,M' % (hdg)
# SOG Corresponding NMEA sentence: VHW
elif len(s) >= 4 and s[0] == '20' and s[1] == '01':
sog = ((int('0x'+s[2], 16) + int('0x'+s[3], 16) * 0xff ))/10.
#print ('sog', sog)
#nmea = pynmea2.VHW(sog, 'T', 'M', 'N')
# Create nmea string vhw for speed over ground
#nmea = '$IIVHW,%s,T,M,N,N' % (sog)
except:
pass
if nmea is not None:
return [pynmea2.parse(nmea)]
return [sentence] | [
"pynmea2.parse"
] | [((3785, 3804), 'pynmea2.parse', 'pynmea2.parse', (['nmea'], {}), '(nmea)\n', (3798, 3804), False, 'import pynmea2\n')] |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~----->>>
# _ _
# .__(.)< ?? >(.)__.
# \___) (___/
# @Time : 2022/3/20 下午10:06
# @Author : wds -->> <EMAIL>
# @File : util.py
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~----->>>
import numpy as np
import os
from sklearn.cluster import KMeans
import torch
def vision_phi(Phi, outpath='phi_output.txt', voc=None, top_n=50, topic_diversity=True):
def get_diversity(topics):
word = []
for line in topics:
word += line
word_unique = np.unique(word)
return len(word_unique) / len(word)
if voc is not None:
phi = 1
for num, phi_layer in enumerate(Phi):
phi = np.dot(phi, phi_layer)
phi_k = phi.shape[1]
f = open(outpath, 'w')
topic_word = []
for each in range(phi_k):
top_n_words = get_top_n(phi[:, each], top_n, voc)
topic_word.append(top_n_words.split()[:25])
f.write(top_n_words)
f.write('\n')
f.close()
if topic_diversity:
td_value = get_diversity(topic_word)
print('topic diversity at layer {}: {}'.format(num, td_value))
else:
print('voc need !!')
def to_list(data, device='cuda:0'):
data_list = []
for i in range(len(data)):
idx = torch.where(data[i]>0)[0]
data_list.append(torch.tensor([j for j in idx for _ in range(data[i,j])], device=device))
return data_list
def get_top_n(phi, top_n, voc):
top_n_words = ''
idx = np.argsort(-phi)
for i in range(top_n):
index = idx[i]
top_n_words += voc[index]
top_n_words += ' '
return top_n_words
def normalization(data):
_range = np.max(data, axis=1, keepdims=True) - np.min(data, axis=1, keepdims=True)
return (data - np.min(data, axis=1, keepdims=True)) / _range
def standardization(data):
mu = np.mean(data, axis=1, keepdims=True)
sigma = np.std(data, axis=1, keepdims=True)
return (data - mu) / sigma
def cluster_kmeans(x, n=50):
# x_norm = standardization(x)
kmeans = KMeans(n_clusters=n, random_state=0, n_jobs=-1).fit(x)
cluster_center = kmeans.cluster_centers_ ### n, d
return cluster_center
def pac_vis(path):
pass | [
"sklearn.cluster.KMeans",
"numpy.mean",
"numpy.unique",
"numpy.min",
"numpy.max",
"numpy.argsort",
"numpy.dot",
"numpy.std",
"torch.where"
] | [((1697, 1713), 'numpy.argsort', 'np.argsort', (['(-phi)'], {}), '(-phi)\n', (1707, 1713), True, 'import numpy as np\n'), ((2065, 2101), 'numpy.mean', 'np.mean', (['data'], {'axis': '(1)', 'keepdims': '(True)'}), '(data, axis=1, keepdims=True)\n', (2072, 2101), True, 'import numpy as np\n'), ((2114, 2149), 'numpy.std', 'np.std', (['data'], {'axis': '(1)', 'keepdims': '(True)'}), '(data, axis=1, keepdims=True)\n', (2120, 2149), True, 'import numpy as np\n'), ((650, 665), 'numpy.unique', 'np.unique', (['word'], {}), '(word)\n', (659, 665), True, 'import numpy as np\n'), ((1888, 1923), 'numpy.max', 'np.max', (['data'], {'axis': '(1)', 'keepdims': '(True)'}), '(data, axis=1, keepdims=True)\n', (1894, 1923), True, 'import numpy as np\n'), ((1926, 1961), 'numpy.min', 'np.min', (['data'], {'axis': '(1)', 'keepdims': '(True)'}), '(data, axis=1, keepdims=True)\n', (1932, 1961), True, 'import numpy as np\n'), ((814, 836), 'numpy.dot', 'np.dot', (['phi', 'phi_layer'], {}), '(phi, phi_layer)\n', (820, 836), True, 'import numpy as np\n'), ((1486, 1510), 'torch.where', 'torch.where', (['(data[i] > 0)'], {}), '(data[i] > 0)\n', (1497, 1510), False, 'import torch\n'), ((1981, 2016), 'numpy.min', 'np.min', (['data'], {'axis': '(1)', 'keepdims': '(True)'}), '(data, axis=1, keepdims=True)\n', (1987, 2016), True, 'import numpy as np\n'), ((2258, 2305), 'sklearn.cluster.KMeans', 'KMeans', ([], {'n_clusters': 'n', 'random_state': '(0)', 'n_jobs': '(-1)'}), '(n_clusters=n, random_state=0, n_jobs=-1)\n', (2264, 2305), False, 'from sklearn.cluster import KMeans\n')] |
"""
Original: <NAME>
New Author: <NAME>
"""
from __future__ import print_function
import httplib2
import os
import re
import time
import base64
from apiclient import discovery
from apiclient import errors
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
SCOPES = 'https://mail.google.com/'
CLIENT_SECRET_FILE = 'client_secret.json'
APPLICATION_NAME = 'Gmail API Python Quickstart'
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
def get_credentials():
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'gmail-python-quickstart.json')
store = Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
credentials = tools.run(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
def main():
credentials = get_credentials()
if __name__ == '__main__':
main()
| [
"os.path.exists",
"os.makedirs",
"argparse.ArgumentParser",
"os.path.join",
"oauth2client.client.flow_from_clientsecrets",
"oauth2client.tools.run",
"oauth2client.file.Storage",
"oauth2client.tools.run_flow",
"os.path.expanduser"
] | [((714, 737), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (732, 737), False, 'import os\n'), ((759, 797), 'os.path.join', 'os.path.join', (['home_dir', '""".credentials"""'], {}), "(home_dir, '.credentials')\n", (771, 797), False, 'import os\n'), ((899, 959), 'os.path.join', 'os.path.join', (['credential_dir', '"""gmail-python-quickstart.json"""'], {}), "(credential_dir, 'gmail-python-quickstart.json')\n", (911, 959), False, 'import os\n'), ((1008, 1032), 'oauth2client.file.Storage', 'Storage', (['credential_path'], {}), '(credential_path)\n', (1015, 1032), False, 'from oauth2client.file import Storage\n'), ((809, 839), 'os.path.exists', 'os.path.exists', (['credential_dir'], {}), '(credential_dir)\n', (823, 839), False, 'import os\n'), ((849, 876), 'os.makedirs', 'os.makedirs', (['credential_dir'], {}), '(credential_dir)\n', (860, 876), False, 'import os\n'), ((1125, 1183), 'oauth2client.client.flow_from_clientsecrets', 'client.flow_from_clientsecrets', (['CLIENT_SECRET_FILE', 'SCOPES'], {}), '(CLIENT_SECRET_FILE, SCOPES)\n', (1155, 1183), False, 'from oauth2client import client\n'), ((574, 624), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'parents': '[tools.argparser]'}), '(parents=[tools.argparser])\n', (597, 624), False, 'import argparse\n'), ((1271, 1305), 'oauth2client.tools.run_flow', 'tools.run_flow', (['flow', 'store', 'flags'], {}), '(flow, store, flags)\n', (1285, 1305), False, 'from oauth2client import tools\n'), ((1394, 1416), 'oauth2client.tools.run', 'tools.run', (['flow', 'store'], {}), '(flow, store)\n', (1403, 1416), False, 'from oauth2client import tools\n')] |
from inspect import getargspec
from uuid import uuid4
from utils.logging import log_error
ACTIONS = {}
DYNAMIC_PARAMS = ["user", "parser_return", "execution_state", "resend", "delay"]
class ActionConfig(object):
@staticmethod
def do_action(action_config, user, parser_return=None, execution_state=[],
resend=False, delay=True):
passed_dynamic_params = {"user": user, "parser_return": parser_return,
"execution_state": execution_state,
"resend": resend, "delay": delay}
action_name = action_config["action_name"]
params = dict(action_config["params"])
dynamic_params = ActionConfig.get_dynamic_params(action_name)
for param_name in passed_dynamic_params:
if param_name in dynamic_params:
params[param_name] = passed_dynamic_params[param_name]
return ACTIONS[action_name]["function"](**params)
@staticmethod
def get_dynamic_params(action_name):
return ACTIONS[action_name]["dynamic_params"]
@staticmethod
def get_static_params(action_name):
return ACTIONS[action_name]["static_params"]
@staticmethod
def create_config(action_name, params_dict=None, existing_id=None, **params):
static_params = ActionConfig.get_static_params(action_name)
if params_dict is not None:
params = params_dict
for key in DYNAMIC_PARAMS:
if key in params:
raise Exception("% is reserved dynamic keyword, yet appears in params" % (key))
for key in static_params:
if key not in params:
raise Exception("%s missing in config for action %s" % (key, action_name))
for key in params:
if key not in static_params:
raise Exception("Key %s is invalid for action %s" % (key, action_name))
if existing_id is not None:
_id = existing_id
elif 'database_id' in params:
_id = params['database_id']
else:
_id = uuid4()
return {"action_name": action_name, "params": params, "action_id": _id}
def Action(name):
""" Registers any function with this decorator onto the ACTIONS dict
under the key of it's name; this decorator also registers the
parameters that the function takes, sorting them under
static params (those passed in from the database configuration) and/or
dymanic params (those passed in at run time); dynamic params
are those from the list DYNAMIC_PARAMS.
Additionally adds a try-except block around the entire function
call to prevent bad database config from breaking the codebase.
NOTE: Any function with the @Action(name) decorator must
still be imported by the project to be reachable."""
class ActionDecorator(object):
def __init__(self, fn):
self.fn = fn
static_params = [arg for arg in getargspec(fn)[0] if arg not in DYNAMIC_PARAMS]
dynamic_params = [arg for arg in getargspec(fn)[0] if arg in DYNAMIC_PARAMS]
ACTIONS[name] = {"function": fn, "static_params": static_params,
"dynamic_params": dynamic_params}
def __call__(self, *args, **kwargs):
try:
return self.fn(*args, **kwargs)
except Exception as e:
log_error(e, 'Error in action ' + name)
return ActionDecorator | [
"inspect.getargspec",
"utils.logging.log_error",
"uuid.uuid4"
] | [((2093, 2100), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (2098, 2100), False, 'from uuid import uuid4\n'), ((3479, 3518), 'utils.logging.log_error', 'log_error', (['e', "('Error in action ' + name)"], {}), "(e, 'Error in action ' + name)\n", (3488, 3518), False, 'from utils.logging import log_error\n'), ((3032, 3046), 'inspect.getargspec', 'getargspec', (['fn'], {}), '(fn)\n', (3042, 3046), False, 'from inspect import getargspec\n'), ((3125, 3139), 'inspect.getargspec', 'getargspec', (['fn'], {}), '(fn)\n', (3135, 3139), False, 'from inspect import getargspec\n')] |
import torch
import torch.nn as nn
import torch.nn.functional as F
from FER.em_network.models.model import TimeDistributed
class PhaseNet(nn.Module):
def __init__(self):
super(PhaseNet, self).__init__()
self.group1 = nn.Sequential(
nn.Conv2d(12, 24, kernel_size=(5, 5), stride=1, padding=(1, 1)),
nn.BatchNorm2d(24),
nn.ReLU(inplace=True),
nn.MaxPool2d((1, 2)))
self.group2 = nn.Sequential(
nn.Conv2d(24, 48, kernel_size=(3, 5), stride=1, padding=(1, 1)),
nn.BatchNorm2d(48),
nn.ReLU(inplace=True),
nn.MaxPool2d((2, 2)))
self.group3 = nn.Sequential(
nn.Conv2d(48, 96, kernel_size=(5, 5), stride=1, padding=(1, 1)),
nn.BatchNorm2d(96),
nn.ReLU(inplace=True),
nn.MaxPool2d((1, 2)),
)
def forward(self, x):
x = self.group1(x)
x = self.group2(x)
x = self.group3(x)
return x
class ImageNet(nn.Module):
def __init__(self, num_channel=1):
super(ImageNet, self).__init__()
self.group1 = nn.Sequential(
nn.Conv2d(num_channel, 16, kernel_size=(5, 3), stride=1, padding=(1, 1)),
nn.BatchNorm2d(16),
nn.ReLU(inplace=True),
nn.MaxPool2d((2, 1)))
self.group2 = nn.Sequential(
nn.Conv2d(16, 32, kernel_size=(5, 3), stride=1, padding=(1, 1)),
nn.BatchNorm2d(32),
nn.ReLU(inplace=True),
nn.MaxPool2d((2, 2)))
self.group3 = nn.Sequential(
nn.Conv2d(32, 64, kernel_size=(5, 3), stride=1, padding=(1, 1)),
nn.BatchNorm2d(64),
nn.ReLU(inplace=True),
nn.MaxPool2d((2, 1)),
)
self.group4 = nn.Sequential(
nn.Conv2d(64, 128, kernel_size=(3, 3), stride=1, padding=(1, 1)),
nn.BatchNorm2d(128),
nn.ReLU(inplace=True),
nn.MaxPool2d((2, 2)),
)
self.group5 = nn.Sequential(
nn.Conv2d(128, 128, kernel_size=(3, 3), stride=1, padding=(1, 1)),
nn.BatchNorm2d(128),
nn.ReLU(inplace=True),
nn.MaxPool2d((1, 1)),
)
def forward(self, x):
x = self.group1(x)
x = self.group2(x)
x = self.group3(x)
x = self.group4(x)
x = self.group5(x)
return x
class ImageDualNet(nn.Module):
def __init__(self):
super(ImageDualNet, self).__init__()
self.azi_net = TimeDistributed(ImageNet())
self.ele_net = TimeDistributed(ImageNet())
def forward(self, azi, ele):
out_azi = self.azi_net(azi)
out_ele = self.ele_net(ele)
out_azi = torch.mean(out_azi, dim=1)
out_ele = torch.mean(out_ele, dim=1)
out_azi = out_azi.view((out_azi.size(0), -1))
out_ele = out_ele.view((out_ele.size(0), -1))
return out_azi, out_ele
class ImageFull(nn.Module):
def __init__(self, num_classes):
super(ImageFull, self).__init__()
self.azi_ele_net = ImageDualNet()
self.fc1 = nn.Sequential(
nn.Linear(1024 * 2, 512),
nn.ReLU(),
nn.Dropout(0.5)
)
self.fc2 = nn.Sequential(
nn.Linear(512, 128),
nn.ReLU(),
nn.Dropout(0.5)
)
self.fc3 = nn.Sequential(
nn.Linear(128, num_classes),
# nn.Softmax(dim=1)
)
def forward(self, azi, ele):
azi, ele = self.azi_ele_net(azi, ele)
out = torch.cat((azi, ele), dim=1)
out = self.fc1(out)
out = self.fc2(out)
out = self.fc3(out)
return out
class ImagePhaseNet(nn.Module):
def __init__(self, num_classes):
super(ImagePhaseNet, self).__init__()
self.azi_ele_net = ImageDualNet()
self.phase_net = PhaseNet()
self.fc1 = nn.Sequential(
nn.Linear(1024 * 2 + 1920, 512),
nn.ReLU(),
nn.Dropout(0.5)
)
self.fc2 = nn.Sequential(
nn.Linear(512, 128),
nn.ReLU(),
nn.Dropout(0.5)
)
self.fc3 = nn.Sequential(
nn.Linear(128, num_classes))
def forward(self, azi, ele, phase):
azi, ele = self.azi_ele_net(azi, ele)
phase = self.phase_net(phase)
phase = phase.view((phase.size(0), -1))
out = torch.cat((azi, ele, phase), dim=1)
out = self.fc1(out)
out = self.fc2(out)
out = self.fc3(out)
return out
if __name__ == "__main__":
# device = torch.device('cuda')
# model = PhaseNet()
# model = model.to(device)
#
# input1 = torch.randn(8, 12, 10, 100)
# input1 = input1.to(device)
# # output = model(input1)
# output = model(input1)
# print(output.view((output.size(0), -1)).size())
device = torch.device('cuda')
# model = TimeDistributed(ImageNet())
# model = ImageDualNet()
# model = ImageFull(num_classes=7)
model = ImagePhaseNet(num_classes=7)
model = model.to(device)
input1 = torch.randn(8, 100, 1, 91, 10)
input1 = input1.to(device)
input2 = torch.randn(8, 100, 1, 91, 10)
input2 = input2.to(device)
input3 = torch.randn(8, 12, 10, 100)
input3 = input3.to(device)
# output = model(input1)
# azi, ele = model(input1, input2)
out = model(input1, input2, input3)
print(out.size())
| [
"torch.nn.BatchNorm2d",
"torch.nn.ReLU",
"torch.nn.Dropout",
"torch.mean",
"torch.nn.Conv2d",
"torch.nn.MaxPool2d",
"torch.nn.Linear",
"torch.randn",
"torch.cat",
"torch.device"
] | [((4908, 4928), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (4920, 4928), False, 'import torch\n'), ((5123, 5153), 'torch.randn', 'torch.randn', (['(8)', '(100)', '(1)', '(91)', '(10)'], {}), '(8, 100, 1, 91, 10)\n', (5134, 5153), False, 'import torch\n'), ((5199, 5229), 'torch.randn', 'torch.randn', (['(8)', '(100)', '(1)', '(91)', '(10)'], {}), '(8, 100, 1, 91, 10)\n', (5210, 5229), False, 'import torch\n'), ((5275, 5302), 'torch.randn', 'torch.randn', (['(8)', '(12)', '(10)', '(100)'], {}), '(8, 12, 10, 100)\n', (5286, 5302), False, 'import torch\n'), ((2741, 2767), 'torch.mean', 'torch.mean', (['out_azi'], {'dim': '(1)'}), '(out_azi, dim=1)\n', (2751, 2767), False, 'import torch\n'), ((2786, 2812), 'torch.mean', 'torch.mean', (['out_ele'], {'dim': '(1)'}), '(out_ele, dim=1)\n', (2796, 2812), False, 'import torch\n'), ((3577, 3605), 'torch.cat', 'torch.cat', (['(azi, ele)'], {'dim': '(1)'}), '((azi, ele), dim=1)\n', (3586, 3605), False, 'import torch\n'), ((4438, 4473), 'torch.cat', 'torch.cat', (['(azi, ele, phase)'], {'dim': '(1)'}), '((azi, ele, phase), dim=1)\n', (4447, 4473), False, 'import torch\n'), ((267, 330), 'torch.nn.Conv2d', 'nn.Conv2d', (['(12)', '(24)'], {'kernel_size': '(5, 5)', 'stride': '(1)', 'padding': '(1, 1)'}), '(12, 24, kernel_size=(5, 5), stride=1, padding=(1, 1))\n', (276, 330), True, 'import torch.nn as nn\n'), ((344, 362), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(24)'], {}), '(24)\n', (358, 362), True, 'import torch.nn as nn\n'), ((376, 397), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (383, 397), True, 'import torch.nn as nn\n'), ((411, 431), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(1, 2)'], {}), '((1, 2))\n', (423, 431), True, 'import torch.nn as nn\n'), ((483, 546), 'torch.nn.Conv2d', 'nn.Conv2d', (['(24)', '(48)'], {'kernel_size': '(3, 5)', 'stride': '(1)', 'padding': '(1, 1)'}), '(24, 48, kernel_size=(3, 5), stride=1, padding=(1, 1))\n', (492, 546), True, 'import torch.nn as nn\n'), ((560, 578), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(48)'], {}), '(48)\n', (574, 578), True, 'import torch.nn as nn\n'), ((592, 613), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (599, 613), True, 'import torch.nn as nn\n'), ((627, 647), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2, 2)'], {}), '((2, 2))\n', (639, 647), True, 'import torch.nn as nn\n'), ((699, 762), 'torch.nn.Conv2d', 'nn.Conv2d', (['(48)', '(96)'], {'kernel_size': '(5, 5)', 'stride': '(1)', 'padding': '(1, 1)'}), '(48, 96, kernel_size=(5, 5), stride=1, padding=(1, 1))\n', (708, 762), True, 'import torch.nn as nn\n'), ((776, 794), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(96)'], {}), '(96)\n', (790, 794), True, 'import torch.nn as nn\n'), ((808, 829), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (815, 829), True, 'import torch.nn as nn\n'), ((843, 863), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(1, 2)'], {}), '((1, 2))\n', (855, 863), True, 'import torch.nn as nn\n'), ((1159, 1231), 'torch.nn.Conv2d', 'nn.Conv2d', (['num_channel', '(16)'], {'kernel_size': '(5, 3)', 'stride': '(1)', 'padding': '(1, 1)'}), '(num_channel, 16, kernel_size=(5, 3), stride=1, padding=(1, 1))\n', (1168, 1231), True, 'import torch.nn as nn\n'), ((1245, 1263), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(16)'], {}), '(16)\n', (1259, 1263), True, 'import torch.nn as nn\n'), ((1277, 1298), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1284, 1298), True, 'import torch.nn as nn\n'), ((1312, 1332), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2, 1)'], {}), '((2, 1))\n', (1324, 1332), True, 'import torch.nn as nn\n'), ((1384, 1447), 'torch.nn.Conv2d', 'nn.Conv2d', (['(16)', '(32)'], {'kernel_size': '(5, 3)', 'stride': '(1)', 'padding': '(1, 1)'}), '(16, 32, kernel_size=(5, 3), stride=1, padding=(1, 1))\n', (1393, 1447), True, 'import torch.nn as nn\n'), ((1461, 1479), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(32)'], {}), '(32)\n', (1475, 1479), True, 'import torch.nn as nn\n'), ((1493, 1514), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1500, 1514), True, 'import torch.nn as nn\n'), ((1528, 1548), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2, 2)'], {}), '((2, 2))\n', (1540, 1548), True, 'import torch.nn as nn\n'), ((1600, 1663), 'torch.nn.Conv2d', 'nn.Conv2d', (['(32)', '(64)'], {'kernel_size': '(5, 3)', 'stride': '(1)', 'padding': '(1, 1)'}), '(32, 64, kernel_size=(5, 3), stride=1, padding=(1, 1))\n', (1609, 1663), True, 'import torch.nn as nn\n'), ((1677, 1695), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(64)'], {}), '(64)\n', (1691, 1695), True, 'import torch.nn as nn\n'), ((1709, 1730), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1716, 1730), True, 'import torch.nn as nn\n'), ((1744, 1764), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2, 1)'], {}), '((2, 1))\n', (1756, 1764), True, 'import torch.nn as nn\n'), ((1826, 1890), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', '(128)'], {'kernel_size': '(3, 3)', 'stride': '(1)', 'padding': '(1, 1)'}), '(64, 128, kernel_size=(3, 3), stride=1, padding=(1, 1))\n', (1835, 1890), True, 'import torch.nn as nn\n'), ((1904, 1923), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(128)'], {}), '(128)\n', (1918, 1923), True, 'import torch.nn as nn\n'), ((1937, 1958), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1944, 1958), True, 'import torch.nn as nn\n'), ((1972, 1992), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2, 2)'], {}), '((2, 2))\n', (1984, 1992), True, 'import torch.nn as nn\n'), ((2054, 2119), 'torch.nn.Conv2d', 'nn.Conv2d', (['(128)', '(128)'], {'kernel_size': '(3, 3)', 'stride': '(1)', 'padding': '(1, 1)'}), '(128, 128, kernel_size=(3, 3), stride=1, padding=(1, 1))\n', (2063, 2119), True, 'import torch.nn as nn\n'), ((2133, 2152), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(128)'], {}), '(128)\n', (2147, 2152), True, 'import torch.nn as nn\n'), ((2166, 2187), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2173, 2187), True, 'import torch.nn as nn\n'), ((2201, 2221), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(1, 1)'], {}), '((1, 1))\n', (2213, 2221), True, 'import torch.nn as nn\n'), ((3151, 3175), 'torch.nn.Linear', 'nn.Linear', (['(1024 * 2)', '(512)'], {}), '(1024 * 2, 512)\n', (3160, 3175), True, 'import torch.nn as nn\n'), ((3189, 3198), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (3196, 3198), True, 'import torch.nn as nn\n'), ((3212, 3227), 'torch.nn.Dropout', 'nn.Dropout', (['(0.5)'], {}), '(0.5)\n', (3222, 3227), True, 'import torch.nn as nn\n'), ((3284, 3303), 'torch.nn.Linear', 'nn.Linear', (['(512)', '(128)'], {}), '(512, 128)\n', (3293, 3303), True, 'import torch.nn as nn\n'), ((3317, 3326), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (3324, 3326), True, 'import torch.nn as nn\n'), ((3340, 3355), 'torch.nn.Dropout', 'nn.Dropout', (['(0.5)'], {}), '(0.5)\n', (3350, 3355), True, 'import torch.nn as nn\n'), ((3412, 3439), 'torch.nn.Linear', 'nn.Linear', (['(128)', 'num_classes'], {}), '(128, num_classes)\n', (3421, 3439), True, 'import torch.nn as nn\n'), ((3952, 3983), 'torch.nn.Linear', 'nn.Linear', (['(1024 * 2 + 1920)', '(512)'], {}), '(1024 * 2 + 1920, 512)\n', (3961, 3983), True, 'import torch.nn as nn\n'), ((3997, 4006), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (4004, 4006), True, 'import torch.nn as nn\n'), ((4020, 4035), 'torch.nn.Dropout', 'nn.Dropout', (['(0.5)'], {}), '(0.5)\n', (4030, 4035), True, 'import torch.nn as nn\n'), ((4093, 4112), 'torch.nn.Linear', 'nn.Linear', (['(512)', '(128)'], {}), '(512, 128)\n', (4102, 4112), True, 'import torch.nn as nn\n'), ((4126, 4135), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (4133, 4135), True, 'import torch.nn as nn\n'), ((4149, 4164), 'torch.nn.Dropout', 'nn.Dropout', (['(0.5)'], {}), '(0.5)\n', (4159, 4164), True, 'import torch.nn as nn\n'), ((4222, 4249), 'torch.nn.Linear', 'nn.Linear', (['(128)', 'num_classes'], {}), '(128, num_classes)\n', (4231, 4249), True, 'import torch.nn as nn\n')] |
"""
https://www.kaggle.com/weicongkong/feedback-prize-huggingface-baseline-training/edit
Copyright (C) <NAME>, 23/02/2022
"""
# %% [markdown]
# # HuggingFace Training Baseline
#
# I wanted to create my own baseline for this competition, and I tried to do so "without peeking" at the kernels published by others. Ideally this can be used for training on a Kaggle kernel. Let's see how good we can get.
#
# This baseline is based on the following notebook by <NAME>: https://github.com/huggingface/notebooks/blob/master/examples/token_classification.ipynb
#
# I initially started building with Roberta - thanks to <NAME> for pointing to Longformer :) The evaluation code is from <NAME>.
#
# The notebook requires a couple of hours to run, so we'll use W&B to be able to monitor it along the way and keep the record of our experiments.
# %% [markdown]
# ## Setup
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T22:59:40.43361Z","iopub.execute_input":"2021-12-23T22:59:40.434Z","iopub.status.idle":"2021-12-23T22:59:40.438896Z","shell.execute_reply.started":"2021-12-23T22:59:40.433966Z","shell.execute_reply":"2021-12-23T22:59:40.437857Z"}}
SAMPLE = True # set True for debugging
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:00:00.094757Z","iopub.execute_input":"2021-12-23T23:00:00.095189Z","iopub.status.idle":"2021-12-23T23:00:08.865381Z","shell.execute_reply.started":"2021-12-23T23:00:00.095139Z","shell.execute_reply":"2021-12-23T23:00:08.86421Z"}}
# setup wandb for experiment tracking
# source: https://www.kaggle.com/debarshichanda/pytorch-w-b-jigsaw-starter
import wandb
wandb.login(key='<KEY>')
wandb.init(project="feedback_prize", entity="wilsonkong")
anony = None
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:00:08.872471Z","iopub.execute_input":"2021-12-23T23:00:08.875384Z","iopub.status.idle":"2021-12-23T23:00:09.613866Z","shell.execute_reply.started":"2021-12-23T23:00:08.875328Z","shell.execute_reply":"2021-12-23T23:00:09.612856Z"}}
# CONFIG
EXP_NUM = 4
task = "ner"
model_checkpoint = "allenai/longformer-base-4096"
max_length = 1024
stride = 128
min_tokens = 6
model_path = f'{model_checkpoint.split("/")[-1]}-{EXP_NUM}'
# TRAINING HYPERPARAMS
BS = 1
GRAD_ACC = 8
LR = 5e-5
WD = 0.01
WARMUP = 0.1
N_EPOCHS = 5
# %% [markdown]
# ## Data Preprocessing
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:00:09.615125Z","iopub.execute_input":"2021-12-23T23:00:09.615508Z","iopub.status.idle":"2021-12-23T23:00:11.240349Z","shell.execute_reply.started":"2021-12-23T23:00:09.615458Z","shell.execute_reply":"2021-12-23T23:00:11.239275Z"}}
import pandas as pd
import os
pd.options.display.width = 500
pd.options.display.max_columns = 20
# read train data
DATA_ROOT = r"C:\Users\wkong\IdeaProjects\kaggle_data\feedback-prize-2021"
train = pd.read_csv(os.path.join(DATA_ROOT, "train.csv"))
train.head(1)
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:00:11.245598Z","iopub.execute_input":"2021-12-23T23:00:11.248663Z","iopub.status.idle":"2021-12-23T23:00:12.088646Z","shell.execute_reply.started":"2021-12-23T23:00:11.248611Z","shell.execute_reply":"2021-12-23T23:00:12.087709Z"}}
# check unique classes
classes = train.discourse_type.unique().tolist()
classes
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:00:12.090074Z","iopub.execute_input":"2021-12-23T23:00:12.090401Z","iopub.status.idle":"2021-12-23T23:00:12.909927Z","shell.execute_reply.started":"2021-12-23T23:00:12.090357Z","shell.execute_reply":"2021-12-23T23:00:12.908979Z"}}
# setup label indices
from collections import defaultdict
tags = defaultdict()
for i, c in enumerate(classes):
tags[f'B-{c}'] = i
tags[f'I-{c}'] = i + len(classes)
tags[f'O'] = len(classes) * 2
tags[f'Special'] = -100
l2i = dict(tags)
i2l = defaultdict()
for k, v in l2i.items():
i2l[v] = k
i2l[-100] = 'Special'
i2l = dict(i2l)
N_LABELS = len(i2l) - 1 # not accounting for -100
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:00:12.913651Z","iopub.execute_input":"2021-12-23T23:00:12.913893Z","iopub.status.idle":"2021-12-23T23:00:13.630498Z","shell.execute_reply.started":"2021-12-23T23:00:12.913861Z","shell.execute_reply":"2021-12-23T23:00:13.629554Z"}}
# some helper functions
from pathlib import Path
path = Path(os.path.join(DATA_ROOT, 'train'))
def get_raw_text(ids):
with open(path / f'{ids}.txt', 'r') as file: data = file.read()
return data
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:00:13.634902Z","iopub.execute_input":"2021-12-23T23:00:13.635138Z","iopub.status.idle":"2021-12-23T23:00:24.829274Z","shell.execute_reply.started":"2021-12-23T23:00:13.635107Z","shell.execute_reply":"2021-12-23T23:00:24.828189Z"}}
# group training labels by text file
df1 = train.groupby('id')['discourse_type'].apply(list).reset_index(name='classlist')
df2 = train.groupby('id')['discourse_start'].apply(list).reset_index(name='starts')
df3 = train.groupby('id')['discourse_end'].apply(list).reset_index(name='ends')
df4 = train.groupby('id')['predictionstring'].apply(list).reset_index(name='predictionstrings')
df = pd.merge(df1, df2, how='inner', on='id')
df = pd.merge(df, df3, how='inner', on='id')
df = pd.merge(df, df4, how='inner', on='id')
df['text'] = df['id'].apply(get_raw_text)
df.head()
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:00:24.831063Z","iopub.execute_input":"2021-12-23T23:00:24.831421Z","iopub.status.idle":"2021-12-23T23:00:25.596595Z","shell.execute_reply.started":"2021-12-23T23:00:24.831375Z","shell.execute_reply":"2021-12-23T23:00:25.595633Z"}}
# debugging
if SAMPLE: df = df.sample(n=100).reset_index(drop=True)
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:00:25.59961Z","iopub.execute_input":"2021-12-23T23:00:25.600322Z","iopub.status.idle":"2021-12-23T23:00:26.415085Z","shell.execute_reply.started":"2021-12-23T23:00:25.600259Z","shell.execute_reply":"2021-12-23T23:00:26.413987Z"}}
# we will use HuggingFace datasets
from datasets import Dataset, load_metric
ds = Dataset.from_pandas(df)
datasets = ds.train_test_split(test_size=0.1, shuffle=True, seed=42)
datasets
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:00:26.416852Z","iopub.execute_input":"2021-12-23T23:00:26.417192Z","iopub.status.idle":"2021-12-23T23:00:31.722501Z","shell.execute_reply.started":"2021-12-23T23:00:26.417127Z","shell.execute_reply":"2021-12-23T23:00:31.721572Z"}}
from transformers import AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained(model_checkpoint, add_prefix_space=True)
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:00:31.724112Z","iopub.execute_input":"2021-12-23T23:00:31.724482Z","iopub.status.idle":"2021-12-23T23:00:32.494243Z","shell.execute_reply.started":"2021-12-23T23:00:31.724438Z","shell.execute_reply":"2021-12-23T23:00:32.49297Z"}}
# Not sure if this is needed, but in case we create a span with certain class without starting token of that class,
# let's convert the first token to be the starting token.
e = [0, 7, 7, 7, 1, 1, 8, 8, 8, 9, 9, 9, 14, 4, 4, 4]
def fix_beginnings(labels):
for i in range(1, len(labels)):
curr_lab = labels[i]
prev_lab = labels[i - 1]
if curr_lab in range(7, 14):
if prev_lab != curr_lab and prev_lab != curr_lab - 7:
labels[i] = curr_lab - 7
return labels
fix_beginnings(e)
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:00:32.495836Z","iopub.execute_input":"2021-12-23T23:00:32.496208Z","iopub.status.idle":"2021-12-23T23:00:33.263669Z","shell.execute_reply.started":"2021-12-23T23:00:32.49614Z","shell.execute_reply":"2021-12-23T23:00:33.262629Z"}}
# tokenize and add labels
def tokenize_and_align_labels(examples):
o = tokenizer(examples['text'], truncation=True, padding=True, return_offsets_mapping=True, max_length=max_length,
stride=stride, return_overflowing_tokens=True)
# Since one example might give us several features if it has a long context, we need a map from a feature to
# its corresponding example. This key gives us just that.
sample_mapping = o["overflow_to_sample_mapping"]
# The offset mappings will give us a map from token to character position in the original context. This will
# help us compute the start_positions and end_positions.
offset_mapping = o["offset_mapping"]
o["labels"] = []
for i in range(len(offset_mapping)):
sample_index = sample_mapping[i]
labels = [l2i['O'] for i in range(len(o['input_ids'][i]))]
for label_start, label_end, label in \
list(zip(examples['starts'][sample_index], examples['ends'][sample_index],
examples['classlist'][sample_index])):
for j in range(len(labels)):
token_start = offset_mapping[i][j][0]
token_end = offset_mapping[i][j][1]
if token_start == label_start:
labels[j] = l2i[f'B-{label}']
if token_start > label_start and token_end <= label_end:
labels[j] = l2i[f'I-{label}']
for k, input_id in enumerate(o['input_ids'][i]):
if input_id in [0, 1, 2]:
labels[k] = -100
labels = fix_beginnings(labels)
o["labels"].append(labels)
return o
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:00:33.265142Z","iopub.execute_input":"2021-12-23T23:00:33.265646Z","iopub.status.idle":"2021-12-23T23:00:35.856612Z","shell.execute_reply.started":"2021-12-23T23:00:33.265601Z","shell.execute_reply":"2021-12-23T23:00:35.855589Z"}}
tokenized_datasets = datasets.map(tokenize_and_align_labels, batched=True, \
batch_size=20000, remove_columns=datasets["train"].column_names)
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:00:35.858326Z","iopub.execute_input":"2021-12-23T23:00:35.858635Z","iopub.status.idle":"2021-12-23T23:00:36.592654Z","shell.execute_reply.started":"2021-12-23T23:00:35.85859Z","shell.execute_reply":"2021-12-23T23:00:36.591606Z"}}
tokenized_datasets
# %% [markdown]
# ## Model and Training
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:00:36.59433Z","iopub.execute_input":"2021-12-23T23:00:36.594634Z","iopub.status.idle":"2021-12-23T23:00:40.685632Z","shell.execute_reply.started":"2021-12-23T23:00:36.594593Z","shell.execute_reply":"2021-12-23T23:00:40.684693Z"}}
# we will use auto model for token classification
from transformers import AutoModelForTokenClassification, TrainingArguments, Trainer
model = AutoModelForTokenClassification.from_pretrained(model_checkpoint, num_labels=N_LABELS)
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:00:40.690854Z","iopub.execute_input":"2021-12-23T23:00:40.693718Z","iopub.status.idle":"2021-12-23T23:00:41.535273Z","shell.execute_reply.started":"2021-12-23T23:00:40.693672Z","shell.execute_reply":"2021-12-23T23:00:41.534215Z"}}
model_name = model_checkpoint.split("/")[-1]
args = TrainingArguments(
f"{model_name}-finetuned-{task}",
evaluation_strategy="epoch",
logging_strategy="epoch",
save_strategy="epoch",
learning_rate=LR,
per_device_train_batch_size=BS,
per_device_eval_batch_size=BS,
num_train_epochs=N_EPOCHS,
weight_decay=WD,
report_to='wandb',
gradient_accumulation_steps=GRAD_ACC,
warmup_ratio=WARMUP
)
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:00:41.53676Z","iopub.execute_input":"2021-12-23T23:00:41.537608Z","iopub.status.idle":"2021-12-23T23:00:42.282789Z","shell.execute_reply.started":"2021-12-23T23:00:41.537572Z","shell.execute_reply":"2021-12-23T23:00:42.281853Z"}}
from transformers import DataCollatorForTokenClassification
data_collator = DataCollatorForTokenClassification(tokenizer)
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:00:42.284192Z","iopub.execute_input":"2021-12-23T23:00:42.284501Z","iopub.status.idle":"2021-12-23T23:00:43.656933Z","shell.execute_reply.started":"2021-12-23T23:00:42.284458Z","shell.execute_reply":"2021-12-23T23:00:43.655937Z"}}
# this is not the competition metric, but for now this will be better than nothing...
metric = load_metric("seqeval")
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:00:43.658571Z","iopub.execute_input":"2021-12-23T23:00:43.658881Z","iopub.status.idle":"2021-12-23T23:00:44.386693Z","shell.execute_reply.started":"2021-12-23T23:00:43.658824Z","shell.execute_reply":"2021-12-23T23:00:44.385607Z"}}
import numpy as np
def compute_metrics(p):
predictions, labels = p
predictions = np.argmax(predictions, axis=2)
# Remove ignored index (special tokens)
true_predictions = [
[i2l[p] for (p, l) in zip(prediction, label) if l != -100]
for prediction, label in zip(predictions, labels)
]
true_labels = [
[i2l[l] for (p, l) in zip(prediction, label) if l != -100]
for prediction, label in zip(predictions, labels)
]
results = metric.compute(predictions=true_predictions, references=true_labels)
return {
"precision": results["overall_precision"],
"recall": results["overall_recall"],
"f1": results["overall_f1"],
"accuracy": results["overall_accuracy"],
}
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:00:44.388421Z","iopub.execute_input":"2021-12-23T23:00:44.388744Z","iopub.status.idle":"2021-12-23T23:00:45.313179Z","shell.execute_reply.started":"2021-12-23T23:00:44.38869Z","shell.execute_reply":"2021-12-23T23:00:45.312215Z"}}
trainer = Trainer(
model,
args,
train_dataset=tokenized_datasets["train"],
eval_dataset=tokenized_datasets["test"],
data_collator=data_collator,
tokenizer=tokenizer,
compute_metrics=compute_metrics,
)
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:00:45.314663Z","iopub.execute_input":"2021-12-23T23:00:45.318411Z","iopub.status.idle":"2021-12-23T23:03:13.651205Z","shell.execute_reply.started":"2021-12-23T23:00:45.318345Z","shell.execute_reply":"2021-12-23T23:03:13.650259Z"}}
trainer.train()
wandb.finish()
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:03:13.656546Z","iopub.execute_input":"2021-12-23T23:03:13.656788Z","iopub.status.idle":"2021-12-23T23:03:15.317965Z","shell.execute_reply.started":"2021-12-23T23:03:13.656757Z","shell.execute_reply":"2021-12-23T23:03:15.316868Z"}}
trainer.save_model(model_path)
# %% [markdown]
# ## Validation
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:03:15.31952Z","iopub.execute_input":"2021-12-23T23:03:15.319834Z","iopub.status.idle":"2021-12-23T23:03:15.332639Z","shell.execute_reply.started":"2021-12-23T23:03:15.319782Z","shell.execute_reply":"2021-12-23T23:03:15.331235Z"}}
def tokenize_for_validation(examples):
o = tokenizer(examples['text'], truncation=True, return_offsets_mapping=True, max_length=4096)
# The offset mappings will give us a map from token to character position in the original context. This will
# help us compute the start_positions and end_positions.
offset_mapping = o["offset_mapping"]
o["labels"] = []
for i in range(len(offset_mapping)):
labels = [l2i['O'] for i in range(len(o['input_ids'][i]))]
for label_start, label_end, label in \
list(zip(examples['starts'][i], examples['ends'][i], examples['classlist'][i])):
for j in range(len(labels)):
token_start = offset_mapping[i][j][0]
token_end = offset_mapping[i][j][1]
if token_start == label_start:
labels[j] = l2i[f'B-{label}']
if token_start > label_start and token_end <= label_end:
labels[j] = l2i[f'I-{label}']
for k, input_id in enumerate(o['input_ids'][i]):
if input_id in [0, 1, 2]:
labels[k] = -100
labels = fix_beginnings(labels)
o["labels"].append(labels)
return o
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:03:15.334494Z","iopub.execute_input":"2021-12-23T23:03:15.335669Z","iopub.status.idle":"2021-12-23T23:03:16.652272Z","shell.execute_reply.started":"2021-12-23T23:03:15.335596Z","shell.execute_reply":"2021-12-23T23:03:16.651209Z"}}
tokenized_val = datasets.map(tokenize_for_validation, batched=True)
tokenized_val
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:03:16.654017Z","iopub.execute_input":"2021-12-23T23:03:16.654625Z","iopub.status.idle":"2021-12-23T23:03:16.711036Z","shell.execute_reply.started":"2021-12-23T23:03:16.654567Z","shell.execute_reply":"2021-12-23T23:03:16.710012Z"}}
# ground truth for validation
l = []
for example in tokenized_val['test']:
for c, p in list(zip(example['classlist'], example['predictionstrings'])):
l.append({
'id': example['id'],
'discourse_type': c,
'predictionstring': p,
})
gt_df = pd.DataFrame(l)
gt_df
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:03:16.712458Z","iopub.execute_input":"2021-12-23T23:03:16.713221Z","iopub.status.idle":"2021-12-23T23:03:16.719502Z","shell.execute_reply.started":"2021-12-23T23:03:16.713168Z","shell.execute_reply":"2021-12-23T23:03:16.718212Z"}}
# visualization with displacy
import pandas as pd
import os
from pathlib import Path
import spacy
from spacy import displacy
from pylab import cm, matplotlib
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:03:16.721142Z","iopub.execute_input":"2021-12-23T23:03:16.721798Z","iopub.status.idle":"2021-12-23T23:03:16.733508Z","shell.execute_reply.started":"2021-12-23T23:03:16.721753Z","shell.execute_reply":"2021-12-23T23:03:16.732443Z"}}
path = Path(os.path.join(DATA_ROOT, 'train'))
colors = {
'Lead': '#8000ff',
'Position': '#2b7ff6',
'Evidence': '#2adddd',
'Claim': '#80ffb4',
'Concluding Statement': 'd4dd80',
'Counterclaim': '#ff8042',
'Rebuttal': '#ff0000',
'Other': '#007f00',
}
def visualize(df, text):
ents = []
example = df['id'].loc[0]
for i, row in df.iterrows():
ents.append({
'start': int(row['discourse_start']),
'end': int(row['discourse_end']),
'label': row['discourse_type']
})
doc2 = {
"text": text,
"ents": ents,
"title": example
}
options = {"ents": train.discourse_type.unique().tolist() + ['Other'], "colors": colors}
displacy.render(doc2, style="ent", options=options, manual=True, jupyter=True)
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:03:16.735115Z","iopub.execute_input":"2021-12-23T23:03:16.736247Z","iopub.status.idle":"2021-12-23T23:03:17.621012Z","shell.execute_reply.started":"2021-12-23T23:03:16.736199Z","shell.execute_reply":"2021-12-23T23:03:17.619921Z"}}
predictions, labels, _ = trainer.predict(tokenized_val['test'])
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:03:17.622787Z","iopub.execute_input":"2021-12-23T23:03:17.623357Z","iopub.status.idle":"2021-12-23T23:03:17.632659Z","shell.execute_reply.started":"2021-12-23T23:03:17.623297Z","shell.execute_reply":"2021-12-23T23:03:17.631425Z"}}
preds = np.argmax(predictions, axis=-1)
preds.shape
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:03:17.634765Z","iopub.execute_input":"2021-12-23T23:03:17.63535Z","iopub.status.idle":"2021-12-23T23:03:17.655065Z","shell.execute_reply.started":"2021-12-23T23:03:17.635228Z","shell.execute_reply":"2021-12-23T23:03:17.653955Z"}}
# code that will convert our predictions into prediction strings, and visualize it at the same time
# this most likely requires some refactoring
def get_class(c):
if c == 14:
return 'Other'
else:
return i2l[c][2:]
def pred2span(pred, example, viz=False, test=False):
example_id = example['id']
n_tokens = len(example['input_ids'])
classes = []
all_span = []
for i, c in enumerate(pred.tolist()):
if i == n_tokens - 1:
break
if i == 0:
cur_span = example['offset_mapping'][i]
classes.append(get_class(c))
elif i > 0 and (c == pred[i - 1] or (c - 7) == pred[i - 1]):
cur_span[1] = example['offset_mapping'][i][1]
else:
all_span.append(cur_span)
cur_span = example['offset_mapping'][i]
classes.append(get_class(c))
all_span.append(cur_span)
if test:
text = get_test_text(example_id)
else:
text = get_raw_text(example_id)
# abra ka dabra se soli fanta ko pelo
# map token ids to word (whitespace) token ids
predstrings = []
for span in all_span:
span_start = span[0]
span_end = span[1]
before = text[:span_start]
token_start = len(before.split())
if len(before) == 0:
token_start = 0
elif before[-1] != ' ':
token_start -= 1
num_tkns = len(text[span_start:span_end + 1].split())
tkns = [str(x) for x in range(token_start, token_start + num_tkns)]
predstring = ' '.join(tkns)
predstrings.append(predstring)
rows = []
for c, span, predstring in zip(classes, all_span, predstrings):
e = {
'id': example_id,
'discourse_type': c,
'predictionstring': predstring,
'discourse_start': span[0],
'discourse_end': span[1],
'discourse': text[span[0]:span[1] + 1]
}
rows.append(e)
df = pd.DataFrame(rows)
df['length'] = df['discourse'].apply(lambda t: len(t.split()))
# short spans are likely to be false positives, we can choose a min number of tokens based on validation
df = df[df.length > min_tokens].reset_index(drop=True)
if viz: visualize(df, text)
return df
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:03:17.658868Z","iopub.execute_input":"2021-12-23T23:03:17.659221Z","iopub.status.idle":"2021-12-23T23:03:17.712976Z","shell.execute_reply.started":"2021-12-23T23:03:17.659184Z","shell.execute_reply":"2021-12-23T23:03:17.711747Z"}}
pred2span(preds[0], tokenized_val['test'][0], viz=True)
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:03:17.71609Z","iopub.execute_input":"2021-12-23T23:03:17.716626Z","iopub.status.idle":"2021-12-23T23:03:17.757272Z","shell.execute_reply.started":"2021-12-23T23:03:17.716588Z","shell.execute_reply":"2021-12-23T23:03:17.756227Z"}}
pred2span(preds[1], tokenized_val['test'][1], viz=True)
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:03:17.759337Z","iopub.execute_input":"2021-12-23T23:03:17.760071Z","iopub.status.idle":"2021-12-23T23:03:17.883329Z","shell.execute_reply.started":"2021-12-23T23:03:17.760003Z","shell.execute_reply":"2021-12-23T23:03:17.8822Z"}}
dfs = []
for i in range(len(tokenized_val['test'])):
dfs.append(pred2span(preds[i], tokenized_val['test'][i]))
pred_df = pd.concat(dfs, axis=0)
pred_df['class'] = pred_df['discourse_type']
pred_df
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:03:17.885121Z","iopub.execute_input":"2021-12-23T23:03:17.885735Z","iopub.status.idle":"2021-12-23T23:03:17.908285Z","shell.execute_reply.started":"2021-12-23T23:03:17.88567Z","shell.execute_reply":"2021-12-23T23:03:17.907198Z"}}
# source: https://www.kaggle.com/robikscube/student-writing-competition-twitch#Competition-Metric-Code
def calc_overlap(row):
"""
Calculates the overlap between prediction and
ground truth and overlap percentages used for determining
true positives.
"""
set_pred = set(row.predictionstring_pred.split(" "))
set_gt = set(row.predictionstring_gt.split(" "))
# Length of each and intersection
len_gt = len(set_gt)
len_pred = len(set_pred)
inter = len(set_gt.intersection(set_pred))
overlap_1 = inter / len_gt
overlap_2 = inter / len_pred
return [overlap_1, overlap_2]
def score_feedback_comp_micro(pred_df, gt_df):
"""
A function that scores for the kaggle
Student Writing Competition
Uses the steps in the evaluation page here:
https://www.kaggle.com/c/feedback-prize-2021/overview/evaluation
"""
gt_df = (
gt_df[["id", "discourse_type", "predictionstring"]]
.reset_index(drop=True)
.copy()
)
pred_df = pred_df[["id", "class", "predictionstring"]].reset_index(drop=True).copy()
pred_df["pred_id"] = pred_df.index
gt_df["gt_id"] = gt_df.index
# Step 1. all ground truths and predictions for a given class are compared.
joined = pred_df.merge(
gt_df,
left_on=["id", "class"],
right_on=["id", "discourse_type"],
how="outer",
suffixes=("_pred", "_gt"),
)
joined["predictionstring_gt"] = joined["predictionstring_gt"].fillna(" ")
joined["predictionstring_pred"] = joined["predictionstring_pred"].fillna(" ")
joined["overlaps"] = joined.apply(calc_overlap, axis=1)
# 2. If the overlap between the ground truth and prediction is >= 0.5,
# and the overlap between the prediction and the ground truth >= 0.5,
# the prediction is a match and considered a true positive.
# If multiple matches exist, the match with the highest pair of overlaps is taken.
joined["overlap1"] = joined["overlaps"].apply(lambda x: eval(str(x))[0])
joined["overlap2"] = joined["overlaps"].apply(lambda x: eval(str(x))[1])
joined["potential_TP"] = (joined["overlap1"] >= 0.5) & (joined["overlap2"] >= 0.5)
joined["max_overlap"] = joined[["overlap1", "overlap2"]].max(axis=1)
tp_pred_ids = (
joined.query("potential_TP")
.sort_values("max_overlap", ascending=False)
.groupby(["id", "predictionstring_gt"])
.first()["pred_id"]
.values
)
# 3. Any unmatched ground truths are false negatives
# and any unmatched predictions are false positives.
fp_pred_ids = [p for p in joined["pred_id"].unique() if p not in tp_pred_ids]
matched_gt_ids = joined.query("potential_TP")["gt_id"].unique()
unmatched_gt_ids = [c for c in joined["gt_id"].unique() if c not in matched_gt_ids]
# Get numbers of each type
TP = len(tp_pred_ids)
FP = len(fp_pred_ids)
FN = len(unmatched_gt_ids)
# calc microf1
my_f1_score = TP / (TP + 0.5 * (FP + FN))
return my_f1_score
def score_feedback_comp(pred_df, gt_df, return_class_scores=False):
class_scores = {}
pred_df = pred_df[["id", "class", "predictionstring"]].reset_index(drop=True).copy()
for discourse_type, gt_subset in gt_df.groupby("discourse_type"):
pred_subset = (
pred_df.loc[pred_df["class"] == discourse_type]
.reset_index(drop=True)
.copy()
)
class_score = score_feedback_comp_micro(pred_subset, gt_subset)
class_scores[discourse_type] = class_score
f1 = np.mean([v for v in class_scores.values()])
if return_class_scores:
return f1, class_scores
return f1
# %% [markdown]
# ## CV Score
# %% [code] {"execution":{"iopub.status.busy":"2021-12-23T23:03:17.910018Z","iopub.execute_input":"2021-12-23T23:03:17.910701Z","iopub.status.idle":"2021-12-23T23:03:18.110011Z","shell.execute_reply.started":"2021-12-23T23:03:17.910652Z","shell.execute_reply":"2021-12-23T23:03:18.108723Z"}}
score_feedback_comp(pred_df, gt_df, return_class_scores=True)
# %% [markdown]
# ## End
#
# I'll appreciate every upvote or comment!
| [
"wandb.login",
"datasets.Dataset.from_pandas",
"datasets.load_metric",
"transformers.TrainingArguments",
"pandas.DataFrame",
"pandas.merge",
"os.path.join",
"numpy.argmax",
"wandb.init",
"spacy.displacy.render",
"transformers.AutoModelForTokenClassification.from_pretrained",
"wandb.finish",
... | [((1613, 1637), 'wandb.login', 'wandb.login', ([], {'key': '"""<KEY>"""'}), "(key='<KEY>')\n", (1624, 1637), False, 'import wandb\n'), ((1638, 1695), 'wandb.init', 'wandb.init', ([], {'project': '"""feedback_prize"""', 'entity': '"""wilsonkong"""'}), "(project='feedback_prize', entity='wilsonkong')\n", (1648, 1695), False, 'import wandb\n'), ((3615, 3628), 'collections.defaultdict', 'defaultdict', ([], {}), '()\n', (3626, 3628), False, 'from collections import defaultdict\n'), ((3796, 3809), 'collections.defaultdict', 'defaultdict', ([], {}), '()\n', (3807, 3809), False, 'from collections import defaultdict\n'), ((5115, 5155), 'pandas.merge', 'pd.merge', (['df1', 'df2'], {'how': '"""inner"""', 'on': '"""id"""'}), "(df1, df2, how='inner', on='id')\n", (5123, 5155), True, 'import pandas as pd\n'), ((5161, 5200), 'pandas.merge', 'pd.merge', (['df', 'df3'], {'how': '"""inner"""', 'on': '"""id"""'}), "(df, df3, how='inner', on='id')\n", (5169, 5200), True, 'import pandas as pd\n'), ((5206, 5245), 'pandas.merge', 'pd.merge', (['df', 'df4'], {'how': '"""inner"""', 'on': '"""id"""'}), "(df, df4, how='inner', on='id')\n", (5214, 5245), True, 'import pandas as pd\n'), ((6035, 6058), 'datasets.Dataset.from_pandas', 'Dataset.from_pandas', (['df'], {}), '(df)\n', (6054, 6058), False, 'from datasets import Dataset, load_metric\n'), ((6482, 6552), 'transformers.AutoTokenizer.from_pretrained', 'AutoTokenizer.from_pretrained', (['model_checkpoint'], {'add_prefix_space': '(True)'}), '(model_checkpoint, add_prefix_space=True)\n', (6511, 6552), False, 'from transformers import AutoTokenizer\n'), ((10349, 10439), 'transformers.AutoModelForTokenClassification.from_pretrained', 'AutoModelForTokenClassification.from_pretrained', (['model_checkpoint'], {'num_labels': 'N_LABELS'}), '(model_checkpoint,\n num_labels=N_LABELS)\n', (10396, 10439), False, 'from transformers import AutoModelForTokenClassification, TrainingArguments, Trainer\n'), ((10781, 11131), 'transformers.TrainingArguments', 'TrainingArguments', (['f"""{model_name}-finetuned-{task}"""'], {'evaluation_strategy': '"""epoch"""', 'logging_strategy': '"""epoch"""', 'save_strategy': '"""epoch"""', 'learning_rate': 'LR', 'per_device_train_batch_size': 'BS', 'per_device_eval_batch_size': 'BS', 'num_train_epochs': 'N_EPOCHS', 'weight_decay': 'WD', 'report_to': '"""wandb"""', 'gradient_accumulation_steps': 'GRAD_ACC', 'warmup_ratio': 'WARMUP'}), "(f'{model_name}-finetuned-{task}', evaluation_strategy=\n 'epoch', logging_strategy='epoch', save_strategy='epoch', learning_rate\n =LR, per_device_train_batch_size=BS, per_device_eval_batch_size=BS,\n num_train_epochs=N_EPOCHS, weight_decay=WD, report_to='wandb',\n gradient_accumulation_steps=GRAD_ACC, warmup_ratio=WARMUP)\n", (10798, 11131), False, 'from transformers import AutoModelForTokenClassification, TrainingArguments, Trainer\n'), ((11497, 11542), 'transformers.DataCollatorForTokenClassification', 'DataCollatorForTokenClassification', (['tokenizer'], {}), '(tokenizer)\n', (11531, 11542), False, 'from transformers import DataCollatorForTokenClassification\n'), ((11932, 11954), 'datasets.load_metric', 'load_metric', (['"""seqeval"""'], {}), "('seqeval')\n", (11943, 11954), False, 'from datasets import Dataset, load_metric\n'), ((13232, 13427), 'transformers.Trainer', 'Trainer', (['model', 'args'], {'train_dataset': "tokenized_datasets['train']", 'eval_dataset': "tokenized_datasets['test']", 'data_collator': 'data_collator', 'tokenizer': 'tokenizer', 'compute_metrics': 'compute_metrics'}), "(model, args, train_dataset=tokenized_datasets['train'],\n eval_dataset=tokenized_datasets['test'], data_collator=data_collator,\n tokenizer=tokenizer, compute_metrics=compute_metrics)\n", (13239, 13427), False, 'from transformers import AutoModelForTokenClassification, TrainingArguments, Trainer\n'), ((13739, 13753), 'wandb.finish', 'wandb.finish', ([], {}), '()\n', (13751, 13753), False, 'import wandb\n'), ((16373, 16388), 'pandas.DataFrame', 'pd.DataFrame', (['l'], {}), '(l)\n', (16385, 16388), True, 'import pandas as pd\n'), ((18521, 18552), 'numpy.argmax', 'np.argmax', (['predictions'], {'axis': '(-1)'}), '(predictions, axis=-1)\n', (18530, 18552), True, 'import numpy as np\n'), ((21937, 21959), 'pandas.concat', 'pd.concat', (['dfs'], {'axis': '(0)'}), '(dfs, axis=0)\n', (21946, 21959), True, 'import pandas as pd\n'), ((2830, 2866), 'os.path.join', 'os.path.join', (['DATA_ROOT', '"""train.csv"""'], {}), "(DATA_ROOT, 'train.csv')\n", (2842, 2866), False, 'import os\n'), ((4294, 4326), 'os.path.join', 'os.path.join', (['DATA_ROOT', '"""train"""'], {}), "(DATA_ROOT, 'train')\n", (4306, 4326), False, 'import os\n'), ((12333, 12363), 'numpy.argmax', 'np.argmax', (['predictions'], {'axis': '(2)'}), '(predictions, axis=2)\n', (12342, 12363), True, 'import numpy as np\n'), ((17152, 17184), 'os.path.join', 'os.path.join', (['DATA_ROOT', '"""train"""'], {}), "(DATA_ROOT, 'train')\n", (17164, 17184), False, 'import os\n'), ((17783, 17861), 'spacy.displacy.render', 'displacy.render', (['doc2'], {'style': '"""ent"""', 'options': 'options', 'manual': '(True)', 'jupyter': '(True)'}), "(doc2, style='ent', options=options, manual=True, jupyter=True)\n", (17798, 17861), False, 'from spacy import displacy\n'), ((20538, 20556), 'pandas.DataFrame', 'pd.DataFrame', (['rows'], {}), '(rows)\n', (20550, 20556), True, 'import pandas as pd\n')] |
from flask import render_template, url_for
from appname.mailers import Mailer
class InviteEmail(Mailer):
TEMPLATE = 'email/teams/invite.html'
def __init__(self, invite):
self.recipient = None
self.invite = invite
self.recipient_email = invite.invite_email or (invite.user and invite.user.email)
@property
def subject(self):
return ("{0} invited you to join their team on appname"
.format(self.invite.inviter.email))
def send(self):
link = url_for('auth.invite_page', invite_id=self.invite.id,
secret=self.invite.invite_secret, _external=True)
html_body = render_template(self.TEMPLATE, link=link, invite=self.invite)
return self.deliver_now(self.recipient_email, self.subject, html_body)
| [
"flask.render_template",
"flask.url_for"
] | [((520, 628), 'flask.url_for', 'url_for', (['"""auth.invite_page"""'], {'invite_id': 'self.invite.id', 'secret': 'self.invite.invite_secret', '_external': '(True)'}), "('auth.invite_page', invite_id=self.invite.id, secret=self.invite.\n invite_secret, _external=True)\n", (527, 628), False, 'from flask import render_template, url_for\n'), ((667, 728), 'flask.render_template', 'render_template', (['self.TEMPLATE'], {'link': 'link', 'invite': 'self.invite'}), '(self.TEMPLATE, link=link, invite=self.invite)\n', (682, 728), False, 'from flask import render_template, url_for\n')] |
"""Python module name validation.
Compatible with python 2 and python 3, to ensure cookiecutter support
across various platforms
"""
import logging
import sys
import re
logging.basicConfig()
logger = logging.getLogger(__name__)
REF_URL = "https://www.python.org/dev/peps/pep-0008/#package-and-module-names"
def validate_python_package_name():
"""Check that the module name given by the user is an acceptable format."""
package_name = "{{ cookiecutter.package_name }}"
# The following regex will only match valid Python module names
if not re.match(r"^[a-z][_a-z0-9]+$", package_name):
# Error out if you have an invalid module name
err = "{} is not a valid Python module name!\n See {} for naming standards."
logger.error(err.format(package_name, REF_URL))
sys.exit(1)
if __name__ == "__main__":
validate_python_package_name()
| [
"logging.basicConfig",
"re.match",
"logging.getLogger",
"sys.exit"
] | [((171, 192), 'logging.basicConfig', 'logging.basicConfig', ([], {}), '()\n', (190, 192), False, 'import logging\n'), ((202, 229), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (219, 229), False, 'import logging\n'), ((560, 603), 're.match', 're.match', (['"""^[a-z][_a-z0-9]+$"""', 'package_name'], {}), "('^[a-z][_a-z0-9]+$', package_name)\n", (568, 603), False, 'import re\n'), ((810, 821), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (818, 821), False, 'import sys\n')] |
"""This module contains all the actual logic of the project.
The main method is run when the microcontroller starts and afer each sleep cycle.
"""
import logging
import machine
import network
import ntptime
import os
import sdcard
import ujson
import urequests
import utime
from Adafruit_Thermal import Adafruit_Thermal
RELAY_PIN = 5
SECS_TO_SLEEP = 4 * 60
UNIX_TIMESTAMP_2000 = 946684800
HOST = "http://s3.eu-central-1.amazonaws.com"
ALARM_PATH = "/xxx"
MENU_PATH = "/xxx"
# The logger writes logs to the serial interface.
logger = logging.getLogger("Logger")
printer = Adafruit_Thermal(baudrate=19200)
relay_pin = machine.Pin(RELAY_PIN, machine.Pin.OUT)
def mount_sd():
logger.info("mounting sd...")
try:
sd = sdcard.SDCard(machine.SPI(1), machine.Pin(15))
os.mount(sd, "/sd")
except Exception as e:
logger.exc(e, "sd could not be mounted.")
failed_mounts_count = increment_counter("failed_mounts")
if failed_mounts_count == 1:
print_error_msg("SD-Karte konnte nicht gelesen werden! Sag besser mal Fabian bescheid!")
else:
reset_counter("failed_mounts")
def connect_wifi():
logger.info("reading wifi config...")
with open("/sd/wifi.json") as json_file:
wifi_conf = ujson.load(json_file)
logger.info("connecting to wifi...")
sta_if = network.WLAN(network.STA_IF)
sta_if.active(True)
sta_if.connect(wifi_conf["ssid"], wifi_conf["password"])
for _ in range(10):
if sta_if.isconnected():
logger.info("wifi is connected.")
reset_counter("failed_connections")
return
utime.sleep(1)
logger.warning("Could not connect to wifi.")
failed_connection_count = increment_counter("failed_connections")
if failed_connection_count == 50:
print_error_msg("WLAN-Verbindung fehlgeschlagen!")
def increment_counter(counter_name):
logger.info("incrementing counter '{}'...".format(counter_name))
file = "/{}".format(counter_name)
try:
with open(file, "r") as infile:
count = int(infile.read())
except Exception as e:
logger.exc(e, "Could not read counter.")
count = 0
logger.info("old value: {}.".format(count))
count += 1
logger.info("new value: {}.".format(count))
try:
with open(file, "w") as outfile:
outfile.write(str(count))
except Exception as e:
logger.exc(e, "Could not write counter.")
return count
def reset_counter(counter_name):
logger.info("resetting counter '{}'...".format(counter_name))
file = "/{}".format(counter_name)
try:
with open(file, "w") as outfile:
outfile.write("0")
except Exception as e:
logger.exc(e, "Could not reset counter.")
def print_error_msg(msg):
turn_printer_on()
printer.feed(2)
printer.println(msg)
printer.feed(2)
utime.sleep(2)
turn_printer_off()
def get_alarm_time():
try:
logger.info("requesting alarm config from server...")
re = urequests.get(HOST + ALARM_PATH)
alarm_data = re.json()
alarm_time = alarm_data["time"] if alarm_data["active"] else 0
logger.info("caching alarm time...")
try:
with open("/alarm_time", "w") as alarm_file:
alarm_file.write(str(alarm_time))
except Exception:
logger.warning("Could not cache alarm time.")
return alarm_time
except Exception:
logger.info("using cached alarm time instead...")
try:
with open("/alarm_time") as alarm_file:
alarm_time = alarm_file.read()
return int(alarm_time)
except Exception:
logger.warning("Could not read cached alarm time.")
return 0
def should_print():
alarm_time = get_alarm_time()
def time_delta():
# For Micropython timestamp is number of seconds since Jan 1, 2000.
local_time = utime.time() + UNIX_TIMESTAMP_2000
logger.info("local_time={}, alarm_time={}".format(local_time, alarm_time))
return alarm_time - local_time
logger.info("comparing local time with alarm time...")
max_delta = 3 * SECS_TO_SLEEP
if 0 <= time_delta() < max_delta:
logger.info("Alarm is triggered.")
while time_delta() > 1:
logger.info("waiting for exact alarm time...")
utime.sleep(1)
return True
else:
logger.info("Alarm is not triggered.")
return False
def turn_printer_on():
logger.info("turning printer on...")
relay_pin.on()
utime.sleep(3)
def turn_printer_off():
logger.info("turning printer off...")
relay_pin.off()
def print_comic_strip():
logger.info("printing comic strip...")
with open("/sd/current_comic_id.json", "r+") as json_file:
current_comic_id = ujson.load(json_file)
logger.info("current_comic_id={}".format(current_comic_id))
comic_strip_count = sum(1 for _ in os.ilistdir("/sd/comic_strips"))
logger.info("comic_strip_count={}".format(comic_strip_count))
if current_comic_id >= comic_strip_count:
current_comic_id = 0
else:
current_comic_id += 1
json_file.seek(0)
json_file.write(ujson.dumps(current_comic_id))
printer.feed()
printer.printBMPImage("/sd/comic_strips/{}.bmp".format(current_comic_id), True)
printer.feed(4)
def print_mensa_menu():
logger.info("printing mensa menu...")
try:
re = urequests.get(HOST + MENU_PATH)
if re.content == "":
return
printer.justify("C")
printer.setSize("L")
printer.println("Heute")
printer.println("in der Mensa")
printer.setSize("S")
printer.justify("L")
for char in re.content:
if char == ord("\n"):
printer.underlineOff()
if char == ord("#"):
printer.underlineOn()
char = ord("\n")
printer.writeBytes(char)
for _ in range(4):
printer.feed()
except Exception as e:
logger.exc(e, "Could not print mensa menu.")
def deep_sleep():
logger.info("going into deep sleep...")
rtc = machine.RTC()
rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP)
rtc.alarm(rtc.ALARM0, SECS_TO_SLEEP * 1000)
machine.deepsleep()
def main():
try:
mount_sd()
connect_wifi()
try:
ntptime.settime()
except Exception:
pass
if should_print():
turn_printer_on()
print_comic_strip()
print_mensa_menu()
utime.sleep(SECS_TO_SLEEP)
turn_printer_off()
except Exception as e:
logger.exc(e, "Error in main function.")
deep_sleep()
| [
"logging.getLogger",
"utime.sleep",
"machine.RTC",
"os.mount",
"machine.SPI",
"ujson.dumps",
"machine.deepsleep",
"machine.Pin",
"os.ilistdir",
"network.WLAN",
"Adafruit_Thermal.Adafruit_Thermal",
"ntptime.settime",
"utime.time",
"urequests.get",
"ujson.load"
] | [((539, 566), 'logging.getLogger', 'logging.getLogger', (['"""Logger"""'], {}), "('Logger')\n", (556, 566), False, 'import logging\n'), ((577, 609), 'Adafruit_Thermal.Adafruit_Thermal', 'Adafruit_Thermal', ([], {'baudrate': '(19200)'}), '(baudrate=19200)\n', (593, 609), False, 'from Adafruit_Thermal import Adafruit_Thermal\n'), ((622, 661), 'machine.Pin', 'machine.Pin', (['RELAY_PIN', 'machine.Pin.OUT'], {}), '(RELAY_PIN, machine.Pin.OUT)\n', (633, 661), False, 'import machine\n'), ((1345, 1373), 'network.WLAN', 'network.WLAN', (['network.STA_IF'], {}), '(network.STA_IF)\n', (1357, 1373), False, 'import network\n'), ((2922, 2936), 'utime.sleep', 'utime.sleep', (['(2)'], {}), '(2)\n', (2933, 2936), False, 'import utime\n'), ((4635, 4649), 'utime.sleep', 'utime.sleep', (['(3)'], {}), '(3)\n', (4646, 4649), False, 'import utime\n'), ((6293, 6306), 'machine.RTC', 'machine.RTC', ([], {}), '()\n', (6304, 6306), False, 'import machine\n'), ((6415, 6434), 'machine.deepsleep', 'machine.deepsleep', ([], {}), '()\n', (6432, 6434), False, 'import machine\n'), ((791, 810), 'os.mount', 'os.mount', (['sd', '"""/sd"""'], {}), "(sd, '/sd')\n", (799, 810), False, 'import os\n'), ((1268, 1289), 'ujson.load', 'ujson.load', (['json_file'], {}), '(json_file)\n', (1278, 1289), False, 'import ujson\n'), ((1647, 1661), 'utime.sleep', 'utime.sleep', (['(1)'], {}), '(1)\n', (1658, 1661), False, 'import utime\n'), ((3067, 3099), 'urequests.get', 'urequests.get', (['(HOST + ALARM_PATH)'], {}), '(HOST + ALARM_PATH)\n', (3080, 3099), False, 'import urequests\n'), ((4896, 4917), 'ujson.load', 'ujson.load', (['json_file'], {}), '(json_file)\n', (4906, 4917), False, 'import ujson\n'), ((5558, 5589), 'urequests.get', 'urequests.get', (['(HOST + MENU_PATH)'], {}), '(HOST + MENU_PATH)\n', (5571, 5589), False, 'import urequests\n'), ((750, 764), 'machine.SPI', 'machine.SPI', (['(1)'], {}), '(1)\n', (761, 764), False, 'import machine\n'), ((766, 781), 'machine.Pin', 'machine.Pin', (['(15)'], {}), '(15)\n', (777, 781), False, 'import machine\n'), ((3996, 4008), 'utime.time', 'utime.time', ([], {}), '()\n', (4006, 4008), False, 'import utime\n'), ((4433, 4447), 'utime.sleep', 'utime.sleep', (['(1)'], {}), '(1)\n', (4444, 4447), False, 'import utime\n'), ((5314, 5343), 'ujson.dumps', 'ujson.dumps', (['current_comic_id'], {}), '(current_comic_id)\n', (5325, 5343), False, 'import ujson\n'), ((6525, 6542), 'ntptime.settime', 'ntptime.settime', ([], {}), '()\n', (6540, 6542), False, 'import ntptime\n'), ((6723, 6749), 'utime.sleep', 'utime.sleep', (['SECS_TO_SLEEP'], {}), '(SECS_TO_SLEEP)\n', (6734, 6749), False, 'import utime\n'), ((5029, 5060), 'os.ilistdir', 'os.ilistdir', (['"""/sd/comic_strips"""'], {}), "('/sd/comic_strips')\n", (5040, 5060), False, 'import os\n')] |
"""
Contains the views of the 'evaluate' blueprint.
"""
# pylint: disable=invalid-name
from flask import Blueprint, render_template, request
from project.evaluate.forms import EvaluateForm
from project.evaluate.helpers import evaluate_pass
evaluate_blueprint = Blueprint('evaluate', __name__, url_prefix='/evaluate')
@evaluate_blueprint.route('/', methods=['GET', 'POST'])
def index():
""" TODO: add function docstring"""
power = None
form = EvaluateForm(request.form)
if form.validate_on_submit():
if form.password.data is not None and form.password.data != '':
power = evaluate_pass(form.password.data)
return render_template('evaluate/index.html', form=form, power=power,
breadcrumb=(('Home', 'main.index'), 'Evaluate'))
return render_template('evaluate/index.html', form=form,
breadcrumb=(('Home', 'main.index'), 'Evaluate'))
| [
"flask.render_template",
"project.evaluate.forms.EvaluateForm",
"flask.Blueprint",
"project.evaluate.helpers.evaluate_pass"
] | [((267, 322), 'flask.Blueprint', 'Blueprint', (['"""evaluate"""', '__name__'], {'url_prefix': '"""/evaluate"""'}), "('evaluate', __name__, url_prefix='/evaluate')\n", (276, 322), False, 'from flask import Blueprint, render_template, request\n'), ((462, 488), 'project.evaluate.forms.EvaluateForm', 'EvaluateForm', (['request.form'], {}), '(request.form)\n', (474, 488), False, 'from project.evaluate.forms import EvaluateForm\n'), ((820, 922), 'flask.render_template', 'render_template', (['"""evaluate/index.html"""'], {'form': 'form', 'breadcrumb': "(('Home', 'main.index'), 'Evaluate')"}), "('evaluate/index.html', form=form, breadcrumb=(('Home',\n 'main.index'), 'Evaluate'))\n", (835, 922), False, 'from flask import Blueprint, render_template, request\n'), ((665, 781), 'flask.render_template', 'render_template', (['"""evaluate/index.html"""'], {'form': 'form', 'power': 'power', 'breadcrumb': "(('Home', 'main.index'), 'Evaluate')"}), "('evaluate/index.html', form=form, power=power, breadcrumb=(\n ('Home', 'main.index'), 'Evaluate'))\n", (680, 781), False, 'from flask import Blueprint, render_template, request\n'), ((615, 648), 'project.evaluate.helpers.evaluate_pass', 'evaluate_pass', (['form.password.data'], {}), '(form.password.data)\n', (628, 648), False, 'from project.evaluate.helpers import evaluate_pass\n')] |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
'''
@author: lx0hacker
@date:2018-02-07
'''
import requests
from urllib.parse import unquote,urlparse
import re
import os
import os.path
from bs4 import BeautifulSoup
requests.packages.urllib3.disable_warnings()
import time
import random
'''
@url : 漫画的入口
@return 创建的文件夹的名字
'''
def get_folder(url):
o = urlparse(url)
s = re.match(r'\/comedy\/(.*?)\/(.*?)\/viewer',o.path)
sub_folder = unquote(s.group(2))
return sub_folder
'''
@folder 要创建的文件夹的名字
'''
def is_exists(folder):
if not os.path.exists(folder):
return False
else:
return True
'''
@url 漫画的入口
@return 每个图片的实际地址
'''
def get_img(url):
data_url = []
r = requests.get(url,verify=False)
soup = BeautifulSoup(r.text,"html.parser")
img_list = soup.find(id='_imageList').find_all('img')
for img in img_list:
data_url.append(img.get('data-url'))
return data_url
'''
@referer 漫画的入口
@folder 存储的文件夹
'''
def save_img(referer,folder):
headers = {
"Referer":referer
}
data_url = get_img(referer)
i=0
for img_url in data_url:
time.sleep(random.uniform(0.1,0.8))
r =requests.get(img_url,headers=headers,verify=False)
if r.status_code != 200:
print('估计网络出现问题。status code : {}'.format(r.status_code))
print('先暂停5秒')
time.sleep(5)
r = requests.get(img_url,headers=headers,verify=False)
if r.status_code !=200:
print("error! status code :{}".format(r.status_code))
return
print('正在爬取漫画里面每个图片的地址.........')
with open(folder+'/'+str(i)+'.jpg','wb')as f:
f.write(r.content)
i+=1
'''
@url 漫画的入口
'''
def start(url,folder):
flag = is_exists(url)
if not flag:
print('往'+folder+'写入图片..........')
os.mkdir(folder)
save_img(url,folder)
else:
print(folder+'已经存在了!!!!!!!!!!!')
#
if __name__ == "__main__":
print('如果你存放的目录不一致,将重新下载!!!')
url = input('输入第一话漫画的地址: ')
parent_folder = input('请输入存放漫画的名字: ')
if not os.path.exists(parent_folder):
os.mkdir(parent_folder)
os.chdir(parent_folder)
num = len([x for x in os.listdir('.') if os.path.isdir(x)])
while True:
#在这里增加参数从而爬取所有的漫画。注意选取的是跳转的url
num+=1
url = re.sub('episode_no=.*','episode_no='+str(num),url)
r= requests.get(url)
url =r.url
sub_folder = get_folder(url)
if r.status_code == 200:
start(url,sub_folder)
elif r.status_code ==404:
print('网页404, 没有更多了......')
break
else:
print('网络出错了: %s'%status_code)
break
| [
"os.path.exists",
"random.uniform",
"os.listdir",
"requests.packages.urllib3.disable_warnings",
"urllib.parse.urlparse",
"re.match",
"requests.get",
"time.sleep",
"bs4.BeautifulSoup",
"os.chdir",
"os.path.isdir",
"os.mkdir"
] | [((212, 256), 'requests.packages.urllib3.disable_warnings', 'requests.packages.urllib3.disable_warnings', ([], {}), '()\n', (254, 256), False, 'import requests\n'), ((352, 365), 'urllib.parse.urlparse', 'urlparse', (['url'], {}), '(url)\n', (360, 365), False, 'from urllib.parse import unquote, urlparse\n'), ((374, 428), 're.match', 're.match', (['"""\\\\/comedy\\\\/(.*?)\\\\/(.*?)\\\\/viewer"""', 'o.path'], {}), "('\\\\/comedy\\\\/(.*?)\\\\/(.*?)\\\\/viewer', o.path)\n", (382, 428), False, 'import re\n'), ((702, 733), 'requests.get', 'requests.get', (['url'], {'verify': '(False)'}), '(url, verify=False)\n', (714, 733), False, 'import requests\n'), ((744, 780), 'bs4.BeautifulSoup', 'BeautifulSoup', (['r.text', '"""html.parser"""'], {}), "(r.text, 'html.parser')\n", (757, 780), False, 'from bs4 import BeautifulSoup\n'), ((2190, 2213), 'os.chdir', 'os.chdir', (['parent_folder'], {}), '(parent_folder)\n', (2198, 2213), False, 'import os\n'), ((545, 567), 'os.path.exists', 'os.path.exists', (['folder'], {}), '(folder)\n', (559, 567), False, 'import os\n'), ((1169, 1221), 'requests.get', 'requests.get', (['img_url'], {'headers': 'headers', 'verify': '(False)'}), '(img_url, headers=headers, verify=False)\n', (1181, 1221), False, 'import requests\n'), ((1854, 1870), 'os.mkdir', 'os.mkdir', (['folder'], {}), '(folder)\n', (1862, 1870), False, 'import os\n'), ((2114, 2143), 'os.path.exists', 'os.path.exists', (['parent_folder'], {}), '(parent_folder)\n', (2128, 2143), False, 'import os\n'), ((2153, 2176), 'os.mkdir', 'os.mkdir', (['parent_folder'], {}), '(parent_folder)\n', (2161, 2176), False, 'import os\n'), ((2425, 2442), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (2437, 2442), False, 'import requests\n'), ((1133, 1157), 'random.uniform', 'random.uniform', (['(0.1)', '(0.8)'], {}), '(0.1, 0.8)\n', (1147, 1157), False, 'import random\n'), ((1361, 1374), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1371, 1374), False, 'import time\n'), ((1391, 1443), 'requests.get', 'requests.get', (['img_url'], {'headers': 'headers', 'verify': '(False)'}), '(img_url, headers=headers, verify=False)\n', (1403, 1443), False, 'import requests\n'), ((2240, 2255), 'os.listdir', 'os.listdir', (['"""."""'], {}), "('.')\n", (2250, 2255), False, 'import os\n'), ((2259, 2275), 'os.path.isdir', 'os.path.isdir', (['x'], {}), '(x)\n', (2272, 2275), False, 'import os\n')] |
from aioredis import Redis, from_url
from core.config import settings
async def init_redis_pool() -> Redis:
if settings.USE_REDIS_SENTINEL:
pass
else:
redis = await from_url(
settings.REDIS_URL,
password=settings.REDIS_PASSWORD,
encoding="utf-8",
db=settings.REDIS_DB,
)
return redis | [
"aioredis.from_url"
] | [((192, 299), 'aioredis.from_url', 'from_url', (['settings.REDIS_URL'], {'password': 'settings.REDIS_PASSWORD', 'encoding': '"""utf-8"""', 'db': 'settings.REDIS_DB'}), "(settings.REDIS_URL, password=settings.REDIS_PASSWORD, encoding=\n 'utf-8', db=settings.REDIS_DB)\n", (200, 299), False, 'from aioredis import Redis, from_url\n')] |
# -*- coding: utf-8 -*-
"""
Get messages save them locally and delete them from the queue.
It's very fast we get thousands messages per minute.
If the queue is empty we sleep for 20 seconds.
We store the messages in yamlfiles, it will create some markers like !!omap.
Besides that we use it for readability.
"""
import boto3
from ruamel.yaml import YAML
import xmltodict
import threading
import pandas as pd
import time
from . import schemas
from mlrepricer import setup, parser
from .example_destination import SQLite # AzureSQL or your own
yaml = YAML(typ='unsafe')
yaml.default_flow_style = False
# Just replace SQLite with your database class
tableobject = schemas.pricemonitor(SQLite)()
datafolder = f"{setup.configs['datafolder']}sub/"
region_name = setup.configs['region_name']
queuename = setup.configs['queuename']
sqsres = boto3.resource('sqs', region_name=region_name)
sqs = boto3.client('sqs', region_name=region_name)
queue = sqsres.get_queue_by_name(QueueName=queuename)
class Listener (threading.Thread):
"""Demon Thread read data from aws queue and dump in SQLite."""
def run(self):
"""Thread should run once."""
print(f'Starting {self.name}')
main()
def receive_message():
"""Request against aws sqs, one message at a time."""
return sqs.receive_message(
QueueUrl=queue.url,
AttributeNames=['SentTimestamp'],
MaxNumberOfMessages=1,
MessageAttributeNames=['All'],
VisibilityTimeout=600, # so long we have one thread it's ok
WaitTimeSeconds=0)
def dump_message_toyaml(message):
"""Dump the message to a yaml file its fine for up to a few thousand."""
messageid = message['MessageId']
r = xmltodict.parse(message['Body'])
with open(f'{datafolder}{messageid}.yaml', 'w') as f:
yaml.dump(r, f)
def parse(message):
"""Parse the message and return a pandas dataframe."""
r = xmltodict.parse(message['Body'])
# here we call the parser.py file •=•
return pd.DataFrame(parser.main(r))
def delete_message(d):
"""Delete the messages we have processed."""
d = [d[i:i + 10] for i in range(0, len(d), 10)] # 10 is max batchsize
for batch in d:
entries = []
for idx, receipt_handle in enumerate(batch):
entries.append({'Id': str(idx), 'ReceiptHandle': receipt_handle})
sqs.delete_message_batch(QueueUrl=queue.url, Entries=entries)
# we could test here if we fail, but sqs says anyway you can't rely
def main():
"""Recieves every 20 seconds a new queue and outputs into a database."""
tableobject.createtable # inherited from destination
while True:
messagedf = pd.DataFrame()
deletelist = []
# get new queue, for new messages
queue = sqsres.get_queue_by_name(QueueName=queuename)
numbermessages = int(queue.attributes['ApproximateNumberOfMessages'])
print(numbermessages)
for _ in range(numbermessages):
message = receive_message().get('Messages', None)
if message is not None:
message = message[0]
messagedf = messagedf.append(parse(message))
deletelist.append(message['ReceiptHandle'])
messagedf.to_sql(tableobject.table, tableobject.conn,
dtype=tableobject.dtypes,
if_exists='append', index=False)
delete_message(deletelist)
time.sleep(20)
| [
"boto3.client",
"xmltodict.parse",
"mlrepricer.parser.main",
"ruamel.yaml.YAML",
"time.sleep",
"boto3.resource",
"pandas.DataFrame"
] | [((555, 573), 'ruamel.yaml.YAML', 'YAML', ([], {'typ': '"""unsafe"""'}), "(typ='unsafe')\n", (559, 573), False, 'from ruamel.yaml import YAML\n'), ((843, 889), 'boto3.resource', 'boto3.resource', (['"""sqs"""'], {'region_name': 'region_name'}), "('sqs', region_name=region_name)\n", (857, 889), False, 'import boto3\n'), ((896, 940), 'boto3.client', 'boto3.client', (['"""sqs"""'], {'region_name': 'region_name'}), "('sqs', region_name=region_name)\n", (908, 940), False, 'import boto3\n'), ((1721, 1753), 'xmltodict.parse', 'xmltodict.parse', (["message['Body']"], {}), "(message['Body'])\n", (1736, 1753), False, 'import xmltodict\n'), ((1925, 1957), 'xmltodict.parse', 'xmltodict.parse', (["message['Body']"], {}), "(message['Body'])\n", (1940, 1957), False, 'import xmltodict\n'), ((2024, 2038), 'mlrepricer.parser.main', 'parser.main', (['r'], {}), '(r)\n', (2035, 2038), False, 'from mlrepricer import setup, parser\n'), ((2693, 2707), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (2705, 2707), True, 'import pandas as pd\n'), ((3456, 3470), 'time.sleep', 'time.sleep', (['(20)'], {}), '(20)\n', (3466, 3470), False, 'import time\n')] |
# Copyright 2019 Alibaba Cloud Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from alibabacloud.client import AlibabaCloudClient
from alibabacloud.request import APIRequest
from alibabacloud.utils.parameter_validation import verify_params
class SmartagClient(AlibabaCloudClient):
def __init__(self, client_config, credentials_provider=None, retry_policy=None,
endpoint_resolver=None):
AlibabaCloudClient.__init__(self, client_config,
credentials_provider=credentials_provider,
retry_policy=retry_policy,
endpoint_resolver=endpoint_resolver)
self.product_code = 'Smartag'
self.api_version = '2018-03-13'
self.location_service_code = 'smartag'
self.location_endpoint_type = 'openAPI'
def describe_bindable_smart_access_gateways(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
cross_account=None,
owner_account=None,
ccn_id=None,
page_size=None,
owner_id=None,
page_number=None):
api_request = APIRequest('DescribeBindableSmartAccessGateways',
'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"CrossAccount": cross_account,
"OwnerAccount": owner_account,
"CcnId": ccn_id,
"PageSize": page_size,
"OwnerId": owner_id,
"PageNumber": page_number}
return self._handle_request(api_request).result
def describe_unbind_flow_log_sags(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
owner_id=None):
api_request = APIRequest('DescribeUnbindFlowLogSags', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def modify_sag_remote_access(
self,
resource_owner_id=None,
serial_number=None,
resource_owner_account=None,
owner_account=None,
remote_access_ip=None,
smart_ag_id=None,
owner_id=None):
api_request = APIRequest('ModifySagRemoteAccess', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"SerialNumber": serial_number,
"ResourceOwnerAccount": resource_owner_account,
"OwnerAccount": owner_account,
"RemoteAccessIp": remote_access_ip,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def associate_qos(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None,
qos_id=None):
api_request = APIRequest('AssociateQos', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"QosId": qos_id}
return self._handle_request(api_request).result
def disassociate_qos(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None,
qos_id=None):
api_request = APIRequest('DisassociateQos', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"QosId": qos_id}
return self._handle_request(api_request).result
def modify_smart_access_gateway_up_bandwidth(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
up_bandwidth4_g=None,
smart_ag_id=None,
up_bandwidth_wan=None,
owner_id=None):
api_request = APIRequest('ModifySmartAccessGatewayUpBandwidth',
'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"UpBandwidth4G": up_bandwidth4_g,
"SmartAGId": smart_ag_id,
"UpBandwidthWan": up_bandwidth_wan,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def deactive_flow_log(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
owner_id=None,
flow_log_id=None):
api_request = APIRequest('DeactiveFlowLog', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"OwnerId": owner_id,
"FlowLogId": flow_log_id}
return self._handle_request(api_request).result
def associate_flow_log(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None,
flow_log_id=None):
api_request = APIRequest('AssociateFlowLog', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"FlowLogId": flow_log_id}
return self._handle_request(api_request).result
def describe_flow_log_sags(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
page_size=None,
owner_id=None,
flow_log_id=None,
page_number=None):
api_request = APIRequest('DescribeFlowLogSags', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"PageSize": page_size,
"OwnerId": owner_id,
"FlowLogId": flow_log_id,
"PageNumber": page_number}
return self._handle_request(api_request).result
def active_flow_log(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
owner_id=None,
flow_log_id=None):
api_request = APIRequest('ActiveFlowLog', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"OwnerId": owner_id,
"FlowLogId": flow_log_id}
return self._handle_request(api_request).result
def delete_flow_log(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
owner_id=None,
flow_log_id=None):
api_request = APIRequest('DeleteFlowLog', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"OwnerId": owner_id,
"FlowLogId": flow_log_id}
return self._handle_request(api_request).result
def describe_flow_logs(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_account=None,
description=None,
owner_id=None,
page_number=None,
region_id=None,
page_size=None,
output_type=None,
flow_log_id=None,
flow_log_name=None,
status=None):
api_request = APIRequest('DescribeFlowLogs', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerAccount": owner_account,
"Description": description,
"OwnerId": owner_id,
"PageNumber": page_number,
"RegionId": region_id,
"PageSize": page_size,
"OutputType": output_type,
"FlowLogId": flow_log_id,
"FlowLogName": flow_log_name,
"Status": status}
return self._handle_request(api_request).result
def modify_flow_log_attribute(
self,
resource_owner_id=None,
project_name=None,
logstore_name=None,
resource_owner_account=None,
owner_account=None,
netflow_server_port=None,
netflow_version=None,
description=None,
owner_id=None,
inactive_aging=None,
netflow_server_ip=None,
region_id=None,
name=None,
sls_region_id=None,
active_aging=None,
output_type=None,
flow_log_id=None):
api_request = APIRequest('ModifyFlowLogAttribute', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ProjectName": project_name,
"LogstoreName": logstore_name,
"ResourceOwnerAccount": resource_owner_account,
"OwnerAccount": owner_account,
"NetflowServerPort": netflow_server_port,
"NetflowVersion": netflow_version,
"Description": description,
"OwnerId": owner_id,
"InactiveAging": inactive_aging,
"NetflowServerIp": netflow_server_ip,
"RegionId": region_id,
"Name": name,
"SlsRegionId": sls_region_id,
"ActiveAging": active_aging,
"OutputType": output_type,
"FlowLogId": flow_log_id}
return self._handle_request(api_request).result
def create_flow_log(
self,
resource_owner_id=None,
project_name=None,
logstore_name=None,
resource_owner_account=None,
owner_account=None,
netflow_server_port=None,
netflow_version=None,
description=None,
owner_id=None,
inactive_aging=None,
netflow_server_ip=None,
region_id=None,
name=None,
sls_region_id=None,
active_aging=None,
output_type=None):
api_request = APIRequest('CreateFlowLog', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ProjectName": project_name,
"LogstoreName": logstore_name,
"ResourceOwnerAccount": resource_owner_account,
"OwnerAccount": owner_account,
"NetflowServerPort": netflow_server_port,
"NetflowVersion": netflow_version,
"Description": description,
"OwnerId": owner_id,
"InactiveAging": inactive_aging,
"NetflowServerIp": netflow_server_ip,
"RegionId": region_id,
"Name": name,
"SlsRegionId": sls_region_id,
"ActiveAging": active_aging,
"OutputType": output_type}
return self._handle_request(api_request).result
def disassociate_flow_log(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None,
flow_log_id=None):
api_request = APIRequest('DisassociateFlowLog', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"FlowLogId": flow_log_id}
return self._handle_request(api_request).result
def describe_grant_sag_rules(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
page_size=None,
smart_ag_id=None,
owner_id=None,
page_number=None):
api_request = APIRequest('DescribeGrantSagRules', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"PageSize": page_size,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"PageNumber": page_number}
return self._handle_request(api_request).result
def grant_sag_instance_to_ccn(
self,
resource_owner_id=None,
ccn_uid=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
ccn_instance_id=None,
owner_id=None):
api_request = APIRequest('GrantSagInstanceToCcn', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"CcnUid": ccn_uid,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"CcnInstanceId": ccn_instance_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def revoke_sag_instance_from_ccn(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
ccn_instance_id=None,
owner_id=None):
api_request = APIRequest('RevokeSagInstanceFromCcn', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"CcnInstanceId": ccn_instance_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_smart_access_gateway_attribute(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None):
api_request = APIRequest('DescribeSmartAccessGatewayAttribute',
'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_qoses(
self,
resource_owner_id=None,
qos_name=None,
resource_owner_account=None,
region_id=None,
qos_ids=None,
owner_account=None,
page_size=None,
owner_id=None,
page_number=None):
api_request = APIRequest('DescribeQoses', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"QosName": qos_name,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"QosIds": qos_ids,
"OwnerAccount": owner_account,
"PageSize": page_size,
"OwnerId": owner_id,
"PageNumber": page_number}
return self._handle_request(api_request).result
def delete_qos(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
owner_id=None,
qos_id=None):
api_request = APIRequest('DeleteQos', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"OwnerId": owner_id,
"QosId": qos_id}
return self._handle_request(api_request).result
def create_qos(
self,
resource_owner_id=None,
qos_name=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
owner_id=None):
api_request = APIRequest('CreateQos', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"QosName": qos_name,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def modify_qos(
self,
resource_owner_id=None,
qos_name=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
owner_id=None,
qos_id=None):
api_request = APIRequest('ModifyQos', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"QosName": qos_name,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"OwnerId": owner_id,
"QosId": qos_id}
return self._handle_request(api_request).result
def create_qos_policy(
self,
resource_owner_id=None,
source_port_range=None,
resource_owner_account=None,
ip_protocol=None,
owner_account=None,
source_cidr=None,
description=None,
end_time=None,
start_time=None,
owner_id=None,
priority=None,
dest_cidr=None,
dest_port_range=None,
region_id=None,
qos_id=None):
api_request = APIRequest('CreateQosPolicy', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"SourcePortRange": source_port_range,
"ResourceOwnerAccount": resource_owner_account,
"IpProtocol": ip_protocol,
"OwnerAccount": owner_account,
"SourceCidr": source_cidr,
"Description": description,
"EndTime": end_time,
"StartTime": start_time,
"OwnerId": owner_id,
"Priority": priority,
"DestCidr": dest_cidr,
"DestPortRange": dest_port_range,
"RegionId": region_id,
"QosId": qos_id}
return self._handle_request(api_request).result
def delete_qos_policy(
self,
resource_owner_id=None,
qos_policy_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
owner_id=None,
qos_id=None):
api_request = APIRequest('DeleteQosPolicy', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"QosPolicyId": qos_policy_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"OwnerId": owner_id,
"QosId": qos_id}
return self._handle_request(api_request).result
def modify_qos_policy(
self,
resource_owner_id=None,
source_port_range=None,
qos_policy_id=None,
resource_owner_account=None,
ip_protocol=None,
owner_account=None,
source_cidr=None,
end_time=None,
description=None,
start_time=None,
owner_id=None,
priority=None,
dest_cidr=None,
dest_port_range=None,
region_id=None,
qos_id=None):
api_request = APIRequest('ModifyQosPolicy', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"SourcePortRange": source_port_range,
"QosPolicyId": qos_policy_id,
"ResourceOwnerAccount": resource_owner_account,
"IpProtocol": ip_protocol,
"OwnerAccount": owner_account,
"SourceCidr": source_cidr,
"EndTime": end_time,
"Description": description,
"StartTime": start_time,
"OwnerId": owner_id,
"Priority": priority,
"DestCidr": dest_cidr,
"DestPortRange": dest_port_range,
"RegionId": region_id,
"QosId": qos_id}
return self._handle_request(api_request).result
def modify_qos_car(
self,
max_bandwidth_abs=None,
resource_owner_id=None,
resource_owner_account=None,
min_bandwidth_abs=None,
max_bandwidth_percent=None,
owner_account=None,
description=None,
owner_id=None,
qos_car_id=None,
priority=None,
min_bandwidth_percent=None,
limit_type=None,
region_id=None,
percent_source_type=None,
qos_id=None):
api_request = APIRequest('ModifyQosCar', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"MaxBandwidthAbs": max_bandwidth_abs,
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"MinBandwidthAbs": min_bandwidth_abs,
"MaxBandwidthPercent": max_bandwidth_percent,
"OwnerAccount": owner_account,
"Description": description,
"OwnerId": owner_id,
"QosCarId": qos_car_id,
"Priority": priority,
"MinBandwidthPercent": min_bandwidth_percent,
"LimitType": limit_type,
"RegionId": region_id,
"PercentSourceType": percent_source_type,
"QosId": qos_id}
return self._handle_request(api_request).result
def delete_qos_car(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
owner_id=None,
qos_id=None,
qos_car_id=None):
api_request = APIRequest('DeleteQosCar', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"OwnerId": owner_id,
"QosId": qos_id,
"QosCarId": qos_car_id}
return self._handle_request(api_request).result
def create_qos_car(
self,
max_bandwidth_abs=None,
resource_owner_id=None,
resource_owner_account=None,
min_bandwidth_abs=None,
max_bandwidth_percent=None,
owner_account=None,
description=None,
owner_id=None,
priority=None,
min_bandwidth_percent=None,
limit_type=None,
region_id=None,
percent_source_type=None,
qos_id=None):
api_request = APIRequest('CreateQosCar', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"MaxBandwidthAbs": max_bandwidth_abs,
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"MinBandwidthAbs": min_bandwidth_abs,
"MaxBandwidthPercent": max_bandwidth_percent,
"OwnerAccount": owner_account,
"Description": description,
"OwnerId": owner_id,
"Priority": priority,
"MinBandwidthPercent": min_bandwidth_percent,
"LimitType": limit_type,
"RegionId": region_id,
"PercentSourceType": percent_source_type,
"QosId": qos_id}
return self._handle_request(api_request).result
def describe_qos_policies(
self,
resource_owner_id=None,
qos_policy_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
page_size=None,
description=None,
owner_id=None,
qos_id=None,
page_number=None,
order=None):
api_request = APIRequest('DescribeQosPolicies', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"QosPolicyId": qos_policy_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"PageSize": page_size,
"Description": description,
"OwnerId": owner_id,
"QosId": qos_id,
"PageNumber": page_number,
"Order": order}
return self._handle_request(api_request).result
def describe_qos_cars(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
page_size=None,
description=None,
owner_id=None,
qos_id=None,
qos_car_id=None,
page_number=None,
order=None):
api_request = APIRequest('DescribeQosCars', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"PageSize": page_size,
"Description": description,
"OwnerId": owner_id,
"QosId": qos_id,
"QosCarId": qos_car_id,
"PageNumber": page_number,
"Order": order}
return self._handle_request(api_request).result
def describe_smart_access_gateway_routes(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
page_size=None,
smart_ag_id=None,
owner_id=None,
page_nubmer=None):
api_request = APIRequest('DescribeSmartAccessGatewayRoutes', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"PageSize": page_size,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"PageNubmer": page_nubmer}
return self._handle_request(api_request).result
def describe_snat_entries(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
page_size=None,
smart_ag_id=None,
owner_id=None,
page_number=None):
api_request = APIRequest('DescribeSnatEntries', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"PageSize": page_size,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"PageNumber": page_number}
return self._handle_request(api_request).result
def delete_snat_entry(
self,
resource_owner_id=None,
instance_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None):
api_request = APIRequest('DeleteSnatEntry', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"InstanceId": instance_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def add_snat_entry(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
cidr_block=None,
smart_ag_id=None,
owner_id=None,
snat_ip=None):
api_request = APIRequest('AddSnatEntry', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"CidrBlock": cidr_block,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"SnatIp": snat_ip}
return self._handle_request(api_request).result
def delete_dnat_entry(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
sag_id=None,
owner_account=None,
owner_id=None,
dnat_entry_id=None):
api_request = APIRequest('DeleteDnatEntry', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"SagId": sag_id,
"OwnerAccount": owner_account,
"OwnerId": owner_id,
"DnatEntryId": dnat_entry_id}
return self._handle_request(api_request).result
def add_dnat_entry(
self,
resource_owner_id=None,
resource_owner_account=None,
ip_protocol=None,
owner_account=None,
owner_id=None,
type_=None,
internal_ip=None,
region_id=None,
sag_id=None,
internal_port=None,
external_ip=None,
external_port=None):
api_request = APIRequest('AddDnatEntry', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"IpProtocol": ip_protocol,
"OwnerAccount": owner_account,
"OwnerId": owner_id,
"Type": type_,
"InternalIp": internal_ip,
"RegionId": region_id,
"SagId": sag_id,
"InternalPort": internal_port,
"ExternalIp": external_ip,
"ExternalPort": external_port}
return self._handle_request(api_request).result
def describe_dnat_entries(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
sag_id=None,
owner_account=None,
page_size=None,
owner_id=None,
type_=None,
page_number=None):
api_request = APIRequest('DescribeDnatEntries', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"SagId": sag_id,
"OwnerAccount": owner_account,
"PageSize": page_size,
"OwnerId": owner_id,
"Type": type_,
"PageNumber": page_number}
return self._handle_request(api_request).result
def bind_vbr(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None,
vbr_id=None,
vbr_region_id=None):
api_request = APIRequest('BindVbr', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"VbrId": vbr_id,
"VbrRegionId": vbr_region_id}
return self._handle_request(api_request).result
def unbind_vbr(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None,
vbr_id=None,
vbr_region_id=None):
api_request = APIRequest('UnbindVbr', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"VbrId": vbr_id,
"VbrRegionId": vbr_region_id}
return self._handle_request(api_request).result
def enable_smart_access_gateway_user(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None,
user_name=None):
api_request = APIRequest('EnableSmartAccessGatewayUser', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"UserName": user_name}
return self._handle_request(api_request).result
def disable_smart_access_gateway_user(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None,
user_name=None):
api_request = APIRequest('DisableSmartAccessGatewayUser', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"UserName": user_name}
return self._handle_request(api_request).result
def create_smart_access_gateway_software(
self,
resource_owner_id=None,
period=None,
auto_pay=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
user_count=None,
charge_type=None,
owner_id=None,
data_plan=None):
api_request = APIRequest('CreateSmartAccessGatewaySoftware', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"Period": period,
"AutoPay": auto_pay,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"UserCount": user_count,
"ChargeType": charge_type,
"OwnerId": owner_id,
"DataPlan": data_plan}
return self._handle_request(api_request).result
def describe_network_optimizations(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
network_opt_id=None,
owner_account=None,
ccn_id=None,
name=None,
page_size=None,
owner_id=None,
page_number=None):
api_request = APIRequest('DescribeNetworkOptimizations', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"NetworkOptId": network_opt_id,
"OwnerAccount": owner_account,
"CcnId": ccn_id,
"Name": name,
"PageSize": page_size,
"OwnerId": owner_id,
"PageNumber": page_number}
return self._handle_request(api_request).result
def add_network_optimization_setting(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
network_opt_id=None,
owner_account=None,
domain=None,
owner_id=None,
type_=None):
api_request = APIRequest('AddNetworkOptimizationSetting', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"NetworkOptId": network_opt_id,
"OwnerAccount": owner_account,
"Domain": domain,
"OwnerId": owner_id,
"Type": type_}
return self._handle_request(api_request).result
def attach_network_optimization_sags(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
network_opt_id=None,
owner_account=None,
list_of_smart_ag_ids=None,
owner_id=None):
api_request = APIRequest('AttachNetworkOptimizationSags', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"NetworkOptId": network_opt_id,
"OwnerAccount": owner_account,
"SmartAGIds": list_of_smart_ag_ids,
"OwnerId": owner_id}
repeat_info = {"SmartAGIds": ('SmartAGIds', 'list', 'str', None),
}
verify_params(api_request._params, repeat_info)
return self._handle_request(api_request).result
def delete_network_optimization_setting(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
network_opt_id=None,
owner_account=None,
domain=None,
owner_id=None,
type_=None):
api_request = APIRequest('DeleteNetworkOptimizationSetting', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"NetworkOptId": network_opt_id,
"OwnerAccount": owner_account,
"Domain": domain,
"OwnerId": owner_id,
"Type": type_}
return self._handle_request(api_request).result
def describe_network_optimization_sags(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
network_opt_id=None,
owner_account=None,
page_size=None,
owner_id=None,
page_number=None):
api_request = APIRequest('DescribeNetworkOptimizationSags', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"NetworkOptId": network_opt_id,
"OwnerAccount": owner_account,
"PageSize": page_size,
"OwnerId": owner_id,
"PageNumber": page_number}
return self._handle_request(api_request).result
def detach_network_optimization_sags(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
network_opt_id=None,
owner_account=None,
list_of_smart_ag_ids=None,
owner_id=None):
api_request = APIRequest('DetachNetworkOptimizationSags', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"NetworkOptId": network_opt_id,
"OwnerAccount": owner_account,
"SmartAGIds": list_of_smart_ag_ids,
"OwnerId": owner_id}
repeat_info = {"SmartAGIds": ('SmartAGIds', 'list', 'str', None),
}
verify_params(api_request._params, repeat_info)
return self._handle_request(api_request).result
def modify_network_optimization(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
network_opt_id=None,
owner_account=None,
name=None,
owner_id=None):
api_request = APIRequest('ModifyNetworkOptimization', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"NetworkOptId": network_opt_id,
"OwnerAccount": owner_account,
"Name": name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_network_optimization_settings(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
network_opt_id=None,
owner_account=None,
page_size=None,
owner_id=None,
page_number=None):
api_request = APIRequest('DescribeNetworkOptimizationSettings',
'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"NetworkOptId": network_opt_id,
"OwnerAccount": owner_account,
"PageSize": page_size,
"OwnerId": owner_id,
"PageNumber": page_number}
return self._handle_request(api_request).result
def delete_network_optimization(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
network_opt_id=None,
owner_account=None,
owner_id=None):
api_request = APIRequest('DeleteNetworkOptimization', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"NetworkOptId": network_opt_id,
"OwnerAccount": owner_account,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def kick_out_clients(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None,
username=None):
api_request = APIRequest('KickOutClients', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"Username": username}
return self._handle_request(api_request).result
def set_sag_routeable_address(
self,
access_key_id=None,
resource_owner_id=None,
region_id=None,
resource_owner_account=None,
sag_id=None,
owner_account=None,
owner_id=None,
routeable_address=None):
api_request = APIRequest('SetSagRouteableAddress', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"access_key_id": access_key_id,
"ResourceOwnerId": resource_owner_id,
"RegionId": region_id,
"ResourceOwnerAccount": resource_owner_account,
"SagId": sag_id,
"OwnerAccount": owner_account,
"OwnerId": owner_id,
"RouteableAddress": routeable_address}
return self._handle_request(api_request).result
def clear_sag_routeable_address(
self,
access_key_id=None,
resource_owner_id=None,
region_id=None,
resource_owner_account=None,
sag_id=None,
owner_account=None,
owner_id=None):
api_request = APIRequest('ClearSagRouteableAddress', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"access_key_id": access_key_id,
"ResourceOwnerId": resource_owner_id,
"RegionId": region_id,
"ResourceOwnerAccount": resource_owner_account,
"SagId": sag_id,
"OwnerAccount": owner_account,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_sag_routeable_address(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
sag_id=None,
owner_account=None,
owner_id=None):
api_request = APIRequest('DescribeSagRouteableAddress', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"SagId": sag_id,
"OwnerAccount": owner_account,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def create_network_optimization(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
name=None,
ccn_id=None,
owner_id=None):
api_request = APIRequest('CreateNetworkOptimization', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"Name": name,
"CcnId": ccn_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def delete_smart_access_gateway_client_user(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None,
user_name=None):
api_request = APIRequest('DeleteSmartAccessGatewayClientUser',
'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"UserName": user_name}
return self._handle_request(api_request).result
def reset_smart_access_gateway_client_user_password(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None,
user_name=None):
api_request = APIRequest(
'ResetSmartAccessGatewayClientUserPassword',
'GET',
'http',
'RPC',
'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"UserName": user_name}
return self._handle_request(api_request).result
def describe_user_flow_statistics(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
statistics_date=None,
list_of_user_names=None,
owner_id=None):
api_request = APIRequest('DescribeUserFlowStatistics', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"StatisticsDate": statistics_date,
"UserNames": list_of_user_names,
"OwnerId": owner_id}
repeat_info = {"UserNames": ('UserNames', 'list', 'str', None),
}
verify_params(api_request._params, repeat_info)
return self._handle_request(api_request).result
def describe_user_online_clients(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None,
user_name=None):
api_request = APIRequest('DescribeUserOnlineClients', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"UserName": user_name}
return self._handle_request(api_request).result
def describe_user_online_client_statistics(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
list_of_user_names=None,
owner_id=None):
api_request = APIRequest('DescribeUserOnlineClientStatistics',
'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"UserNames": list_of_user_names,
"OwnerId": owner_id}
repeat_info = {"UserNames": ('UserNames', 'list', 'str', None),
}
verify_params(api_request._params, repeat_info)
return self._handle_request(api_request).result
def describe_sag_online_client_statistics(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
list_of_smart_ag_ids=None,
owner_id=None):
api_request = APIRequest('DescribeSagOnlineClientStatistics', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGIds": list_of_smart_ag_ids,
"OwnerId": owner_id}
repeat_info = {"SmartAGIds": ('SmartAGIds', 'list', 'str', None),
}
verify_params(api_request._params, repeat_info)
return self._handle_request(api_request).result
def describe_smart_access_gateway_client_users(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
page_size=None,
smart_ag_id=None,
owner_id=None,
page_number=None,
user_name=None):
api_request = APIRequest('DescribeSmartAccessGatewayClientUsers',
'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"PageSize": page_size,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"PageNumber": page_number,
"UserName": user_name}
return self._handle_request(api_request).result
def modify_smart_access_gateway_client_user(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
bandwidth=None,
owner_account=None,
smart_ag_id=None,
owner_id=None,
user_name=None):
api_request = APIRequest('ModifySmartAccessGatewayClientUser',
'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"Bandwidth": bandwidth,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"UserName": user_name}
return self._handle_request(api_request).result
def create_smart_access_gateway_client_user(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
bandwidth=None,
owner_account=None,
client_ip=None,
smart_ag_id=None,
user_mail=None,
owner_id=None,
user_name=None):
api_request = APIRequest('CreateSmartAccessGatewayClientUser',
'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"Bandwidth": bandwidth,
"OwnerAccount": owner_account,
"ClientIp": client_ip,
"SmartAGId": smart_ag_id,
"UserMail": user_mail,
"OwnerId": owner_id,
"UserName": user_name}
return self._handle_request(api_request).result
def modify_serial_number(
self,
resource_owner_id=None,
serial_number=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None):
api_request = APIRequest('ModifySerialNumber', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"SerialNumber": serial_number,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def create_acl(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
name=None,
owner_id=None):
api_request = APIRequest('CreateACL', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"Name": name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_ac_ls(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
acl_ids=None,
owner_account=None,
name=None,
page_size=None,
owner_id=None,
page_number=None):
api_request = APIRequest('DescribeACLs', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"AclIds": acl_ids,
"OwnerAccount": owner_account,
"Name": name,
"PageSize": page_size,
"OwnerId": owner_id,
"PageNumber": page_number}
return self._handle_request(api_request).result
def associate_acl(
self,
acl_id=None,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None):
api_request = APIRequest('AssociateACL', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"AclId": acl_id,
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def add_acl_rule(
self,
acl_id=None,
resource_owner_id=None,
source_port_range=None,
resource_owner_account=None,
ip_protocol=None,
owner_account=None,
source_cidr=None,
description=None,
owner_id=None,
priority=None,
type_=None,
dest_cidr=None,
dest_port_range=None,
region_id=None,
direction=None,
policy=None):
api_request = APIRequest('AddACLRule', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"AclId": acl_id,
"ResourceOwnerId": resource_owner_id,
"SourcePortRange": source_port_range,
"ResourceOwnerAccount": resource_owner_account,
"IpProtocol": ip_protocol,
"OwnerAccount": owner_account,
"SourceCidr": source_cidr,
"Description": description,
"OwnerId": owner_id,
"Priority": priority,
"Type": type_,
"DestCidr": dest_cidr,
"DestPortRange": dest_port_range,
"RegionId": region_id,
"Direction": direction,
"Policy": policy}
return self._handle_request(api_request).result
def disassociate_acl(
self,
acl_id=None,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None):
api_request = APIRequest('DisassociateACL', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"AclId": acl_id,
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def delete_acl_rule(
self,
acl_id=None,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
owner_id=None,
acr_id=None):
api_request = APIRequest('DeleteACLRule', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"AclId": acl_id,
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"OwnerId": owner_id,
"AcrId": acr_id}
return self._handle_request(api_request).result
def modify_acl_rule(
self,
acl_id=None,
resource_owner_id=None,
source_port_range=None,
resource_owner_account=None,
ip_protocol=None,
owner_account=None,
source_cidr=None,
description=None,
owner_id=None,
priority=None,
type_=None,
acr_id=None,
dest_cidr=None,
dest_port_range=None,
region_id=None,
direction=None,
policy=None):
api_request = APIRequest('ModifyACLRule', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"AclId": acl_id,
"ResourceOwnerId": resource_owner_id,
"SourcePortRange": source_port_range,
"ResourceOwnerAccount": resource_owner_account,
"IpProtocol": ip_protocol,
"OwnerAccount": owner_account,
"SourceCidr": source_cidr,
"Description": description,
"OwnerId": owner_id,
"Priority": priority,
"Type": type_,
"AcrId": acr_id,
"DestCidr": dest_cidr,
"DestPortRange": dest_port_range,
"RegionId": region_id,
"Direction": direction,
"Policy": policy}
return self._handle_request(api_request).result
def describe_acl_attribute(
self,
acl_id=None,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
page_size=None,
owner_id=None,
page_number=None,
direction=None,
order=None):
api_request = APIRequest('DescribeACLAttribute', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"AclId": acl_id,
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"PageSize": page_size,
"OwnerId": owner_id,
"PageNumber": page_number,
"Direction": direction,
"Order": order}
return self._handle_request(api_request).result
def delete_acl(
self,
acl_id=None,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
owner_id=None):
api_request = APIRequest('DeleteACL', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"AclId": acl_id,
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def modify_acl(
self,
acl_id=None,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
name=None,
owner_id=None):
api_request = APIRequest('ModifyACL', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"AclId": acl_id,
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"Name": name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def unicom_order_confirm(
self,
tms_code=None,
resource_owner_id=None,
list_of_order_item=None,
owner_user_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
order_post_fee=None,
owner_id=None,
tms_order_code=None,
trade_id=None):
api_request = APIRequest('UnicomOrderConfirm', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"TmsCode": tms_code,
"ResourceOwnerId": resource_owner_id,
"OrderItem": list_of_order_item,
"OwnerUserId": owner_user_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"OrderPostFee": order_post_fee,
"OwnerId": owner_id,
"TmsOrderCode": tms_order_code,
"TradeId": trade_id}
repeat_info = {"OrderItem": ('OrderItem',
'list',
'dict',
[('ScItemName',
'str',
None,
None),
('ItemAmount',
'str',
None,
None),
('SnList',
'list',
'str',
None),
('OrderItemId',
'str',
None,
None),
('ScItemCode',
'str',
None,
None),
('ItemQuantity',
'str',
None,
None),
('TradeId',
'str',
None,
None),
('TradeItemId',
'str',
None,
None),
]),
}
verify_params(api_request._params, repeat_info)
return self._handle_request(api_request).result
def unicom_sign_confirm(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_account=None,
list_of_tms_order=None,
owner_id=None):
api_request = APIRequest('UnicomSignConfirm', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerAccount": owner_account,
"TmsOrder": list_of_tms_order,
"OwnerId": owner_id}
repeat_info = {"TmsOrder": ('TmsOrder',
'list',
'dict',
[('TmsCode',
'str',
None,
None),
('SigningTime',
'str',
None,
None),
('TmsOrderCode',
'str',
None,
None),
('TradeId',
'str',
None,
None),
]),
}
verify_params(api_request._params, repeat_info)
return self._handle_request(api_request).result
def downgrade_smart_access_gateway(
self,
resource_owner_id=None,
auto_pay=None,
band_width_spec=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
user_count=None,
smart_ag_id=None,
owner_id=None,
data_plan=None):
api_request = APIRequest('DowngradeSmartAccessGateway', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"AutoPay": auto_pay,
"BandWidthSpec": band_width_spec,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"UserCount": user_count,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"DataPlan": data_plan}
return self._handle_request(api_request).result
def upgrade_smart_access_gateway(
self,
resource_owner_id=None,
auto_pay=None,
band_width_spec=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
user_count=None,
smart_ag_id=None,
owner_id=None,
data_plan=None):
api_request = APIRequest('UpgradeSmartAccessGateway', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"AutoPay": auto_pay,
"BandWidthSpec": band_width_spec,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"UserCount": user_count,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"DataPlan": data_plan}
return self._handle_request(api_request).result
def describe_grant_rules(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
page_size=None,
associated_ccn_id=None,
owner_id=None,
page_number=None):
api_request = APIRequest('DescribeGrantRules', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"PageSize": page_size,
"AssociatedCcnId": associated_ccn_id,
"OwnerId": owner_id,
"PageNumber": page_number}
return self._handle_request(api_request).result
def grant_instance_to_cbn(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
cen_uid=None,
cen_instance_id=None,
owner_account=None,
ccn_instance_id=None,
owner_id=None):
api_request = APIRequest('GrantInstanceToCbn', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"CenUid": cen_uid,
"CenInstanceId": cen_instance_id,
"OwnerAccount": owner_account,
"CcnInstanceId": ccn_instance_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def revoke_instance_from_cbn(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
cen_instance_id=None,
owner_account=None,
ccn_instance_id=None,
owner_id=None):
api_request = APIRequest('RevokeInstanceFromCbn', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"CenInstanceId": cen_instance_id,
"OwnerAccount": owner_account,
"CcnInstanceId": ccn_instance_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def create_sag_link_level_ha(
self,
resource_owner_id=None,
backup_link_id=None,
resource_owner_account=None,
ha_type=None,
owner_account=None,
main_link_region_id=None,
smart_ag_id=None,
owner_id=None,
main_link_id=None,
backup_link_region_id=None):
api_request = APIRequest('CreateSAGLinkLevelHa', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"BackupLinkId": backup_link_id,
"ResourceOwnerAccount": resource_owner_account,
"HaType": ha_type,
"OwnerAccount": owner_account,
"MainLinkRegionId": main_link_region_id,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"MainLinkId": main_link_id,
"BackupLinkRegionId": backup_link_region_id}
return self._handle_request(api_request).result
def switch_sag_ha_state(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
ha_type=None,
owner_account=None,
smart_ag_id=None,
owner_id=None):
api_request = APIRequest('SwitchSAGHaState', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"HaType": ha_type,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def delete_sag_link_level_ha(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
ha_type=None,
owner_account=None,
smart_ag_id=None,
owner_id=None):
api_request = APIRequest('DeleteSAGLinkLevelHa', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"HaType": ha_type,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def delete_dedicated_line_backup(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None):
api_request = APIRequest('DeleteDedicatedLineBackup', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def create_dedicated_line_backup(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None,
vbr_id=None,
vbr_region_id=None):
api_request = APIRequest('CreateDedicatedLineBackup', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"VbrId": vbr_id,
"VbrRegionId": vbr_region_id}
return self._handle_request(api_request).result
def describe_smart_access_gateway_ha(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None):
api_request = APIRequest('DescribeSmartAccessGatewayHa', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def switch_cloud_box_ha_state(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None):
api_request = APIRequest('SwitchCloudBoxHaState', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def reboot_smart_access_gateway(
self,
resource_owner_id=None,
serial_number=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None):
api_request = APIRequest('RebootSmartAccessGateway', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"SerialNumber": serial_number,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def create_smart_access_gateway(
self,
max_band_width=None,
resource_owner_id=None,
description=None,
receiver_town=None,
receiver_district=None,
region_id=None,
user_count=None,
receiver_address=None,
instance_type=None,
buyer_message=None,
hard_ware_spec=None,
receiver_email=None,
receiver_state=None,
receiver_city=None,
period=None,
auto_pay=None,
receiver_mobile=None,
resource_owner_account=None,
owner_account=None,
owner_id=None,
receiver_phone=None,
receiver_name=None,
ha_type=None,
name=None,
receiver_country=None,
charge_type=None,
data_plan=None,
receiver_zip=None):
api_request = APIRequest('CreateSmartAccessGateway', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"MaxBandWidth": max_band_width,
"ResourceOwnerId": resource_owner_id,
"Description": description,
"ReceiverTown": receiver_town,
"ReceiverDistrict": receiver_district,
"RegionId": region_id,
"UserCount": user_count,
"ReceiverAddress": receiver_address,
"InstanceType": instance_type,
"BuyerMessage": buyer_message,
"HardWareSpec": hard_ware_spec,
"ReceiverEmail": receiver_email,
"ReceiverState": receiver_state,
"ReceiverCity": receiver_city,
"Period": period,
"AutoPay": auto_pay,
"ReceiverMobile": receiver_mobile,
"ResourceOwnerAccount": resource_owner_account,
"OwnerAccount": owner_account,
"OwnerId": owner_id,
"ReceiverPhone": receiver_phone,
"ReceiverName": receiver_name,
"HaType": ha_type,
"Name": name,
"ReceiverCountry": receiver_country,
"ChargeType": charge_type,
"DataPlan": data_plan,
"ReceiverZip": receiver_zip}
return self._handle_request(api_request).result
def activate_smart_access_gateway(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None):
api_request = APIRequest('ActivateSmartAccessGateway', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def unlock_smart_access_gateway(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None):
api_request = APIRequest('UnlockSmartAccessGateway', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def bind_smart_access_gateway(
self,
resource_owner_id=None,
smart_ag_uid=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
ccn_id=None,
smart_ag_id=None,
owner_id=None):
api_request = APIRequest('BindSmartAccessGateway', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"SmartAGUid": smart_ag_uid,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"CcnId": ccn_id,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def create_cloud_connect_network(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
name=None,
cidr_block=None,
description=None,
snat_cidr_block=None,
is_default=None,
owner_id=None):
api_request = APIRequest('CreateCloudConnectNetwork', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"Name": name,
"CidrBlock": cidr_block,
"Description": description,
"SnatCidrBlock": snat_cidr_block,
"IsDefault": is_default,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def delete_cloud_connect_network(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
ccn_id=None,
owner_id=None):
api_request = APIRequest('DeleteCloudConnectNetwork', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"CcnId": ccn_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_cloud_connect_networks(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
name=None,
ccn_id=None,
page_size=None,
list_of_tag=None,
owner_id=None,
page_number=None):
api_request = APIRequest('DescribeCloudConnectNetworks', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"Name": name,
"CcnId": ccn_id,
"PageSize": page_size,
"Tag": list_of_tag,
"OwnerId": owner_id,
"PageNumber": page_number}
repeat_info = {"Tag": ('Tag', 'list', 'dict', [('Value', 'str', None, None),
('Key', 'str', None, None),
]),
}
verify_params(api_request._params, repeat_info)
return self._handle_request(api_request).result
def describe_regions(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_account=None,
accept_language=None,
owner_id=None):
api_request = APIRequest('DescribeRegions', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerAccount": owner_account,
"AcceptLanguage": accept_language,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_smart_access_gateways(
self,
resource_owner_id=None,
serial_number=None,
resource_owner_account=None,
acl_ids=None,
owner_account=None,
associated_ccn_id=None,
owner_id=None,
unbound_acl_ids=None,
page_number=None,
region_id=None,
name=None,
page_size=None,
smart_ag_id=None,
instance_type=None,
status=None):
api_request = APIRequest('DescribeSmartAccessGateways', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"SerialNumber": serial_number,
"ResourceOwnerAccount": resource_owner_account,
"AclIds": acl_ids,
"OwnerAccount": owner_account,
"AssociatedCcnId": associated_ccn_id,
"OwnerId": owner_id,
"UnboundAclIds": unbound_acl_ids,
"PageNumber": page_number,
"RegionId": region_id,
"Name": name,
"PageSize": page_size,
"SmartAGId": smart_ag_id,
"InstanceType": instance_type,
"Status": status}
return self._handle_request(api_request).result
def get_cloud_connect_network_use_limit(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
owner_id=None):
api_request = APIRequest('GetCloudConnectNetworkUseLimit', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def get_smart_access_gateway_use_limit(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
owner_id=None):
api_request = APIRequest('GetSmartAccessGatewayUseLimit', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def modify_cloud_connect_network(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
ccn_id=None,
name=None,
cidr_block=None,
description=None,
owner_id=None,
interworking_status=None):
api_request = APIRequest('ModifyCloudConnectNetwork', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"CcnId": ccn_id,
"Name": name,
"CidrBlock": cidr_block,
"Description": description,
"OwnerId": owner_id,
"InterworkingStatus": interworking_status}
return self._handle_request(api_request).result
def modify_smart_access_gateway(
self,
resource_owner_id=None,
resource_owner_account=None,
city=None,
owner_account=None,
description=None,
owner_id=None,
security_lock_threshold=None,
routing_strategy=None,
region_id=None,
name=None,
cidr_block=None,
smart_ag_id=None):
api_request = APIRequest('ModifySmartAccessGateway', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"City": city,
"OwnerAccount": owner_account,
"Description": description,
"OwnerId": owner_id,
"SecurityLockThreshold": security_lock_threshold,
"RoutingStrategy": routing_strategy,
"RegionId": region_id,
"Name": name,
"CidrBlock": cidr_block,
"SmartAGId": smart_ag_id}
return self._handle_request(api_request).result
def unbind_smart_access_gateway(
self,
resource_owner_id=None,
smart_ag_uid=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
ccn_id=None,
smart_ag_id=None,
owner_id=None):
api_request = APIRequest('UnbindSmartAccessGateway', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"SmartAGUid": smart_ag_uid,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"CcnId": ccn_id,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def update_smart_access_gateway_version(
self,
resource_owner_id=None,
serial_number=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None,
version_code=None):
api_request = APIRequest('UpdateSmartAccessGatewayVersion', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"SerialNumber": serial_number,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id,
"VersionCode": version_code}
return self._handle_request(api_request).result
def describe_smart_access_gateway_versions(
self,
resource_owner_id=None,
resource_owner_account=None,
region_id=None,
owner_account=None,
smart_ag_id=None,
owner_id=None):
api_request = APIRequest('DescribeSmartAccessGatewayVersions',
'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"RegionId": region_id,
"OwnerAccount": owner_account,
"SmartAGId": smart_ag_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
| [
"alibabacloud.request.APIRequest",
"alibabacloud.client.AlibabaCloudClient.__init__",
"alibabacloud.utils.parameter_validation.verify_params"
] | [((942, 1107), 'alibabacloud.client.AlibabaCloudClient.__init__', 'AlibabaCloudClient.__init__', (['self', 'client_config'], {'credentials_provider': 'credentials_provider', 'retry_policy': 'retry_policy', 'endpoint_resolver': 'endpoint_resolver'}), '(self, client_config, credentials_provider=\n credentials_provider, retry_policy=retry_policy, endpoint_resolver=\n endpoint_resolver)\n', (969, 1107), False, 'from alibabacloud.client import AlibabaCloudClient\n'), ((1749, 1834), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeBindableSmartAccessGateways"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeBindableSmartAccessGateways', 'GET', 'http', 'RPC', 'query'\n )\n", (1759, 1834), False, 'from alibabacloud.request import APIRequest\n'), ((2563, 2633), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeUnbindFlowLogSags"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeUnbindFlowLogSags', 'GET', 'http', 'RPC', 'query')\n", (2573, 2633), False, 'from alibabacloud.request import APIRequest\n'), ((3252, 3318), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""ModifySagRemoteAccess"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('ModifySagRemoteAccess', 'GET', 'http', 'RPC', 'query')\n", (3262, 3318), False, 'from alibabacloud.request import APIRequest\n'), ((4006, 4063), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""AssociateQos"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('AssociateQos', 'GET', 'http', 'RPC', 'query')\n", (4016, 4063), False, 'from alibabacloud.request import APIRequest\n'), ((4727, 4787), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DisassociateQos"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DisassociateQos', 'GET', 'http', 'RPC', 'query')\n", (4737, 4787), False, 'from alibabacloud.request import APIRequest\n'), ((5519, 5604), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""ModifySmartAccessGatewayUpBandwidth"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('ModifySmartAccessGatewayUpBandwidth', 'GET', 'http', 'RPC', 'query'\n )\n", (5529, 5604), False, 'from alibabacloud.request import APIRequest\n'), ((6337, 6397), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DeactiveFlowLog"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DeactiveFlowLog', 'GET', 'http', 'RPC', 'query')\n", (6347, 6397), False, 'from alibabacloud.request import APIRequest\n'), ((7039, 7100), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""AssociateFlowLog"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('AssociateFlowLog', 'GET', 'http', 'RPC', 'query')\n", (7049, 7100), False, 'from alibabacloud.request import APIRequest\n'), ((7812, 7876), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeFlowLogSags"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeFlowLogSags', 'GET', 'http', 'RPC', 'query')\n", (7822, 7876), False, 'from alibabacloud.request import APIRequest\n'), ((8559, 8617), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""ActiveFlowLog"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('ActiveFlowLog', 'GET', 'http', 'RPC', 'query')\n", (8569, 8617), False, 'from alibabacloud.request import APIRequest\n'), ((9226, 9284), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DeleteFlowLog"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DeleteFlowLog', 'GET', 'http', 'RPC', 'query')\n", (9236, 9284), False, 'from alibabacloud.request import APIRequest\n'), ((10071, 10132), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeFlowLogs"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeFlowLogs', 'GET', 'http', 'RPC', 'query')\n", (10081, 10132), False, 'from alibabacloud.request import APIRequest\n'), ((11326, 11393), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""ModifyFlowLogAttribute"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('ModifyFlowLogAttribute', 'GET', 'http', 'RPC', 'query')\n", (11336, 11393), False, 'from alibabacloud.request import APIRequest\n'), ((12790, 12848), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""CreateFlowLog"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('CreateFlowLog', 'GET', 'http', 'RPC', 'query')\n", (12800, 12848), False, 'from alibabacloud.request import APIRequest\n'), ((13923, 13987), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DisassociateFlowLog"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DisassociateFlowLog', 'GET', 'http', 'RPC', 'query')\n", (13933, 13987), False, 'from alibabacloud.request import APIRequest\n'), ((14701, 14767), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeGrantSagRules"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeGrantSagRules', 'GET', 'http', 'RPC', 'query')\n", (14711, 14767), False, 'from alibabacloud.request import APIRequest\n'), ((15520, 15586), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""GrantSagInstanceToCcn"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('GrantSagInstanceToCcn', 'GET', 'http', 'RPC', 'query')\n", (15530, 15586), False, 'from alibabacloud.request import APIRequest\n'), ((16319, 16388), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""RevokeSagInstanceFromCcn"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('RevokeSagInstanceFromCcn', 'GET', 'http', 'RPC', 'query')\n", (16329, 16388), False, 'from alibabacloud.request import APIRequest\n'), ((17067, 17152), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeSmartAccessGatewayAttribute"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeSmartAccessGatewayAttribute', 'GET', 'http', 'RPC', 'query'\n )\n", (17077, 17152), False, 'from alibabacloud.request import APIRequest\n'), ((17869, 17927), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeQoses"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeQoses', 'GET', 'http', 'RPC', 'query')\n", (17879, 17927), False, 'from alibabacloud.request import APIRequest\n'), ((18626, 18680), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DeleteQos"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DeleteQos', 'GET', 'http', 'RPC', 'query')\n", (18636, 18680), False, 'from alibabacloud.request import APIRequest\n'), ((19272, 19326), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""CreateQos"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('CreateQos', 'GET', 'http', 'RPC', 'query')\n", (19282, 19326), False, 'from alibabacloud.request import APIRequest\n'), ((19947, 20001), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""ModifyQos"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('ModifyQos', 'GET', 'http', 'RPC', 'query')\n", (19957, 20001), False, 'from alibabacloud.request import APIRequest\n'), ((20902, 20962), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""CreateQosPolicy"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('CreateQosPolicy', 'GET', 'http', 'RPC', 'query')\n", (20912, 20962), False, 'from alibabacloud.request import APIRequest\n'), ((21944, 22004), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DeleteQosPolicy"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DeleteQosPolicy', 'GET', 'http', 'RPC', 'query')\n", (21954, 22004), False, 'from alibabacloud.request import APIRequest\n'), ((22946, 23006), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""ModifyQosPolicy"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('ModifyQosPolicy', 'GET', 'http', 'RPC', 'query')\n", (22956, 23006), False, 'from alibabacloud.request import APIRequest\n'), ((24300, 24357), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""ModifyQosCar"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('ModifyQosCar', 'GET', 'http', 'RPC', 'query')\n", (24310, 24357), False, 'from alibabacloud.request import APIRequest\n'), ((25397, 25454), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DeleteQosCar"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DeleteQosCar', 'GET', 'http', 'RPC', 'query')\n", (25407, 25454), False, 'from alibabacloud.request import APIRequest\n'), ((26360, 26417), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""CreateQosCar"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('CreateQosCar', 'GET', 'http', 'RPC', 'query')\n", (26370, 26417), False, 'from alibabacloud.request import APIRequest\n'), ((27543, 27607), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeQosPolicies"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeQosPolicies', 'GET', 'http', 'RPC', 'query')\n", (27553, 27607), False, 'from alibabacloud.request import APIRequest\n'), ((28529, 28589), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeQosCars"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeQosCars', 'GET', 'http', 'RPC', 'query')\n", (28539, 28589), False, 'from alibabacloud.request import APIRequest\n'), ((29446, 29523), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeSmartAccessGatewayRoutes"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeSmartAccessGatewayRoutes', 'GET', 'http', 'RPC', 'query')\n", (29456, 29523), False, 'from alibabacloud.request import APIRequest\n'), ((30270, 30334), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeSnatEntries"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeSnatEntries', 'GET', 'http', 'RPC', 'query')\n", (30280, 30334), False, 'from alibabacloud.request import APIRequest\n'), ((31049, 31109), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DeleteSnatEntry"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DeleteSnatEntry', 'GET', 'http', 'RPC', 'query')\n", (31059, 31109), False, 'from alibabacloud.request import APIRequest\n'), ((31811, 31868), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""AddSnatEntry"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('AddSnatEntry', 'GET', 'http', 'RPC', 'query')\n", (31821, 31868), False, 'from alibabacloud.request import APIRequest\n'), ((32574, 32634), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DeleteDnatEntry"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DeleteDnatEntry', 'GET', 'http', 'RPC', 'query')\n", (32584, 32634), False, 'from alibabacloud.request import APIRequest\n'), ((33448, 33505), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""AddDnatEntry"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('AddDnatEntry', 'GET', 'http', 'RPC', 'query')\n", (33458, 33505), False, 'from alibabacloud.request import APIRequest\n'), ((34418, 34482), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeDnatEntries"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeDnatEntries', 'GET', 'http', 'RPC', 'query')\n", (34428, 34482), False, 'from alibabacloud.request import APIRequest\n'), ((35233, 35285), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""BindVbr"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('BindVbr', 'GET', 'http', 'RPC', 'query')\n", (35243, 35285), False, 'from alibabacloud.request import APIRequest\n'), ((36017, 36071), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""UnbindVbr"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('UnbindVbr', 'GET', 'http', 'RPC', 'query')\n", (36027, 36071), False, 'from alibabacloud.request import APIRequest\n'), ((36796, 36869), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""EnableSmartAccessGatewayUser"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('EnableSmartAccessGatewayUser', 'GET', 'http', 'RPC', 'query')\n", (36806, 36869), False, 'from alibabacloud.request import APIRequest\n'), ((37559, 37633), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DisableSmartAccessGatewayUser"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DisableSmartAccessGatewayUser', 'GET', 'http', 'RPC', 'query')\n", (37569, 37633), False, 'from alibabacloud.request import APIRequest\n'), ((38407, 38484), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""CreateSmartAccessGatewaySoftware"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('CreateSmartAccessGatewaySoftware', 'GET', 'http', 'RPC', 'query')\n", (38417, 38484), False, 'from alibabacloud.request import APIRequest\n'), ((39353, 39426), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeNetworkOptimizations"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeNetworkOptimizations', 'GET', 'http', 'RPC', 'query')\n", (39363, 39426), False, 'from alibabacloud.request import APIRequest\n'), ((40239, 40313), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""AddNetworkOptimizationSetting"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('AddNetworkOptimizationSetting', 'GET', 'http', 'RPC', 'query')\n", (40249, 40313), False, 'from alibabacloud.request import APIRequest\n'), ((41044, 41118), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""AttachNetworkOptimizationSags"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('AttachNetworkOptimizationSags', 'GET', 'http', 'RPC', 'query')\n", (41054, 41118), False, 'from alibabacloud.request import APIRequest\n'), ((41571, 41618), 'alibabacloud.utils.parameter_validation.verify_params', 'verify_params', (['api_request._params', 'repeat_info'], {}), '(api_request._params, repeat_info)\n', (41584, 41618), False, 'from alibabacloud.utils.parameter_validation import verify_params\n'), ((42008, 42085), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DeleteNetworkOptimizationSetting"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DeleteNetworkOptimizationSetting', 'GET', 'http', 'RPC', 'query')\n", (42018, 42085), False, 'from alibabacloud.request import APIRequest\n'), ((42837, 42913), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeNetworkOptimizationSags"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeNetworkOptimizationSags', 'GET', 'http', 'RPC', 'query')\n", (42847, 42913), False, 'from alibabacloud.request import APIRequest\n'), ((43661, 43735), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DetachNetworkOptimizationSags"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DetachNetworkOptimizationSags', 'GET', 'http', 'RPC', 'query')\n", (43671, 43735), False, 'from alibabacloud.request import APIRequest\n'), ((44188, 44235), 'alibabacloud.utils.parameter_validation.verify_params', 'verify_params', (['api_request._params', 'repeat_info'], {}), '(api_request._params, repeat_info)\n', (44201, 44235), False, 'from alibabacloud.utils.parameter_validation import verify_params\n'), ((44591, 44661), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""ModifyNetworkOptimization"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('ModifyNetworkOptimization', 'GET', 'http', 'RPC', 'query')\n", (44601, 44661), False, 'from alibabacloud.request import APIRequest\n'), ((45386, 45471), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeNetworkOptimizationSettings"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeNetworkOptimizationSettings', 'GET', 'http', 'RPC', 'query'\n )\n", (45396, 45471), False, 'from alibabacloud.request import APIRequest\n'), ((46203, 46273), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DeleteNetworkOptimization"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DeleteNetworkOptimization', 'GET', 'http', 'RPC', 'query')\n", (46213, 46273), False, 'from alibabacloud.request import APIRequest\n'), ((46916, 46975), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""KickOutClients"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('KickOutClients', 'GET', 'http', 'RPC', 'query')\n", (46926, 46975), False, 'from alibabacloud.request import APIRequest\n'), ((47691, 47758), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""SetSagRouteableAddress"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('SetSagRouteableAddress', 'GET', 'http', 'RPC', 'query')\n", (47701, 47758), False, 'from alibabacloud.request import APIRequest\n'), ((48492, 48561), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""ClearSagRouteableAddress"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('ClearSagRouteableAddress', 'GET', 'http', 'RPC', 'query')\n", (48502, 48561), False, 'from alibabacloud.request import APIRequest\n'), ((49215, 49287), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeSagRouteableAddress"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeSagRouteableAddress', 'GET', 'http', 'RPC', 'query')\n", (49225, 49287), False, 'from alibabacloud.request import APIRequest\n'), ((49917, 49987), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""CreateNetworkOptimization"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('CreateNetworkOptimization', 'GET', 'http', 'RPC', 'query')\n", (49927, 49987), False, 'from alibabacloud.request import APIRequest\n'), ((50665, 50744), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DeleteSmartAccessGatewayClientUser"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DeleteSmartAccessGatewayClientUser', 'GET', 'http', 'RPC', 'query')\n", (50675, 50744), False, 'from alibabacloud.request import APIRequest\n'), ((51481, 51571), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""ResetSmartAccessGatewayClientUserPassword"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('ResetSmartAccessGatewayClientUserPassword', 'GET', 'http',\n 'RPC', 'query')\n", (51491, 51571), False, 'from alibabacloud.request import APIRequest\n'), ((52357, 52428), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeUserFlowStatistics"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeUserFlowStatistics', 'GET', 'http', 'RPC', 'query')\n", (52367, 52428), False, 'from alibabacloud.request import APIRequest\n'), ((52917, 52964), 'alibabacloud.utils.parameter_validation.verify_params', 'verify_params', (['api_request._params', 'repeat_info'], {}), '(api_request._params, repeat_info)\n', (52930, 52964), False, 'from alibabacloud.utils.parameter_validation import verify_params\n'), ((53323, 53393), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeUserOnlineClients"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeUserOnlineClients', 'GET', 'http', 'RPC', 'query')\n", (53333, 53393), False, 'from alibabacloud.request import APIRequest\n'), ((54097, 54176), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeUserOnlineClientStatistics"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeUserOnlineClientStatistics', 'GET', 'http', 'RPC', 'query')\n", (54107, 54176), False, 'from alibabacloud.request import APIRequest\n'), ((54651, 54698), 'alibabacloud.utils.parameter_validation.verify_params', 'verify_params', (['api_request._params', 'repeat_info'], {}), '(api_request._params, repeat_info)\n', (54664, 54698), False, 'from alibabacloud.utils.parameter_validation import verify_params\n'), ((55047, 55125), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeSagOnlineClientStatistics"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeSagOnlineClientStatistics', 'GET', 'http', 'RPC', 'query')\n", (55057, 55125), False, 'from alibabacloud.request import APIRequest\n'), ((55534, 55581), 'alibabacloud.utils.parameter_validation.verify_params', 'verify_params', (['api_request._params', 'repeat_info'], {}), '(api_request._params, repeat_info)\n', (55547, 55581), False, 'from alibabacloud.utils.parameter_validation import verify_params\n'), ((56012, 56098), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeSmartAccessGatewayClientUsers"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeSmartAccessGatewayClientUsers', 'GET', 'http', 'RPC',\n 'query')\n", (56022, 56098), False, 'from alibabacloud.request import APIRequest\n'), ((56925, 57004), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""ModifySmartAccessGatewayClientUser"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('ModifySmartAccessGatewayClientUser', 'GET', 'http', 'RPC', 'query')\n", (56935, 57004), False, 'from alibabacloud.request import APIRequest\n'), ((57853, 57932), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""CreateSmartAccessGatewayClientUser"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('CreateSmartAccessGatewayClientUser', 'GET', 'http', 'RPC', 'query')\n", (57863, 57932), False, 'from alibabacloud.request import APIRequest\n'), ((58752, 58815), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""ModifySerialNumber"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('ModifySerialNumber', 'GET', 'http', 'RPC', 'query')\n", (58762, 58815), False, 'from alibabacloud.request import APIRequest\n'), ((59455, 59509), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""CreateACL"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('CreateACL', 'GET', 'http', 'RPC', 'query')\n", (59465, 59509), False, 'from alibabacloud.request import APIRequest\n'), ((60182, 60239), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeACLs"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeACLs', 'GET', 'http', 'RPC', 'query')\n", (60192, 60239), False, 'from alibabacloud.request import APIRequest\n'), ((60964, 61021), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""AssociateACL"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('AssociateACL', 'GET', 'http', 'RPC', 'query')\n", (60974, 61021), False, 'from alibabacloud.request import APIRequest\n'), ((61943, 61998), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""AddACLRule"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('AddACLRule', 'GET', 'http', 'RPC', 'query')\n", (61953, 61998), False, 'from alibabacloud.request import APIRequest\n'), ((63000, 63060), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DisassociateACL"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DisassociateACL', 'GET', 'http', 'RPC', 'query')\n", (63010, 63060), False, 'from alibabacloud.request import APIRequest\n'), ((63718, 63776), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DeleteACLRule"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DeleteACLRule', 'GET', 'http', 'RPC', 'query')\n", (63728, 63776), False, 'from alibabacloud.request import APIRequest\n'), ((64717, 64775), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""ModifyACLRule"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('ModifyACLRule', 'GET', 'http', 'RPC', 'query')\n", (64727, 64775), False, 'from alibabacloud.request import APIRequest\n'), ((65892, 65957), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeACLAttribute"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeACLAttribute', 'GET', 'http', 'RPC', 'query')\n", (65902, 65957), False, 'from alibabacloud.request import APIRequest\n'), ((66685, 66739), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DeleteACL"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DeleteACL', 'GET', 'http', 'RPC', 'query')\n", (66695, 66739), False, 'from alibabacloud.request import APIRequest\n'), ((67352, 67406), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""ModifyACL"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('ModifyACL', 'GET', 'http', 'RPC', 'query')\n", (67362, 67406), False, 'from alibabacloud.request import APIRequest\n'), ((68196, 68259), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""UnicomOrderConfirm"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('UnicomOrderConfirm', 'GET', 'http', 'RPC', 'query')\n", (68206, 68259), False, 'from alibabacloud.request import APIRequest\n'), ((70560, 70607), 'alibabacloud.utils.parameter_validation.verify_params', 'verify_params', (['api_request._params', 'repeat_info'], {}), '(api_request._params, repeat_info)\n', (70573, 70607), False, 'from alibabacloud.utils.parameter_validation import verify_params\n'), ((70907, 70969), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""UnicomSignConfirm"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('UnicomSignConfirm', 'GET', 'http', 'RPC', 'query')\n", (70917, 70969), False, 'from alibabacloud.request import APIRequest\n'), ((72206, 72253), 'alibabacloud.utils.parameter_validation.verify_params', 'verify_params', (['api_request._params', 'repeat_info'], {}), '(api_request._params, repeat_info)\n', (72219, 72253), False, 'from alibabacloud.utils.parameter_validation import verify_params\n'), ((72704, 72776), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DowngradeSmartAccessGateway"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DowngradeSmartAccessGateway', 'GET', 'http', 'RPC', 'query')\n", (72714, 72776), False, 'from alibabacloud.request import APIRequest\n'), ((73667, 73737), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""UpgradeSmartAccessGateway"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('UpgradeSmartAccessGateway', 'GET', 'http', 'RPC', 'query')\n", (73677, 73737), False, 'from alibabacloud.request import APIRequest\n'), ((74566, 74629), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeGrantRules"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeGrantRules', 'GET', 'http', 'RPC', 'query')\n", (74576, 74629), False, 'from alibabacloud.request import APIRequest\n'), ((75394, 75457), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""GrantInstanceToCbn"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('GrantInstanceToCbn', 'GET', 'http', 'RPC', 'query')\n", (75404, 75457), False, 'from alibabacloud.request import APIRequest\n'), ((76198, 76264), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""RevokeInstanceFromCbn"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('RevokeInstanceFromCbn', 'GET', 'http', 'RPC', 'query')\n", (76208, 76264), False, 'from alibabacloud.request import APIRequest\n'), ((77076, 77141), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""CreateSAGLinkLevelHa"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('CreateSAGLinkLevelHa', 'GET', 'http', 'RPC', 'query')\n", (77086, 77141), False, 'from alibabacloud.request import APIRequest\n'), ((77970, 78031), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""SwitchSAGHaState"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('SwitchSAGHaState', 'GET', 'http', 'RPC', 'query')\n", (77980, 78031), False, 'from alibabacloud.request import APIRequest\n'), ((78706, 78771), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DeleteSAGLinkLevelHa"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DeleteSAGLinkLevelHa', 'GET', 'http', 'RPC', 'query')\n", (78716, 78771), False, 'from alibabacloud.request import APIRequest\n'), ((79424, 79494), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DeleteDedicatedLineBackup"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DeleteDedicatedLineBackup', 'GET', 'http', 'RPC', 'query')\n", (79434, 79494), False, 'from alibabacloud.request import APIRequest\n'), ((80173, 80243), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""CreateDedicatedLineBackup"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('CreateDedicatedLineBackup', 'GET', 'http', 'RPC', 'query')\n", (80183, 80243), False, 'from alibabacloud.request import APIRequest\n'), ((80940, 81013), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeSmartAccessGatewayHa"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeSmartAccessGatewayHa', 'GET', 'http', 'RPC', 'query')\n", (80950, 81013), False, 'from alibabacloud.request import APIRequest\n'), ((81632, 81698), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""SwitchCloudBoxHaState"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('SwitchCloudBoxHaState', 'GET', 'http', 'RPC', 'query')\n", (81642, 81698), False, 'from alibabacloud.request import APIRequest\n'), ((82351, 82420), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""RebootSmartAccessGateway"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('RebootSmartAccessGateway', 'GET', 'http', 'RPC', 'query')\n", (82361, 82420), False, 'from alibabacloud.request import APIRequest\n'), ((83768, 83837), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""CreateSmartAccessGateway"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('CreateSmartAccessGateway', 'GET', 'http', 'RPC', 'query')\n", (83778, 83837), False, 'from alibabacloud.request import APIRequest\n'), ((85368, 85439), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""ActivateSmartAccessGateway"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('ActivateSmartAccessGateway', 'GET', 'http', 'RPC', 'query')\n", (85378, 85439), False, 'from alibabacloud.request import APIRequest\n'), ((86060, 86129), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""UnlockSmartAccessGateway"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('UnlockSmartAccessGateway', 'GET', 'http', 'RPC', 'query')\n", (86070, 86129), False, 'from alibabacloud.request import APIRequest\n'), ((86804, 86871), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""BindSmartAccessGateway"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('BindSmartAccessGateway', 'GET', 'http', 'RPC', 'query')\n", (86814, 86871), False, 'from alibabacloud.request import APIRequest\n'), ((87677, 87747), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""CreateCloudConnectNetwork"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('CreateCloudConnectNetwork', 'GET', 'http', 'RPC', 'query')\n", (87687, 87747), False, 'from alibabacloud.request import APIRequest\n'), ((88512, 88582), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DeleteCloudConnectNetwork"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DeleteCloudConnectNetwork', 'GET', 'http', 'RPC', 'query')\n", (88522, 88582), False, 'from alibabacloud.request import APIRequest\n'), ((89304, 89377), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeCloudConnectNetworks"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeCloudConnectNetworks', 'GET', 'http', 'RPC', 'query')\n", (89314, 89377), False, 'from alibabacloud.request import APIRequest\n'), ((90052, 90099), 'alibabacloud.utils.parameter_validation.verify_params', 'verify_params', (['api_request._params', 'repeat_info'], {}), '(api_request._params, repeat_info)\n', (90065, 90099), False, 'from alibabacloud.utils.parameter_validation import verify_params\n'), ((90394, 90454), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeRegions"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeRegions', 'GET', 'http', 'RPC', 'query')\n", (90404, 90454), False, 'from alibabacloud.request import APIRequest\n'), ((91318, 91390), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeSmartAccessGateways"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeSmartAccessGateways', 'GET', 'http', 'RPC', 'query')\n", (91328, 91390), False, 'from alibabacloud.request import APIRequest\n'), ((92332, 92407), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""GetCloudConnectNetworkUseLimit"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('GetCloudConnectNetworkUseLimit', 'GET', 'http', 'RPC', 'query')\n", (92342, 92407), False, 'from alibabacloud.request import APIRequest\n'), ((92967, 93041), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""GetSmartAccessGatewayUseLimit"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('GetSmartAccessGatewayUseLimit', 'GET', 'http', 'RPC', 'query')\n", (92977, 93041), False, 'from alibabacloud.request import APIRequest\n'), ((93740, 93810), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""ModifyCloudConnectNetwork"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('ModifyCloudConnectNetwork', 'GET', 'http', 'RPC', 'query')\n", (93750, 93810), False, 'from alibabacloud.request import APIRequest\n'), ((94762, 94831), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""ModifySmartAccessGateway"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('ModifySmartAccessGateway', 'GET', 'http', 'RPC', 'query')\n", (94772, 94831), False, 'from alibabacloud.request import APIRequest\n'), ((95748, 95817), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""UnbindSmartAccessGateway"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('UnbindSmartAccessGateway', 'GET', 'http', 'RPC', 'query')\n", (95758, 95817), False, 'from alibabacloud.request import APIRequest\n'), ((96578, 96654), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""UpdateSmartAccessGatewayVersion"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('UpdateSmartAccessGatewayVersion', 'GET', 'http', 'RPC', 'query')\n", (96588, 96654), False, 'from alibabacloud.request import APIRequest\n'), ((97370, 97449), 'alibabacloud.request.APIRequest', 'APIRequest', (['"""DescribeSmartAccessGatewayVersions"""', '"""GET"""', '"""http"""', '"""RPC"""', '"""query"""'], {}), "('DescribeSmartAccessGatewayVersions', 'GET', 'http', 'RPC', 'query')\n", (97380, 97449), False, 'from alibabacloud.request import APIRequest\n')] |
import json
from . import Protocol, Activity, Item
def load_schema(filepath):
with open(filepath) as fp:
data = json.load(fp)
if "@type" not in data:
raise ValueError("Missing @type key")
schema_type = data["@type"]
if schema_type == "reproschema:Protocol":
return Protocol.from_data(data)
if schema_type == "reproschema:Activity":
return Activity.from_data(data)
if schema_type == "reproschema:Item":
return Item.from_data(data)
| [
"json.load"
] | [((126, 139), 'json.load', 'json.load', (['fp'], {}), '(fp)\n', (135, 139), False, 'import json\n')] |
from django.db import transaction
from api.management.data_script import OperationalDataScript
from api.models.notification import Notification
class UpdateNotifications(OperationalDataScript):
"""
Update notifications name
"""
is_revertable = False
comment = 'Update notifications name'
def check_run_preconditions(self):
return True
def update_notifications(self):
notification = Notification.objects.get(notification_code="CREDIT_APPLICATION_ISSUED")
notification.name = "Credit Application Processed by the Government of B.C"
notification.save()
notification = Notification.objects.get(notification_code="CREDIT_TRANSFER_RECORDED")
notification.name = "Credit Transfer Recorded by the Government of B.C."
notification.save()
notification = Notification.objects.get(notification_code="CREDIT_TRANSFER_REJECTED")
notification.name = "Credit Transfer Rejected by the Government of B.C."
notification.save()
@transaction.atomic
def run(self):
self.update_notifications()
script_class = UpdateNotifications
| [
"api.models.notification.Notification.objects.get"
] | [((431, 502), 'api.models.notification.Notification.objects.get', 'Notification.objects.get', ([], {'notification_code': '"""CREDIT_APPLICATION_ISSUED"""'}), "(notification_code='CREDIT_APPLICATION_ISSUED')\n", (455, 502), False, 'from api.models.notification import Notification\n'), ((638, 708), 'api.models.notification.Notification.objects.get', 'Notification.objects.get', ([], {'notification_code': '"""CREDIT_TRANSFER_RECORDED"""'}), "(notification_code='CREDIT_TRANSFER_RECORDED')\n", (662, 708), False, 'from api.models.notification import Notification\n'), ((841, 911), 'api.models.notification.Notification.objects.get', 'Notification.objects.get', ([], {'notification_code': '"""CREDIT_TRANSFER_REJECTED"""'}), "(notification_code='CREDIT_TRANSFER_REJECTED')\n", (865, 911), False, 'from api.models.notification import Notification\n')] |
"""
Functions for retrieving strings from files
"""
import os
def string_from_file(string, strip=True):
"""
Return an unaltered string or the contents of a file if the string
begins with @ and the rest of it points at a path.
If 'strip' is True, remove leading and trailing whitespace
(default behavior).
"""
if not isinstance(string, str):
raise ValueError("Argument must be a string")
if not string:
# Easy case. No need to strip, either.
return string
if (string[0] != "@"):
if string.startswith("\\@"):
value = string[1:]
else:
value = string
else:
path = os.path.expanduser(string[1:])
with open(path, 'r') as content:
value = content.read()
return value.strip() if strip else value
| [
"os.path.expanduser"
] | [((682, 712), 'os.path.expanduser', 'os.path.expanduser', (['string[1:]'], {}), '(string[1:])\n', (700, 712), False, 'import os\n')] |
import unittest
import numpy as np
from bert2tf import Executor, ElectraDiscriminator, BertTokenizer
from tests import Bert2TFTestCase
class MyTestCase(Bert2TFTestCase):
@unittest.skip('just run on local machine')
def test_create_electra_model(self):
model = Executor.load_config('ElectraDiscriminator', use_with={
'pretrained_weights_path': '../../resources/pre_models/electra-chinese-small/electra_small',
'config': '../../resources/pre_models/electra-chinese-small/electra_small_config.json'})
self.assertEqual(isinstance(model, ElectraDiscriminator), True)
model = Executor.load_config('yaml/electra.yml')
self.assertEqual(isinstance(model, ElectraDiscriminator), True)
model = ElectraDiscriminator(
config='../../resources/pre_models/electra-chinese-small/electra_small_config.json',
pretrained_weights_path='../../resources/pre_models/electra-chinese-small/electra_small')
self.assertEqual(isinstance(model, ElectraDiscriminator), True)
@unittest.skip('just run on local machine')
def test_electra_encode(self):
model = ElectraDiscriminator(
config='../../resources/pre_models/electra-chinese-small/electra_small_config.json',
pretrained_weights_path='../../resources/pre_models/electra-chinese-small/electra_small')
self.assertEqual(isinstance(model, ElectraDiscriminator), True)
tokenizer = BertTokenizer('../../resources/pre_models/electra-chinese-small/vocab.txt')
input_ids, input_mask, segment_ids = tokenizer.encode('今天天气不好')
result = model([np.array([input_ids]), np.array([input_mask]), np.array([segment_ids])]).numpy()
self.assertEqual(result.size, 2048)
| [
"bert2tf.BertTokenizer",
"bert2tf.Executor.load_config",
"numpy.array",
"bert2tf.ElectraDiscriminator",
"unittest.skip"
] | [((179, 221), 'unittest.skip', 'unittest.skip', (['"""just run on local machine"""'], {}), "('just run on local machine')\n", (192, 221), False, 'import unittest\n'), ((1059, 1101), 'unittest.skip', 'unittest.skip', (['"""just run on local machine"""'], {}), "('just run on local machine')\n", (1072, 1101), False, 'import unittest\n'), ((279, 537), 'bert2tf.Executor.load_config', 'Executor.load_config', (['"""ElectraDiscriminator"""'], {'use_with': "{'pretrained_weights_path':\n '../../resources/pre_models/electra-chinese-small/electra_small',\n 'config':\n '../../resources/pre_models/electra-chinese-small/electra_small_config.json'\n }"}), "('ElectraDiscriminator', use_with={\n 'pretrained_weights_path':\n '../../resources/pre_models/electra-chinese-small/electra_small',\n 'config':\n '../../resources/pre_models/electra-chinese-small/electra_small_config.json'\n })\n", (299, 537), False, 'from bert2tf import Executor, ElectraDiscriminator, BertTokenizer\n'), ((630, 670), 'bert2tf.Executor.load_config', 'Executor.load_config', (['"""yaml/electra.yml"""'], {}), "('yaml/electra.yml')\n", (650, 670), False, 'from bert2tf import Executor, ElectraDiscriminator, BertTokenizer\n'), ((760, 970), 'bert2tf.ElectraDiscriminator', 'ElectraDiscriminator', ([], {'config': '"""../../resources/pre_models/electra-chinese-small/electra_small_config.json"""', 'pretrained_weights_path': '"""../../resources/pre_models/electra-chinese-small/electra_small"""'}), "(config=\n '../../resources/pre_models/electra-chinese-small/electra_small_config.json'\n , pretrained_weights_path=\n '../../resources/pre_models/electra-chinese-small/electra_small')\n", (780, 970), False, 'from bert2tf import Executor, ElectraDiscriminator, BertTokenizer\n'), ((1153, 1363), 'bert2tf.ElectraDiscriminator', 'ElectraDiscriminator', ([], {'config': '"""../../resources/pre_models/electra-chinese-small/electra_small_config.json"""', 'pretrained_weights_path': '"""../../resources/pre_models/electra-chinese-small/electra_small"""'}), "(config=\n '../../resources/pre_models/electra-chinese-small/electra_small_config.json'\n , pretrained_weights_path=\n '../../resources/pre_models/electra-chinese-small/electra_small')\n", (1173, 1363), False, 'from bert2tf import Executor, ElectraDiscriminator, BertTokenizer\n'), ((1467, 1542), 'bert2tf.BertTokenizer', 'BertTokenizer', (['"""../../resources/pre_models/electra-chinese-small/vocab.txt"""'], {}), "('../../resources/pre_models/electra-chinese-small/vocab.txt')\n", (1480, 1542), False, 'from bert2tf import Executor, ElectraDiscriminator, BertTokenizer\n'), ((1639, 1660), 'numpy.array', 'np.array', (['[input_ids]'], {}), '([input_ids])\n', (1647, 1660), True, 'import numpy as np\n'), ((1662, 1684), 'numpy.array', 'np.array', (['[input_mask]'], {}), '([input_mask])\n', (1670, 1684), True, 'import numpy as np\n'), ((1686, 1709), 'numpy.array', 'np.array', (['[segment_ids]'], {}), '([segment_ids])\n', (1694, 1709), True, 'import numpy as np\n')] |
import numpy as np
import unittest
from deepblast.dataset.alphabet import UniprotTokenizer
import numpy.testing as npt
class TestAlphabet(unittest.TestCase):
def test_tokenizer(self):
tokenizer = UniprotTokenizer(pad_ends=True)
res = tokenizer(b'ARNDCQEGHILKMFPSTWYVXOUBZ')
# Need to account for padding and offset
exp = np.array([20] + list(range(0, 21)) + [11, 4, 20, 20] + [20])
npt.assert_allclose(res, exp)
def test_tokenizer_encode(self):
tokenizer = UniprotTokenizer(pad_ends=True)
x = 'ARNDCQEGHILKMFPSTWYVXOUBZ'
x = str.encode(x)
res = tokenizer(x)
exp = np.array(
[20, 0, 1, 2, 3, 4, 5, 6, 7, 8,
9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
19, 20, 11, 4, 20, 20, 20])
npt.assert_allclose(exp, res)
def test_tokenizer_encode_no_padding(self):
tokenizer = UniprotTokenizer(pad_ends=False)
x = 'ARNDCQEGHILKMFPSTWYVXOUBZ'
x = str.encode(x)
res = tokenizer(x)
exp = np.array(
[0, 1, 2, 3, 4, 5, 6, 7, 8,
9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
19, 20, 11, 4, 20, 20])
npt.assert_allclose(exp, res)
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"numpy.array",
"deepblast.dataset.alphabet.UniprotTokenizer",
"numpy.testing.assert_allclose"
] | [((1260, 1275), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1273, 1275), False, 'import unittest\n'), ((211, 242), 'deepblast.dataset.alphabet.UniprotTokenizer', 'UniprotTokenizer', ([], {'pad_ends': '(True)'}), '(pad_ends=True)\n', (227, 242), False, 'from deepblast.dataset.alphabet import UniprotTokenizer\n'), ((429, 458), 'numpy.testing.assert_allclose', 'npt.assert_allclose', (['res', 'exp'], {}), '(res, exp)\n', (448, 458), True, 'import numpy.testing as npt\n'), ((517, 548), 'deepblast.dataset.alphabet.UniprotTokenizer', 'UniprotTokenizer', ([], {'pad_ends': '(True)'}), '(pad_ends=True)\n', (533, 548), False, 'from deepblast.dataset.alphabet import UniprotTokenizer\n'), ((656, 767), 'numpy.array', 'np.array', (['[20, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, \n 20, 11, 4, 20, 20, 20]'], {}), '([20, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,\n 18, 19, 20, 11, 4, 20, 20, 20])\n', (664, 767), True, 'import numpy as np\n'), ((811, 840), 'numpy.testing.assert_allclose', 'npt.assert_allclose', (['exp', 'res'], {}), '(exp, res)\n', (830, 840), True, 'import numpy.testing as npt\n'), ((910, 942), 'deepblast.dataset.alphabet.UniprotTokenizer', 'UniprotTokenizer', ([], {'pad_ends': '(False)'}), '(pad_ends=False)\n', (926, 942), False, 'from deepblast.dataset.alphabet import UniprotTokenizer\n'), ((1050, 1153), 'numpy.array', 'np.array', (['[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, \n 11, 4, 20, 20]'], {}), '([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,\n 19, 20, 11, 4, 20, 20])\n', (1058, 1153), True, 'import numpy as np\n'), ((1197, 1226), 'numpy.testing.assert_allclose', 'npt.assert_allclose', (['exp', 'res'], {}), '(exp, res)\n', (1216, 1226), True, 'import numpy.testing as npt\n')] |
"""Tests for drg module"""
import sys
import os
import pkg_resources
import pytest
import numpy as np
import networkx as nx
import cantera as ct
from ..sampling import data_files, InputIgnition
from ..drg import graph_search, create_drg_matrix, run_drg, trim_drg, reduce_drg
# Taken from http://stackoverflow.com/a/22726782/1569494
try:
from tempfile import TemporaryDirectory
except ImportError:
from contextlib import contextmanager
import shutil
import tempfile
import errno
@contextmanager
def TemporaryDirectory():
name = tempfile.mkdtemp()
try:
yield name
finally:
try:
shutil.rmtree(name)
except OSError as e:
# Reraise unless ENOENT: No such file or directory
# (ok if directory has already been deleted)
if e.errno != errno.ENOENT:
raise
def relative_location(file):
file_path = os.path.join(file)
return pkg_resources.resource_filename(__name__, file_path)
def check_equal(list1, list2):
"""Check whether two lists have the same contents (regardless of order).
Taken from https://stackoverflow.com/a/12813909
Parameters
----------
list1 : list
First list, containing all of a particular type
list2: list
Second list, containing all of a particular type
Returns
-------
bool
``True`` if lists are equal
"""
return len(list1) == len(list2) and sorted(list1) == sorted(list2)
class TestCreateDRGMatrix:
"""Tests for create_drg_matrix method"""
def test_qss_artificial(self):
"""Test using four species artificial model with QSS species from 2006 DRG paper.
# R \approx F / 1e3
"""
R1 = ct.Reaction.fromCti('''reaction('F => R', [1.0, 0.0, 0.0])''')
R2 = ct.Reaction.fromCti('''reaction('R => P', [1.0e3, 0.0, 0.0])''')
R3 = ct.Reaction.fromCti('''reaction('R => Pp', [1.0, 0.0, 0.0])''')
F = ct.Species('F', 'H:1')
R = ct.Species('R', 'H:1')
P = ct.Species('P', 'H:1')
Pp = ct.Species('Pp', 'H:1')
for sp in [F, R, P, Pp]:
sp.thermo = ct.ConstantCp(
300, 1000, 101325, (300, 1.0, 1.0, 1.0)
)
model = ct.Solution(
thermo='IdealGas', kinetics='GasKinetics',
species=[F, R, P, Pp], reactions=[R1, R2, R3]
)
state = 1000, ct.one_atm, [1., 1./1.e3, 0., 0.]
matrix = create_drg_matrix(state, model)
correct = np.array([
[0, 1.0, 0, 0],
[0.5, 0, 0.5, 0.5*1e-3],
[0, 1.0, 0, 0],
[0, 1, 0, 0]
])
assert np.allclose(correct, matrix, rtol=1e-3)
def test_pe_artificial(self):
"""Test using three species artificial model with PE reactions from 2006 DRG paper.
"""
R1 = ct.Reaction.fromCti('''reaction('F <=> R', [1.0e3, 0.0, 0.0])''')
R2 = ct.Reaction.fromCti('''reaction('R <=> P', [1.0, 0.0, 0.0])''')
F = ct.Species('F', 'H:1')
R = ct.Species('R', 'H:1')
P = ct.Species('P', 'H:1')
for sp in [F, R, P]:
sp.thermo = ct.ConstantCp(
300, 1000, 101325, (300, 1.0, 1.0, 1.0)
)
model = ct.Solution(
thermo='IdealGas', kinetics='GasKinetics',
species=[F, R, P], reactions=[R1, R2]
)
conc_R = 0.1
conc_F = ((1 + 1e-3)*conc_R - (1/2e3))/(1 - (1/2e3))
conc_P = 1.0 - (conc_R + conc_F)
state = 1000, ct.one_atm, [conc_F, conc_R, conc_P]
matrix = create_drg_matrix(state, model)
correct = np.array([
[0, 1.0, 0],
[1./3., 0, 2./3.],
[0, 1.0, 0],
])
assert np.allclose(correct, matrix, rtol=1e-3)
def test_dormant_modes(self):
"""Test using three species artificial model with dormant modes from 2006 DRG paper.
"""
R1 = ct.Reaction.fromCti('''reaction('A <=> B', [1.0, 0.0, 0.0])''')
R2 = ct.Reaction.fromCti('''reaction('B <=> C', [1.0e-3, 0.0, 0.0])''')
A = ct.Species('A', 'H:1')
B = ct.Species('B', 'H:1')
C = ct.Species('C', 'H:1')
for sp in [A, B, C]:
sp.thermo = ct.ConstantCp(
300, 1000, 101325, (300, 1.0, 1.0, 1.0)
)
model = ct.Solution(
thermo='IdealGas', kinetics='GasKinetics',
species=[A, B, C], reactions=[R1, R2]
)
state = 1000, ct.one_atm, [1.0, 2.0, 1.0]
matrix = create_drg_matrix(state, model)
correct = np.array([
[0, 1.0, 0],
[1/(1+1e-3), 0, 1e-3/(1+1e-3)],
[0, 1.0, 0],
])
assert np.allclose(correct, matrix, rtol=1e-3)
conc_A = 1.370536
conc_B = 1.370480
conc_C = 1.258985
state = 1000, ct.one_atm, [conc_A, conc_B, conc_C]
matrix = create_drg_matrix(state, model)
correct = np.array([
[0, 1.0, 0],
[abs(conc_A-conc_B)/(abs(conc_A-conc_B)+1e-3*abs(conc_B-conc_C)), 0,
1e-3*abs(conc_B-conc_C)/(abs(conc_A-conc_B)+1e-3*abs(conc_B-conc_C))
],
[0, 1.0, 0],
])
assert np.allclose(correct, matrix, rtol=1e-3)
@pytest.mark.skip
def testArtificial(self):
"""Uses artificial mechanism to test"""
# Load model
path_to_original = relative_location("artificial-mechanism.cti")
solution_object = ct.Solution(path_to_original)
# Pull out timestep one denomenator and numerator dicts
ic_one = rate_edge_data[list(rate_edge_data.keys())[0]]
tstep_one = ic_one[list(ic_one.keys())[0]]
denoms = tstep_one[0]
numers = tstep_one[1]
# Expected values for denomenators
expected_denoms = {}
expected_denoms["H2O"] = 1.9573216e-13
expected_denoms["H2"] = .00025854374
expected_denoms["O2"] = 9.7866081e-14
expected_denoms["H"] = .00051708749
assert np.isclose(expected_denoms["H2O"], denoms["H2O"],abs_tol=1.0e-17)
assert np.isclose(expected_denoms["H2"], denoms["H2"],abs_tol=1.0e-10)
assert np.isclose(expected_denoms["O2"], denoms["O2"],abs_tol=1.0e-18)
assert np.isclose(expected_denoms["H"], denoms["H"],abs_tol=1.0e-10)
expected_numers = {}
expected_numers["H2O_H2"] = 1.9573216e-13
expected_numers["H2O_O2"] = 1.9573216e-13
expected_numers["H2_O2"] = 1.9573216e-13
expected_numers["H2_H2O"] = 1.9573216e-13
expected_numers["O2_H2"] = 9.7866081e-14
expected_numers["O2_H2O"] = 9.7866081e-14
expected_numers["H2_H"] = .00025854374
expected_numers["H_H2"] = .00051708749
assert np.isclose(expected_numers["H2O_H2"],numers["H2O_H2"],abs_tol=1.0e-17)
assert np.isclose(expected_numers["H2O_O2"],numers["H2O_O2"],abs_tol=1.0e-17)
assert np.isclose(expected_numers["H2_O2"],numers["H2_O2"],abs_tol=1.0e-17)
assert np.isclose(expected_numers["H2_H2O"],numers["H2_H2O"],abs_tol=1.0e-17)
assert np.isclose(expected_numers["O2_H2"],numers["O2_H2"],abs_tol=1.0e-18)
assert np.isclose(expected_numers["O2_H2O"],numers["O2_H2O"],abs_tol=1.0e-18)
assert np.isclose(expected_numers["H2_H"],numers["H2_H"],abs_tol=1.0e-18)
assert np.isclose(expected_numers["H_H2"],numers["H_H2"],abs_tol=1.0e-18)
class TestTrimDRG:
"""Tests for trim_drg method"""
def test_simple(self):
matrix = np.array([[0, 1, 0.1], [0.5, 0, 0.5], [0.5, 0.5, 0]])
names = ['A', 'B', 'C']
reached = trim_drg(matrix, names, ['A'], 0.2)
assert check_equal(reached, names)
reached = trim_drg(matrix, names, ['A'], 0.6)
assert check_equal(reached, ['A', 'B'])
def test_uncoupled_group(self):
"""Test of simple five-component graph from DRG papers.
"""
matrix = np.array([
[0, 0.5, 0, 0, 0, 0],
[0, 0, 0, 0.9, 0, 0],
[0, 0.5, 0, 0.5, 0, 0],
[0, 0.9, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1.0],
[0, 0, 0, 0, 1.0, 0]
])
names = ['A', 'B', 'C', 'D', 'E', 'F']
reached = trim_drg(matrix, names, ['A'], 0.1)
assert check_equal(reached, ['A', 'B', 'D'])
matrix = np.array([
[0, 0.5, 0, 0, 0, 0],
[0, 0, 0, 0.9, 0, 0],
[0, 0.5, 0, 0.5, 0, 0],
[0, 0.9, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1.0],
[0, 0, 0, 0, 1.0, 0]
])
names = ['A', 'B', 'C', 'D', 'E', 'F']
reached = trim_drg(matrix, names, ['E'], 0.1)
assert check_equal(reached, ['E', 'F'])
def test_uncoupled_group2(self):
"""Test of simple five-component graph from DRG papers.
"""
matrix = np.array([
[0, 0.5, 0, 0, 0, 0],
[0, 0, 0.15, 0.9, 0, 0],
[0, 0.5, 0, 0.5, 0, 0],
[0, 0.9, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1.0],
[0, 0, 0, 0, 1.0, 0]
])
names = ['A', 'B', 'C', 'D', 'E', 'F']
reached = trim_drg(matrix, names, ['A'], 0.1)
assert check_equal(reached, ['A', 'B', 'C', 'D'])
reached = trim_drg(matrix, names, ['A'], 0.2)
assert check_equal(reached, ['A', 'B', 'D'])
def test_csp_mech5(self):
"""Test of simple mech 5 from 2006 DRG paper.
"""
R1 = ct.Reaction.fromCti('''reaction('F => P', [1.0, 0.0, 0.0])''')
R2 = ct.Reaction.fromCti('''reaction('F => R', [1.0e-2, 0.0, 0.0])''')
R3 = ct.Reaction.fromCti('''reaction('R => P', [1.0e2, 0.0, 0.0])''')
F = ct.Species('F', 'H:1')
P = ct.Species('P', 'H:1')
R = ct.Species('R', 'H:1')
for sp in [F, P, R]:
sp.thermo = ct.ConstantCp(
300, 1000, 101325, (300, 1.0, 1.0, 1.0)
)
model = ct.Solution(
thermo='IdealGas', kinetics='GasKinetics',
species=[F, P, R], reactions=[R1, R2, R3]
)
state = 1000, ct.one_atm, [1.0, 1.0, 1.0e-4]
matrix = create_drg_matrix(state, model)
reached = trim_drg(matrix, ['F', 'P', 'R'], ['F'], 0.1)
assert check_equal(reached, ['F', 'P'])
class TestGraphSearch:
"""Tests for graph_search method"""
#generate test graph
#starting from A, nodes A,E,C,F,D,I,H,O should be the only nodes found
def testGraphSearchOneInput(self):
graph = nx.DiGraph()
graph.add_nodes_from(
['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O']
)
graph.add_weighted_edges_from([
#('A','F', 0), ('A','N',0),
# ('C','F',1.0), ('A','C',1.0),
('N','C',1.0), ('C','D',1.0),
('D','I',1.0), ('I','O',1.0), ('A','E',1.0),
#('E','G',0), ('G','I',0), ('G','M',0),
('G','L',1.0), ('E','H',1.0),
#('H','J',0)
])
subgraph = nx.DiGraph([(u,v,d) for u,v,d in graph.edges(data=True) if d['weight'] > 0])
#temporary solution
essential_nodes = graph_search(subgraph, 'A')
assert 'A' in essential_nodes
assert [n in essential_nodes for n in ['A', 'C', 'D', 'I', 'O', 'F', 'E', 'H']]
assert [n not in essential_nodes for n in ['B', 'G', 'J', 'K', 'L', 'M', 'N']]
#generate test graph
#starting from A, nodes A,E,C,F,D,I,H,O should be the only nodes found
def testGraphSearchOneInput2(self):
graph = nx.DiGraph()
graph.add_nodes_from(
['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O']
)
graph.add_weighted_edges_from([
#('A','F', 0), ('A','N',0),
# ('C','F',1.0), ('A','C',1.0),
('N','C',1.0), ('C','D',1.0),
('D','I',1.0), ('I','O',1.0), ('A','E',1.0),
#('E','G',0), ('G','I',0), ('G','M',0),
('G','L',1.0), ('E','H',1.0),
#('H','J',0)
])
subgraph = nx.DiGraph([(u,v,d) for u,v,d in graph.edges(data=True) if d['weight'] > 0])
#temporary solution
essential_nodes = graph_search(subgraph, 'G')
assert 'G' in essential_nodes
for n in ['A','B', 'C', 'D', 'J', 'K', 'I', 'O', 'F', 'E', 'H', 'M', 'N']:
assert n not in essential_nodes
assert [n in essential_nodes for n in [ 'G', 'L']]
def testGraphSearch3Inputs(self):
graph = nx.DiGraph()
graph.add_nodes_from(
['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O']
)
graph.add_weighted_edges_from(
[ ('C','F', 1), ('A','C', 1),
#('A','F', 0), ('A','N', 0),
('N','C', 1), ('C','D', 1),
('D','I', 1), ('I','O', 1), ('A','E', 1),
#('E','G', 0), ('G','I', 0), ('G','M', 0),
('G','L', 1), ('E','H', 1),
#('H','J', 0)
])
target_species= ['A', 'C', 'D']
essential_nodes = graph_search(graph, target_species)
assert 'A' in essential_nodes
assert 'C' in essential_nodes
assert 'D' in essential_nodes
for n in ['A', 'C', 'D', 'I', 'O', 'F', 'E', 'H']:
assert n in essential_nodes
for n in ['B', 'G', 'J', 'K', 'L', 'M', 'N']:
assert n not in essential_nodes
def testgraphsearch_no_targets (self):
graph = nx.DiGraph()
graph.add_nodes_from(
['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O']
)
graph.add_weighted_edges_from([
#('A','F', 0), ('A','N',0),
('C','F',1.0), ('A','C',1.0),
('N','C',1.0), ('C','D',1.0),
('D','I',1.0), ('I','O',1.0), ('A','E',1.0),
#('E','G',0), ('G','I',0), ('G','M',0),
('G','L',1.0), ('E','H',1.0),
#('H','J',0)
])
essential_nodes = graph_search(graph, [])
assert not essential_nodes
@pytest.mark.xfail
def testGraphshearchwithatargetThatsnotinGraph(self):
graph = nx.DiGraph()
graph.add_nodes_from(
['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O']
)
graph.add_weighted_edges_from([
#('A','F', 0), ('A','N',0),
('C','F',1.0), ('A','C',1.0),
('N','C',1.0), ('C','D',1.0),
('D','I',1.0), ('I','O',1.0), ('A','E',1.0),
#('E','G',0), ('G','I',0), ('G','M',0),
('G','L',1.0), ('E','H',1.0),
#('H','J',0)
])
essential_nodes = graph_search(graph, 'Z')
assert 'Z' in essential_nodes
def testGraphsearchforinfinteloops(self):
graph = nx.DiGraph()
graph.add_nodes_from(['A', 'B', 'C', 'D', 'E'])
graph.add_weighted_edges_from(
[('A', 'B', 1), ('B', 'C', 1), ('C', 'D', 1), ('D', 'E',1), ('E', 'A', 1)]
)
essential_nodes= graph_search(graph, 'A')
assert 'A' in essential_nodes
assert [n in essential_nodes for n in ['A', 'C', 'D', 'B', 'E']]
@pytest.mark.xfail
def testGraphShearchWithATargetThatsNotInGraphAndOneThatIs(self):
graph = nx.DiGraph()
graph.add_nodes_from(
['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O']
)
graph.add_weighted_edges_from([
#('A','F', 0), ('A','N',0),
('C','F',1.0), ('A','C',1.0),
('N','C',1.0), ('C','D',1.0),
('D','I',1.0), ('I','O',1.0), ('A','E',1.0),
#('E','G',0), ('G','I',0), ('G','M',0),
('G','L',1.0), ('E','H',1.0),
#('H','J',0)
])
essential_nodes = graph_search(graph, ['B', 'Z'])
assert 'B' in essential_nodes
def testGraphsearchwithListofLength1(self):
graph = nx.DiGraph()
graph.add_node('A')
essential_nodes = graph_search(graph, 'A')
assert 'A' in essential_nodes
assert len(essential_nodes) == 1
def testGraphSearchWithTwoOfTheSameItemInTheGraph(self):
graph = nx.DiGraph()
graph.add_nodes_from(
['A', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O']
)
graph.add_weighted_edges_from([
#('A','F',0), ('A','N',0),
('C','F',1.0), ('A','C',1.0),
('N','C',1.0), ('C','D',1.0),
('D','I',1.0), ('I','O',1.0), ('A','E',1.0),
#('E','G',0), ('G','I',0), ('G','M',0),
('G','L',1.0), ('E','H',1.0),
#('H','J',0)
])
essential_nodes = graph_search(graph, 'A')
assert 'A' in essential_nodes
assert [n in essential_nodes for n in ['A', 'C', 'D', 'I', 'O', 'F', 'E', 'H']]
assert [n not in essential_nodes for n in ['B', 'G', 'J', 'K', 'L', 'M', 'N']]
def testGraphSearchWithTwoOfTheSameItemInTheTargetList(self):
graph = nx.DiGraph()
graph.add_nodes_from(
['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O']
)
graph.add_weighted_edges_from([
#('A','F', 0), ('A','N',0),
('C','F',1.0), ('A','C',1.0),
('N','C',1.0), ('C','D',1.0),
('D','I',1.0), ('I','O',1.0), ('A','E',1.0),
#('E','G',0), ('G','I',0), ('G','M',0),
('G','L',1.0), ('E','H',1.0),
#('H','J',0)
])
essential_nodes = graph_search(graph, ['A','A'])
assert 'A' in essential_nodes
assert [n in essential_nodes for n in ['A', 'C', 'D', 'I', 'O', 'F', 'E', 'H']]
assert [n not in essential_nodes for n in ['B', 'G', 'J', 'K', 'L', 'M', 'N']]
class TestReduceDRG:
def test_gri_reduction_multiple_cases(self):
"""Tests reduce_drg method with multiple cases"""
model_file = 'gri30.cti'
# Conditions for reduction
conditions = [
InputIgnition(
kind='constant volume', pressure=1.0, temperature=1000.0, equivalence_ratio=1.0,
fuel={'CH4': 1.0}, oxidizer={'O2': 1.0, 'N2': 3.76}
),
InputIgnition(
kind='constant volume', pressure=1.0, temperature=1200.0, equivalence_ratio=1.0,
fuel={'CH4': 1.0}, oxidizer={'O2': 1.0, 'N2': 3.76}
),
]
data = np.genfromtxt(
relative_location(os.path.join('assets', 'example_ignition_data.dat')),
delimiter=','
)
model = ct.Solution(model_file)
matrices = []
for state in data:
matrices.append(create_drg_matrix((state[0], state[1], state[2:]), model))
with TemporaryDirectory() as temp_dir:
reduced_model = reduce_drg(
model_file, ['CH4', 'O2'], ['N2'], 0.14, matrices,
conditions, np.array([1.066766136745876281e+00, 4.334773545084597696e-02]),
previous_model=None, threshold_upper=None, num_threads=1, path=temp_dir
)
expected_species = [
'H2', 'H', 'O', 'O2', 'OH', 'H2O', 'HO2', 'H2O2', 'C', 'CH', 'CH2', 'CH2(S)',
'CH3', 'CH4', 'CO', 'CO2', 'HCO', 'CH2O', 'CH2OH', 'CH3O', 'C2H2', 'C2H3',
'C2H4', 'C2H5', 'C2H6', 'HCCO', 'CH2CO', 'N', 'NH', 'NNH', 'NO', 'N2O',
'HNO', 'CN', 'HCN', 'H2CN', 'HCNN', 'NCO', 'N2', 'CH2CHO'
]
assert check_equal(reduced_model.model.species_names, expected_species)
assert reduced_model.model.n_reactions == 245
assert round(reduced_model.error, 2) == 3.64
def test_gri_reduction_limbo(self):
"""Tests reduce_drg method with limbo species"""
model_file = 'gri30.cti'
# Conditions for reduction
conditions = [
InputIgnition(
kind='constant volume', pressure=1.0, temperature=1000.0, equivalence_ratio=1.0,
fuel={'CH4': 1.0}, oxidizer={'O2': 1.0, 'N2': 3.76}
),
]
data = np.genfromtxt(
relative_location(os.path.join('assets', 'example_ignition_data.dat')),
delimiter=','
)
model = ct.Solution(model_file)
matrices = []
for state in data:
matrices.append(create_drg_matrix((state[0], state[1], state[2:]), model))
with TemporaryDirectory() as temp_dir:
reduced_model = reduce_drg(
model_file, ['CH4', 'O2'], ['N2'], 0.14, matrices,
conditions, np.array([1.066766136745876281e+00]),
previous_model=None, threshold_upper=0.6, num_threads=1, path=temp_dir
)
expected_species = [
'H2', 'H', 'O', 'O2', 'OH', 'H2O', 'HO2', 'H2O2', 'C', 'CH', 'CH2', 'CH2(S)',
'CH3', 'CH4', 'CO', 'CO2', 'HCO', 'CH2O', 'CH2OH', 'CH3O', 'C2H2', 'C2H3',
'C2H4', 'C2H5', 'C2H6', 'HCCO', 'CH2CO', 'N', 'NH', 'NNH', 'NO', 'N2O',
'HNO', 'CN', 'HCN', 'H2CN', 'HCNN', 'NCO', 'N2', 'CH2CHO'
]
expected_limbo_species = ['H', 'CH3', 'CH4', 'OH', 'HO2', 'O', 'H2O', 'O2']
assert check_equal(reduced_model.model.species_names, expected_species)
assert check_equal(reduced_model.limbo_species, expected_limbo_species)
class TestRunDRG:
def test_gri_reduction(self):
"""Tests driver run_drg method"""
model_file = 'gri30.cti'
# Conditions for reduction
conditions = [
InputIgnition(
kind='constant volume', pressure=1.0, temperature=1000.0, equivalence_ratio=1.0,
fuel={'CH4': 1.0}, oxidizer={'O2': 1.0, 'N2': 3.76}
),
InputIgnition(
kind='constant volume', pressure=1.0, temperature=1200.0, equivalence_ratio=1.0,
fuel={'CH4': 1.0}, oxidizer={'O2': 1.0, 'N2': 3.76}
),
]
data_files['output_ignition'] = relative_location(
os.path.join('assets', 'example_ignition_output.txt')
)
data_files['data_ignition'] = relative_location(
os.path.join('assets', 'example_ignition_data.dat')
)
error = 5.0
# Run DRG
with TemporaryDirectory() as temp_dir:
reduced_model = run_drg(
model_file, conditions, [], [], error, ['CH4', 'O2'], ['N2'],
num_threads=1, path=temp_dir
)
# Expected answer
expected_model = ct.Solution(relative_location(os.path.join('assets', 'drg_gri30.cti')))
# Make sure models are the same
assert check_equal(reduced_model.model.species_names, expected_model.species_names)
assert reduced_model.model.n_reactions == expected_model.n_reactions
assert round(reduced_model.error, 2) == 3.64
| [
"tempfile.TemporaryDirectory",
"numpy.allclose",
"numpy.isclose",
"cantera.ConstantCp",
"networkx.DiGraph",
"os.path.join",
"pkg_resources.resource_filename",
"numpy.array",
"tempfile.mkdtemp",
"shutil.rmtree",
"cantera.Reaction.fromCti",
"cantera.Solution",
"cantera.Species"
] | [((971, 989), 'os.path.join', 'os.path.join', (['file'], {}), '(file)\n', (983, 989), False, 'import os\n'), ((1001, 1053), 'pkg_resources.resource_filename', 'pkg_resources.resource_filename', (['__name__', 'file_path'], {}), '(__name__, file_path)\n', (1032, 1053), False, 'import pkg_resources\n'), ((1798, 1856), 'cantera.Reaction.fromCti', 'ct.Reaction.fromCti', (['"""reaction(\'F => R\', [1.0, 0.0, 0.0])"""'], {}), '("reaction(\'F => R\', [1.0, 0.0, 0.0])")\n', (1817, 1856), True, 'import cantera as ct\n'), ((1874, 1934), 'cantera.Reaction.fromCti', 'ct.Reaction.fromCti', (['"""reaction(\'R => P\', [1.0e3, 0.0, 0.0])"""'], {}), '("reaction(\'R => P\', [1.0e3, 0.0, 0.0])")\n', (1893, 1934), True, 'import cantera as ct\n'), ((1952, 2011), 'cantera.Reaction.fromCti', 'ct.Reaction.fromCti', (['"""reaction(\'R => Pp\', [1.0, 0.0, 0.0])"""'], {}), '("reaction(\'R => Pp\', [1.0, 0.0, 0.0])")\n', (1971, 2011), True, 'import cantera as ct\n'), ((2029, 2051), 'cantera.Species', 'ct.Species', (['"""F"""', '"""H:1"""'], {}), "('F', 'H:1')\n", (2039, 2051), True, 'import cantera as ct\n'), ((2064, 2086), 'cantera.Species', 'ct.Species', (['"""R"""', '"""H:1"""'], {}), "('R', 'H:1')\n", (2074, 2086), True, 'import cantera as ct\n'), ((2099, 2121), 'cantera.Species', 'ct.Species', (['"""P"""', '"""H:1"""'], {}), "('P', 'H:1')\n", (2109, 2121), True, 'import cantera as ct\n'), ((2135, 2158), 'cantera.Species', 'ct.Species', (['"""Pp"""', '"""H:1"""'], {}), "('Pp', 'H:1')\n", (2145, 2158), True, 'import cantera as ct\n'), ((2321, 2427), 'cantera.Solution', 'ct.Solution', ([], {'thermo': '"""IdealGas"""', 'kinetics': '"""GasKinetics"""', 'species': '[F, R, P, Pp]', 'reactions': '[R1, R2, R3]'}), "(thermo='IdealGas', kinetics='GasKinetics', species=[F, R, P, Pp\n ], reactions=[R1, R2, R3])\n", (2332, 2427), True, 'import cantera as ct\n'), ((2585, 2673), 'numpy.array', 'np.array', (['[[0, 1.0, 0, 0], [0.5, 0, 0.5, 0.5 * 0.001], [0, 1.0, 0, 0], [0, 1, 0, 0]]'], {}), '([[0, 1.0, 0, 0], [0.5, 0, 0.5, 0.5 * 0.001], [0, 1.0, 0, 0], [0, 1,\n 0, 0]])\n', (2593, 2673), True, 'import numpy as np\n'), ((2744, 2784), 'numpy.allclose', 'np.allclose', (['correct', 'matrix'], {'rtol': '(0.001)'}), '(correct, matrix, rtol=0.001)\n', (2755, 2784), True, 'import numpy as np\n'), ((2936, 2997), 'cantera.Reaction.fromCti', 'ct.Reaction.fromCti', (['"""reaction(\'F <=> R\', [1.0e3, 0.0, 0.0])"""'], {}), '("reaction(\'F <=> R\', [1.0e3, 0.0, 0.0])")\n', (2955, 2997), True, 'import cantera as ct\n'), ((3015, 3074), 'cantera.Reaction.fromCti', 'ct.Reaction.fromCti', (['"""reaction(\'R <=> P\', [1.0, 0.0, 0.0])"""'], {}), '("reaction(\'R <=> P\', [1.0, 0.0, 0.0])")\n', (3034, 3074), True, 'import cantera as ct\n'), ((3092, 3114), 'cantera.Species', 'ct.Species', (['"""F"""', '"""H:1"""'], {}), "('F', 'H:1')\n", (3102, 3114), True, 'import cantera as ct\n'), ((3127, 3149), 'cantera.Species', 'ct.Species', (['"""R"""', '"""H:1"""'], {}), "('R', 'H:1')\n", (3137, 3149), True, 'import cantera as ct\n'), ((3162, 3184), 'cantera.Species', 'ct.Species', (['"""P"""', '"""H:1"""'], {}), "('P', 'H:1')\n", (3172, 3184), True, 'import cantera as ct\n'), ((3344, 3441), 'cantera.Solution', 'ct.Solution', ([], {'thermo': '"""IdealGas"""', 'kinetics': '"""GasKinetics"""', 'species': '[F, R, P]', 'reactions': '[R1, R2]'}), "(thermo='IdealGas', kinetics='GasKinetics', species=[F, R, P],\n reactions=[R1, R2])\n", (3355, 3441), True, 'import cantera as ct\n'), ((3726, 3789), 'numpy.array', 'np.array', (['[[0, 1.0, 0], [1.0 / 3.0, 0, 2.0 / 3.0], [0, 1.0, 0]]'], {}), '([[0, 1.0, 0], [1.0 / 3.0, 0, 2.0 / 3.0], [0, 1.0, 0]])\n', (3734, 3789), True, 'import numpy as np\n'), ((3848, 3888), 'numpy.allclose', 'np.allclose', (['correct', 'matrix'], {'rtol': '(0.001)'}), '(correct, matrix, rtol=0.001)\n', (3859, 3888), True, 'import numpy as np\n'), ((4045, 4104), 'cantera.Reaction.fromCti', 'ct.Reaction.fromCti', (['"""reaction(\'A <=> B\', [1.0, 0.0, 0.0])"""'], {}), '("reaction(\'A <=> B\', [1.0, 0.0, 0.0])")\n', (4064, 4104), True, 'import cantera as ct\n'), ((4122, 4184), 'cantera.Reaction.fromCti', 'ct.Reaction.fromCti', (['"""reaction(\'B <=> C\', [1.0e-3, 0.0, 0.0])"""'], {}), '("reaction(\'B <=> C\', [1.0e-3, 0.0, 0.0])")\n', (4141, 4184), True, 'import cantera as ct\n'), ((4202, 4224), 'cantera.Species', 'ct.Species', (['"""A"""', '"""H:1"""'], {}), "('A', 'H:1')\n", (4212, 4224), True, 'import cantera as ct\n'), ((4237, 4259), 'cantera.Species', 'ct.Species', (['"""B"""', '"""H:1"""'], {}), "('B', 'H:1')\n", (4247, 4259), True, 'import cantera as ct\n'), ((4272, 4294), 'cantera.Species', 'ct.Species', (['"""C"""', '"""H:1"""'], {}), "('C', 'H:1')\n", (4282, 4294), True, 'import cantera as ct\n'), ((4454, 4551), 'cantera.Solution', 'ct.Solution', ([], {'thermo': '"""IdealGas"""', 'kinetics': '"""GasKinetics"""', 'species': '[A, B, C]', 'reactions': '[R1, R2]'}), "(thermo='IdealGas', kinetics='GasKinetics', species=[A, B, C],\n reactions=[R1, R2])\n", (4465, 4551), True, 'import cantera as ct\n'), ((4704, 4783), 'numpy.array', 'np.array', (['[[0, 1.0, 0], [1 / (1 + 0.001), 0, 0.001 / (1 + 0.001)], [0, 1.0, 0]]'], {}), '([[0, 1.0, 0], [1 / (1 + 0.001), 0, 0.001 / (1 + 0.001)], [0, 1.0, 0]])\n', (4712, 4783), True, 'import numpy as np\n'), ((4839, 4879), 'numpy.allclose', 'np.allclose', (['correct', 'matrix'], {'rtol': '(0.001)'}), '(correct, matrix, rtol=0.001)\n', (4850, 4879), True, 'import numpy as np\n'), ((5361, 5401), 'numpy.allclose', 'np.allclose', (['correct', 'matrix'], {'rtol': '(0.001)'}), '(correct, matrix, rtol=0.001)\n', (5372, 5401), True, 'import numpy as np\n'), ((5626, 5655), 'cantera.Solution', 'ct.Solution', (['path_to_original'], {}), '(path_to_original)\n', (5637, 5655), True, 'import cantera as ct\n'), ((6168, 6232), 'numpy.isclose', 'np.isclose', (["expected_denoms['H2O']", "denoms['H2O']"], {'abs_tol': '(1e-17)'}), "(expected_denoms['H2O'], denoms['H2O'], abs_tol=1e-17)\n", (6178, 6232), True, 'import numpy as np\n'), ((6249, 6311), 'numpy.isclose', 'np.isclose', (["expected_denoms['H2']", "denoms['H2']"], {'abs_tol': '(1e-10)'}), "(expected_denoms['H2'], denoms['H2'], abs_tol=1e-10)\n", (6259, 6311), True, 'import numpy as np\n'), ((6328, 6390), 'numpy.isclose', 'np.isclose', (["expected_denoms['O2']", "denoms['O2']"], {'abs_tol': '(1e-18)'}), "(expected_denoms['O2'], denoms['O2'], abs_tol=1e-18)\n", (6338, 6390), True, 'import numpy as np\n'), ((6407, 6467), 'numpy.isclose', 'np.isclose', (["expected_denoms['H']", "denoms['H']"], {'abs_tol': '(1e-10)'}), "(expected_denoms['H'], denoms['H'], abs_tol=1e-10)\n", (6417, 6467), True, 'import numpy as np\n'), ((6915, 6985), 'numpy.isclose', 'np.isclose', (["expected_numers['H2O_H2']", "numers['H2O_H2']"], {'abs_tol': '(1e-17)'}), "(expected_numers['H2O_H2'], numers['H2O_H2'], abs_tol=1e-17)\n", (6925, 6985), True, 'import numpy as np\n'), ((7001, 7071), 'numpy.isclose', 'np.isclose', (["expected_numers['H2O_O2']", "numers['H2O_O2']"], {'abs_tol': '(1e-17)'}), "(expected_numers['H2O_O2'], numers['H2O_O2'], abs_tol=1e-17)\n", (7011, 7071), True, 'import numpy as np\n'), ((7087, 7155), 'numpy.isclose', 'np.isclose', (["expected_numers['H2_O2']", "numers['H2_O2']"], {'abs_tol': '(1e-17)'}), "(expected_numers['H2_O2'], numers['H2_O2'], abs_tol=1e-17)\n", (7097, 7155), True, 'import numpy as np\n'), ((7171, 7241), 'numpy.isclose', 'np.isclose', (["expected_numers['H2_H2O']", "numers['H2_H2O']"], {'abs_tol': '(1e-17)'}), "(expected_numers['H2_H2O'], numers['H2_H2O'], abs_tol=1e-17)\n", (7181, 7241), True, 'import numpy as np\n'), ((7257, 7325), 'numpy.isclose', 'np.isclose', (["expected_numers['O2_H2']", "numers['O2_H2']"], {'abs_tol': '(1e-18)'}), "(expected_numers['O2_H2'], numers['O2_H2'], abs_tol=1e-18)\n", (7267, 7325), True, 'import numpy as np\n'), ((7341, 7411), 'numpy.isclose', 'np.isclose', (["expected_numers['O2_H2O']", "numers['O2_H2O']"], {'abs_tol': '(1e-18)'}), "(expected_numers['O2_H2O'], numers['O2_H2O'], abs_tol=1e-18)\n", (7351, 7411), True, 'import numpy as np\n'), ((7427, 7493), 'numpy.isclose', 'np.isclose', (["expected_numers['H2_H']", "numers['H2_H']"], {'abs_tol': '(1e-18)'}), "(expected_numers['H2_H'], numers['H2_H'], abs_tol=1e-18)\n", (7437, 7493), True, 'import numpy as np\n'), ((7509, 7575), 'numpy.isclose', 'np.isclose', (["expected_numers['H_H2']", "numers['H_H2']"], {'abs_tol': '(1e-18)'}), "(expected_numers['H_H2'], numers['H_H2'], abs_tol=1e-18)\n", (7519, 7575), True, 'import numpy as np\n'), ((7677, 7730), 'numpy.array', 'np.array', (['[[0, 1, 0.1], [0.5, 0, 0.5], [0.5, 0.5, 0]]'], {}), '([[0, 1, 0.1], [0.5, 0, 0.5], [0.5, 0.5, 0]])\n', (7685, 7730), True, 'import numpy as np\n'), ((8097, 8246), 'numpy.array', 'np.array', (['[[0, 0.5, 0, 0, 0, 0], [0, 0, 0, 0.9, 0, 0], [0, 0.5, 0, 0.5, 0, 0], [0, \n 0.9, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1.0], [0, 0, 0, 0, 1.0, 0]]'], {}), '([[0, 0.5, 0, 0, 0, 0], [0, 0, 0, 0.9, 0, 0], [0, 0.5, 0, 0.5, 0, 0\n ], [0, 0.9, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1.0], [0, 0, 0, 0, 1.0, 0]])\n', (8105, 8246), True, 'import numpy as np\n'), ((8526, 8675), 'numpy.array', 'np.array', (['[[0, 0.5, 0, 0, 0, 0], [0, 0, 0, 0.9, 0, 0], [0, 0.5, 0, 0.5, 0, 0], [0, \n 0.9, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1.0], [0, 0, 0, 0, 1.0, 0]]'], {}), '([[0, 0.5, 0, 0, 0, 0], [0, 0, 0, 0.9, 0, 0], [0, 0.5, 0, 0.5, 0, 0\n ], [0, 0.9, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1.0], [0, 0, 0, 0, 1.0, 0]])\n', (8534, 8675), True, 'import numpy as np\n'), ((9063, 9214), 'numpy.array', 'np.array', (['[[0, 0.5, 0, 0, 0, 0], [0, 0, 0.15, 0.9, 0, 0], [0, 0.5, 0, 0.5, 0, 0], [0,\n 0.9, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1.0], [0, 0, 0, 0, 1.0, 0]]'], {}), '([[0, 0.5, 0, 0, 0, 0], [0, 0, 0.15, 0.9, 0, 0], [0, 0.5, 0, 0.5, 0,\n 0], [0, 0.9, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1.0], [0, 0, 0, 0, 1.0, 0]])\n', (9071, 9214), True, 'import numpy as np\n'), ((9704, 9762), 'cantera.Reaction.fromCti', 'ct.Reaction.fromCti', (['"""reaction(\'F => P\', [1.0, 0.0, 0.0])"""'], {}), '("reaction(\'F => P\', [1.0, 0.0, 0.0])")\n', (9723, 9762), True, 'import cantera as ct\n'), ((9780, 9841), 'cantera.Reaction.fromCti', 'ct.Reaction.fromCti', (['"""reaction(\'F => R\', [1.0e-2, 0.0, 0.0])"""'], {}), '("reaction(\'F => R\', [1.0e-2, 0.0, 0.0])")\n', (9799, 9841), True, 'import cantera as ct\n'), ((9859, 9919), 'cantera.Reaction.fromCti', 'ct.Reaction.fromCti', (['"""reaction(\'R => P\', [1.0e2, 0.0, 0.0])"""'], {}), '("reaction(\'R => P\', [1.0e2, 0.0, 0.0])")\n', (9878, 9919), True, 'import cantera as ct\n'), ((9937, 9959), 'cantera.Species', 'ct.Species', (['"""F"""', '"""H:1"""'], {}), "('F', 'H:1')\n", (9947, 9959), True, 'import cantera as ct\n'), ((9972, 9994), 'cantera.Species', 'ct.Species', (['"""P"""', '"""H:1"""'], {}), "('P', 'H:1')\n", (9982, 9994), True, 'import cantera as ct\n'), ((10007, 10029), 'cantera.Species', 'ct.Species', (['"""R"""', '"""H:1"""'], {}), "('R', 'H:1')\n", (10017, 10029), True, 'import cantera as ct\n'), ((10189, 10290), 'cantera.Solution', 'ct.Solution', ([], {'thermo': '"""IdealGas"""', 'kinetics': '"""GasKinetics"""', 'species': '[F, P, R]', 'reactions': '[R1, R2, R3]'}), "(thermo='IdealGas', kinetics='GasKinetics', species=[F, P, R],\n reactions=[R1, R2, R3])\n", (10200, 10290), True, 'import cantera as ct\n'), ((10760, 10772), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (10770, 10772), True, 'import networkx as nx\n'), ((11821, 11833), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (11831, 11833), True, 'import networkx as nx\n'), ((12797, 12809), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (12807, 12809), True, 'import networkx as nx\n'), ((13787, 13799), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (13797, 13799), True, 'import networkx as nx\n'), ((14474, 14486), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (14484, 14486), True, 'import networkx as nx\n'), ((15129, 15141), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (15139, 15141), True, 'import networkx as nx\n'), ((15636, 15648), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (15646, 15648), True, 'import networkx as nx\n'), ((16300, 16312), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (16310, 16312), True, 'import networkx as nx\n'), ((16551, 16563), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (16561, 16563), True, 'import networkx as nx\n'), ((17410, 17422), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (17420, 17422), True, 'import networkx as nx\n'), ((19022, 19045), 'cantera.Solution', 'ct.Solution', (['model_file'], {}), '(model_file)\n', (19033, 19045), True, 'import cantera as ct\n'), ((20707, 20730), 'cantera.Solution', 'ct.Solution', (['model_file'], {}), '(model_file)\n', (20718, 20730), True, 'import cantera as ct\n'), ((568, 586), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (584, 586), False, 'import tempfile\n'), ((2216, 2270), 'cantera.ConstantCp', 'ct.ConstantCp', (['(300)', '(1000)', '(101325)', '(300, 1.0, 1.0, 1.0)'], {}), '(300, 1000, 101325, (300, 1.0, 1.0, 1.0))\n', (2229, 2270), True, 'import cantera as ct\n'), ((3239, 3293), 'cantera.ConstantCp', 'ct.ConstantCp', (['(300)', '(1000)', '(101325)', '(300, 1.0, 1.0, 1.0)'], {}), '(300, 1000, 101325, (300, 1.0, 1.0, 1.0))\n', (3252, 3293), True, 'import cantera as ct\n'), ((4349, 4403), 'cantera.ConstantCp', 'ct.ConstantCp', (['(300)', '(1000)', '(101325)', '(300, 1.0, 1.0, 1.0)'], {}), '(300, 1000, 101325, (300, 1.0, 1.0, 1.0))\n', (4362, 4403), True, 'import cantera as ct\n'), ((10084, 10138), 'cantera.ConstantCp', 'ct.ConstantCp', (['(300)', '(1000)', '(101325)', '(300, 1.0, 1.0, 1.0)'], {}), '(300, 1000, 101325, (300, 1.0, 1.0, 1.0))\n', (10097, 10138), True, 'import cantera as ct\n'), ((19204, 19224), 'tempfile.TemporaryDirectory', 'TemporaryDirectory', ([], {}), '()\n', (19222, 19224), False, 'from tempfile import TemporaryDirectory\n'), ((20889, 20909), 'tempfile.TemporaryDirectory', 'TemporaryDirectory', ([], {}), '()\n', (20907, 20909), False, 'from tempfile import TemporaryDirectory\n'), ((22521, 22574), 'os.path.join', 'os.path.join', (['"""assets"""', '"""example_ignition_output.txt"""'], {}), "('assets', 'example_ignition_output.txt')\n", (22533, 22574), False, 'import os\n'), ((22658, 22709), 'os.path.join', 'os.path.join', (['"""assets"""', '"""example_ignition_data.dat"""'], {}), "('assets', 'example_ignition_data.dat')\n", (22670, 22709), False, 'import os\n'), ((22776, 22796), 'tempfile.TemporaryDirectory', 'TemporaryDirectory', ([], {}), '()\n', (22794, 22796), False, 'from tempfile import TemporaryDirectory\n'), ((18910, 18961), 'os.path.join', 'os.path.join', (['"""assets"""', '"""example_ignition_data.dat"""'], {}), "('assets', 'example_ignition_data.dat')\n", (18922, 18961), False, 'import os\n'), ((19374, 19425), 'numpy.array', 'np.array', (['[1.0667661367458763, 0.04334773545084598]'], {}), '([1.0667661367458763, 0.04334773545084598])\n', (19382, 19425), True, 'import numpy as np\n'), ((20595, 20646), 'os.path.join', 'os.path.join', (['"""assets"""', '"""example_ignition_data.dat"""'], {}), "('assets', 'example_ignition_data.dat')\n", (20607, 20646), False, 'import os\n'), ((21059, 21089), 'numpy.array', 'np.array', (['[1.0667661367458763]'], {}), '([1.0667661367458763])\n', (21067, 21089), True, 'import numpy as np\n'), ((23071, 23110), 'os.path.join', 'os.path.join', (['"""assets"""', '"""drg_gri30.cti"""'], {}), "('assets', 'drg_gri30.cti')\n", (23083, 23110), False, 'import os\n'), ((673, 692), 'shutil.rmtree', 'shutil.rmtree', (['name'], {}), '(name)\n', (686, 692), False, 'import shutil\n')] |
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 23 12:59:35 2019
Binomial test
@author: tadahaya
"""
import pandas as pd
import numpy as np
import scipy.stats as stats
import statsmodels.stats.multitest as multitest
from scipy.stats import rankdata
class Calculator():
def __init__(self):
self.res = pd.DataFrame()
def calc(self,obj,ref,whole,focus=None,**kwargs):
self.res = do_binom(obj,ref,whole,focus=None,**kwargs)
return self.res
def get_details(self):
return self.res
def do_binom(obj,ref,whole,focus=None,correction="fdr_bh",mode="greater"):
"""
conduct Binomial test and obtain p value corrected for multiple tests
all elements should be given as ID, except for term
Parameters
----------
obj: set
a set of variables in signature of interest
ref: dict
a dict of term and member list of datasets
whole: set
a set of whole variables adjusted between datasets and interest
method: str
indicate method for correcting multiple tests
depend on "statsmodels.stats.multitest.multipletests"
focus: int
export results by XX th lowest p value
mode: str
indicate the type of significant judging
"greater", "two-sided", or "less"
"""
pval = []
overlap = []
total = []
hit = []
ap = pval.append
ap2 = overlap.append
ap3 = total.append
ap4 = hit.append
n_whole = len(whole)
keys = list(ref.keys())
values = list(ref.values())
for m in values:
n_set = len(m)
pc = n_set/n_whole
inter = obj & m
n_inter = len(inter)
ap(stats.binom_test(n_inter,n_set,pc,alternative=mode))
ap2(inter)
ap3(len(m))
ap4(n_inter)
if len(pval)==0:
res = pd.DataFrame(columns=["p value","adjusted p value","overlap","hit No.",
"total No."])
else:
res = pd.DataFrame({"p value":pval,"overlap":overlap,
"hit No.":hit,"total No.":total},index=keys).sort_values(by="p value")
fxn = lambda x: len(x) > 0
res = res[res["overlap"].map(fxn)]
if res.shape[0]!=0:
res["adjusted p value"] = multitest.multipletests(res["p value"],alpha=0.05,method=correction)[1]
res = res.sort_values(by="p value")
res = res.loc[:,["p value","adjusted p value","overlap","hit No.","total No."]] # sort
if (focus is None) or (res.shape[0]==0):
pass
else:
res = res.iloc[:focus,:]
return res | [
"pandas.DataFrame",
"scipy.stats.binom_test",
"statsmodels.stats.multitest.multipletests"
] | [((332, 346), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (344, 346), True, 'import pandas as pd\n'), ((1926, 2018), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['p value', 'adjusted p value', 'overlap', 'hit No.', 'total No.']"}), "(columns=['p value', 'adjusted p value', 'overlap', 'hit No.',\n 'total No.'])\n", (1938, 2018), True, 'import pandas as pd\n'), ((1773, 1827), 'scipy.stats.binom_test', 'stats.binom_test', (['n_inter', 'n_set', 'pc'], {'alternative': 'mode'}), '(n_inter, n_set, pc, alternative=mode)\n', (1789, 1827), True, 'import scipy.stats as stats\n'), ((2075, 2178), 'pandas.DataFrame', 'pd.DataFrame', (["{'p value': pval, 'overlap': overlap, 'hit No.': hit, 'total No.': total}"], {'index': 'keys'}), "({'p value': pval, 'overlap': overlap, 'hit No.': hit,\n 'total No.': total}, index=keys)\n", (2087, 2178), True, 'import pandas as pd\n'), ((2371, 2441), 'statsmodels.stats.multitest.multipletests', 'multitest.multipletests', (["res['p value']"], {'alpha': '(0.05)', 'method': 'correction'}), "(res['p value'], alpha=0.05, method=correction)\n", (2394, 2441), True, 'import statsmodels.stats.multitest as multitest\n')] |
import os
from argparse import SUPPRESS
import numpy as np
from pysam import Samfile, Fastafile
from scipy.stats import scoreatpercentile
# Internal
from rgt.Util import GenomeData, HmmData, ErrorHandler
from rgt.GenomicRegionSet import GenomicRegionSet
from rgt.HINT.biasTable import BiasTable
from rgt.HINT.signalProcessing import GenomicSignal
def tracks_args(parser):
# Parameters Options
parser.add_argument("--organism", type=str, metavar="STRING", default="hg19",
help="Organism considered on the analysis. Must have been setup in the RGTDATA folder. "
"Common choices are hg19, hg38. mm9, and mm10. DEFAULT: hg19")
parser.add_argument("--bias-table", type=str, metavar="FILE1_F,FILE1_R", default=None,
help="Bias table files used to generate bias corrected tracks. DEFAULT: None")
# Hidden Options
parser.add_argument("--initial-clip", type=int, metavar="INT", default=50, help=SUPPRESS)
parser.add_argument("--downstream-ext", type=int, metavar="INT", default=1, help=SUPPRESS)
parser.add_argument("--upstream-ext", type=int, metavar="INT", default=0, help=SUPPRESS)
parser.add_argument("--forward-shift", type=int, metavar="INT", default=5, help=SUPPRESS)
parser.add_argument("--reverse-shift", type=int, metavar="INT", default=-4, help=SUPPRESS)
parser.add_argument("--k-nb", type=int, metavar="INT", default=6, help=SUPPRESS)
# Output Options
parser.add_argument("--raw", action="store_true", default=False,
help="If set, the raw signals from DNase-seq or ATAC-seq data will be generated. DEFAULT: False")
parser.add_argument("--bc", action="store_true", default=False,
help="If set, the bias corrected signals from DNase-seq or ATAC-seq data will be generated. "
"DEFAULT: False")
parser.add_argument("--norm", action="store_true", default=False,
help="If set, the normalised signals from DNase-seq or ATAC-seq data will be generated. "
"DEFAULT: False")
parser.add_argument("--bigWig", action="store_true", default=False,
help="If set, all .wig files will be converted to .bw files. DEFAULT: False")
parser.add_argument("--strand-specific", action="store_true", default=False,
help="If set, the tracks will be splitted into two files, one for forward and another for "
"reverse strand. DEFAULT: False")
# Output Options
parser.add_argument("--output-location", type=str, metavar="PATH", default=os.getcwd(),
help="Path where the output bias table files will be written. DEFAULT: current directory")
parser.add_argument("--output-prefix", type=str, metavar="STRING", default="tracks",
help="The prefix for results files. DEFAULT: tracks")
parser.add_argument('input_files', metavar='reads.bam regions.bed', type=str, nargs='*',
help='BAM file of reads and BED files of interesting regions')
def tracks_run(args):
if args.raw:
get_raw_tracks(args)
if args.bc:
get_bc_tracks(args)
def get_raw_tracks(args):
# Initializing Error Handler
err = ErrorHandler()
if len(args.input_files) != 2:
err.throw_error("ME_FEW_ARG", add_msg="You must specify reads and regions file.")
output_fname = os.path.join(args.output_location, "{}.wig".format(args.output_prefix))
bam = Samfile(args.input_files[0], "rb")
regions = GenomicRegionSet("Interested regions")
regions.read(args.input_files[1])
regions.merge()
reads_file = GenomicSignal()
with open(output_fname, "a") as output_f:
for region in regions:
# Raw counts
signal = [0.0] * (region.final - region.initial)
for read in bam.fetch(region.chrom, region.initial, region.final):
if not read.is_reverse:
cut_site = read.pos + args.forward_shift
if region.initial <= cut_site < region.final:
signal[cut_site - region.initial] += 1.0
else:
cut_site = read.aend + args.reverse_shift - 1
if region.initial <= cut_site < region.final:
signal[cut_site - region.initial] += 1.0
if args.norm:
signal = reads_file.boyle_norm(signal)
perc = scoreatpercentile(signal, 98)
std = np.std(signal)
signal = reads_file.hon_norm_atac(signal, perc, std)
output_f.write("fixedStep chrom=" + region.chrom + " start=" + str(region.initial + 1) + " step=1\n" +
"\n".join([str(e) for e in np.nan_to_num(signal)]) + "\n")
output_f.close()
if args.bigWig:
genome_data = GenomeData(args.organism)
chrom_sizes_file = genome_data.get_chromosome_sizes()
bw_filename = os.path.join(args.output_location, "{}.bw".format(args.output_prefix))
os.system(" ".join(["wigToBigWig", output_fname, chrom_sizes_file, bw_filename, "-verbose=0"]))
os.remove(output_fname)
def get_bc_tracks(args):
# Initializing Error Handler
err = ErrorHandler()
if len(args.input_files) != 2:
err.throw_error("ME_FEW_ARG", add_msg="You must specify reads and regions file.")
regions = GenomicRegionSet("Interested regions")
regions.read(args.input_files[1])
regions.merge()
reads_file = GenomicSignal()
bam = Samfile(args.input_files[0], "rb")
genome_data = GenomeData(args.organism)
fasta = Fastafile(genome_data.get_genome())
hmm_data = HmmData()
if args.bias_table:
bias_table_list = args.bias_table.split(",")
bias_table = BiasTable().load_table(table_file_name_F=bias_table_list[0],
table_file_name_R=bias_table_list[1])
else:
table_F = hmm_data.get_default_bias_table_F_ATAC()
table_R = hmm_data.get_default_bias_table_R_ATAC()
bias_table = BiasTable().load_table(table_file_name_F=table_F,
table_file_name_R=table_R)
if args.strand_specific:
fname_forward = os.path.join(args.output_location, "{}_forward.wig".format(args.output_prefix))
fname_reverse = os.path.join(args.output_location, "{}_reverse.wig".format(args.output_prefix))
f_forward = open(fname_forward, "a")
f_reverse = open(fname_reverse, "a")
for region in regions:
signal_f, signal_r = reads_file.get_bc_signal_by_fragment_length(
ref=region.chrom, start=region.initial, end=region.final, bam=bam, fasta=fasta, bias_table=bias_table,
forward_shift=args.forward_shift, reverse_shift=args.reverse_shift, min_length=None, max_length=None,
strand=True)
if args.norm:
signal_f = reads_file.boyle_norm(signal_f)
perc = scoreatpercentile(signal_f, 98)
std = np.std(signal_f)
signal_f = reads_file.hon_norm_atac(signal_f, perc, std)
signal_r = reads_file.boyle_norm(signal_r)
perc = scoreatpercentile(signal_r, 98)
std = np.std(signal_r)
signal_r = reads_file.hon_norm_atac(signal_r, perc, std)
f_forward.write("fixedStep chrom=" + region.chrom + " start=" + str(region.initial + 1) + " step=1\n" +
"\n".join([str(e) for e in np.nan_to_num(signal_f)]) + "\n")
f_reverse.write("fixedStep chrom=" + region.chrom + " start=" + str(region.initial + 1) + " step=1\n" +
"\n".join([str(-e) for e in np.nan_to_num(signal_r)]) + "\n")
f_forward.close()
f_reverse.close()
if args.bigWig:
genome_data = GenomeData(args.organism)
chrom_sizes_file = genome_data.get_chromosome_sizes()
bw_filename = os.path.join(args.output_location, "{}_forward.bw".format(args.output_prefix))
os.system(" ".join(["wigToBigWig", fname_forward, chrom_sizes_file, bw_filename, "-verbose=0"]))
os.remove(fname_forward)
bw_filename = os.path.join(args.output_location, "{}_reverse.bw".format(args.output_prefix))
os.system(" ".join(["wigToBigWig", fname_reverse, chrom_sizes_file, bw_filename, "-verbose=0"]))
os.remove(fname_reverse)
else:
output_fname = os.path.join(args.output_location, "{}.wig".format(args.output_prefix))
with open(output_fname, "a") as output_f:
for region in regions:
signal = reads_file.get_bc_signal_by_fragment_length(ref=region.chrom, start=region.initial,
end=region.final,
bam=bam, fasta=fasta, bias_table=bias_table,
forward_shift=args.forward_shift,
reverse_shift=args.reverse_shift,
min_length=None, max_length=None, strand=False)
if args.norm:
signal = reads_file.boyle_norm(signal)
perc = scoreatpercentile(signal, 98)
std = np.std(signal)
signal = reads_file.hon_norm_atac(signal, perc, std)
output_f.write("fixedStep chrom=" + region.chrom + " start=" + str(region.initial + 1) + " step=1\n" +
"\n".join([str(e) for e in np.nan_to_num(signal)]) + "\n")
output_f.close()
if args.bigWig:
genome_data = GenomeData(args.organism)
chrom_sizes_file = genome_data.get_chromosome_sizes()
bw_filename = os.path.join(args.output_location, "{}.bw".format(args.output_prefix))
os.system(" ".join(["wigToBigWig", output_fname, chrom_sizes_file, bw_filename, "-verbose=0"]))
os.remove(output_fname)
| [
"rgt.Util.GenomeData",
"scipy.stats.scoreatpercentile",
"rgt.Util.ErrorHandler",
"rgt.HINT.signalProcessing.GenomicSignal",
"os.getcwd",
"rgt.HINT.biasTable.BiasTable",
"numpy.std",
"pysam.Samfile",
"rgt.Util.HmmData",
"rgt.GenomicRegionSet.GenomicRegionSet",
"numpy.nan_to_num",
"os.remove"
] | [((3332, 3346), 'rgt.Util.ErrorHandler', 'ErrorHandler', ([], {}), '()\n', (3344, 3346), False, 'from rgt.Util import GenomeData, HmmData, ErrorHandler\n'), ((3576, 3610), 'pysam.Samfile', 'Samfile', (['args.input_files[0]', '"""rb"""'], {}), "(args.input_files[0], 'rb')\n", (3583, 3610), False, 'from pysam import Samfile, Fastafile\n'), ((3625, 3663), 'rgt.GenomicRegionSet.GenomicRegionSet', 'GenomicRegionSet', (['"""Interested regions"""'], {}), "('Interested regions')\n", (3641, 3663), False, 'from rgt.GenomicRegionSet import GenomicRegionSet\n'), ((3739, 3754), 'rgt.HINT.signalProcessing.GenomicSignal', 'GenomicSignal', ([], {}), '()\n', (3752, 3754), False, 'from rgt.HINT.signalProcessing import GenomicSignal\n'), ((5343, 5357), 'rgt.Util.ErrorHandler', 'ErrorHandler', ([], {}), '()\n', (5355, 5357), False, 'from rgt.Util import GenomeData, HmmData, ErrorHandler\n'), ((5499, 5537), 'rgt.GenomicRegionSet.GenomicRegionSet', 'GenomicRegionSet', (['"""Interested regions"""'], {}), "('Interested regions')\n", (5515, 5537), False, 'from rgt.GenomicRegionSet import GenomicRegionSet\n'), ((5614, 5629), 'rgt.HINT.signalProcessing.GenomicSignal', 'GenomicSignal', ([], {}), '()\n', (5627, 5629), False, 'from rgt.HINT.signalProcessing import GenomicSignal\n'), ((5641, 5675), 'pysam.Samfile', 'Samfile', (['args.input_files[0]', '"""rb"""'], {}), "(args.input_files[0], 'rb')\n", (5648, 5675), False, 'from pysam import Samfile, Fastafile\n'), ((5694, 5719), 'rgt.Util.GenomeData', 'GenomeData', (['args.organism'], {}), '(args.organism)\n', (5704, 5719), False, 'from rgt.Util import GenomeData, HmmData, ErrorHandler\n'), ((5784, 5793), 'rgt.Util.HmmData', 'HmmData', ([], {}), '()\n', (5791, 5793), False, 'from rgt.Util import GenomeData, HmmData, ErrorHandler\n'), ((4956, 4981), 'rgt.Util.GenomeData', 'GenomeData', (['args.organism'], {}), '(args.organism)\n', (4966, 4981), False, 'from rgt.Util import GenomeData, HmmData, ErrorHandler\n'), ((5249, 5272), 'os.remove', 'os.remove', (['output_fname'], {}), '(output_fname)\n', (5258, 5272), False, 'import os\n'), ((2671, 2682), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2680, 2682), False, 'import os\n'), ((8006, 8031), 'rgt.Util.GenomeData', 'GenomeData', (['args.organism'], {}), '(args.organism)\n', (8016, 8031), False, 'from rgt.Util import GenomeData, HmmData, ErrorHandler\n'), ((8325, 8349), 'os.remove', 'os.remove', (['fname_forward'], {}), '(fname_forward)\n', (8334, 8349), False, 'import os\n'), ((8577, 8601), 'os.remove', 'os.remove', (['fname_reverse'], {}), '(fname_reverse)\n', (8586, 8601), False, 'import os\n'), ((9973, 9998), 'rgt.Util.GenomeData', 'GenomeData', (['args.organism'], {}), '(args.organism)\n', (9983, 9998), False, 'from rgt.Util import GenomeData, HmmData, ErrorHandler\n'), ((10282, 10305), 'os.remove', 'os.remove', (['output_fname'], {}), '(output_fname)\n', (10291, 10305), False, 'import os\n'), ((4554, 4583), 'scipy.stats.scoreatpercentile', 'scoreatpercentile', (['signal', '(98)'], {}), '(signal, 98)\n', (4571, 4583), False, 'from scipy.stats import scoreatpercentile\n'), ((4606, 4620), 'numpy.std', 'np.std', (['signal'], {}), '(signal)\n', (4612, 4620), True, 'import numpy as np\n'), ((5892, 5903), 'rgt.HINT.biasTable.BiasTable', 'BiasTable', ([], {}), '()\n', (5901, 5903), False, 'from rgt.HINT.biasTable import BiasTable\n'), ((6184, 6195), 'rgt.HINT.biasTable.BiasTable', 'BiasTable', ([], {}), '()\n', (6193, 6195), False, 'from rgt.HINT.biasTable import BiasTable\n'), ((7118, 7149), 'scipy.stats.scoreatpercentile', 'scoreatpercentile', (['signal_f', '(98)'], {}), '(signal_f, 98)\n', (7135, 7149), False, 'from scipy.stats import scoreatpercentile\n'), ((7172, 7188), 'numpy.std', 'np.std', (['signal_f'], {}), '(signal_f)\n', (7178, 7188), True, 'import numpy as np\n'), ((7345, 7376), 'scipy.stats.scoreatpercentile', 'scoreatpercentile', (['signal_r', '(98)'], {}), '(signal_r, 98)\n', (7362, 7376), False, 'from scipy.stats import scoreatpercentile\n'), ((7399, 7415), 'numpy.std', 'np.std', (['signal_r'], {}), '(signal_r)\n', (7405, 7415), True, 'import numpy as np\n'), ((9543, 9572), 'scipy.stats.scoreatpercentile', 'scoreatpercentile', (['signal', '(98)'], {}), '(signal, 98)\n', (9560, 9572), False, 'from scipy.stats import scoreatpercentile\n'), ((9599, 9613), 'numpy.std', 'np.std', (['signal'], {}), '(signal)\n', (9605, 9613), True, 'import numpy as np\n'), ((4860, 4881), 'numpy.nan_to_num', 'np.nan_to_num', (['signal'], {}), '(signal)\n', (4873, 4881), True, 'import numpy as np\n'), ((7661, 7684), 'numpy.nan_to_num', 'np.nan_to_num', (['signal_f'], {}), '(signal_f)\n', (7674, 7684), True, 'import numpy as np\n'), ((7868, 7891), 'numpy.nan_to_num', 'np.nan_to_num', (['signal_r'], {}), '(signal_r)\n', (7881, 7891), True, 'import numpy as np\n'), ((9865, 9886), 'numpy.nan_to_num', 'np.nan_to_num', (['signal'], {}), '(signal)\n', (9878, 9886), True, 'import numpy as np\n')] |
import matplotlib.pyplot as plt
import random, math
import simpy
class Model:
def __init__(self, env, cap, ub, mt, vt):
self.env = env
self.cap = cap # number of seats
self.ub = ub # maximum queue length
self.mt = mt # mean of eating time
self.vt = vt # variance of eating time
self.in_queue = 0 # number of customers waiting
self.in_seats = 0 # number of customers eating
self.loss = 0 # opportunity loss
def say_goodbye(self): # finish eating and leave
self.in_seats -= 1
def is_seatable(self): # at least an empty seat and a waiting customer
return self.in_seats < self.cap and self.in_queue > 0
def seat_customer(self): # move a customer from queue to a seat
self.in_queue -= 1
self.in_seats += 1
def print_state(self):
print('[{}] in queue: {}, in seats: {}, lost sales: {} '.format(round(self.env.now), self.in_queue, self.in_seats, self.loss))
def reception(self): # deal with arriving potential customers
while True:
yield self.env.timeout(random.expovariate(1))
if self.in_queue < self.ub:
self.in_queue += 1 # join the queue
self.env.activateServer.succeed() # signal for activating the server
else:
self.loss += 1 # give up and go home
def server(self): # the server of the restaurant
while True:
self.env.activateServer = self.env.event() # create the signal
yield self.env.activateServer
if self.is_seatable():
self.seat_customer()
self.env.process(self.customer()) # activate a customer
self.print_state()
self.env.log.extend()
def customer(self): # a customer having lunch
eating_time = 0
while eating_time <= 0: # eating time must be > 0
eating_time = random.normalvariate(self.mt, math.sqrt(self.vt))
yield self.env.timeout(eating_time)
self.say_goodbye()
self.env.activateServer.succeed() # signal for activating the server
class Log:
def __init__(self, env):
self.env = env
self.time = []
self.in_queue = []
self.in_seats = []
self.loss = []
self.extend()
def extend(self):
self.time.append(self.env.now)
self.in_queue.append(self.env.model.in_queue)
self.in_seats.append(self.env.model.in_seats)
self.loss.append(self.env.model.loss)
def plot_log(self):
plt.plot(self.time, self.in_queue, drawstyle = "steps-post")
plt.xlabel("time (minute)")
plt.ylabel("queue length")
plt.show()
def main():
env = simpy.Environment()
env.model = Model(env, 30, 10, 25, 25) # cap, ub, mt, vt
env.log = Log(env)
env.process(env.model.reception())
env.process(env.model.server())
env.run(until=200)
env.log.plot_log()
if __name__ == '__main__':
main()
| [
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"simpy.Environment",
"math.sqrt",
"random.expovariate",
"matplotlib.pyplot.show"
] | [((2759, 2778), 'simpy.Environment', 'simpy.Environment', ([], {}), '()\n', (2776, 2778), False, 'import simpy\n'), ((2585, 2643), 'matplotlib.pyplot.plot', 'plt.plot', (['self.time', 'self.in_queue'], {'drawstyle': '"""steps-post"""'}), "(self.time, self.in_queue, drawstyle='steps-post')\n", (2593, 2643), True, 'import matplotlib.pyplot as plt\n'), ((2654, 2681), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""time (minute)"""'], {}), "('time (minute)')\n", (2664, 2681), True, 'import matplotlib.pyplot as plt\n'), ((2690, 2716), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""queue length"""'], {}), "('queue length')\n", (2700, 2716), True, 'import matplotlib.pyplot as plt\n'), ((2725, 2735), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2733, 2735), True, 'import matplotlib.pyplot as plt\n'), ((1981, 1999), 'math.sqrt', 'math.sqrt', (['self.vt'], {}), '(self.vt)\n', (1990, 1999), False, 'import random, math\n'), ((1113, 1134), 'random.expovariate', 'random.expovariate', (['(1)'], {}), '(1)\n', (1131, 1134), False, 'import random, math\n')] |
from utils import osUtils as ou
import random
from tqdm import tqdm
from data_set import filepaths as fp
import pandas as pd
def readRecData(path,test_ratio = 0.1):
df = pd.read_csv(path,sep='\t',header=None)
a = df.sort_values(by=[0,3],axis=0)
a.to_csv('a.csv')
print(a)
return
if __name__ == '__main__':
readRecData(fp.Ml_latest_small.RATING_TS) | [
"pandas.read_csv"
] | [((175, 215), 'pandas.read_csv', 'pd.read_csv', (['path'], {'sep': '"""\t"""', 'header': 'None'}), "(path, sep='\\t', header=None)\n", (186, 215), True, 'import pandas as pd\n')] |
from abc import ABC, abstractmethod
from typing import TYPE_CHECKING, Sequence, Type, TypeVar
from eth._utils.datatypes import Configurable
from eth.constants import ZERO_HASH32
from eth_typing import BLSSignature, Hash32
from eth_utils import humanize_hash
from ssz.hashable_container import HashableContainer, SignedHashableContainer
from ssz.sedes import List, bytes32, bytes96, uint64
from eth2.beacon.constants import (
EMPTY_SIGNATURE,
GENESIS_PARENT_ROOT,
ZERO_SIGNING_ROOT,
)
from eth2.beacon.typing import FromBlockParams, SigningRoot, Slot
from .attestations import Attestation
from .attester_slashings import AttesterSlashing
from .block_headers import BeaconBlockHeader
from .defaults import default_slot, default_tuple
from .deposits import Deposit
from .eth1_data import Eth1Data, default_eth1_data
from .proposer_slashings import ProposerSlashing
from .voluntary_exits import VoluntaryExit
if TYPE_CHECKING:
from eth2.beacon.db.chain import BaseBeaconChainDB # noqa: F401
TBeaconBlockBody = TypeVar("TBeaconBlockBody", bound="BeaconBlockBody")
class BeaconBlockBody(HashableContainer):
fields = [
("randao_reveal", bytes96),
("eth1_data", Eth1Data),
("graffiti", bytes32),
("proposer_slashings", List(ProposerSlashing, 16)),
("attester_slashings", List(AttesterSlashing, 1)),
("attestations", List(Attestation, 128)),
("deposits", List(Deposit, 16)),
("voluntary_exits", List(VoluntaryExit, 16)),
]
@classmethod
def create(
cls: Type[TBeaconBlockBody],
*,
randao_reveal: bytes96 = EMPTY_SIGNATURE,
eth1_data: Eth1Data = default_eth1_data,
graffiti: Hash32 = ZERO_HASH32,
proposer_slashings: Sequence[ProposerSlashing] = default_tuple,
attester_slashings: Sequence[AttesterSlashing] = default_tuple,
attestations: Sequence[Attestation] = default_tuple,
deposits: Sequence[Deposit] = default_tuple,
voluntary_exits: Sequence[VoluntaryExit] = default_tuple,
) -> TBeaconBlockBody:
return super().create(
randao_reveal=randao_reveal,
eth1_data=eth1_data,
graffiti=graffiti,
proposer_slashings=proposer_slashings,
attester_slashings=attester_slashings,
attestations=attestations,
deposits=deposits,
voluntary_exits=voluntary_exits,
)
@property
def is_empty(self) -> bool:
return self == BeaconBlockBody.create()
def __str__(self) -> str:
return (
f"randao_reveal={humanize_hash(self.randao_reveal)},"
f" graffiti={humanize_hash(self.graffiti)},"
f" proposer_slashings={self.proposer_slashings},"
f" attester_slashings={self.attester_slashings},"
f" attestations={self.attestations},"
f" deposits={self.deposits},"
f" voluntary_exits={self.voluntary_exits},"
)
def __repr__(self) -> str:
return f"<{self.__class__.__name__}: {str(self)}>"
default_beacon_block_body = BeaconBlockBody.create()
TBaseBeaconBlock = TypeVar("TBaseBeaconBlock", bound="BaseBeaconBlock")
class BaseBeaconBlock(SignedHashableContainer, Configurable, ABC):
fields = [
("slot", uint64),
("parent_root", bytes32),
("state_root", bytes32),
("body", BeaconBlockBody),
("signature", bytes96),
]
@classmethod
def create(
cls: Type[TBaseBeaconBlock],
*,
slot: Slot = default_slot,
parent_root: SigningRoot = ZERO_SIGNING_ROOT,
state_root: Hash32 = ZERO_HASH32,
body: BeaconBlockBody = default_beacon_block_body,
signature: BLSSignature = EMPTY_SIGNATURE,
) -> TBaseBeaconBlock:
return super().create(
slot=slot,
parent_root=parent_root,
state_root=state_root,
body=body,
signature=signature,
)
def __str__(self) -> str:
return (
f"[signing_root]={humanize_hash(self.signing_root)},"
f" [hash_tree_root]={humanize_hash(self.hash_tree_root)},"
f" slot={self.slot},"
f" parent_root={humanize_hash(self.parent_root)},"
f" state_root={humanize_hash(self.state_root)},"
f" body=({self.body}),"
f" signature={humanize_hash(self.signature)}"
)
@property
def is_genesis(self) -> bool:
return self.parent_root == GENESIS_PARENT_ROOT
@property
def header(self) -> BeaconBlockHeader:
return BeaconBlockHeader.create(
slot=self.slot,
parent_root=self.parent_root,
state_root=self.state_root,
body_root=self.body.hash_tree_root,
signature=self.signature,
)
@classmethod
@abstractmethod
def from_root(
cls, root: SigningRoot, chaindb: "BaseBeaconChainDB"
) -> "BaseBeaconBlock":
"""
Return the block denoted by the given block root.
"""
...
def __repr__(self) -> str:
return f"<{self.__class__.__name__}: {str(self)}>"
TBeaconBlock = TypeVar("TBeaconBlock", bound="BeaconBlock")
class BeaconBlock(BaseBeaconBlock):
block_body_class = BeaconBlockBody
@classmethod
def from_root(
cls, root: SigningRoot, chaindb: "BaseBeaconChainDB"
) -> "BeaconBlock":
"""
Return the block denoted by the given block ``root``.
"""
block = chaindb.get_block_by_root(root, cls)
body = cls.block_body_class.create(
randao_reveal=block.body.randao_reveal,
eth1_data=block.body.eth1_data,
graffiti=block.body.graffiti,
proposer_slashings=block.body.proposer_slashings,
attester_slashings=block.body.attester_slashings,
attestations=block.body.attestations,
deposits=block.body.deposits,
voluntary_exits=block.body.voluntary_exits,
)
return cls.create(
slot=block.slot,
parent_root=block.parent_root,
state_root=block.state_root,
body=body,
signature=block.signature,
)
@classmethod
def from_parent(
cls: Type[TBaseBeaconBlock],
parent_block: "BaseBeaconBlock",
block_params: FromBlockParams,
) -> TBaseBeaconBlock:
"""
Initialize a new block with the ``parent_block`` as the block's
previous block root.
"""
if block_params.slot is None:
slot = parent_block.slot + 1
else:
slot = block_params.slot
return cls.create(
slot=slot,
parent_root=parent_block.signing_root,
state_root=parent_block.state_root,
body=cls.block_body_class.create(),
)
@classmethod
def convert_block(
cls: Type[TBaseBeaconBlock], block: "BaseBeaconBlock"
) -> TBaseBeaconBlock:
return cls.create(
slot=block.slot,
parent_root=block.parent_root,
state_root=block.state_root,
body=block.body,
signature=block.signature,
)
@classmethod
def from_header(
cls: Type[TBaseBeaconBlock], header: BeaconBlockHeader
) -> TBeaconBlock:
return cls.create(
slot=header.slot,
parent_root=header.parent_root,
state_root=header.state_root,
signature=header.signature,
body=BeaconBlockBody(),
)
| [
"eth_utils.humanize_hash",
"ssz.sedes.List",
"typing.TypeVar"
] | [((1029, 1081), 'typing.TypeVar', 'TypeVar', (['"""TBeaconBlockBody"""'], {'bound': '"""BeaconBlockBody"""'}), "('TBeaconBlockBody', bound='BeaconBlockBody')\n", (1036, 1081), False, 'from typing import TYPE_CHECKING, Sequence, Type, TypeVar\n'), ((3162, 3214), 'typing.TypeVar', 'TypeVar', (['"""TBaseBeaconBlock"""'], {'bound': '"""BaseBeaconBlock"""'}), "('TBaseBeaconBlock', bound='BaseBeaconBlock')\n", (3169, 3214), False, 'from typing import TYPE_CHECKING, Sequence, Type, TypeVar\n'), ((5211, 5255), 'typing.TypeVar', 'TypeVar', (['"""TBeaconBlock"""'], {'bound': '"""BeaconBlock"""'}), "('TBeaconBlock', bound='BeaconBlock')\n", (5218, 5255), False, 'from typing import TYPE_CHECKING, Sequence, Type, TypeVar\n'), ((1273, 1299), 'ssz.sedes.List', 'List', (['ProposerSlashing', '(16)'], {}), '(ProposerSlashing, 16)\n', (1277, 1299), False, 'from ssz.sedes import List, bytes32, bytes96, uint64\n'), ((1333, 1358), 'ssz.sedes.List', 'List', (['AttesterSlashing', '(1)'], {}), '(AttesterSlashing, 1)\n', (1337, 1358), False, 'from ssz.sedes import List, bytes32, bytes96, uint64\n'), ((1386, 1408), 'ssz.sedes.List', 'List', (['Attestation', '(128)'], {}), '(Attestation, 128)\n', (1390, 1408), False, 'from ssz.sedes import List, bytes32, bytes96, uint64\n'), ((1432, 1449), 'ssz.sedes.List', 'List', (['Deposit', '(16)'], {}), '(Deposit, 16)\n', (1436, 1449), False, 'from ssz.sedes import List, bytes32, bytes96, uint64\n'), ((1480, 1503), 'ssz.sedes.List', 'List', (['VoluntaryExit', '(16)'], {}), '(VoluntaryExit, 16)\n', (1484, 1503), False, 'from ssz.sedes import List, bytes32, bytes96, uint64\n'), ((2619, 2652), 'eth_utils.humanize_hash', 'humanize_hash', (['self.randao_reveal'], {}), '(self.randao_reveal)\n', (2632, 2652), False, 'from eth_utils import humanize_hash\n'), ((2681, 2709), 'eth_utils.humanize_hash', 'humanize_hash', (['self.graffiti'], {}), '(self.graffiti)\n', (2694, 2709), False, 'from eth_utils import humanize_hash\n'), ((4085, 4117), 'eth_utils.humanize_hash', 'humanize_hash', (['self.signing_root'], {}), '(self.signing_root)\n', (4098, 4117), False, 'from eth_utils import humanize_hash\n'), ((4154, 4188), 'eth_utils.humanize_hash', 'humanize_hash', (['self.hash_tree_root'], {}), '(self.hash_tree_root)\n', (4167, 4188), False, 'from eth_utils import humanize_hash\n'), ((4254, 4285), 'eth_utils.humanize_hash', 'humanize_hash', (['self.parent_root'], {}), '(self.parent_root)\n', (4267, 4285), False, 'from eth_utils import humanize_hash\n'), ((4316, 4346), 'eth_utils.humanize_hash', 'humanize_hash', (['self.state_root'], {}), '(self.state_root)\n', (4329, 4346), False, 'from eth_utils import humanize_hash\n'), ((4412, 4441), 'eth_utils.humanize_hash', 'humanize_hash', (['self.signature'], {}), '(self.signature)\n', (4425, 4441), False, 'from eth_utils import humanize_hash\n')] |
import numpy as np
from scipy import signal
from .. import MaskSeparationBase
from ...core import utils
from ...core import constants
class Duet(MaskSeparationBase):
"""
The DUET algorithm was originally proposed by S.Rickard and F.Dietrich for DOA
estimation and further developed for BSS and demixing by <NAME>, S.Rickard,
and <NAME>.
DUET extracts sources using the symmetric attenuation and relative delay between
two channels. The symmetric attenuation is calculated from the ratio of the two
channels' stft amplitudes, and the delay is the arrival delay between the two
sensors used to record the audio signal. These two values are clustered as peaks on
a histogram to determine where each source occurs. This implementation of DUET
creates and returns Mask objects after the run() function, which can then be
applied to the original audio signal to extract each individual source.
References:
[1] Rickard, Scott. "The DUET blind source separation algorithm."
Blind Speech Separation. Springer Netherlands, 2007. 217-241.
[2] Yilmaz, Ozgur, and <NAME>. "Blind separation of speech mixtures
via time-frequency masking."
Signal Processing, IEEE transactions on 52.7 (2004): 1830-1847.
Args:
input_audio_signal (np.array): a 2-row Numpy matrix containing samples of the
two-channel mixture.
num_sources (int): Number of sources to find.
attenuation_min (int): Minimum distance in utils.find_peak_indices, change if
not enough peaks are identified.
attenuation_max (int): Used for creating a histogram without outliers.
num_attenuation_bins (int): Number of bins for attenuation.
delay_min (int): Lower bound on delay, used as minimum distance in
utils.find_peak_indices.
delay_max (int): Upper bound on delay, used for creating a histogram without
outliers.
num_delay_bins (int): Number of bins for delay.
peak_threshold (float): Value in [0, 1] for peak picking.
attenuation_min_distance (int): Minimum distance between peaks wrt attenuation.
delay_min_distance (int): Minimum distance between peaks wrt delay.
p (int): Weight the histogram with the symmetric attenuation estimator.
q (int): Weight the histogram with the delay estimato
Notes:
On page 8 of his paper, Rickard recommends p=1 and q=0 as a default starting
point and p=.5, q=0 if one source is more dominant.
Attributes:
stft_ch0 (np.array): A Numpy matrix containing the stft data of channel 0.
stft_ch1 (np.array): A Numpy matrix containing the stft data of channel 1.
frequency_matrix (np.array): A Numpy matrix containing the frequencies of
analysis.
symmetric_atn (np.array): A Numpy matrix containing the symmetric attenuation
between the two channels.
delay (np.array): A Numpy matrix containing the delay between the two channels.
num_time_bins (np.array): The number of time bins for the frequency matrix and
mask arrays.
num_frequency_bins (int): The number of frequency bins for the mask arrays.
attenuation_bins (int): A Numpy array containing the attenuation bins for the
histogram.
delay_bins (np.array): A Numpy array containing the delay bins for the histogram.
normalized_attenuation_delay_histogram (np.array): A normalized Numpy matrix
containing the attenuation delay histogram, which has peaks for each source.
attenuation_delay_histogram (np.array): A non-normalized Numpy matrix containing
the attenuation delay histogram, which has peaks for each source.
peak_indices (np.array): A Numpy array containing the indices of the peaks for
the histogram.
separated_sources (np.array): A Numpy array of arrays containing each
separated source.
"""
def __init__(self, input_audio_signal, num_sources,
attenuation_min=-3, attenuation_max=3, num_attenuation_bins=50,
delay_min=-3, delay_max=3, num_delay_bins=50,
peak_threshold=0.0, attenuation_min_distance=5, delay_min_distance=5,
p=1, q=0, mask_type='binary'):
super().__init__(
input_audio_signal=input_audio_signal,
mask_type=mask_type)
self.num_sources = num_sources
self.attenuation_min = attenuation_min
self.attenuation_max = attenuation_max
self.num_attenuation_bins = num_attenuation_bins
self.delay_min = delay_min
self.delay_max = delay_max
self.num_delay_bins = num_delay_bins
self.peak_threshold = peak_threshold
self.attenuation_min_distance = attenuation_min_distance
self.delay_min_distance = delay_min_distance
self.p = p
self.q = q
self.stft_ch0 = None
self.stft_ch1 = None
self.frequency_matrix = None
self.symmetric_atn = None
self.delay = None
self.num_time_bins = None
self.num_frequency_bins = None
self.attenuation_bins = None
self.delay_bins = None
self.normalized_attenuation_delay_histogram = None
self.attenuation_delay_histogram = None
self.peak_indices = None
self.delay_peak = None
self.atn_peak = None
self.separated_sources = None
def run(self):
""" Extracts N sources from a given stereo audio mixture (N sources captured via 2 sensors)
Returns:
computed_masks (np.array): A list of binary mask objects that can be used to extract the sources
Example:
.. code-block:: python
:linenos:
#Import input audio signal
input_file_name = '../Input/dev1_female3_inst_mix.wav'
signal = AudioSignal(path_to_input_file=input_file_name)
# Set up and run Duet
duet = Duet(signal, a_min=-3, a_max=3, a_num=50, d_min=-3, d_max=3, d_num=50, threshold=0.2,
a_min_distance=5, d_min_distance=5, num_sources=3)
duet.run()
# plot histogram results
duet.plot(os.path.join('..', 'Output', 'duet_2d.png'))
duet.plot(os.path.join('..', 'Output', 'duet_3d.png'), three_d_plot=True)
# Create output file for each source found
output_name_stem = os.path.join('..', 'Output', 'duet_source')
i = 1
for s in duet.make_audio_signals():
output_file_name = f"{output_name_stem}{i}.wav"
s.write_audio_to_file(output_file_name)
i += 1
"""
self.result_masks = []
# Calculate the stft of both channels and create the frequency matrix (the matrix containing the
# frequencies of analysis of the Fourier transform)
self.stft_ch0, self.stft_ch1, self.frequency_matrix = self._compute_spectrogram(
self.sample_rate)
# Calculate the symmetric attenuation (alpha) and delay (delta) for each
# time-freq. point and return a matrix for each
self.symmetric_atn, self.delay = self._compute_atn_delay(
self.stft_ch0, self.stft_ch1, self.frequency_matrix)
# Make histogram of attenuation-delay values and get the center values for the bins in this histogram
self.normalized_attenuation_delay_histogram, self.attenuation_bins, self.delay_bins = (
self._make_histogram()
)
# Find the location of peaks in the attenuation-delay plane
self.peak_indices = utils.find_peak_indices(
self.normalized_attenuation_delay_histogram, self.num_sources,
threshold=self.peak_threshold,
min_dist=[self.attenuation_min_distance, self.delay_min_distance])
# compute delay_peak, attenuation peak, and attenuation/delay estimates
self.delay_peak, atn_delay_est, self.atn_peak = self._convert_peaks(
self.peak_indices)
# compute masks for separation
computed_masks = self._compute_masks()
return computed_masks
def _compute_spectrogram(self, sample_rate):
""" Creates the STFT matrices for channel 0 and 1, and computes the frequency matrix.
Parameter:
sample_rate (integer): sample rate
Returns:
stft_ch0 (np.matrix): a 2D Numpy matrix containing the stft of channel 0
stft_ch1 (np.matrix): a 2D Numpy matrix containing the stft of channel 1
wmat (np.matrix): a 2D Numpy matrix containing the frequencies of analysis of the Fourier transform
"""
# Compute the stft of the two channel mixtures
self.audio_signal.stft_params = self.stft_params
self.audio_signal.stft()
stft_ch0 = self.audio_signal.get_stft_channel(0)
stft_ch1 = self.audio_signal.get_stft_channel(1)
# Compute the freq. matrix for later use in phase calculations
n_time_bins = len(self.audio_signal.time_bins_vector)
wmat = np.array(np.tile(np.mat(
self.audio_signal.freq_vector).T, (1, n_time_bins))) * (
2 * np.pi / sample_rate)
wmat += constants.EPSILON
return stft_ch0, stft_ch1, wmat
@staticmethod
def _compute_atn_delay(stft_ch0, stft_ch1, frequency_matrix):
# Calculate the symmetric attenuation (alpha) and delay (delta) for each
# time-freq. point
inter_channel_ratio = (stft_ch1 + constants.EPSILON) / (stft_ch0 + constants.EPSILON)
attenuation = np.abs(inter_channel_ratio) # relative attenuation between the two channels
symmetric_attenuation = attenuation - 1 / attenuation # symmetric attenuation
relative_delay = -np.imag(np.log(inter_channel_ratio)) / (2 * np.pi * frequency_matrix) # relative delay
return symmetric_attenuation, relative_delay
def _make_histogram(self):
"""Receives the stft of the two channel mixtures and the frequency matrix to a create
a smooth and normalized histogram.
Parameters:
stft_ch0 (complex np.array): a 2D Numpy matrix containing the stft of channel 0
stft_ch1 (complex np.array): a 2D Numpy matrix containing the stft of channel 1
symmetric_atn (np.array): the symmetric attenuation between two channels
delay (np.array): the time delay between 2 channels
wmat(np.array): a 2D Numpy matrix containing the frequency matrix of the signal
Returns:
histogram (np.array): a smooth and normalized histogram
atn_bins (np.array): The range of attenuation values distributed into bins
delay_bins (np.array): The range of delay values distributed into bins
"""
# calculate the weighted histogram
time_frequency_weights = (np.abs(self.stft_ch0) * np.abs(self.stft_ch1)) ** self.p * \
(np.abs(self.frequency_matrix)) ** self.q
# only consider time-freq. points yielding estimates in bounds
attenuation_premask = np.logical_and(self.attenuation_min < self.symmetric_atn,
self.symmetric_atn < self.attenuation_max)
delay_premask = np.logical_and(self.delay_min < self.delay, self.delay < self.delay_max)
attenuation_delay_premask = np.logical_and(attenuation_premask, delay_premask)
nonzero_premask = np.nonzero(attenuation_delay_premask)
symmetric_attenuation_vector = self.symmetric_atn[nonzero_premask]
delay_vector = self.delay[nonzero_premask]
time_frequency_weights_vector = time_frequency_weights[nonzero_premask]
bins_array = np.array([self.num_attenuation_bins, self.num_delay_bins])
range_array = np.array([[self.attenuation_min, self.attenuation_max], [self.delay_min, self.delay_max]])
# compute the histogram
histogram, atn_bins, delay_bins = np.histogram2d(symmetric_attenuation_vector, delay_vector,
bins=bins_array, range=range_array,
weights=time_frequency_weights_vector)
# Save non-normalized as an option for plotting later
self.attenuation_delay_histogram = histogram
# Scale histogram from 0 to 1
histogram /= histogram.max()
# smooth the normalized histogram - local average 3-by-3 neighboring bins
histogram = self._smooth_matrix(histogram, np.array([3]))
return histogram, atn_bins, delay_bins
def _convert_peaks(self, peak_indices):
"""Receives the attenuation and delay bins and computes the delay/attenuation
peaks based on the peak finder indices.
Returns:
delay_peak(np.array): The delay peaks determined from the histogram
atn_delay_est (np.array): The estimated symmetric attenuation and delay values
atn_peak (np.array): Attenuation converted from symmetric attenuation
"""
atn_indices = [x[0] for x in peak_indices]
delay_indices = [x[1] for x in peak_indices]
symmetric_atn_peak = self.attenuation_bins[atn_indices]
delay_peak = self.delay_bins[delay_indices]
atn_delay_est = np.column_stack((symmetric_atn_peak, delay_peak))
# convert symmetric_atn to atn_peak using formula from Rickard
atn_peak = (symmetric_atn_peak + np.sqrt(symmetric_atn_peak ** 2 + 4)) / 2
return delay_peak, atn_delay_est, atn_peak
def _compute_masks(self):
"""Receives the attenuation and delay peaks and computes a mask to be applied to the signal for source
separation.
"""
# compute masks for separation
best_so_far = np.inf * np.ones_like(self.stft_ch0, dtype=float)
for i in range(0, self.num_sources):
mask_array = np.zeros_like(self.stft_ch0, dtype=bool)
phase = np.exp(-1j * self.frequency_matrix * self.delay_peak[i])
score = np.abs(self.atn_peak[i] * phase * self.stft_ch0 - self.stft_ch1) ** 2 / (1 + self.atn_peak[i] ** 2)
mask = (score < best_so_far)
mask_array[mask] = True
background_mask = self.mask_type(np.array(mask_array))
self.result_masks.append(background_mask)
self.result_masks[0].mask = np.logical_xor(self.result_masks[i].mask, self.result_masks[0].mask)
best_so_far[mask] = score[mask]
# Compute first mask based on what the other masks left remaining
self.result_masks[0].mask = np.logical_not(self.result_masks[0].mask)
return self.result_masks
@staticmethod
def _smooth_matrix(matrix, kernel):
"""Performs two-dimensional convolution in order to smooth the values of matrix elements.
(similar to low-pass filtering)
Parameters:
matrix (np.array): a 2D Numpy matrix to be smoothed
kernel (np.array): a 2D Numpy matrix containing kernel values
Note:
if Kernel is of size 1 by 1 (scalar), a Kernel by Kernel matrix of 1/Kernel**2 will be used as the matrix
averaging kernel
Output:
smoothed_matrix (np.array): a 2D Numpy matrix containing a smoothed version of Mat (same size as Mat)
"""
# check the dimensions of the Kernel matrix and set the values of the averaging
# matrix, kernel_matrix
kernel_matrix = np.ones((kernel[0], kernel[0])) / kernel[0] ** 2
krow, kcol = np.shape(kernel_matrix)
# adjust the matrix dimension for convolution
copy_row = int(np.floor(krow / 2)) # number of rows to copy on top and bottom
copy_col = int(np.floor(kcol / 2)) # number of columns to copy on either side
# TODO: This is very ugly. Make this readable.
# form the augmented matrix (rows and columns added to top, bottom, and sides)
matrix = np.mat(matrix) # make sure Mat is a Numpy matrix
augmented_matrix = np.vstack(
[
np.hstack(
[matrix[0, 0] * np.ones((copy_row, copy_col)),
np.ones((copy_row, 1)) * matrix[0, :],
matrix[0, -1] * np.ones((copy_row, copy_col))
]),
np.hstack(
[matrix[:, 0] * np.ones((1, copy_col)),
matrix,
matrix[:, -1] * np.ones((1, copy_col))]),
np.hstack(
[matrix[-1, 1] * np.ones((copy_row, copy_col)),
np.ones((copy_row, 1)) * matrix[-1, :],
matrix[-1, -1] * np.ones((copy_row, copy_col))
]
)
]
)
# perform two-dimensional convolution between the input matrix and the kernel
smooted_matrix = signal.convolve2d(augmented_matrix, kernel_matrix[::-1, ::-1], mode='valid')
return smooted_matrix
| [
"numpy.abs",
"numpy.mat",
"scipy.signal.convolve2d",
"numpy.ones_like",
"numpy.ones",
"numpy.logical_and",
"numpy.sqrt",
"numpy.logical_not",
"numpy.floor",
"numpy.column_stack",
"numpy.logical_xor",
"numpy.log",
"numpy.exp",
"numpy.array",
"numpy.nonzero",
"numpy.histogram2d",
"nump... | [((9712, 9739), 'numpy.abs', 'np.abs', (['inter_channel_ratio'], {}), '(inter_channel_ratio)\n', (9718, 9739), True, 'import numpy as np\n'), ((11221, 11326), 'numpy.logical_and', 'np.logical_and', (['(self.attenuation_min < self.symmetric_atn)', '(self.symmetric_atn < self.attenuation_max)'], {}), '(self.attenuation_min < self.symmetric_atn, self.\n symmetric_atn < self.attenuation_max)\n', (11235, 11326), True, 'import numpy as np\n'), ((11392, 11464), 'numpy.logical_and', 'np.logical_and', (['(self.delay_min < self.delay)', '(self.delay < self.delay_max)'], {}), '(self.delay_min < self.delay, self.delay < self.delay_max)\n', (11406, 11464), True, 'import numpy as np\n'), ((11501, 11551), 'numpy.logical_and', 'np.logical_and', (['attenuation_premask', 'delay_premask'], {}), '(attenuation_premask, delay_premask)\n', (11515, 11551), True, 'import numpy as np\n'), ((11579, 11616), 'numpy.nonzero', 'np.nonzero', (['attenuation_delay_premask'], {}), '(attenuation_delay_premask)\n', (11589, 11616), True, 'import numpy as np\n'), ((11845, 11903), 'numpy.array', 'np.array', (['[self.num_attenuation_bins, self.num_delay_bins]'], {}), '([self.num_attenuation_bins, self.num_delay_bins])\n', (11853, 11903), True, 'import numpy as np\n'), ((11926, 12020), 'numpy.array', 'np.array', (['[[self.attenuation_min, self.attenuation_max], [self.delay_min, self.delay_max]\n ]'], {}), '([[self.attenuation_min, self.attenuation_max], [self.delay_min,\n self.delay_max]])\n', (11934, 12020), True, 'import numpy as np\n'), ((12092, 12229), 'numpy.histogram2d', 'np.histogram2d', (['symmetric_attenuation_vector', 'delay_vector'], {'bins': 'bins_array', 'range': 'range_array', 'weights': 'time_frequency_weights_vector'}), '(symmetric_attenuation_vector, delay_vector, bins=bins_array,\n range=range_array, weights=time_frequency_weights_vector)\n', (12106, 12229), True, 'import numpy as np\n'), ((13436, 13485), 'numpy.column_stack', 'np.column_stack', (['(symmetric_atn_peak, delay_peak)'], {}), '((symmetric_atn_peak, delay_peak))\n', (13451, 13485), True, 'import numpy as np\n'), ((14749, 14790), 'numpy.logical_not', 'np.logical_not', (['self.result_masks[0].mask'], {}), '(self.result_masks[0].mask)\n', (14763, 14790), True, 'import numpy as np\n'), ((15699, 15722), 'numpy.shape', 'np.shape', (['kernel_matrix'], {}), '(kernel_matrix)\n', (15707, 15722), True, 'import numpy as np\n'), ((16112, 16126), 'numpy.mat', 'np.mat', (['matrix'], {}), '(matrix)\n', (16118, 16126), True, 'import numpy as np\n'), ((17040, 17116), 'scipy.signal.convolve2d', 'signal.convolve2d', (['augmented_matrix', 'kernel_matrix[::-1, ::-1]'], {'mode': '"""valid"""'}), "(augmented_matrix, kernel_matrix[::-1, ::-1], mode='valid')\n", (17057, 17116), False, 'from scipy import signal\n'), ((12666, 12679), 'numpy.array', 'np.array', (['[3]'], {}), '([3])\n', (12674, 12679), True, 'import numpy as np\n'), ((13937, 13977), 'numpy.ones_like', 'np.ones_like', (['self.stft_ch0'], {'dtype': 'float'}), '(self.stft_ch0, dtype=float)\n', (13949, 13977), True, 'import numpy as np\n'), ((14049, 14089), 'numpy.zeros_like', 'np.zeros_like', (['self.stft_ch0'], {'dtype': 'bool'}), '(self.stft_ch0, dtype=bool)\n', (14062, 14089), True, 'import numpy as np\n'), ((14110, 14168), 'numpy.exp', 'np.exp', (['(-1.0j * self.frequency_matrix * self.delay_peak[i])'], {}), '(-1.0j * self.frequency_matrix * self.delay_peak[i])\n', (14116, 14168), True, 'import numpy as np\n'), ((14525, 14593), 'numpy.logical_xor', 'np.logical_xor', (['self.result_masks[i].mask', 'self.result_masks[0].mask'], {}), '(self.result_masks[i].mask, self.result_masks[0].mask)\n', (14539, 14593), True, 'import numpy as np\n'), ((15629, 15660), 'numpy.ones', 'np.ones', (['(kernel[0], kernel[0])'], {}), '((kernel[0], kernel[0]))\n', (15636, 15660), True, 'import numpy as np\n'), ((15801, 15819), 'numpy.floor', 'np.floor', (['(krow / 2)'], {}), '(krow / 2)\n', (15809, 15819), True, 'import numpy as np\n'), ((15888, 15906), 'numpy.floor', 'np.floor', (['(kcol / 2)'], {}), '(kcol / 2)\n', (15896, 15906), True, 'import numpy as np\n'), ((11078, 11107), 'numpy.abs', 'np.abs', (['self.frequency_matrix'], {}), '(self.frequency_matrix)\n', (11084, 11107), True, 'import numpy as np\n'), ((13599, 13635), 'numpy.sqrt', 'np.sqrt', (['(symmetric_atn_peak ** 2 + 4)'], {}), '(symmetric_atn_peak ** 2 + 4)\n', (13606, 13635), True, 'import numpy as np\n'), ((14409, 14429), 'numpy.array', 'np.array', (['mask_array'], {}), '(mask_array)\n', (14417, 14429), True, 'import numpy as np\n'), ((9910, 9937), 'numpy.log', 'np.log', (['inter_channel_ratio'], {}), '(inter_channel_ratio)\n', (9916, 9937), True, 'import numpy as np\n'), ((10983, 11004), 'numpy.abs', 'np.abs', (['self.stft_ch0'], {}), '(self.stft_ch0)\n', (10989, 11004), True, 'import numpy as np\n'), ((11007, 11028), 'numpy.abs', 'np.abs', (['self.stft_ch1'], {}), '(self.stft_ch1)\n', (11013, 11028), True, 'import numpy as np\n'), ((14187, 14251), 'numpy.abs', 'np.abs', (['(self.atn_peak[i] * phase * self.stft_ch0 - self.stft_ch1)'], {}), '(self.atn_peak[i] * phase * self.stft_ch0 - self.stft_ch1)\n', (14193, 14251), True, 'import numpy as np\n'), ((9215, 9252), 'numpy.mat', 'np.mat', (['self.audio_signal.freq_vector'], {}), '(self.audio_signal.freq_vector)\n', (9221, 9252), True, 'import numpy as np\n'), ((16277, 16306), 'numpy.ones', 'np.ones', (['(copy_row, copy_col)'], {}), '((copy_row, copy_col))\n', (16284, 16306), True, 'import numpy as np\n'), ((16329, 16351), 'numpy.ones', 'np.ones', (['(copy_row, 1)'], {}), '((copy_row, 1))\n', (16336, 16351), True, 'import numpy as np\n'), ((16405, 16434), 'numpy.ones', 'np.ones', (['(copy_row, copy_col)'], {}), '((copy_row, copy_col))\n', (16412, 16434), True, 'import numpy as np\n'), ((16523, 16545), 'numpy.ones', 'np.ones', (['(1, copy_col)'], {}), '((1, copy_col))\n', (16530, 16545), True, 'import numpy as np\n'), ((16613, 16635), 'numpy.ones', 'np.ones', (['(1, copy_col)'], {}), '((1, copy_col))\n', (16620, 16635), True, 'import numpy as np\n'), ((16703, 16732), 'numpy.ones', 'np.ones', (['(copy_row, copy_col)'], {}), '((copy_row, copy_col))\n', (16710, 16732), True, 'import numpy as np\n'), ((16755, 16777), 'numpy.ones', 'np.ones', (['(copy_row, 1)'], {}), '((copy_row, 1))\n', (16762, 16777), True, 'import numpy as np\n'), ((16833, 16862), 'numpy.ones', 'np.ones', (['(copy_row, copy_col)'], {}), '((copy_row, copy_col))\n', (16840, 16862), True, 'import numpy as np\n')] |
from __future__ import unicode_literals
import os
import django
from django.test import TestCase
from mock import call, patch
from storage.brokers.host_broker import HostBroker
from storage.delete_files_job import delete_files
from storage.test import utils as storage_test_utils
class TestDeleteFiles(TestCase):
def setUp(self):
django.setup()
self.broker = HostBroker()
self.broker.load_configuration({'type': HostBroker().broker_type, 'host_path': '/host/path'})
@patch('storage.brokers.host_broker.os.path.exists')
@patch('storage.brokers.host_broker.os.remove')
def test_delete_file(self, mock_remove, mock_exists):
"""Tests removing a file"""
def new_exists(path):
return True
mock_exists.side_effect = new_exists
volume_path = os.path.join('the', 'volume', 'path')
file_path_1 = os.path.join('my_dir', 'my_file.txt')
file_path_2 = os.path.join('my_dir', 'my_file.json')
full_path_file_1 = os.path.join(volume_path, file_path_1)
full_path_file_2 = os.path.join(volume_path, file_path_2)
file_1 = storage_test_utils.create_file(file_path=file_path_1)
file_2 = storage_test_utils.create_file(file_path=file_path_2)
# Call function
test_1 = delete_files([file_1], volume_path, self.broker)
self.assertEqual(test_1, None)
test_2 = delete_files([file_2], volume_path, self.broker)
self.assertEqual(test_2, None)
# Check results
two_calls = [call(full_path_file_1), call(full_path_file_2)]
mock_remove.assert_has_calls(two_calls)
| [
"storage.brokers.host_broker.HostBroker",
"django.setup",
"mock.patch",
"os.path.join",
"storage.delete_files_job.delete_files",
"mock.call",
"storage.test.utils.create_file"
] | [((507, 558), 'mock.patch', 'patch', (['"""storage.brokers.host_broker.os.path.exists"""'], {}), "('storage.brokers.host_broker.os.path.exists')\n", (512, 558), False, 'from mock import call, patch\n'), ((564, 610), 'mock.patch', 'patch', (['"""storage.brokers.host_broker.os.remove"""'], {}), "('storage.brokers.host_broker.os.remove')\n", (569, 610), False, 'from mock import call, patch\n'), ((348, 362), 'django.setup', 'django.setup', ([], {}), '()\n', (360, 362), False, 'import django\n'), ((386, 398), 'storage.brokers.host_broker.HostBroker', 'HostBroker', ([], {}), '()\n', (396, 398), False, 'from storage.brokers.host_broker import HostBroker\n'), ((828, 865), 'os.path.join', 'os.path.join', (['"""the"""', '"""volume"""', '"""path"""'], {}), "('the', 'volume', 'path')\n", (840, 865), False, 'import os\n'), ((888, 925), 'os.path.join', 'os.path.join', (['"""my_dir"""', '"""my_file.txt"""'], {}), "('my_dir', 'my_file.txt')\n", (900, 925), False, 'import os\n'), ((948, 986), 'os.path.join', 'os.path.join', (['"""my_dir"""', '"""my_file.json"""'], {}), "('my_dir', 'my_file.json')\n", (960, 986), False, 'import os\n'), ((1014, 1052), 'os.path.join', 'os.path.join', (['volume_path', 'file_path_1'], {}), '(volume_path, file_path_1)\n', (1026, 1052), False, 'import os\n'), ((1080, 1118), 'os.path.join', 'os.path.join', (['volume_path', 'file_path_2'], {}), '(volume_path, file_path_2)\n', (1092, 1118), False, 'import os\n'), ((1137, 1190), 'storage.test.utils.create_file', 'storage_test_utils.create_file', ([], {'file_path': 'file_path_1'}), '(file_path=file_path_1)\n', (1167, 1190), True, 'from storage.test import utils as storage_test_utils\n'), ((1208, 1261), 'storage.test.utils.create_file', 'storage_test_utils.create_file', ([], {'file_path': 'file_path_2'}), '(file_path=file_path_2)\n', (1238, 1261), True, 'from storage.test import utils as storage_test_utils\n'), ((1304, 1352), 'storage.delete_files_job.delete_files', 'delete_files', (['[file_1]', 'volume_path', 'self.broker'], {}), '([file_1], volume_path, self.broker)\n', (1316, 1352), False, 'from storage.delete_files_job import delete_files\n'), ((1410, 1458), 'storage.delete_files_job.delete_files', 'delete_files', (['[file_2]', 'volume_path', 'self.broker'], {}), '([file_2], volume_path, self.broker)\n', (1422, 1458), False, 'from storage.delete_files_job import delete_files\n'), ((1544, 1566), 'mock.call', 'call', (['full_path_file_1'], {}), '(full_path_file_1)\n', (1548, 1566), False, 'from mock import call, patch\n'), ((1568, 1590), 'mock.call', 'call', (['full_path_file_2'], {}), '(full_path_file_2)\n', (1572, 1590), False, 'from mock import call, patch\n'), ((447, 459), 'storage.brokers.host_broker.HostBroker', 'HostBroker', ([], {}), '()\n', (457, 459), False, 'from storage.brokers.host_broker import HostBroker\n')] |
from django.db import models
# Helper functions
def project_cover(instance, filename):
return "Project_{0}/cover_{1}".format(instance.id, filename)
def project_image(instance, filename):
return "Project_{0}/image_{1}".format(instance.project.id, filename)
def client_logo(instance, filename):
return "ClientLogos/{0}".format(filename)
def license_images(instance, filename):
return "Licences/{0}".format(filename)
# Create your models here.
class Project(models.Model):
title = models.CharField(
"Title",
help_text="Title of the project",
max_length=64,
null=False,
blank=False,
)
cover_image = models.ImageField(
"Cover Image",
upload_to=project_cover,
blank=False,
null=False,
help_text="Project cover image",
)
short_description = models.CharField(
"Short Description",
help_text="Example: Renovations and upgrades",
max_length=32,
blank=True,
null=False,
default="",
)
description = models.TextField(
"Project Description",
help_text="Overview of the project, written in markdown",
blank=False,
null=False,
)
details = models.TextField(
"Project Details",
help_text="Details about the project, including specifics",
null=False,
blank=False,
)
def __str__(self) -> str:
return self.title
def __repr__(self) -> str:
return self.title
class ProjectImage(models.Model):
project = models.ForeignKey(
to=Project,
on_delete=models.CASCADE,
blank=False,
null=False,
related_name="images",
)
text = models.CharField(
"Project Image Text",
help_text="A title for the image",
default="Project Image",
max_length=32,
)
image = models.ImageField("Project Image", upload_to=project_image)
class Client(models.Model):
client_image = models.ImageField("Client Logo", upload_to=client_logo)
company = models.CharField(
"Company Name",
max_length=64,
null=False,
blank=False,
default="",
help_text="Name of company",
)
def __str__(self) -> str:
return str(self.company)
def __repr__(self) -> str:
return self.company
class License(models.Model):
license_image = models.ImageField("License Photo", upload_to=license_images)
name = models.CharField(
"License Title",
max_length=64,
null=False,
blank=False,
default="Solutions License",
help_text="Title of license to be displayed",
)
def __str__(self) -> str:
return self.name
def __repr__(self) -> str:
return self.name
class Message(models.Model):
first_name = models.CharField("First Name", null=False, blank=False, max_length=32)
last_name = models.CharField("Last Name", null=False, blank=False, max_length=32)
company = models.CharField("Company", null=False, blank=False, max_length=32)
email = models.EmailField("Email", blank=False, null=False)
message = models.TextField("Message", max_length=200)
def __str__(self) -> str:
return "Message from{0} {1}".format(self.last_name, self.first_name)
def __repr__(self) -> str:
return "Message from{0} {1}".format(self.last_name, self.first_name) | [
"django.db.models.EmailField",
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.ImageField",
"django.db.models.CharField"
] | [((507, 610), 'django.db.models.CharField', 'models.CharField', (['"""Title"""'], {'help_text': '"""Title of the project"""', 'max_length': '(64)', 'null': '(False)', 'blank': '(False)'}), "('Title', help_text='Title of the project', max_length=64,\n null=False, blank=False)\n", (523, 610), False, 'from django.db import models\n'), ((672, 792), 'django.db.models.ImageField', 'models.ImageField', (['"""Cover Image"""'], {'upload_to': 'project_cover', 'blank': '(False)', 'null': '(False)', 'help_text': '"""Project cover image"""'}), "('Cover Image', upload_to=project_cover, blank=False, null\n =False, help_text='Project cover image')\n", (689, 792), False, 'from django.db import models\n'), ((859, 1004), 'django.db.models.CharField', 'models.CharField', (['"""Short Description"""'], {'help_text': '"""Example: Renovations and upgrades"""', 'max_length': '(32)', 'blank': '(True)', 'null': '(False)', 'default': '""""""'}), "('Short Description', help_text=\n 'Example: Renovations and upgrades', max_length=32, blank=True, null=\n False, default='')\n", (875, 1004), False, 'from django.db import models\n'), ((1068, 1195), 'django.db.models.TextField', 'models.TextField', (['"""Project Description"""'], {'help_text': '"""Overview of the project, written in markdown"""', 'blank': '(False)', 'null': '(False)'}), "('Project Description', help_text=\n 'Overview of the project, written in markdown', blank=False, null=False)\n", (1084, 1195), False, 'from django.db import models\n'), ((1244, 1369), 'django.db.models.TextField', 'models.TextField', (['"""Project Details"""'], {'help_text': '"""Details about the project, including specifics"""', 'null': '(False)', 'blank': '(False)'}), "('Project Details', help_text=\n 'Details about the project, including specifics', null=False, blank=False)\n", (1260, 1369), False, 'from django.db import models\n'), ((1569, 1677), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': 'Project', 'on_delete': 'models.CASCADE', 'blank': '(False)', 'null': '(False)', 'related_name': '"""images"""'}), "(to=Project, on_delete=models.CASCADE, blank=False, null=\n False, related_name='images')\n", (1586, 1677), False, 'from django.db import models\n'), ((1731, 1848), 'django.db.models.CharField', 'models.CharField', (['"""Project Image Text"""'], {'help_text': '"""A title for the image"""', 'default': '"""Project Image"""', 'max_length': '(32)'}), "('Project Image Text', help_text='A title for the image',\n default='Project Image', max_length=32)\n", (1747, 1848), False, 'from django.db import models\n'), ((1896, 1955), 'django.db.models.ImageField', 'models.ImageField', (['"""Project Image"""'], {'upload_to': 'project_image'}), "('Project Image', upload_to=project_image)\n", (1913, 1955), False, 'from django.db import models\n'), ((2005, 2060), 'django.db.models.ImageField', 'models.ImageField', (['"""Client Logo"""'], {'upload_to': 'client_logo'}), "('Client Logo', upload_to=client_logo)\n", (2022, 2060), False, 'from django.db import models\n'), ((2075, 2192), 'django.db.models.CharField', 'models.CharField', (['"""Company Name"""'], {'max_length': '(64)', 'null': '(False)', 'blank': '(False)', 'default': '""""""', 'help_text': '"""Name of company"""'}), "('Company Name', max_length=64, null=False, blank=False,\n default='', help_text='Name of company')\n", (2091, 2192), False, 'from django.db import models\n'), ((2419, 2479), 'django.db.models.ImageField', 'models.ImageField', (['"""License Photo"""'], {'upload_to': 'license_images'}), "('License Photo', upload_to=license_images)\n", (2436, 2479), False, 'from django.db import models\n'), ((2491, 2643), 'django.db.models.CharField', 'models.CharField', (['"""License Title"""'], {'max_length': '(64)', 'null': '(False)', 'blank': '(False)', 'default': '"""Solutions License"""', 'help_text': '"""Title of license to be displayed"""'}), "('License Title', max_length=64, null=False, blank=False,\n default='Solutions License', help_text='Title of license to be displayed')\n", (2507, 2643), False, 'from django.db import models\n'), ((2856, 2926), 'django.db.models.CharField', 'models.CharField', (['"""First Name"""'], {'null': '(False)', 'blank': '(False)', 'max_length': '(32)'}), "('First Name', null=False, blank=False, max_length=32)\n", (2872, 2926), False, 'from django.db import models\n'), ((2943, 3012), 'django.db.models.CharField', 'models.CharField', (['"""Last Name"""'], {'null': '(False)', 'blank': '(False)', 'max_length': '(32)'}), "('Last Name', null=False, blank=False, max_length=32)\n", (2959, 3012), False, 'from django.db import models\n'), ((3027, 3094), 'django.db.models.CharField', 'models.CharField', (['"""Company"""'], {'null': '(False)', 'blank': '(False)', 'max_length': '(32)'}), "('Company', null=False, blank=False, max_length=32)\n", (3043, 3094), False, 'from django.db import models\n'), ((3107, 3158), 'django.db.models.EmailField', 'models.EmailField', (['"""Email"""'], {'blank': '(False)', 'null': '(False)'}), "('Email', blank=False, null=False)\n", (3124, 3158), False, 'from django.db import models\n'), ((3173, 3216), 'django.db.models.TextField', 'models.TextField', (['"""Message"""'], {'max_length': '(200)'}), "('Message', max_length=200)\n", (3189, 3216), False, 'from django.db import models\n')] |
from django.http import HttpResponse
from django.shortcuts import render
def home(request):
return render(request,'index.html')
def table(request):
return render(request,"basic-table.html") | [
"django.shortcuts.render"
] | [((103, 132), 'django.shortcuts.render', 'render', (['request', '"""index.html"""'], {}), "(request, 'index.html')\n", (109, 132), False, 'from django.shortcuts import render\n'), ((163, 198), 'django.shortcuts.render', 'render', (['request', '"""basic-table.html"""'], {}), "(request, 'basic-table.html')\n", (169, 198), False, 'from django.shortcuts import render\n')] |
#!/usr/bin/env python
import sys
import os
import plotGlobalEndemicBehaviour as analysisFile
thisDir = os.path.dirname(os.path.abspath(__file__))
filenames = []
thepath = "/Users/pascal/Coding/uni/Masterarbeit/Tests/results/finalcont/"
scen = "scen"
thisScen = scen
listfile = "/comp3.txt" #"/testing_list.txt"
scenariotext = " of scenario "
for i in range(5,7):
thisScen = scen +str(i)
filename = thepath + thisScen + listfile
analysisFile.doThePlot(filename,scenariotext+str(i))
for i in range(5,7):
thisScen = scen +str(i) + "b"
filename = thepath + thisScen + listfile
analysisFile.doThePlot(filename,scenariotext+str(i)+"b")
| [
"os.path.abspath"
] | [((120, 145), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (135, 145), False, 'import os\n')] |
from pathlib import Path
from fhir.resources.codesystem import CodeSystem
from oops_fhir.utils import CodeSystemConcept
__all__ = ["v3HL7ContextConductionStyle"]
_resource = CodeSystem.parse_file(Path(__file__).with_suffix(".json"))
class v3HL7ContextConductionStyle:
"""
v3 Code System HL7ContextConductionStyle
The styles of context conduction usable by relationships within a static
model derived from tyhe HL7 Reference Information Model.
Status: active - Version: 2018-08-12
Copyright None
http://terminology.hl7.org/CodeSystem/v3-HL7ContextConductionStyle
"""
c = CodeSystemConcept(
{
"code": "C",
"definition": "Definition: Context conduction is defined using the contextConductionCode and contextConductionInd attributes on ActRelationship and Participation.\r\n\n \n UsageNotes: This approach is deprecated as of March, 2010.",
"display": "conduction-indicator-based",
}
)
"""
conduction-indicator-based
Definition: Context conduction is defined using the contextConductionCode and contextConductionInd attributes on ActRelationship and Participation.
UsageNotes: This approach is deprecated as of March, 2010.
"""
i = CodeSystemConcept(
{
"code": "I",
"definition": 'Definition: Context conduction is not explicitly defined. The recipient of an instance must infer conduction based on the semantics of the model and what is deemed "reasonable".\r\n\n \n UsageNotes: Because this approach can lead to variation in instance interpretation, its use is discouraged.',
"display": "inferred",
}
)
"""
inferred
Definition: Context conduction is not explicitly defined. The recipient of an instance must infer conduction based on the semantics of the model and what is deemed "reasonable".
UsageNotes: Because this approach can lead to variation in instance interpretation, its use is discouraged.
"""
v = CodeSystemConcept(
{
"code": "V",
"definition": 'Definition: Context conduction is defined using the ActRelationship.blockedContextActRelationshipType and blockedContextParticipationType attributes and the "conductible" property on the ActRelationshipType and ParticipationType code systems.',
"display": "vocabulary-based",
}
)
"""
vocabulary-based
Definition: Context conduction is defined using the ActRelationship.blockedContextActRelationshipType and blockedContextParticipationType attributes and the "conductible" property on the ActRelationshipType and ParticipationType code systems.
"""
class Meta:
resource = _resource
| [
"oops_fhir.utils.CodeSystemConcept",
"pathlib.Path"
] | [((614, 978), 'oops_fhir.utils.CodeSystemConcept', 'CodeSystemConcept', (["{'code': 'C', 'definition':\n 'Definition: Context conduction is defined using the contextConductionCode and contextConductionInd attributes on ActRelationship and Participation.\\r\\n\\n \\n UsageNotes: This approach is deprecated as of March, 2010.'\n , 'display': 'conduction-indicator-based'}"], {}), "({'code': 'C', 'definition':\n 'Definition: Context conduction is defined using the contextConductionCode and contextConductionInd attributes on ActRelationship and Participation.\\r\\n\\n \\n UsageNotes: This approach is deprecated as of March, 2010.'\n , 'display': 'conduction-indicator-based'})\n", (631, 978), False, 'from oops_fhir.utils import CodeSystemConcept\n'), ((1352, 1778), 'oops_fhir.utils.CodeSystemConcept', 'CodeSystemConcept', (['{\'code\': \'I\', \'definition\':\n \'Definition: Context conduction is not explicitly defined. The recipient of an instance must infer conduction based on the semantics of the model and what is deemed "reasonable".\\r\\n\\n \\n UsageNotes: Because this approach can lead to variation in instance interpretation, its use is discouraged.\'\n , \'display\': \'inferred\'}'], {}), '({\'code\': \'I\', \'definition\':\n \'Definition: Context conduction is not explicitly defined. The recipient of an instance must infer conduction based on the semantics of the model and what is deemed "reasonable".\\r\\n\\n \\n UsageNotes: Because this approach can lead to variation in instance interpretation, its use is discouraged.\'\n , \'display\': \'inferred\'})\n', (1369, 1778), False, 'from oops_fhir.utils import CodeSystemConcept\n'), ((2214, 2546), 'oops_fhir.utils.CodeSystemConcept', 'CodeSystemConcept', (['{\'code\': \'V\', \'definition\':\n \'Definition: Context conduction is defined using the ActRelationship.blockedContextActRelationshipType and blockedContextParticipationType attributes and the "conductible" property on the ActRelationshipType and ParticipationType code systems.\'\n , \'display\': \'vocabulary-based\'}'], {}), '({\'code\': \'V\', \'definition\':\n \'Definition: Context conduction is defined using the ActRelationship.blockedContextActRelationshipType and blockedContextParticipationType attributes and the "conductible" property on the ActRelationshipType and ParticipationType code systems.\'\n , \'display\': \'vocabulary-based\'})\n', (2231, 2546), False, 'from oops_fhir.utils import CodeSystemConcept\n'), ((201, 215), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (205, 215), False, 'from pathlib import Path\n')] |
"""
Authors: <<NAME>, <NAME>>
Copyright: (C) 2019-2020 <http://www.dei.unipd.it/
Department of Information Engineering> (DEI), <http://www.unipd.it/ University of Padua>, Italy
License: <http://www.apache.org/licenses/LICENSE-2.0 Apache License, Version 2.0>
"""
import os
import math
import string
import subprocess
import itertools
import pickle
import numpy as np
import xml.etree.ElementTree as ET
from collections import Counter
from functools import reduce
from textwrap import wrap
from whoosh.analysis import SimpleAnalyzer
from sklearn.metrics.pairwise import cosine_similarity
from tqdm import tqdm
class Utils(object):
"""utils functions for neural vector space models"""
def __init__(self, seed):
"""set random seed, initialize index variables"""
np.random.seed(seed)
self.term_dict = {}
def build_term_dictionary(self, index, dict_size=65536, oov=False, remove_digits=True, min_doc_freq=2,
max_doc_freq=0.5):
"""create term dictionary"""
reader = index.reader()
# get corpus size
corpus_size = reader.doc_count()
# get unique terms statistics: (term, doc_freq, term_freq)
terms = self.terms_statistics(index)
# initialize count list
count = []
# add terms to count
for term, doc_freq, term_freq in terms:
# check if term does not exceed max_doc_freq (in %)
if doc_freq / corpus_size <= max_doc_freq:
# check if term is not inferior to min_doc_freq (not in %)
if doc_freq >= min_doc_freq:
# check if term does not contain digits
if remove_digits:
if self.has_digit(term): # skip term
continue
else: # keep term
count.extend([(term, term_freq)])
else: # keep terms containing digits
count.extend([(term, term_freq)])
else: # minimum doc freq not reached
# skip term
continue
else: # maximum doc freq exceeded
# skip term
continue
# convert count into Counter object and keep dict_size most frequent terms
count = Counter(dict(count)).most_common(dict_size)
if oov:
# include out of vocabulary token
count.extend([("__UNK__", 1)]) # last index: dict_size
# for each term - that we want in the dictionary - add it and make it the value of the prior dictionary length
for term, term_freq in count:
self.term_dict[term] = len(self.term_dict)
return True
def has_digit(self, term):
"""check whether input term contains digits"""
return any(char.isdigit() for char in term)
def only_digits(self, term):
"""check whether input term contains only digits and/or punctuation"""
return all(char.isdigit() or char in string.punctuation for char in term)
def get_term_dictionary(self):
"""get term dictionary"""
return self.term_dict
def update_term_dictionary(self, term):
"""update term dictionary"""
if term in self.term_dict: # term already in term_dict
return True
else: # update term_dict
self.term_dict[term] = len(self.term_dict)
return True
def find_pos(self, line):
"""split text into terms and return dict {pos: [term, ["__NULL__"]]}"""
pos_terms = {}
terms = line.split()
# define sentence index
index = line.index
running_offset = 0
# loop over terms
for term in terms:
# get term offset
term_offset = index(term, running_offset)
term_len = len(term)
# update running offset
running_offset = term_offset + term_len
# append to term_offset each term + ["__NULL__"] for later use
pos_terms[term_offset] = [term, ["__NULL__"]]
return pos_terms
def terms_statistics(self, index):
"""get unique terms statistics"""
reader = index.reader()
# unique terms
terms = list(reader.field_terms('text'))
# terms statistics
terms_stats = list()
# loop over unique terms
for term in terms:
# term info
term_info = reader.term_info('text', term)
# doc frequency
doc_freq = term_info.doc_frequency()
# term frequency
term_freq = term_info.weight()
# append info to terms statistics
terms_stats.append((term, doc_freq, term_freq))
return terms_stats
def index_statistics(self, index):
"""compute and print index statistics"""
reader = index.reader()
# doc indexes in whoosh
doc_ids = list(reader.all_doc_ids())
# corpus size
corpus_size = reader.doc_count()
# maximum length of given field across all documents
max_length = reader.max_field_length('text')
# minimum length of given field across all documents
min_length = reader.min_field_length('text')
# total number of terms in given field
corpus_length = reader.field_length('text')
# total number of unique terms
terms = list(reader.field_terms('text'))
# number of terms in given field in given document
docs_length = list()
for doc_id in doc_ids:
doc_length = reader.doc_field_length(doc_id, 'text')
if doc_length:
docs_length.append(doc_length)
else:
docs_length.append(0)
# average length of given field across all documents in corpus
avg_length = reduce((lambda x, y: x + y), docs_length) / corpus_size
# print statistics
print('corpus size: {}'.format(corpus_size))
print('maximum length: {}'.format(max_length))
print('minimum length: {}'.format(min_length))
print('average length: {}'.format(avg_length))
print('all terms: {}'.format(corpus_length))
print('unique terms: {}'.format(len(terms)))
return True
def corpus_statistics(self, corpus):
"""compute and print corpus statistics"""
corpus_size = len(corpus)
# compute documents lengths
docs_length = np.array([len(doc) for doc in corpus])
# compute corpus length
corpus_length = [term for doc in corpus for term in doc]
# print statistics
print('corpus size: {}'.format(corpus_size))
print('maximum length: {}'.format(np.max(docs_length)))
print('minimum length: {}'.format(np.min(docs_length)))
print('average length: {}'.format(np.mean(docs_length)))
print('median length: {}'.format(np.median(docs_length)))
print('std length: {}'.format(np.std(docs_length)))
print('all terms: {}'.format(len(corpus_length)))
return True
def compute_num_batches(self, corpus, batch_size, ngram_size):
"""compute number of batch iterations per epoch"""
docs_length = [len(doc) for doc in corpus]
# compute number of batches
num_batches = math.ceil(sum([max(doc_length - ngram_size + 1, 0) for doc_length in docs_length]) / batch_size)
return num_batches
def store_doc_labels(self, index, out_dir):
"""store document labels dictionary"""
reader = index.reader()
doc_ids = list(reader.all_doc_ids())
# define doc labels list
doc_labels = list()
for doc_id in doc_ids:
label = reader.stored_fields(doc_id)['docno']
doc_labels.append(label)
# convert doc labels list into dicts
ix2label = {ix: docid for ix, docid in enumerate(doc_labels)}
# store doc labels dict
with open(out_dir + '/ix2label.pkl', 'wb') as out:
pickle.dump(ix2label, out)
return ix2label
def get_doc_labels(self, data_path):
"""read dict of doc lables (e.g. TREC <DOCNO> values)"""
with open(data_path + '/ix2label.pkl', 'rb') as dfile:
ix2label = pickle.load(dfile)
return ix2label
"""
def get_doc_labels(self, index):
# return list of document labels (e.g. TREC <DOCNO> values)
reader = index.reader()
doc_ids = list(reader.all_doc_ids())
# define doc labels list
doc_labels = list()
for doc_id in doc_ids:
label = reader.stored_fields(doc_id)['docno']
doc_labels.append(label)
return doc_labels
"""
def corpus2idx(self, index, oov=False):
"""convert documents into list of indices"""
reader = index.reader()
# define corpus as a list of lists
corpus = []
# get doc ids (whoosh' index ids)
doc_ids = list(reader.all_doc_ids())
# encode corpus
for doc_id in doc_ids:
# read doc and return its contents as an ordered seq of terms
terms = self.pos2terms(reader, doc_id)
# store doc as ordered list of index terms
doc = list()
for term in terms:
if term in self.term_dict:
doc.append(self.term_dict[term])
else:
if oov: # store oov index
doc.append(self.term_dict['__UNK__'])
else: # skip term
continue
# store processed doc in corpus
corpus.append(doc)
return corpus
def pos2terms(self, reader, doc_id):
"""return list of ordered doc terms given doc id"""
if reader.has_vector(doc_id, 'text'):
doc_data = reader.vector(doc_id, 'text').items_as('positions')
# get term-positions dict: {term: [pos1, pos2, ...], ...}
term_pos = dict(doc_data)
# create position-term dict: {pos1: term, pos2: term, ...}
pos_term = dict()
for term, positions in term_pos.items():
for pos in positions:
pos_term[pos] = term
# return ordered list of doc terms
return [pos_term.get(i) for i in range(min(pos_term), max(pos_term) + 1)]
else: # target doc does not contain terms
return []
def generate_batch_data(self, corpus, allowed_docs, batch_size, ngram_size, neg_samples):
"""generate a batch of data for given corpus (optimized)"""
corpus_size = len(corpus)
# select random documents from allowed documents (i.e. documents with len(doc) >= ngram_size)
rand_docs_idx = np.random.choice(allowed_docs, size=batch_size)
# compute documents length
docs_length = [len(corpus[rand_doc_idx]) for rand_doc_idx in rand_docs_idx]
# store position of last prefixes + 1 (one above the highest prefix available)
last_prefixes = [doc_length - ngram_size + 1 for doc_length in docs_length]
# sample random prefixes lower than or equal to last_prefixes
prefixes = [np.random.randint(last_prefix) for last_prefix in last_prefixes]
# slices = prefixes + ngram_size
ngrams = [corpus[rand_doc_idx][prefix:prefix + ngram_size] for rand_doc_idx, prefix in
zip(rand_docs_idx, prefixes)]
# generate negative labels - discrete uniform distribution
negative_labels = np.random.randint(corpus_size, size=[batch_size, neg_samples])
# convert batch data to numpy array
ngrams = np.array(ngrams)
# return batch data in the form: (ngrams, true labels, negative labels)
return ngrams, rand_docs_idx, negative_labels
def get_allowed_docs(self, corpus, ngram_size):
"""return list of allowed documents (as whoosh's indexes) for the given ngram size"""
allowed_docs = list()
del_docs = list()
# loop over documents and store doc indexes when len(doc) >= ngram_size
for idx, doc in enumerate(corpus):
if len(doc) >= ngram_size:
allowed_docs.append(idx)
else:
del_docs.append(idx)
print('deleted {} docs'.format(len(del_docs)))
return np.array(allowed_docs)
def read_ohsu_queries(self, query_path):
"""read query file and return a dict[id] = {title: <string>, desc: <string>}"""
with open(query_path, 'r') as qf:
q = qf.read()
q = [query.split('\n') for query in q.split('\n\n') if query]
# loop through each query and fill dict
qdict = dict()
for query in q:
qid = query[1].split()[-1]
qdict[qid] = dict()
qdict[qid]['title'] = query[2].split('<title>')[1].strip()
qdict[qid]['desc'] = query[4]
return qdict
def read_trec_queries(self, query_path):
"""read query file and return a dict[id] = query"""
with open(query_path, 'r') as qf:
xml = qf.readlines()
# convert into true xml
true_xml = []
# properly close tags
for line in xml:
if '<title>' in line:
line = '</num>\n' + line
if '<desc>' in line:
line = '</title>\n' + line
if '<narr>' in line:
line = '</desc>\n' + line
if '</top>' in line:
line = '</narr>\n' + line
# remove noisy information
line = line.replace('Number:', '')
line = line.replace('Topic:', '')
line = line.replace('Description:', '')
# convert non-valid xml chars
line = line.replace('&', '&')
# strip string
line = line.strip()
true_xml.append(line)
# reconvert list to single string
true_xml = ''.join(true_xml)
# add root
true_xml = '<ROOT>' + true_xml + '</ROOT>'
root = ET.fromstring(true_xml)
# define query dict: {qid: {title:, desc:}, ...}
qdict = dict()
# loop through each query
for q in root:
qid = q.find('num').text.strip()
qdict[qid] = {}
qdict[qid]['title'] = q.find('title').text.strip()
qdict[qid]['desc'] = q.find('desc').text.strip()
return qdict
def read_clef_queries(self, query_path): # TODO: add description field
"""read query file and return a dict[id] = query"""
qdict = dict()
with open(query_path, 'r') as qf:
xml = qf.read()
root = ET.fromstring(xml)
# loop through each query
for q in root:
qid = q.find('identifier').text.strip()
qdict[qid] = {}
qdict[qid]['title'] = q.find('title').text.strip()
qdict[qid]['desc'] = q.find('description').text.strip()
return qdict
def tokenize_query(self, q):
"""lowerize and tokenize query"""
analyzer = SimpleAnalyzer()
return [token.text for token in analyzer(q)]
def query2idx(self, q, qid, oov=False):
"""convert query terms to indices"""
query_idx = list()
for term in q:
if term in self.term_dict:
query_idx.append(self.term_dict[term])
else:
if oov: # keep term as __UNK__ token
query_idx.append(self.term_dict['__UNK__'])
else: # skip term
continue
if not query_idx:
print('query {} does not contain terms'.format(qid))
return None
else:
return np.array(query_idx)
def query_projection(self, query_idx, word_embs, proj_weights):
"""convert list of indices into dense vector of size [1, doc_embs]"""
if query_idx is None:
return None
else:
return np.matmul(proj_weights, np.mean(word_embs[query_idx], axis=0))
def prepare_query(self, qid, qtext, word_embs, proj_weights, oov=False):
"""transform query into dense vector of size [1, doc_embs]"""
query_tokens = self.tokenize_query(qtext)
query_idx = self.query2idx(query_tokens, qid, oov)
query_proj = self.query_projection(query_idx, word_embs, proj_weights)
return query_proj
def perform_search(self, doc_labels, docs, query_ids, queries, ranking_path):
"""perform search over docs given queries"""
#doc_labels = np.array(doc_labels)
# compute similarities
print('compute similarities between docs and queries')
similarities = cosine_similarity(docs, queries)
# open file to write results
ranking_name = 'nvsm' # os.path.basename(ranking_path)
# rf = open(ranking_folder + '/' + ranking_name + '.run', 'w')
rf = open(ranking_path, 'w')
# write results in ranking file
for i in tqdm(range(similarities.shape[1])):
rank = np.argsort(-similarities[:, i])[:1000]
#docs_rank = doc_labels[rank]
docs_rank = [doc_labels[r] for r in rank]
qid = query_ids[i]
# verify whether qid is an integer
if qid.isdigit(): # cast to integer - this operation avoids storing topic ids as '059' instead of '59'
qid = str(int(qid)) # convert to int and then back to str
for j in range(len(docs_rank)):
# write into .run file
rf.write('%s\t%d\t%s\t%d\t%f\t%s\n' % (qid, 0, docs_rank[j], j, similarities[rank[j]][i], ranking_name))
rf.close()
return True
def get_averaged_measure_score(self, run_path, qrel_path, measure):
"""return averaged measure score over topics"""
if "P_" in measure:
cmd = "./trec_eval/trec_eval -m " + measure.split('_')[0] + " " + qrel_path + " " + run_path
elif "ndcg_cut" in measure:
cmd = "./trec_eval/trec_eval -m " + measure.split('_')[0] + '_' + measure.split('_')[
1] + " " + qrel_path + " " + run_path
else:
cmd = "./trec_eval/trec_eval -m " + measure + " " + qrel_path + " " + run_path
process = subprocess.run(cmd.split(), stdout=subprocess.PIPE)
result = process.stdout.decode('utf-8').split('\n')
qscore = np.array([score.split('\t')[-1] for score in result
if score.split('\t')[0].strip() == measure])
qscore = qscore.astype(np.float)[0]
return qscore
def evaluate_rankings(self, ranking_path, qrels_folder, qrels_name):
"""evaluate rankings performed by neural models"""
qrels_file_path = qrels_folder + '/' + qrels_name + '.qrel'
print('qrels file: ' + qrels_file_path)
if not os.path.isfile(qrels_file_path):
print('QRELS file NOT FOUND!')
if not os.path.isfile(ranking_path):
print('RANKING file NOT FOUND!')
print('evaluate model ranking')
MAP = self.get_averaged_measure_score(ranking_path, qrels_file_path, 'map')
NDCG = self.get_averaged_measure_score(ranking_path, qrels_file_path, 'ndcg_cut_100')
P_10 = self.get_averaged_measure_score(ranking_path, qrels_file_path, 'P_10')
print('MAP: ' + str(MAP), 'NDCG: ' + str(NDCG), 'P@10: ' + str(P_10))
return MAP
| [
"numpy.mean",
"numpy.median",
"pickle.dump",
"sklearn.metrics.pairwise.cosine_similarity",
"numpy.random.choice",
"functools.reduce",
"numpy.std",
"pickle.load",
"numpy.max",
"os.path.isfile",
"numpy.array",
"numpy.random.randint",
"numpy.argsort",
"numpy.random.seed",
"numpy.min",
"xm... | [((834, 854), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (848, 854), True, 'import numpy as np\n'), ((11079, 11126), 'numpy.random.choice', 'np.random.choice', (['allowed_docs'], {'size': 'batch_size'}), '(allowed_docs, size=batch_size)\n', (11095, 11126), True, 'import numpy as np\n'), ((11860, 11922), 'numpy.random.randint', 'np.random.randint', (['corpus_size'], {'size': '[batch_size, neg_samples]'}), '(corpus_size, size=[batch_size, neg_samples])\n', (11877, 11922), True, 'import numpy as np\n'), ((11986, 12002), 'numpy.array', 'np.array', (['ngrams'], {}), '(ngrams)\n', (11994, 12002), True, 'import numpy as np\n'), ((12683, 12705), 'numpy.array', 'np.array', (['allowed_docs'], {}), '(allowed_docs)\n', (12691, 12705), True, 'import numpy as np\n'), ((14443, 14466), 'xml.etree.ElementTree.fromstring', 'ET.fromstring', (['true_xml'], {}), '(true_xml)\n', (14456, 14466), True, 'import xml.etree.ElementTree as ET\n'), ((15084, 15102), 'xml.etree.ElementTree.fromstring', 'ET.fromstring', (['xml'], {}), '(xml)\n', (15097, 15102), True, 'import xml.etree.ElementTree as ET\n'), ((15498, 15514), 'whoosh.analysis.SimpleAnalyzer', 'SimpleAnalyzer', ([], {}), '()\n', (15512, 15514), False, 'from whoosh.analysis import SimpleAnalyzer\n'), ((17164, 17196), 'sklearn.metrics.pairwise.cosine_similarity', 'cosine_similarity', (['docs', 'queries'], {}), '(docs, queries)\n', (17181, 17196), False, 'from sklearn.metrics.pairwise import cosine_similarity\n'), ((6044, 6083), 'functools.reduce', 'reduce', (['(lambda x, y: x + y)', 'docs_length'], {}), '(lambda x, y: x + y, docs_length)\n', (6050, 6083), False, 'from functools import reduce\n'), ((8253, 8279), 'pickle.dump', 'pickle.dump', (['ix2label', 'out'], {}), '(ix2label, out)\n', (8264, 8279), False, 'import pickle\n'), ((8503, 8521), 'pickle.load', 'pickle.load', (['dfile'], {}), '(dfile)\n', (8514, 8521), False, 'import pickle\n'), ((11513, 11543), 'numpy.random.randint', 'np.random.randint', (['last_prefix'], {}), '(last_prefix)\n', (11530, 11543), True, 'import numpy as np\n'), ((16168, 16187), 'numpy.array', 'np.array', (['query_idx'], {}), '(query_idx)\n', (16176, 16187), True, 'import numpy as np\n'), ((19361, 19392), 'os.path.isfile', 'os.path.isfile', (['qrels_file_path'], {}), '(qrels_file_path)\n', (19375, 19392), False, 'import os\n'), ((19454, 19482), 'os.path.isfile', 'os.path.isfile', (['ranking_path'], {}), '(ranking_path)\n', (19468, 19482), False, 'import os\n'), ((6932, 6951), 'numpy.max', 'np.max', (['docs_length'], {}), '(docs_length)\n', (6938, 6951), True, 'import numpy as np\n'), ((6997, 7016), 'numpy.min', 'np.min', (['docs_length'], {}), '(docs_length)\n', (7003, 7016), True, 'import numpy as np\n'), ((7062, 7082), 'numpy.mean', 'np.mean', (['docs_length'], {}), '(docs_length)\n', (7069, 7082), True, 'import numpy as np\n'), ((7127, 7149), 'numpy.median', 'np.median', (['docs_length'], {}), '(docs_length)\n', (7136, 7149), True, 'import numpy as np\n'), ((7191, 7210), 'numpy.std', 'np.std', (['docs_length'], {}), '(docs_length)\n', (7197, 7210), True, 'import numpy as np\n'), ((16453, 16490), 'numpy.mean', 'np.mean', (['word_embs[query_idx]'], {'axis': '(0)'}), '(word_embs[query_idx], axis=0)\n', (16460, 16490), True, 'import numpy as np\n'), ((17525, 17556), 'numpy.argsort', 'np.argsort', (['(-similarities[:, i])'], {}), '(-similarities[:, i])\n', (17535, 17556), True, 'import numpy as np\n')] |
# copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import paddle
import paddle.nn.functional as F
from paddle.nn import LSTM, Embedding, Dropout, Linear
import numpy as np
class SentimentClassifier(paddle.nn.Layer):
def __init__(self, hidden_size, vocab_size, class_num=2, num_steps=128, num_layers=1, init_scale=0.1, dropout=None):
# 参数含义如下:
# 1.hidden_size,表示embedding-size,hidden和cell向量的维度
# 2.vocab_size,模型可以考虑的词表大小
# 3.class_num,情感类型个数,可以是2分类,也可以是多分类
# 4.num_steps,表示这个情感分析模型最大可以考虑的句子长度
# 5.num_layers,表示网络的层数
# 6.init_scale,表示网络内部的参数的初始化范围
# 长短时记忆网络内部用了很多Tanh,Sigmoid等激活函数,这些函数对数值精度非常敏感,
# 因此我们一般只使用比较小的初始化范围,以保证效果
super(SentimentClassifier, self).__init__()
self.hidden_size = hidden_size
self.vocab_size = vocab_size
self.class_num = class_num
self.init_scale = init_scale
self.num_layers = num_layers
self.num_steps = num_steps
self.dropout = dropout
# 声明一个LSTM模型,用来把每个句子抽象成向量
self.simple_lstm_rnn = LSTM(input_size=hidden_size, hidden_size=hidden_size, num_layers=num_layers)
# 声明一个embedding层,用来把句子中的每个词转换为向量
self.embedding = Embedding(num_embeddings=vocab_size, embedding_dim=hidden_size, sparse=False,
weight_attr=paddle.ParamAttr(initializer=paddle.nn.initializer.Uniform(low=-init_scale, high=init_scale)))
# 在得到一个句子的向量表示后,需要根据这个向量表示对这个句子进行分类
# 一般来说,可以把这个句子的向量表示乘以一个大小为[self.hidden_size, self.class_num]的W参数,
# 并加上一个大小为[self.class_num]的b参数,从而达到把句子向量映射到分类结果的目的
# 我们需要声明最终在使用句子向量映射到具体情感类别过程中所需要使用的参数
# 这个参数的大小一般是[self.hidden_size, self.class_num]
self.cls_fc = Linear(in_features=self.hidden_size, out_features=self.class_num,
weight_attr=None, bias_attr=None)
self.dropout_layer = Dropout(p=self.dropout, mode='upscale_in_train')
def forward(self, input, label):
batch_size = len(input)
# 首先我们需要定义LSTM的初始hidden和cell,这里我们使用0来初始化这个序列的记忆
init_hidden_data = np.zeros(
(self.num_layers, batch_size, self.hidden_size), dtype='float32')
init_cell_data = np.zeros(
(self.num_layers, batch_size, self.hidden_size), dtype='float32')
# 将这些初始记忆转换为飞桨可计算的向量
# 设置stop_gradient=True,避免这些向量被更新,从而影响训练效果
init_hidden = paddle.to_tensor(init_hidden_data)
init_hidden.stop_gradient = True
init_cell = paddle.to_tensor(init_cell_data)
init_cell.stop_gradient = True
init_h = paddle.reshape(
init_hidden, shape=[self.num_layers, -1, self.hidden_size])
init_c = paddle.reshape(
init_cell, shape=[self.num_layers, -1, self.hidden_size])
# 将输入的句子的mini-batch转换为词向量表示
x_emb = self.embedding(input)
x_emb = paddle.reshape(
x_emb, shape=[-1, self.num_steps, self.hidden_size])
if self.dropout is not None and self.dropout > 0.0:
x_emb = self.dropout_layer(x_emb)
# 使用LSTM网络,把每个句子转换为向量表示
rnn_out, (last_hidden, last_cell) = self.simple_lstm_rnn(x_emb, (init_h, init_c))
last_hidden = paddle.reshape(
last_hidden[-1], shape=[-1, self.hidden_size])
# 将每个句子的向量表示映射到具体的情感类别上
projection = self.cls_fc(last_hidden)
pred = F.softmax(projection, axis=-1)
# 根据给定的标签信息,计算整个网络的损失函数,这里我们可以直接使用分类任务中常使用的交叉熵来训练网络
loss = F.softmax_with_cross_entropy(
logits=projection, label=label, soft_label=False)
loss = paddle.mean(loss)
# 最终返回预测结果pred,和网络的loss
return pred, loss
| [
"paddle.nn.Dropout",
"paddle.nn.functional.softmax_with_cross_entropy",
"paddle.nn.LSTM",
"paddle.mean",
"numpy.zeros",
"paddle.to_tensor",
"paddle.nn.Linear",
"paddle.reshape",
"paddle.nn.functional.softmax",
"paddle.nn.initializer.Uniform"
] | [((1628, 1704), 'paddle.nn.LSTM', 'LSTM', ([], {'input_size': 'hidden_size', 'hidden_size': 'hidden_size', 'num_layers': 'num_layers'}), '(input_size=hidden_size, hidden_size=hidden_size, num_layers=num_layers)\n', (1632, 1704), False, 'from paddle.nn import LSTM, Embedding, Dropout, Linear\n'), ((2294, 2397), 'paddle.nn.Linear', 'Linear', ([], {'in_features': 'self.hidden_size', 'out_features': 'self.class_num', 'weight_attr': 'None', 'bias_attr': 'None'}), '(in_features=self.hidden_size, out_features=self.class_num,\n weight_attr=None, bias_attr=None)\n', (2300, 2397), False, 'from paddle.nn import LSTM, Embedding, Dropout, Linear\n'), ((2452, 2500), 'paddle.nn.Dropout', 'Dropout', ([], {'p': 'self.dropout', 'mode': '"""upscale_in_train"""'}), "(p=self.dropout, mode='upscale_in_train')\n", (2459, 2500), False, 'from paddle.nn import LSTM, Embedding, Dropout, Linear\n'), ((2655, 2729), 'numpy.zeros', 'np.zeros', (['(self.num_layers, batch_size, self.hidden_size)'], {'dtype': '"""float32"""'}), "((self.num_layers, batch_size, self.hidden_size), dtype='float32')\n", (2663, 2729), True, 'import numpy as np\n'), ((2768, 2842), 'numpy.zeros', 'np.zeros', (['(self.num_layers, batch_size, self.hidden_size)'], {'dtype': '"""float32"""'}), "((self.num_layers, batch_size, self.hidden_size), dtype='float32')\n", (2776, 2842), True, 'import numpy as np\n'), ((2958, 2992), 'paddle.to_tensor', 'paddle.to_tensor', (['init_hidden_data'], {}), '(init_hidden_data)\n', (2974, 2992), False, 'import paddle\n'), ((3054, 3086), 'paddle.to_tensor', 'paddle.to_tensor', (['init_cell_data'], {}), '(init_cell_data)\n', (3070, 3086), False, 'import paddle\n'), ((3144, 3218), 'paddle.reshape', 'paddle.reshape', (['init_hidden'], {'shape': '[self.num_layers, -1, self.hidden_size]'}), '(init_hidden, shape=[self.num_layers, -1, self.hidden_size])\n', (3158, 3218), False, 'import paddle\n'), ((3249, 3321), 'paddle.reshape', 'paddle.reshape', (['init_cell'], {'shape': '[self.num_layers, -1, self.hidden_size]'}), '(init_cell, shape=[self.num_layers, -1, self.hidden_size])\n', (3263, 3321), False, 'import paddle\n'), ((3426, 3493), 'paddle.reshape', 'paddle.reshape', (['x_emb'], {'shape': '[-1, self.num_steps, self.hidden_size]'}), '(x_emb, shape=[-1, self.num_steps, self.hidden_size])\n', (3440, 3493), False, 'import paddle\n'), ((3758, 3819), 'paddle.reshape', 'paddle.reshape', (['last_hidden[-1]'], {'shape': '[-1, self.hidden_size]'}), '(last_hidden[-1], shape=[-1, self.hidden_size])\n', (3772, 3819), False, 'import paddle\n'), ((3927, 3957), 'paddle.nn.functional.softmax', 'F.softmax', (['projection'], {'axis': '(-1)'}), '(projection, axis=-1)\n', (3936, 3957), True, 'import paddle.nn.functional as F\n'), ((4034, 4112), 'paddle.nn.functional.softmax_with_cross_entropy', 'F.softmax_with_cross_entropy', ([], {'logits': 'projection', 'label': 'label', 'soft_label': '(False)'}), '(logits=projection, label=label, soft_label=False)\n', (4062, 4112), True, 'import paddle.nn.functional as F\n'), ((4141, 4158), 'paddle.mean', 'paddle.mean', (['loss'], {}), '(loss)\n', (4152, 4158), False, 'import paddle\n'), ((1926, 1989), 'paddle.nn.initializer.Uniform', 'paddle.nn.initializer.Uniform', ([], {'low': '(-init_scale)', 'high': 'init_scale'}), '(low=-init_scale, high=init_scale)\n', (1955, 1989), False, 'import paddle\n')] |
import pytest
from demo_project.main import app
from fastapi.testclient import TestClient
openapi_schema = {
'openapi': '3.0.2',
'info': {
'title': 'My Project',
'description': '## Welcome to my API! \n This is my description, written in `markdown`',
'version': '1.0.0',
},
'paths': {
'/api/v1/hello': {
'get': {
'tags': ['hello'],
'summary': 'Say hello',
'description': 'Wonder who we say hello to?',
'operationId': 'helloWorld',
'responses': {
'200': {
'description': 'Successful Response',
'content': {
'application/json': {'schema': {'$ref': '#/components/schemas/HelloWorldResponse'}}
},
}
},
'security': [{'Azure AD - PKCE, Single-tenant': []}],
}
},
'/api/v1/hello-multi-auth': {
'get': {
'tags': ['hello'],
'summary': 'Say hello with an API key',
'description': 'Wonder how this auth is done?',
'operationId': 'helloWorldApiKey',
'responses': {
'200': {
'description': 'Successful Response',
'content': {'application/json': {'schema': {'$ref': '#/components/schemas/TokenType'}}},
}
},
'security': [{'Azure AD - PKCE, Multi-tenant': []}, {'APIKeyHeader': []}],
}
},
},
'components': {
'schemas': {
'HelloWorldResponse': {
'title': 'HelloWorldResponse',
'required': ['hello', 'user'],
'type': 'object',
'properties': {
'hello': {'title': 'Hello', 'type': 'string', 'description': 'What we\'re saying hello to'},
'user': {
'title': 'User',
'allOf': [{'$ref': '#/components/schemas/User'}],
'description': 'The user object',
},
},
},
'TokenType': {
'title': 'TokenType',
'required': ['api_key', 'azure_auth'],
'type': 'object',
'properties': {
'api_key': {'title': 'Api Key', 'type': 'boolean', 'description': 'API key was used'},
'azure_auth': {'title': 'Azure Auth', 'type': 'boolean', 'description': 'Azure auth was used'},
},
},
'User': {
'title': 'User',
'required': ['aud', 'tid', 'claims', 'access_token'],
'type': 'object',
'properties': {
'aud': {'title': 'Aud', 'type': 'string', 'description': 'Audience'},
'tid': {'title': 'Tid', 'type': 'string', 'description': 'Tenant ID'},
'roles': {
'title': 'Roles',
'type': 'array',
'items': {'type': 'string'},
'description': 'Roles (Groups) the user has for this app',
'default': [],
},
'claims': {'title': 'Claims', 'type': 'object', 'description': 'The entire decoded token'},
'scp': {'title': 'Scp', 'type': 'string', 'description': 'Scope'},
'name': {'title': 'Name', 'type': 'string', 'description': 'Name'},
'access_token': {
'title': 'Access Token',
'type': 'string',
'description': 'The access_token. Can be used for fetching the Graph API',
},
},
},
},
'securitySchemes': {
'Azure AD - PKCE, Single-tenant': {
'type': 'oauth2',
'description': '`Leave client_secret blank`',
'flows': {
'authorizationCode': {
'scopes': {
'api://oauth299-9999-9999-abcd-efghijkl1234567890/user_impersonation': '**No client secret needed, leave blank**'
},
'authorizationUrl': 'https://login.microsoftonline.com/intility_tenant_id/oauth2/v2.0/authorize',
'tokenUrl': 'https://login.microsoftonline.com/intility_tenant_id/oauth2/v2.0/token',
}
},
},
'Azure AD - PKCE, Multi-tenant': {
'description': '`Leave ' 'client_secret ' 'blank`',
'flows': {
'authorizationCode': {
'authorizationUrl': 'https://login.microsoftonline.com/common/oauth2/v2.0/authorize',
'scopes': {
'api://oauth299-9999-9999-abcd-efghijkl1234567890/user_impersonation': 'User '
'impersonation'
},
'tokenUrl': 'https://login.microsoftonline.com/common/oauth2/v2.0/token',
}
},
'type': 'oauth2',
},
'APIKeyHeader': {'type': 'apiKey', 'in': 'header', 'name': 'TEST-API-KEY'},
},
},
}
@pytest.fixture
def test_client():
"""
Test client that does not run startup event.
All these tests fails before we get to loading the OpenID Connect configuration.
"""
yield TestClient(app=app)
def test_openapi_schema(test_client):
response = test_client.get('api/v1/openapi.json')
assert response.status_code == 200, response.text
assert response.json() == openapi_schema
def test_no_token(test_client):
response = test_client.get('/api/v1/hello')
assert response.status_code == 401, response.text
assert response.json() == {'detail': 'Not authenticated'}
def test_incorrect_token(test_client):
response = test_client.get('/api/v1/hello', headers={'Authorization': 'Non-existent testtoken'})
assert response.status_code == 401, response.text
assert response.json() == {'detail': 'Not authenticated'}
def test_token(test_client):
response = test_client.get('/api/v1/hello', headers={'Authorization': 'Bearer '})
assert response.status_code == 401, response.text
assert response.json() == {'detail': 'Invalid token format'}
| [
"fastapi.testclient.TestClient"
] | [((5712, 5731), 'fastapi.testclient.TestClient', 'TestClient', ([], {'app': 'app'}), '(app=app)\n', (5722, 5731), False, 'from fastapi.testclient import TestClient\n')] |
import display
import pytest
import msgflo
import gevent
import os.path
BROKER = os.environ.get('MSGFLO_BROKER', 'mqtt://localhost')
# Helper for running one iteration of next_state()
def run_next(state, inputs):
current = display.State(**state)
inputs = display.Inputs(**inputs)
next = display.next_state(current, inputs)
return next
# State calculation tests
def test_no_wind_no_fan():
state = run_next({}, dict(time=0.0,windspeed=0.0))
assert state.fan_speed == 0.0
def create_mqtt_client(broker_url):
import paho.mqtt.client as mqtt
from urllib.parse import urlparse
broker_info = urlparse(broker_url)
client = mqtt.Client()
if broker_info.username:
client.username_pw_set(broker_info.username, broker_info.password)
client.reconnect_delay_set(min_delay=0.1, max_delay=2*60)
host = broker_info.hostname
default_port = 1883
if broker_info.scheme == 'mqtts':
default_port = 8883
client.tls_set()
port = broker_info.port or default_port
# XXX loop() does not handle reconnects, have to use loop_start() or loop_forever()
client.loop_start()
return client, host, port
@pytest.fixture()
def mqtt_client():
mqtt_client, host, port = create_mqtt_client(BROKER)
timeout = 2
r = mqtt_client.connect(host, port, timeout)
gevent.sleep(0.5)
yield mqtt_client
mqtt_client.disconnect()
@pytest.fixture()
def participant():
role = 'testdisplay/0'
participant = display.Participant(role)
engine = msgflo.run([participant], broker=BROKER)
gevent.sleep(0.2)
assert participant._engine.connected
yield participant
engine._client = None # disconnect
# MQTT / tests
def test_mqtt_windspeed_update(participant, mqtt_client):
if os.path.exists(participant.windspeed_file):
os.remove(participant.windspeed_file)
role = participant.definition['role']
topic = role + '/windspeed'
print('sending to', topic)
assert 'testdisplay/0' in topic
participant.recalculate_state()
assert participant.inputs.windspeed == 0.0
# Send new windspeed
mqtt_client.publish(topic, '13.44')
gevent.sleep(0.5)
# FIXME: check get response
# check was written to disk
assert participant.inputs.windspeed == 13.44
assert os.path.exists(participant.windspeed_file)
def test_windspeed_load_disk():
participant = display.Participant('testdisplay/1')
if os.path.exists(participant.windspeed_file):
os.remove(participant.windspeed_file)
participant.recalculate_state()
assert participant.inputs.windspeed == 0.0
with open(participant.windspeed_file, 'w') as f:
f.write('12.12')
participant.recalculate_state()
assert participant.inputs.windspeed == 12.12
| [
"display.State",
"urllib.parse.urlparse",
"gevent.sleep",
"paho.mqtt.client.Client",
"display.Inputs",
"display.Participant",
"msgflo.run",
"pytest.fixture",
"display.next_state"
] | [((1187, 1203), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (1201, 1203), False, 'import pytest\n'), ((1421, 1437), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (1435, 1437), False, 'import pytest\n'), ((232, 254), 'display.State', 'display.State', ([], {}), '(**state)\n', (245, 254), False, 'import display\n'), ((268, 292), 'display.Inputs', 'display.Inputs', ([], {}), '(**inputs)\n', (282, 292), False, 'import display\n'), ((304, 339), 'display.next_state', 'display.next_state', (['current', 'inputs'], {}), '(current, inputs)\n', (322, 339), False, 'import display\n'), ((632, 652), 'urllib.parse.urlparse', 'urlparse', (['broker_url'], {}), '(broker_url)\n', (640, 652), False, 'from urllib.parse import urlparse\n'), ((667, 680), 'paho.mqtt.client.Client', 'mqtt.Client', ([], {}), '()\n', (678, 680), True, 'import paho.mqtt.client as mqtt\n'), ((1350, 1367), 'gevent.sleep', 'gevent.sleep', (['(0.5)'], {}), '(0.5)\n', (1362, 1367), False, 'import gevent\n'), ((1502, 1527), 'display.Participant', 'display.Participant', (['role'], {}), '(role)\n', (1521, 1527), False, 'import display\n'), ((1541, 1581), 'msgflo.run', 'msgflo.run', (['[participant]'], {'broker': 'BROKER'}), '([participant], broker=BROKER)\n', (1551, 1581), False, 'import msgflo\n'), ((1586, 1603), 'gevent.sleep', 'gevent.sleep', (['(0.2)'], {}), '(0.2)\n', (1598, 1603), False, 'import gevent\n'), ((2172, 2189), 'gevent.sleep', 'gevent.sleep', (['(0.5)'], {}), '(0.5)\n', (2184, 2189), False, 'import gevent\n'), ((2410, 2446), 'display.Participant', 'display.Participant', (['"""testdisplay/1"""'], {}), "('testdisplay/1')\n", (2429, 2446), False, 'import display\n')] |
import random
import sys
sys.path.append("../../")
from gfxlcd.driver.ssd1306.spi import SPI
from gfxlcd.driver.ssd1306.ssd1306 import SSD1306
def hole(x, y):
o.draw_pixel(x+1, y)
o.draw_pixel(x+2, y)
o.draw_pixel(x+3, y)
o.draw_pixel(x+1, y + 4)
o.draw_pixel(x+2, y + 4)
o.draw_pixel(x+3, y + 4)
o.draw_pixel(x, y + 1)
o.draw_pixel(x+4, y + 1)
o.draw_pixel(x, y + 2)
o.draw_pixel(x+4, y + 2)
o.draw_pixel(x, y + 3)
o.draw_pixel(x+4, y + 3)
drv = SPI()
o = SSD1306(128, 64, drv)
o.init()
o.auto_flush = False
for _ in range(0, 50):
hole(random.randint(2, 120), random.randint(2, 56))
hole(10, 10)
hole(15, 13)
hole(18, 23)
hole(40, 10)
o.flush(True)
# o.fill(0)
#
# o.fill(random.randint(0, 255))
#
# o.draw_pixels(2, 0, 128)
# o.draw_pixels(3, 0, 128)
# o.draw_pixels(7, 0, 128)
# o.draw_pixels(8, 0, 128)
# o.draw_pixels(1, 9, 7)
# o.draw_pixels(9, 9, 7)
# o.draw_pixels(2, 9, 8)
# o.draw_pixels(3, 9, 16)
# o.draw_pixels(4, 9, 33)
# o.draw_pixels(5, 9, 66)
# o.draw_pixels(6, 9, 33)
# o.draw_pixels(7, 9, 16)
# o.draw_pixels(8, 9, 8)
#
# o.draw_pixels(15, 9, 127)
# o.draw_pixels(16, 9, 65)
# o.draw_pixels(17, 9, 65)
# o.draw_pixels(18, 9, 62)
#
# o.draw_pixels(20, 9, 38)
# o.draw_pixels(21, 9, 73)
# o.draw_pixels(22, 9, 73)
# o.draw_pixels(23, 9, 50)
#
# o.draw_pixels(25, 9, 127)
# o.draw_pixels(26, 9, 9)
# o.draw_pixels(27, 9, 9)
# o.draw_pixels(28, 9, 6)
#
# o.draw_pixels(30, 9, 98)
# o.draw_pixels(31, 9, 81)
# o.draw_pixels(32, 9, 73)
# o.draw_pixels(33, 9, 70)
#
# o.draw_pixels(35, 9, 62)
# o.draw_pixels(36, 9, 65)
# o.draw_pixels(37, 9, 65)
# o.draw_pixels(38, 9, 62)
#
# o.draw_pixels(40, 9, 4)
# o.draw_pixels(41, 9, 2+64)
# o.draw_pixels(42, 9, 127)
# o.draw_pixels(43, 9, 64)
#
# o.draw_pixels(40, 9, 4)
# o.draw_pixels(41, 9, 2+64)
# o.draw_pixels(42, 9, 127)
# o.draw_pixels(43, 9, 64)
#
# o.draw_pixels(45, 9, 97)
# o.draw_pixels(46, 9, 25)
# o.draw_pixels(47, 9, 5)
# o.draw_pixels(48, 9, 3)
| [
"gfxlcd.driver.ssd1306.spi.SPI",
"sys.path.append",
"random.randint",
"gfxlcd.driver.ssd1306.ssd1306.SSD1306"
] | [((25, 50), 'sys.path.append', 'sys.path.append', (['"""../../"""'], {}), "('../../')\n", (40, 50), False, 'import sys\n'), ((499, 504), 'gfxlcd.driver.ssd1306.spi.SPI', 'SPI', ([], {}), '()\n', (502, 504), False, 'from gfxlcd.driver.ssd1306.spi import SPI\n'), ((509, 530), 'gfxlcd.driver.ssd1306.ssd1306.SSD1306', 'SSD1306', (['(128)', '(64)', 'drv'], {}), '(128, 64, drv)\n', (516, 530), False, 'from gfxlcd.driver.ssd1306.ssd1306 import SSD1306\n'), ((594, 616), 'random.randint', 'random.randint', (['(2)', '(120)'], {}), '(2, 120)\n', (608, 616), False, 'import random\n'), ((618, 639), 'random.randint', 'random.randint', (['(2)', '(56)'], {}), '(2, 56)\n', (632, 639), False, 'import random\n')] |
def corpus_file_transform(src_file,dst_file):
import os
assert os.path.isfile(src_file),'Src File Not Exists.'
with open(src_file,'r',encoding = 'utf-8') as text_corpus_src:
with open(dst_file,'w',encoding = 'utf-8') as text_corpus_dst:
from tqdm.notebook import tqdm
text_corpus_dst.write(''.join([(text_word + "\tS\n" if len(text_word) == 1 else (text_word[0] + "\tB\n" + ''.join([(w + "\tM\n") for w in text_word[1 : -1]]) + text_word[-1] + "\tE\n")) for text_line in tqdm_notebook(text_corpus_src.readlines()) for text_word in text_line.strip().split()]))
def IOForFeature(file,feature = None,mode = 'rb',featureList = ['A','B','C']):
assert (mode == 'rb') or (mode == 'wb'),'The third parameter must be \'r\' or \'w\''
assert not((mode == 'wb') and not feature),'The second parameter feature must not be empty.'
try:
import pickle
with open(file,mode) as f:
if mode == 'rb':
feature = pickle.load(f)
elif mode == 'wb':
pickle.dump(feature,f)
except:
feature = {label : {} for label in featureList}
return feature
def TrainingFeatureA(corpus,featureA,wordLabel = {'B' : 0, 'M' : 1, 'E' : 2, 'S' : 3}):
# p(y_i|x_i)
if not featureA:
featureA = {}
for word in tqdm_notebook(corpus):
if not featureA.get(word[0]):
featureA[word[0]] = [0,0,0,0]
featureA[word[0]][wordLabel[word[2]]] += 1
return featureA
def TrainingFeatureB(corpus,featureB,wordLabel = {'B' : 0, 'M' : 1, 'E' : 2, 'S' : 3}):
# p(y_(i+1)|x_i,y_i)
if not featureB:
featureB = {}
for word,nextword in tqdm_notebook(zip(corpus[:-1],corpus[1:])):
if not featureB.get(word[0]):
featureB[word[0]] = [[0,0,0,0] for i in range(4)]
featureB[word[0]][wordLabel[word[2]]][wordLabel[nextword[2]]] += 1
return featureB
def TrainingFeatureC(corpus,featureC,wordLabel = {'B' : 0, 'M' : 1, 'E' : 2, 'S' : 3}):
# p(x_(i-1)|x_i,y_i),p(x_(i+1)|x_i,y_i)
if not featureC:
featureC = {}
for lastWord,word,nextWord in tqdm_notebook(zip(corpus[:-2],corpus[1:-1],corpus[2:])):
if not featureC.get(word[0]):
featureC[word[0]] = {label : {} for label in wordLabel}
if not featureC[word[0]][word[2]].get(lastWord[0]):
featureC[word[0]][word[2]][lastWord[0]] = [0,0]
featureC[word[0]][word[2]][lastWord[0]][0] += 1
if not featureC[word[0]][word[2]].get(nextWord[0]):
featureC[word[0]][word[2]][nextWord[0]] = [0,0]
featureC[word[0]][word[2]][nextWord[0]][1] += 1
return featureC4
def featureTraining(feature,train_corpus,
featureList = ['A','B','C'],
featureFunction = {'A' : TrainingFeatureA, 'B' : TrainingFeatureB,'C' : TrainingFeatureC},
wordLabel = {'B' : 0, 'M' : 1, 'E' : 2, 'S' : 3}):
for featureLabel in featureList:
feature[featureLabel] = featureFunction[featureLabel](train_corpus,feature[featureLabel],wordLabel)
def getTestFeatureABC(test_str,feature,wordLabel):
import numpy as np
test_featureA = {word : (-np.log(np.array(feature['A'][word]) / sum(feature['A'][word]))).tolist()
if feature['A'].get(word) else [0,0,0,0] for word in test_str}
test_featureB = {word : (-np.log(np.array(feature['B'][word]).T / np.array(feature['B'][word]).sum(axis = 1)).T).tolist()
if feature['B'].get(word) else [[0,0,0,0] for label in wordLabel.keys()] for word in test_str}
test_featureC = {word :{d1_key : {d2_key : d2_value for d2_key,d2_value in
zip(d1_value.keys(),(np.array(list(d1_value.values())) / np.array(list(d1_value.values())).sum(axis = 0)).tolist())}
for d1_key,d1_value in feature['C'][word].items()} if feature['C'].get(word) else {label : {} for label in wordLabel.keys()} for word in test_str}
return test_featureA,test_featureB,test_featureC
def getDividedResult(wordLabel,relationDict,test_str):
wordLabelk = list(wordLabel.keys())
thisIndex = relationDict[-1][0].index(min(relationDict[-1][0]))
dividedResult, lastIndex = [[test_str[-1],wordLabelk[thisIndex]]],relationDict[-1][1][thisIndex]
for w_id in range(len(test_str) - 2,-1,-1):
dividedResult.append([test_str[w_id],wordLabelk[lastIndex]])
lastIndex = relationDict[w_id][1][lastIndex]
dividedResult.reverse()
resultString = ''.join([(' ' if d_R[1] == 'S' or d_R[1] == 'B' else '') + d_R[0] + (' ' if d_R[1] == 'S' or d_R[1] == 'E' else '') for d_R in dividedResult])
return dividedResult,resultString
def CRFWordSeperate(test_str,feature,wordLabel = {'B' : 0, 'M' : 1, 'E' : 2, 'S' : 3} ):
import numpy as np
test_featureA,test_featureB,test_featureC = getTestFeatureABC(test_str,feature,wordLabel)
relationDict = [[[test_featureA[test_str[w_id]][wordLabel[l_id]] *
(1 - (0 if w_id == 0 else test_featureC[test_str[w_id]][l_id].get(test_str[w_id - 1], [0,0])[0])) *
(1 - (0 if w_id == len(test_str) - 1 else test_featureC[test_str[w_id]][l_id].get(test_str[w_id + 1], [0,0])[1]))
for l_id in wordLabel],[0 for l_id in wordLabel]] for w_id in range(len(test_str))]
relationDict[0][0][wordLabel['E']] = relationDict[0][0][wordLabel['M']] = float('inf')
for w_id in range(1,len(test_str)):
for l_id in wordLabel:
candidateList = [test_featureB[test_str[w_id - 1]][wordLabel[l]][wordLabel[l_id]]
* (1 - (0 if w_id == 0 else test_featureC[test_str[w_id]][l_id].get(test_str[w_id - 1], [0,0])[0]))
* (1 - (0 if w_id == len(test_str) - 1 else test_featureC[test_str[w_id]][l_id].get(test_str[w_id + 1], [0,0])[1]))
+ relationDict[w_id - 1][0][wordLabel[l]] for l in wordLabel]
candidateList = [float('inf') if np.isnan(c_l) else c_l for c_l in candidateList]
relationDict[w_id][0][wordLabel[l_id]] += min(candidateList)
relationDict[w_id][1][wordLabel[l_id]] = candidateList.index(min(candidateList))
relationDict[-1][0][wordLabel['B']] = relationDict[-1][0][wordLabel['M']] = float('inf')
return getDividedResult(wordLabel,relationDict,test_str)
if __name__=="__main__":
train_corpus_src = 'msr_training.utf8'
train_corpus_dst = 'msr_training.utf8.pr'
corpus_file_transform(train_corpus_src,train_corpus_dst)
with open(train_corpus_dst,'r',encoding = 'utf-8') as f:
train_corpus = f.readlines()
print(train_corpus[:10])
featureFile = 'feature.pkl'
wordLabel = {'B' : 0, 'M' : 1, 'E' : 2, 'S' : 3}
feature = IOForFeature(featureFile,mode='rb')
featureTraining(feature,train_corpus)
feature = IOForFeature(featureFile,feature,mode='wb')
t_str = '最近内存在涨价,不能用以前等价值的物品交换了'
dividedResult,resultString = CRFWordSeperate(t_str,feature,wordLabel)
dividedSequences = ''.join([result[1] for result in dividedResult])
print(resultString)
print(dividedSequences)
print(dividedResult)
test_corpus_src = 'pku_training.utf8'
test_corpus_dst = 'pku_training.utf8.pr'
corpus_file_transform(test_corpus_src,test_corpus_dst)
#将已分词的训练文件转换为未分词的测试文件
with open(test_corpus_src,'r',encoding = 'utf-8') as f:
test_sentences = f.readlines()
test_sentences = [sentence.replace(' ','') for sentence in test_sentences]
test_sentences = [sentence.replace('\n','') for sentence in test_sentences]
#将获得测试文件的正确标注
with open(test_corpus_dst,'r',encoding = 'utf-8') as f:
test_corpus = f.readlines()
test_label = ''.join([result[2] for result in test_corpus])
print(test_sentences[0])
print(test_corpus[:len(test_sentences[0])])
print(test_label[:len(test_sentences[0])])
dividedSequences = ''
dividedResults = []
resultStrings = []
for sentences in tqdm_notebook(test_sentences[:500]):
dividedResult,resultString = CRFWordSeperate(sentences,feature,wordLabel)
dividedResults.append(dividedResult)
resultStrings.append(resultString)
dividedSequences += ''.join([result[1] for result in dividedResult])
for d_R,r_S in zip(dividedResults[:10],resultStrings[:10]):
print(r_S)
print(d_R)
count = [0,0,0,0]
for d_S in dividedSequences:
count[wordLabel[d_S]] += 1
print(list(zip(wordLabel.keys(),count)))
accurate = [0,0]
for d_S in range(len(dividedSequences)):
accurate[test_label[d_S] == dividedSequences[d_S]] += 1
print('Wrong : %.2f%%, Right : %.2f%%' % (accurate[0] / sum(accurate) * 100,accurate[1] / sum(accurate) * 100)) | [
"pickle.dump",
"pickle.load",
"os.path.isfile",
"numpy.array",
"numpy.isnan"
] | [((71, 95), 'os.path.isfile', 'os.path.isfile', (['src_file'], {}), '(src_file)\n', (85, 95), False, 'import os\n'), ((1002, 1016), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1013, 1016), False, 'import pickle\n'), ((1064, 1087), 'pickle.dump', 'pickle.dump', (['feature', 'f'], {}), '(feature, f)\n', (1075, 1087), False, 'import pickle\n'), ((6157, 6170), 'numpy.isnan', 'np.isnan', (['c_l'], {}), '(c_l)\n', (6165, 6170), True, 'import numpy as np\n'), ((3249, 3277), 'numpy.array', 'np.array', (["feature['A'][word]"], {}), "(feature['A'][word])\n", (3257, 3277), True, 'import numpy as np\n'), ((3436, 3464), 'numpy.array', 'np.array', (["feature['B'][word]"], {}), "(feature['B'][word])\n", (3444, 3464), True, 'import numpy as np\n'), ((3469, 3497), 'numpy.array', 'np.array', (["feature['B'][word]"], {}), "(feature['B'][word])\n", (3477, 3497), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2020 VMware, Inc. All Rights Reserved.
# SPDX-License-Identifier: BSD-2-Clause
import unittest
from tern.load import docker_api
from tern.utils import rootfs
from test_fixtures import create_working_dir
from test_fixtures import remove_working_dir
class TestLoadDockerAPI(unittest.TestCase):
"""This test case requires a temporary folder to be set up and the Docker
daemon to be up and running properly"""
def setUp(self):
self.client = docker_api.check_docker_setup()
create_working_dir()
rootfs.set_working_dir()
def tearDown(self):
# should not do anything if the client is already closed
docker_api.close_client(self.client)
# clean up working directory
remove_working_dir()
def testBuildAndRemoveImage(self):
# working dockerfile
dockerfile_path = 'tests/dockerfiles/debian_buster_apt'
image_obj = docker_api.build_image(dockerfile_path, self.client)
self.assertTrue(image_obj)
# successful remove
self.assertTrue(docker_api.remove_image(image_obj, self.client))
# remove an image that is not there
self.assertFalse(docker_api.remove_image(image_obj, self.client))
# no dockerfile
image_obj = docker_api.build_image(
'dockerfiles/not_there', self.client)
self.assertFalse(image_obj)
# failed build
image_obj = docker_api.build_image(
'tests/dockerfiles/fail_build', self.client)
self.assertFalse(image_obj)
def testExtractImage(self):
# successful save
dockerfile_path = 'tests/dockerfiles/debian_buster_apt'
image_obj = docker_api.build_image(dockerfile_path, self.client)
self.assertTrue(docker_api.extract_image(image_obj))
docker_api.remove_image(image_obj, self.client)
if __name__ == '__main__':
unittest.main()
| [
"tern.load.docker_api.check_docker_setup",
"tern.load.docker_api.build_image",
"test_fixtures.create_working_dir",
"tern.utils.rootfs.set_working_dir",
"tern.load.docker_api.extract_image",
"unittest.main",
"tern.load.docker_api.remove_image",
"test_fixtures.remove_working_dir",
"tern.load.docker_ap... | [((1919, 1934), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1932, 1934), False, 'import unittest\n'), ((504, 535), 'tern.load.docker_api.check_docker_setup', 'docker_api.check_docker_setup', ([], {}), '()\n', (533, 535), False, 'from tern.load import docker_api\n'), ((544, 564), 'test_fixtures.create_working_dir', 'create_working_dir', ([], {}), '()\n', (562, 564), False, 'from test_fixtures import create_working_dir\n'), ((573, 597), 'tern.utils.rootfs.set_working_dir', 'rootfs.set_working_dir', ([], {}), '()\n', (595, 597), False, 'from tern.utils import rootfs\n'), ((696, 732), 'tern.load.docker_api.close_client', 'docker_api.close_client', (['self.client'], {}), '(self.client)\n', (719, 732), False, 'from tern.load import docker_api\n'), ((778, 798), 'test_fixtures.remove_working_dir', 'remove_working_dir', ([], {}), '()\n', (796, 798), False, 'from test_fixtures import remove_working_dir\n'), ((952, 1004), 'tern.load.docker_api.build_image', 'docker_api.build_image', (['dockerfile_path', 'self.client'], {}), '(dockerfile_path, self.client)\n', (974, 1004), False, 'from tern.load import docker_api\n'), ((1303, 1363), 'tern.load.docker_api.build_image', 'docker_api.build_image', (['"""dockerfiles/not_there"""', 'self.client'], {}), "('dockerfiles/not_there', self.client)\n", (1325, 1363), False, 'from tern.load import docker_api\n'), ((1456, 1523), 'tern.load.docker_api.build_image', 'docker_api.build_image', (['"""tests/dockerfiles/fail_build"""', 'self.client'], {}), "('tests/dockerfiles/fail_build', self.client)\n", (1478, 1523), False, 'from tern.load import docker_api\n'), ((1716, 1768), 'tern.load.docker_api.build_image', 'docker_api.build_image', (['dockerfile_path', 'self.client'], {}), '(dockerfile_path, self.client)\n', (1738, 1768), False, 'from tern.load import docker_api\n'), ((1838, 1885), 'tern.load.docker_api.remove_image', 'docker_api.remove_image', (['image_obj', 'self.client'], {}), '(image_obj, self.client)\n', (1861, 1885), False, 'from tern.load import docker_api\n'), ((1092, 1139), 'tern.load.docker_api.remove_image', 'docker_api.remove_image', (['image_obj', 'self.client'], {}), '(image_obj, self.client)\n', (1115, 1139), False, 'from tern.load import docker_api\n'), ((1210, 1257), 'tern.load.docker_api.remove_image', 'docker_api.remove_image', (['image_obj', 'self.client'], {}), '(image_obj, self.client)\n', (1233, 1257), False, 'from tern.load import docker_api\n'), ((1793, 1828), 'tern.load.docker_api.extract_image', 'docker_api.extract_image', (['image_obj'], {}), '(image_obj)\n', (1817, 1828), False, 'from tern.load import docker_api\n')] |
"""
Expected to be run from repo root
"""
import shutil
import os
def copy_golds(dir_path):
for f in os.listdir(os.path.join(dir_path, "gold")):
try:
shutil.copy(
os.path.join(dir_path, "build", f),
os.path.join(dir_path, "gold", f)
)
except FileNotFoundError as e:
# corresponding build has different name or extra file
pass
copy_golds("tests")
for name in os.listdir("tests"):
if not os.path.isdir(os.path.join("tests", name)):
continue
if "gold" in os.listdir(os.path.join("tests", name)):
copy_golds(os.path.join("tests", name))
| [
"os.listdir",
"os.path.join"
] | [((464, 483), 'os.listdir', 'os.listdir', (['"""tests"""'], {}), "('tests')\n", (474, 483), False, 'import os\n'), ((118, 148), 'os.path.join', 'os.path.join', (['dir_path', '"""gold"""'], {}), "(dir_path, 'gold')\n", (130, 148), False, 'import os\n'), ((510, 537), 'os.path.join', 'os.path.join', (['"""tests"""', 'name'], {}), "('tests', name)\n", (522, 537), False, 'import os\n'), ((585, 612), 'os.path.join', 'os.path.join', (['"""tests"""', 'name'], {}), "('tests', name)\n", (597, 612), False, 'import os\n'), ((634, 661), 'os.path.join', 'os.path.join', (['"""tests"""', 'name'], {}), "('tests', name)\n", (646, 661), False, 'import os\n'), ((205, 239), 'os.path.join', 'os.path.join', (['dir_path', '"""build"""', 'f'], {}), "(dir_path, 'build', f)\n", (217, 239), False, 'import os\n'), ((257, 290), 'os.path.join', 'os.path.join', (['dir_path', '"""gold"""', 'f'], {}), "(dir_path, 'gold', f)\n", (269, 290), False, 'import os\n')] |
# -*- coding: utf-8 -*-
import logging
import os
from django.utils import six
from .base import * # noqa
SECRET_KEY = 'dev-<KEY>'
ALLOWED_HOSTS = []
DEBUG = True
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
INTERNAL_IPS = ('127.0.0.1',) # Used by app debug_toolbar
# Add the Python core NullHandler to be available when needed
LOGGING['handlers']['null'] = { # noqa
'level': logging.NOTSET,
'class': 'logging.NullHandler',
}
# Force every loggers to use console handler only. Note that using 'root'
# logger is not enough if children don't propage.
for logger in six.itervalues(LOGGING['loggers']): # noqa
logger['handlers'] = ['console']
# Log every level.
LOGGING['handlers']['console']['level'] = logging.NOTSET # noqa
CORS_ORIGIN_ALLOW_ALL = True
try:
from .local import * # noqa
except ImportError:
pass
| [
"django.utils.six.itervalues"
] | [((602, 636), 'django.utils.six.itervalues', 'six.itervalues', (["LOGGING['loggers']"], {}), "(LOGGING['loggers'])\n", (616, 636), False, 'from django.utils import six\n')] |
import click
import logging
from django.core.management.base import BaseCommand
from sendcloud.core.members import MemberAPI
logger = logging.getLogger('sendcloud')
class Command(BaseCommand):
help = __doc__
table_width = 120
def add_arguments(self, parser):
parser.add_argument(
'-L', '--address-list',
# action='store_true',
# nargs='+',
dest='mail_list',
type=str,
help='Send Cloud Mail Address List',
)
parser.add_argument(
'-m', '--member',
dest='member',
type=str,
help='Send Cloud Mail Address List Member'
)
parser.add_argument(
'-n', '--new-member',
dest='new_member',
type=str,
help="Send Cloud Mail Address List update Old Member to New Member",
)
parser.add_argument(
'-a', '--add',
dest='add',
action='store_true',
help='Add member into Address list',
)
parser.add_argument(
'-d', '--delete',
dest='delete',
action='store_true',
help="Delete member from Address list",
)
parser.add_argument(
'-u', '--update',
dest='update',
action='store_true',
help='Update member from Address list'
)
parser.add_argument(
'-l', '--list',
dest="list",
action="store_true",
help="List member from Address list",
)
def _print_separator(self):
try:
click.echo(self._separator)
except AttributeError:
self._separator = "-" * self.table_width
click.echo(self._separator)
def _print_stats_dashboard(self, statistics):
click.echo()
click.echo("Django Send Cloud CLI Dashboard")
click.echo()
# Header
click.echo(
"""| %-30s|%15s |%20s |%20s |%20s |""" %
("member", "name", "vars", "gmtCreated", "gmtUpdated",)
)
self._print_separator()
for row in statistics:
click.echo(
"""| %-30s|%15s |%20s |%20s |%20s |""" %
(row['member'], row['name'], row['vars'], row['gmtCreated'], row['gmtUpdated'])
)
self._print_separator()
def handle(self, *args, **options):
_mail_list = options.get('mail_list')
_list = options.get('list')
_add = options.get('add')
_delete = options.get('delete')
_update = options.get('update')
_member = options.get('member')
_new_member = options.get('new_member')
if _list:
r = MemberAPI().list(address=_mail_list)
self._print_stats_dashboard(statistics=r)
if _add:
r = MemberAPI().add(address=_mail_list, members=[_member])
self.stdout.write(
self.style.SUCCESS(
"add member ({member}) Success".format(member=_member)
)
)
if _delete:
r = MemberAPI().delete(address=_mail_list, members=[_member])
self.stdout.write(
self.style.SUCCESS(
"delete member ({member}) Success".format(member=_member)
)
)
if _update:
r = MemberAPI().update(
address=_mail_list,
members=[_member],
new_members=[_new_member],
)
if r['addressNotExistCount']:
self.stdout.write(
self.style.ERROR(
"member ({member}) Not Exist!".format(member=_member)
)
)
else:
self.stdout.write(
self.style.SUCCESS(
"update member {member} to ({new_member}) Success".format(
member=_member,
new_member=_new_member
)
)
)
return
| [
"logging.getLogger",
"click.echo",
"sendcloud.core.members.MemberAPI"
] | [((136, 166), 'logging.getLogger', 'logging.getLogger', (['"""sendcloud"""'], {}), "('sendcloud')\n", (153, 166), False, 'import logging\n'), ((1877, 1889), 'click.echo', 'click.echo', ([], {}), '()\n', (1887, 1889), False, 'import click\n'), ((1898, 1943), 'click.echo', 'click.echo', (['"""Django Send Cloud CLI Dashboard"""'], {}), "('Django Send Cloud CLI Dashboard')\n", (1908, 1943), False, 'import click\n'), ((1952, 1964), 'click.echo', 'click.echo', ([], {}), '()\n', (1962, 1964), False, 'import click\n'), ((1991, 2098), 'click.echo', 'click.echo', (["('| %-30s|%15s |%20s |%20s |%20s |' % ('member', 'name', 'vars',\n 'gmtCreated', 'gmtUpdated'))"], {}), "('| %-30s|%15s |%20s |%20s |%20s |' % ('member', 'name', 'vars',\n 'gmtCreated', 'gmtUpdated'))\n", (2001, 2098), False, 'import click\n'), ((1665, 1692), 'click.echo', 'click.echo', (['self._separator'], {}), '(self._separator)\n', (1675, 1692), False, 'import click\n'), ((2211, 2343), 'click.echo', 'click.echo', (["('| %-30s|%15s |%20s |%20s |%20s |' % (row['member'], row['name'], row[\n 'vars'], row['gmtCreated'], row['gmtUpdated']))"], {}), "('| %-30s|%15s |%20s |%20s |%20s |' % (row['member'], row['name'],\n row['vars'], row['gmtCreated'], row['gmtUpdated']))\n", (2221, 2343), False, 'import click\n'), ((1789, 1816), 'click.echo', 'click.echo', (['self._separator'], {}), '(self._separator)\n', (1799, 1816), False, 'import click\n'), ((2785, 2796), 'sendcloud.core.members.MemberAPI', 'MemberAPI', ([], {}), '()\n', (2794, 2796), False, 'from sendcloud.core.members import MemberAPI\n'), ((2910, 2921), 'sendcloud.core.members.MemberAPI', 'MemberAPI', ([], {}), '()\n', (2919, 2921), False, 'from sendcloud.core.members import MemberAPI\n'), ((3176, 3187), 'sendcloud.core.members.MemberAPI', 'MemberAPI', ([], {}), '()\n', (3185, 3187), False, 'from sendcloud.core.members import MemberAPI\n'), ((3448, 3459), 'sendcloud.core.members.MemberAPI', 'MemberAPI', ([], {}), '()\n', (3457, 3459), False, 'from sendcloud.core.members import MemberAPI\n')] |
"""
Attention is all you need!
"""
from typing import Dict, List, Any
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.nn.parallel
from allennlp.data.vocabulary import Vocabulary
from allennlp.models.model import Model
from allennlp.modules import TextFieldEmbedder, Seq2SeqEncoder, FeedForward, \
InputVariationalDropout, TimeDistributed
from allennlp.training.metrics import CategoricalAccuracy
from allennlp.modules.matrix_attention import BilinearMatrixAttention
from utils.detector import SimpleDetector
from allennlp.nn.util import masked_softmax, weighted_sum, \
add_positional_features, replace_masked_values, \
add_sentence_boundary_token_ids
from allennlp.nn import InitializerApplicator
@Model.register("MultiModalAttentionQA")
class MultiModalAttentionQA(Model):
def __init__(self,
vocab: Vocabulary,
fusion_encoder: Seq2SeqEncoder,
type_vocab_size: int = 3,
feature_dim: int = 768,
final_mlp_hidden_dim: int = 1024,
input_dropout: float = 0.3,
class_embs: bool=True,
reasoning_use_obj: bool=True,
reasoning_use_answer: bool=True,
reasoning_use_question: bool=True,
initializer: InitializerApplicator = InitializerApplicator(),
):
super(MultiModalAttentionQA, self).__init__(vocab)
self.detector = SimpleDetector(pretrained=True,
average_pool=True,
semantic=class_embs,
final_dim=feature_dim)
######################################################################
self.token_type_embeddings = nn.Embedding(type_vocab_size, feature_dim)
self.bos_token = torch.randn(feature_dim)
self.eos_token = torch.randn(feature_dim)
self.encoder_input_dropout = TimeDistributed(InputVariationalDropout(input_dropout)) if input_dropout > 0 else None
self.feature_dim = feature_dim
self.fusion_encoder = TimeDistributed(fusion_encoder)
self.reasoning_use_obj = reasoning_use_obj
self.reasoning_use_answer = reasoning_use_answer
self.reasoning_use_question = reasoning_use_question
final_mlp_dim = fusion_encoder.get_output_dim()
self.final_mlp = torch.nn.Sequential(
torch.nn.Dropout(input_dropout, inplace=False),
torch.nn.Linear(final_mlp_dim, final_mlp_hidden_dim),
torch.nn.ReLU(inplace=True),
torch.nn.Dropout(input_dropout, inplace=False),
torch.nn.Linear(final_mlp_hidden_dim, 1),
)
self._accuracy = CategoricalAccuracy()
self._loss = torch.nn.CrossEntropyLoss()
initializer(self)
def forward(self,
images: torch.Tensor,
objects: torch.LongTensor,
segms: torch.Tensor,
boxes: torch.Tensor,
box_mask: torch.LongTensor,
question: Dict[str, torch.Tensor],
question_tags: torch.LongTensor,
question_mask: torch.LongTensor,
answers: Dict[str, torch.Tensor],
answer_tags: torch.LongTensor,
answer_mask: torch.LongTensor,
metadata: List[Dict[str, Any]] = None,
label: torch.LongTensor = None) -> Dict[str, torch.Tensor]:
"""
:param images: [batch_size, 3, im_height, im_width]
:param objects: [batch_size, max_num_objects] Padded objects
:param boxes: [batch_size, max_num_objects, 4] Padded boxes
:param box_mask: [batch_size, max_num_objects] Mask for whether or not each box is OK
:param question: AllenNLP representation of the question. [batch_size, num_answers, seq_length]
:param question_tags: A detection label for each item in the Q [batch_size, num_answers, seq_length]
:param question_mask: Mask for the Q [batch_size, num_answers, seq_length]
:param answers: AllenNLP representation of the answer. [batch_size, num_answers, seq_length]
:param answer_tags: A detection label for each item in the A [batch_size, num_answers, seq_length]
:param answer_mask: Mask for the As [batch_size, num_answers, seq_length]
:param metadata: Ignore, this is about which dataset item we're on
:param label: Optional, which item is valid
:return: shit
"""
# Trim off boxes that are too long. this is an issue b/c dataparallel, it'll pad more zeros that are
# not needed
max_len = int(box_mask.sum(1).max().item())
objects = objects[:, :max_len]
box_mask = box_mask[:, :max_len]
boxes = boxes[:, :max_len]
segms = segms[:, :max_len]
for tag_type, the_tags in (('question', question_tags), ('answer', answer_tags)):
if int(the_tags.max()) > max_len:
raise ValueError("Oh no! {}_tags has maximum of {} but objects is of dim {}. Values are\n{}".format(
tag_type, int(the_tags.max()), objects.shape, the_tags
))
obj_reps = self.detector(images=images, boxes=boxes, box_mask=box_mask, classes=objects, segms=segms)
##################################################
# Concatenate words features and object features #
# at the dim of sequence #
##################################################
obj_features = obj_reps['obj_reps']
obj_bs, obj_len, obj_dim = obj_features.shape
que_bs, a_num, que_len, que_dim = question['bert'].shape
ans_bs, a_num, ans_len, ans_dim = answers['bert'].shape
# Add [SEP] and [CLS]. What is really done here is wrap question,
# answers, and images obejcts with <S> </S> then remove the last
# two <S> and view the first one as [CLS]
question_bert, question_mask = add_sentence_boundary_token_ids(
question['bert'].view(-1, que_len, que_dim),
question_mask,
self.bos_token.to(question_mask.device),
self.eos_token.to(question_mask.device))
question_bert = question_bert.view(que_bs, a_num, que_len+2, que_dim)
question_mask = question_mask.view(que_bs, a_num, que_len+2)
answers_bert, answer_mask = add_sentence_boundary_token_ids(
answers['bert'].view(-1, ans_len, ans_dim),
answer_mask,
self.bos_token.to(answer_mask.device),
self.eos_token.to(answer_mask.device))
answers_bert = answers_bert.view(ans_bs, a_num, ans_len+2, ans_dim)[:, :, 1:, :]
answer_mask = answer_mask.view(ans_bs, a_num, ans_len+2)[:, :, 1:]
obj_features, obj_mask = add_sentence_boundary_token_ids(
obj_features,
box_mask,
self.bos_token.to(box_mask.device),
self.eos_token.to(box_mask.device))
obj_features = obj_features.view(obj_bs, obj_len+2, obj_dim)[:, 1:, :]
obj_mask = obj_mask.view(obj_bs, obj_len+2)[:, 1:]
obj_features = torch.stack([obj_features for _ in range(a_num)], dim=1)
obj_mask = torch.stack([obj_mask for _ in range(a_num)], dim=1)
# The shape for the input of transformer is
# batch_size * num_answers * new_seq_length * dim
# where new_seq_length = question_seq_length + 2 +
# answer_seq_lenght + 1 +
# max_num_objects + 1
que_ans_obj = torch.cat((question_bert,
answers_bert,
obj_features), dim=2)
que_ans_obj_mask = torch.cat((question_mask,
answer_mask,
obj_mask), dim=2)
# Add positional features
total_bs, a_num, total_len, total_dim = que_ans_obj.shape
que_ans_obj = add_positional_features(que_ans_obj.view(-1,
total_len,
total_dim)).view(total_bs,
a_num,
total_len,
total_dim)
# Add type information, which is used to distinguished between
# Qution, Answer, and Images
target_device = que_ans_obj.device
question_type_ids = torch.zeros(que_bs, a_num, que_len+2, dtype=torch.long, device=target_device)
answers_type_ids = 1 - torch.zeros(ans_bs, a_num, ans_len+1, dtype=torch.long, device=target_device)
objs_type_ids = 2 - torch.zeros(obj_bs, a_num, obj_len+1, dtype=torch.long, device=target_device)
token_type_ids = torch.cat((question_type_ids,
answers_type_ids,
objs_type_ids), dim=2)
token_type_embeddings = self.token_type_embeddings(token_type_ids)
que_ans_obj = que_ans_obj + token_type_embeddings
##########################################
# Self attetion
outputs = self.fusion_encoder(que_ans_obj, que_ans_obj_mask)
bs, a_num, seq_len, output_dim = outputs.shape
cls_reps = outputs[:, :, 1, :].squeeze(2)
###########################################
logits = self.final_mlp(cls_reps.view(-1, output_dim)).view(bs, a_num)
###########################################
class_probabilities = F.softmax(logits, dim=-1)
output_dict = {"label_logits": logits, "label_probs": class_probabilities,
'cnn_regularization_loss': obj_reps['cnn_regularization_loss'],
# Uncomment to visualize attention, if you want
# 'qa_attention_weights': qa_attention_weights,
# 'atoo_attention_weights': atoo_attention_weights,
}
if label is not None:
loss = self._loss(logits, label.long().view(-1))
self._accuracy(logits, label)
output_dict["loss"] = loss[None]
return output_dict
def get_metrics(self, reset: bool = False) -> Dict[str, float]:
return {'accuracy': self._accuracy.get_metric(reset)}
| [
"torch.nn.functional.softmax",
"utils.detector.SimpleDetector",
"torch.nn.Dropout",
"allennlp.modules.TimeDistributed",
"allennlp.nn.InitializerApplicator",
"torch.nn.CrossEntropyLoss",
"allennlp.training.metrics.CategoricalAccuracy",
"torch.nn.ReLU",
"allennlp.models.model.Model.register",
"torch... | [((820, 859), 'allennlp.models.model.Model.register', 'Model.register', (['"""MultiModalAttentionQA"""'], {}), "('MultiModalAttentionQA')\n", (834, 859), False, 'from allennlp.models.model import Model\n'), ((1427, 1450), 'allennlp.nn.InitializerApplicator', 'InitializerApplicator', ([], {}), '()\n', (1448, 1450), False, 'from allennlp.nn import InitializerApplicator\n'), ((1556, 1654), 'utils.detector.SimpleDetector', 'SimpleDetector', ([], {'pretrained': '(True)', 'average_pool': '(True)', 'semantic': 'class_embs', 'final_dim': 'feature_dim'}), '(pretrained=True, average_pool=True, semantic=class_embs,\n final_dim=feature_dim)\n', (1570, 1654), False, 'from utils.detector import SimpleDetector\n'), ((1885, 1927), 'torch.nn.Embedding', 'nn.Embedding', (['type_vocab_size', 'feature_dim'], {}), '(type_vocab_size, feature_dim)\n', (1897, 1927), True, 'import torch.nn as nn\n'), ((1953, 1977), 'torch.randn', 'torch.randn', (['feature_dim'], {}), '(feature_dim)\n', (1964, 1977), False, 'import torch\n'), ((2003, 2027), 'torch.randn', 'torch.randn', (['feature_dim'], {}), '(feature_dim)\n', (2014, 2027), False, 'import torch\n'), ((2223, 2254), 'allennlp.modules.TimeDistributed', 'TimeDistributed', (['fusion_encoder'], {}), '(fusion_encoder)\n', (2238, 2254), False, 'from allennlp.modules import TextFieldEmbedder, Seq2SeqEncoder, FeedForward, InputVariationalDropout, TimeDistributed\n'), ((2844, 2865), 'allennlp.training.metrics.CategoricalAccuracy', 'CategoricalAccuracy', ([], {}), '()\n', (2863, 2865), False, 'from allennlp.training.metrics import CategoricalAccuracy\n'), ((2887, 2914), 'torch.nn.CrossEntropyLoss', 'torch.nn.CrossEntropyLoss', ([], {}), '()\n', (2912, 2914), False, 'import torch\n'), ((8054, 8115), 'torch.cat', 'torch.cat', (['(question_bert, answers_bert, obj_features)'], {'dim': '(2)'}), '((question_bert, answers_bert, obj_features), dim=2)\n', (8063, 8115), False, 'import torch\n'), ((8211, 8267), 'torch.cat', 'torch.cat', (['(question_mask, answer_mask, obj_mask)'], {'dim': '(2)'}), '((question_mask, answer_mask, obj_mask), dim=2)\n', (8220, 8267), False, 'import torch\n'), ((9125, 9204), 'torch.zeros', 'torch.zeros', (['que_bs', 'a_num', '(que_len + 2)'], {'dtype': 'torch.long', 'device': 'target_device'}), '(que_bs, a_num, que_len + 2, dtype=torch.long, device=target_device)\n', (9136, 9204), False, 'import torch\n'), ((9443, 9513), 'torch.cat', 'torch.cat', (['(question_type_ids, answers_type_ids, objs_type_ids)'], {'dim': '(2)'}), '((question_type_ids, answers_type_ids, objs_type_ids), dim=2)\n', (9452, 9513), False, 'import torch\n'), ((10186, 10211), 'torch.nn.functional.softmax', 'F.softmax', (['logits'], {'dim': '(-1)'}), '(logits, dim=-1)\n', (10195, 10211), True, 'import torch.nn.functional as F\n'), ((2540, 2586), 'torch.nn.Dropout', 'torch.nn.Dropout', (['input_dropout'], {'inplace': '(False)'}), '(input_dropout, inplace=False)\n', (2556, 2586), False, 'import torch\n'), ((2600, 2652), 'torch.nn.Linear', 'torch.nn.Linear', (['final_mlp_dim', 'final_mlp_hidden_dim'], {}), '(final_mlp_dim, final_mlp_hidden_dim)\n', (2615, 2652), False, 'import torch\n'), ((2666, 2693), 'torch.nn.ReLU', 'torch.nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2679, 2693), False, 'import torch\n'), ((2707, 2753), 'torch.nn.Dropout', 'torch.nn.Dropout', (['input_dropout'], {'inplace': '(False)'}), '(input_dropout, inplace=False)\n', (2723, 2753), False, 'import torch\n'), ((2767, 2807), 'torch.nn.Linear', 'torch.nn.Linear', (['final_mlp_hidden_dim', '(1)'], {}), '(final_mlp_hidden_dim, 1)\n', (2782, 2807), False, 'import torch\n'), ((9234, 9313), 'torch.zeros', 'torch.zeros', (['ans_bs', 'a_num', '(ans_len + 1)'], {'dtype': 'torch.long', 'device': 'target_device'}), '(ans_bs, a_num, ans_len + 1, dtype=torch.long, device=target_device)\n', (9245, 9313), False, 'import torch\n'), ((9340, 9419), 'torch.zeros', 'torch.zeros', (['obj_bs', 'a_num', '(obj_len + 1)'], {'dtype': 'torch.long', 'device': 'target_device'}), '(obj_bs, a_num, obj_len + 1, dtype=torch.long, device=target_device)\n', (9351, 9419), False, 'import torch\n'), ((2082, 2120), 'allennlp.modules.InputVariationalDropout', 'InputVariationalDropout', (['input_dropout'], {}), '(input_dropout)\n', (2105, 2120), False, 'from allennlp.modules import TextFieldEmbedder, Seq2SeqEncoder, FeedForward, InputVariationalDropout, TimeDistributed\n')] |
import sys
import io
import time
import argparse
import re
from pyctest import *
def getState(test):
return test.c_test_machine.current_state
'''
@see PycTestCase
'''
class TestStartAndTransition(PycTestCase):
def __init__(self):
super(PycTestCase, self).__init__()
def runTest(self):
self.c_test_machine_Init()
self.assertEqual(getState(self), \
self.c_test_machine_state_eSTATE1)
self.c_test_machine_Compute( \
self.c_test_machine_event_eEVENT_1_2, \
self.NULL())
self.assertEqual(getState(self), \
self.c_test_machine_state_eSTATE2)
'''
@see PycTestCase
'''
class TestConditionalTransition(PycTestCase):
def __init__(self):
super(PycTestCase, self).__init__()
def runTest(self):
self.c_test_machine_Init()
self.c_test_machine_Compute( \
self.c_test_machine_event_eEVENT_1_2, \
self.NULL())
self.c_cond1[0] = 0
self.c_cond2[0] = 0
self.c_cond3[0] = 0
self.c_test_machine_Compute( \
self.c_test_machine_event_eEVENT2, \
self.NULL())
self.assertEqual(getState(self), \
self.c_test_machine_state_eSTATE2)
self.c_cond1[0] = 1
self.c_test_machine_Compute( \
self.c_test_machine_event_eEVENT2, \
self.NULL())
self.assertEqual(getState(self), \
self.c_test_machine_state_eSTATE1)
self.c_cond1[0] = 0
self.c_cond2[0] = 1
self.c_test_machine_Compute( \
self.c_test_machine_event_eEVENT1, \
self.NULL())
self.assertEqual(getState(self), \
self.c_test_machine_state_eSTATE2)
self.c_cond2[0] = 0
self.c_cond3[0] = 1
self.c_test_machine_Compute( \
self.c_test_machine_event_eEVENT2, \
self.NULL())
self.assertEqual(getState(self), \
self.c_test_machine_state_eSTATE3)
'''
@see PycTestCase
'''
class TestCurrentStateMemoryFailure(PycTestCase):
def __init__(self):
super(PycTestCase, self).__init__()
def runTest(self):
self.c_test_machine_Init()
self.c_test_machine.current_state = self.c_test_machine_state_eSTATE2
self.c_corruption[0] = 0
self.assertEqual(self.c_corruption[0], 0)
self.c_test_machine_Compute( \
self.c_test_machine_event_eEVENT_1_2, \
self.NULL())
self.assertEqual(self.c_corruption[0], 2)
'''
@see PycTestCase
'''
class TestNewStateMemoryFailure(PycTestCase):
def __init__(self):
super(PycTestCase, self).__init__()
def runTest(self):
self.c_test_machine_Init()
self.c_test_machine.compl_new_state = self.c_test_machine_state_eSTATE3
self.c_corruption[0] = 0
self.assertEqual(self.c_corruption[0], 0)
self.c_test_machine_Compute( \
self.c_test_machine_event_eEVENT2, \
self.NULL())
self.assertEqual(getState(self), \
self.c_test_machine_state_eSTATE1)
self.assertEqual(self.c_corruption[0], 1)
class TestMachine:
MODULE_FILE="statemachine"
MACHINE_FILE="test_machine"
def __init__(self):
self.__loader = PycTester()
'''
@brief build library from c file
'''
def build(self):
self.__loader.load_source("""
int corrupt = 0;
#define statemachineASSERT_CORRUPT(cond) if(!(cond)){corrupt++;}
int * corruption = &corrupt;
""");
self.__loader.load_module(TestMachine.MODULE_FILE)
self.__loader.load_source("""
int condition1 = 0;
int condition2 = 0;
int condition3 = 0;
int * cond1 = &condition1;
int * cond2 = &condition2;
int * cond3 = &condition3;
""");
self.__loader.load_module(TestMachine.MACHINE_FILE)
self.__loader.load_header("""
extern int * cond1;
extern int * cond2;
extern int * cond3;
extern int * corruption;
extern statemachine_t test_machine;
""");
self.__loader.build("_testmachine")
'''
@brief unitary test for C library
'''
def unitest(self):
print("================Unitary Test==============")
print("Generate test cases")
self.__loader.appendTest(TestStartAndTransition())
self.__loader.appendTest(TestConditionalTransition())
self.__loader.appendTest(TestCurrentStateMemoryFailure())
self.__loader.appendTest(TestNewStateMemoryFailure())
self.__loader.run()
parser = argparse.ArgumentParser(description='Statemachine tester')
parser.add_argument("-u", default=False, action="store_true")
parser.add_argument("-b", default=False, action="store_true")
args = parser.parse_args()
tester = TestMachine()
if args.u or args.b:
tester.build()
if args.u:
tester.unitest() | [
"argparse.ArgumentParser"
] | [((5339, 5397), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Statemachine tester"""'}), "(description='Statemachine tester')\n", (5362, 5397), False, 'import argparse\n')] |
import sys
import datetime
import pytest
import pandas as pd
try:
import unittest.mock as mock
except ImportError:
import mock
from dagster_pandas import DataFrame
from dagster import (
DependencyDefinition,
InputDefinition,
List,
ModeDefinition,
Nothing,
OutputDefinition,
Path,
PipelineDefinition,
execute_pipeline,
solid,
)
from dagster.core.definitions import create_environment_type
from dagster.core.types.evaluator import evaluate_config_value
from dagster_gcp import (
bigquery_resource,
BigQueryError,
BigQuerySolidDefinition,
BigQueryCreateDatasetSolidDefinition,
BigQueryDeleteDatasetSolidDefinition,
BigQueryLoadSolidDefinition,
BigQueryLoadSource,
)
def bq_modes():
return [ModeDefinition(resources={'bq': bigquery_resource})]
def test_simple_queries():
solid_inst = BigQuerySolidDefinition(
'test',
[
# Toy example query
'SELECT 1 AS field1, 2 AS field2;',
# Test access of public BQ historical dataset (only processes ~2MB here)
# pylint: disable=line-too-long
'''SELECT *
FROM `weathersource-com.pub_weather_data_samples.sample_weather_history_anomaly_us_zipcode_daily`
ORDER BY postal_code ASC, date_valid_std ASC
LIMIT 1''',
],
)
pipeline = PipelineDefinition(solids=[solid_inst], mode_definitions=bq_modes())
pipeline_result = execute_pipeline(pipeline)
res = pipeline_result.result_for_solid(solid_inst.name)
assert res.success
values = res.transformed_value()
for df in values:
assert isinstance(df, pd.DataFrame)
assert values[0].to_dict('list') == {'field1': [1], 'field2': [2]}
assert values[1].to_dict('list') == {
'postal_code': ['02101'],
'country': ['US'],
'date_valid_std': [datetime.date(2014, 1, 1)],
'doy_std': [1],
'avg_temperature_air_2m_f': [25.05],
'avg_temperature_anomaly_air_2m_f': [-7.81],
'tot_precipitation_in': [0.0],
'tot_precipitation_anomaly_in': [-0.28],
'tot_snowfall_in': [0.0],
'tot_snowfall_anomaly_in': [-1.36],
'avg_wind_speed_10m_mph': [7.91],
'avg_wind_speed_10m_anomaly_mph': [-1.85],
}
# pylint: disable=line-too-long
def test_bad_config():
configs_and_expected_errors = [
(
# Create disposition must match enum values
{'create_disposition': 'this is not a valid create disposition'},
'Value not in enum type BQCreateDisposition',
),
(
# Dataset must be of form project_name.dataset_name
{'default_dataset': 'this is not a valid dataset'},
'Value at path root:solids:test:config:query_job_config:default_dataset is not valid. Expected "Dataset"',
),
(
# Table must be of form project_name.dataset_name.table_name
{'destination': 'this is not a valid table'},
'Value at path root:solids:test:config:query_job_config:destination is not valid. Expected "Table"',
),
(
# Priority must match enum values
{'priority': 'this is not a valid priority'},
'Value not in enum type BQPriority',
),
(
# Schema update options must be a list
{'schema_update_options': 'this is not valid schema update options'},
'Value at path root:solids:test:config:query_job_config:schema_update_options must be list. Expected: [BQSchemaUpdateOption]',
),
(
{'schema_update_options': ['this is not valid schema update options']},
'Value not in enum type BQSchemaUpdateOption',
),
(
{'write_disposition': 'this is not a valid write disposition'},
'Value not in enum type BQWriteDisposition',
),
]
pipeline_def = PipelineDefinition(
name='test_config_pipeline',
solids=[BigQuerySolidDefinition('test', ['SELECT 1'])],
mode_definitions=bq_modes(),
)
env_type = create_environment_type(pipeline_def)
for config_fragment, error_message in configs_and_expected_errors:
config = {'solids': {'test': {'config': {'query_job_config': config_fragment}}}}
result = evaluate_config_value(env_type, config)
assert result.errors[0].message == error_message
def test_create_delete_dataset():
create_solid = BigQueryCreateDatasetSolidDefinition('test')
create_pipeline = PipelineDefinition(solids=[create_solid], mode_definitions=bq_modes())
config = {'solids': {'test': {'config': {'dataset': 'foo', 'exists_ok': True}}}}
assert execute_pipeline(create_pipeline, config).result_for_solid(create_solid.name).success
config = {'solids': {'test': {'config': {'dataset': 'foo', 'exists_ok': False}}}}
with pytest.raises(BigQueryError) as exc_info:
execute_pipeline(create_pipeline, config)
assert 'Dataset "foo" already exists and exists_ok is false' in str(exc_info.value)
delete_solid = BigQueryDeleteDatasetSolidDefinition('test')
delete_pipeline = PipelineDefinition(solids=[delete_solid], mode_definitions=bq_modes())
config = {'solids': {'test': {'config': {'dataset': 'foo'}}}}
# Delete should succeed
assert execute_pipeline(delete_pipeline, config).result_for_solid(delete_solid.name).success
# Delete non-existent with "not_found_ok" should succeed
config = {'solids': {'test': {'config': {'dataset': 'foo', 'not_found_ok': True}}}}
assert execute_pipeline(delete_pipeline, config).result_for_solid(delete_solid.name).success
# Delete non-existent with "not_found_ok" False should fail
config = {'solids': {'test': {'config': {'dataset': 'foo', 'not_found_ok': False}}}}
with pytest.raises(BigQueryError) as exc_info:
execute_pipeline(delete_pipeline, config)
assert 'Dataset "foo" does not exist and not_found_ok is false' in str(exc_info.value)
def test_pd_df_load():
test_df = pd.DataFrame({'num1': [1, 3], 'num2': [2, 4]})
create_solid = BigQueryCreateDatasetSolidDefinition('create_solid')
load_solid = BigQueryLoadSolidDefinition('load_solid', BigQueryLoadSource.DataFrame)
query_solid = BigQuerySolidDefinition('query_solid', ['SELECT num1, num2 FROM foo.df'])
delete_solid = BigQueryDeleteDatasetSolidDefinition('delete_solid')
@solid(inputs=[InputDefinition('success', Nothing)], outputs=[OutputDefinition(DataFrame)])
def return_df(_context): # pylint: disable=unused-argument
return test_df
config = {
'solids': {
'create_solid': {'config': {'dataset': 'foo', 'exists_ok': True}},
'load_solid': {'config': {'destination': 'foo.df'}},
'delete_solid': {'config': {'dataset': 'foo', 'delete_contents': True}},
}
}
pipeline = PipelineDefinition(
solids=[return_df, create_solid, load_solid, query_solid, delete_solid],
dependencies={
'return_df': {'success': DependencyDefinition('create_solid')},
'load_solid': {'df': DependencyDefinition('return_df')},
'query_solid': {'start': DependencyDefinition('load_solid')},
'delete_solid': {'start': DependencyDefinition('query_solid')},
},
mode_definitions=bq_modes(),
)
result = execute_pipeline(pipeline, config)
assert result.success
values = result.result_for_solid(query_solid.name).transformed_value()
assert values[0].to_dict() == test_df.to_dict()
# BQ loads should throw an exception if pyarrow and fastparquet aren't available
with mock.patch.dict(sys.modules, {'pyarrow': None, 'fastparquet': None}):
with pytest.raises(BigQueryError) as exc_info:
result = execute_pipeline(pipeline, config)
assert (
'loading data to BigQuery from pandas DataFrames requires either pyarrow or fastparquet'
' to be installed' in str(exc_info.value)
)
def test_gcs_load():
create_solid = BigQueryCreateDatasetSolidDefinition('create_solid')
load_solid = BigQueryLoadSolidDefinition('load_solid', BigQueryLoadSource.Gcs)
query_solid = BigQuerySolidDefinition(
'query_solid',
['SELECT string_field_0, string_field_1 FROM foo.df ORDER BY string_field_0 ASC LIMIT 1'],
)
delete_solid = BigQueryDeleteDatasetSolidDefinition('delete_solid')
@solid(inputs=[InputDefinition('success', Nothing)], outputs=[OutputDefinition(List(Path))])
def return_gcs_uri(_context): # pylint: disable=unused-argument
return ["gs://cloud-samples-data/bigquery/us-states/us-states.csv"]
config = {
'solids': {
'create_solid': {'config': {'dataset': 'foo', 'exists_ok': True}},
'load_solid': {
'config': {
'destination': 'foo.df',
'load_job_config': {
'autodetect': True,
'skip_leading_rows': 1,
'source_format': 'CSV',
'write_disposition': 'WRITE_TRUNCATE',
},
}
},
'delete_solid': {'config': {'dataset': 'foo', 'delete_contents': True}},
}
}
pipeline = PipelineDefinition(
solids=[create_solid, return_gcs_uri, load_solid, query_solid, delete_solid],
dependencies={
'return_gcs_uri': {'success': DependencyDefinition('create_solid')},
'load_solid': {'source_uris': DependencyDefinition('return_gcs_uri')},
'query_solid': {'start': DependencyDefinition('load_solid')},
'delete_solid': {'start': DependencyDefinition('query_solid')},
},
mode_definitions=bq_modes(),
)
result = execute_pipeline(pipeline, config)
assert result.success
values = result.result_for_solid(query_solid.name).transformed_value()
assert values[0].to_dict() == {'string_field_0': {0: 'Alabama'}, 'string_field_1': {0: 'AL'}}
| [
"dagster_gcp.BigQueryDeleteDatasetSolidDefinition",
"mock.patch.dict",
"dagster.List",
"dagster.ModeDefinition",
"dagster.execute_pipeline",
"dagster_gcp.BigQueryCreateDatasetSolidDefinition",
"dagster.InputDefinition",
"dagster.core.definitions.create_environment_type",
"dagster.core.types.evaluato... | [((873, 1154), 'dagster_gcp.BigQuerySolidDefinition', 'BigQuerySolidDefinition', (['"""test"""', '[\'SELECT 1 AS field1, 2 AS field2;\',\n """SELECT *\n FROM `weathersource-com.pub_weather_data_samples.sample_weather_history_anomaly_us_zipcode_daily`\n ORDER BY postal_code ASC, date_valid_std ASC\n LIMIT 1"""\n ]'], {}), '(\'test\', [\'SELECT 1 AS field1, 2 AS field2;\',\n """SELECT *\n FROM `weathersource-com.pub_weather_data_samples.sample_weather_history_anomaly_us_zipcode_daily`\n ORDER BY postal_code ASC, date_valid_std ASC\n LIMIT 1"""\n ])\n', (896, 1154), False, 'from dagster_gcp import bigquery_resource, BigQueryError, BigQuerySolidDefinition, BigQueryCreateDatasetSolidDefinition, BigQueryDeleteDatasetSolidDefinition, BigQueryLoadSolidDefinition, BigQueryLoadSource\n'), ((1472, 1498), 'dagster.execute_pipeline', 'execute_pipeline', (['pipeline'], {}), '(pipeline)\n', (1488, 1498), False, 'from dagster import DependencyDefinition, InputDefinition, List, ModeDefinition, Nothing, OutputDefinition, Path, PipelineDefinition, execute_pipeline, solid\n'), ((4132, 4169), 'dagster.core.definitions.create_environment_type', 'create_environment_type', (['pipeline_def'], {}), '(pipeline_def)\n', (4155, 4169), False, 'from dagster.core.definitions import create_environment_type\n'), ((4499, 4543), 'dagster_gcp.BigQueryCreateDatasetSolidDefinition', 'BigQueryCreateDatasetSolidDefinition', (['"""test"""'], {}), "('test')\n", (4535, 4543), False, 'from dagster_gcp import bigquery_resource, BigQueryError, BigQuerySolidDefinition, BigQueryCreateDatasetSolidDefinition, BigQueryDeleteDatasetSolidDefinition, BigQueryLoadSolidDefinition, BigQueryLoadSource\n'), ((5116, 5160), 'dagster_gcp.BigQueryDeleteDatasetSolidDefinition', 'BigQueryDeleteDatasetSolidDefinition', (['"""test"""'], {}), "('test')\n", (5152, 5160), False, 'from dagster_gcp import bigquery_resource, BigQueryError, BigQuerySolidDefinition, BigQueryCreateDatasetSolidDefinition, BigQueryDeleteDatasetSolidDefinition, BigQueryLoadSolidDefinition, BigQueryLoadSource\n'), ((6078, 6124), 'pandas.DataFrame', 'pd.DataFrame', (["{'num1': [1, 3], 'num2': [2, 4]}"], {}), "({'num1': [1, 3], 'num2': [2, 4]})\n", (6090, 6124), True, 'import pandas as pd\n'), ((6145, 6197), 'dagster_gcp.BigQueryCreateDatasetSolidDefinition', 'BigQueryCreateDatasetSolidDefinition', (['"""create_solid"""'], {}), "('create_solid')\n", (6181, 6197), False, 'from dagster_gcp import bigquery_resource, BigQueryError, BigQuerySolidDefinition, BigQueryCreateDatasetSolidDefinition, BigQueryDeleteDatasetSolidDefinition, BigQueryLoadSolidDefinition, BigQueryLoadSource\n'), ((6215, 6286), 'dagster_gcp.BigQueryLoadSolidDefinition', 'BigQueryLoadSolidDefinition', (['"""load_solid"""', 'BigQueryLoadSource.DataFrame'], {}), "('load_solid', BigQueryLoadSource.DataFrame)\n", (6242, 6286), False, 'from dagster_gcp import bigquery_resource, BigQueryError, BigQuerySolidDefinition, BigQueryCreateDatasetSolidDefinition, BigQueryDeleteDatasetSolidDefinition, BigQueryLoadSolidDefinition, BigQueryLoadSource\n'), ((6305, 6378), 'dagster_gcp.BigQuerySolidDefinition', 'BigQuerySolidDefinition', (['"""query_solid"""', "['SELECT num1, num2 FROM foo.df']"], {}), "('query_solid', ['SELECT num1, num2 FROM foo.df'])\n", (6328, 6378), False, 'from dagster_gcp import bigquery_resource, BigQueryError, BigQuerySolidDefinition, BigQueryCreateDatasetSolidDefinition, BigQueryDeleteDatasetSolidDefinition, BigQueryLoadSolidDefinition, BigQueryLoadSource\n'), ((6398, 6450), 'dagster_gcp.BigQueryDeleteDatasetSolidDefinition', 'BigQueryDeleteDatasetSolidDefinition', (['"""delete_solid"""'], {}), "('delete_solid')\n", (6434, 6450), False, 'from dagster_gcp import bigquery_resource, BigQueryError, BigQuerySolidDefinition, BigQueryCreateDatasetSolidDefinition, BigQueryDeleteDatasetSolidDefinition, BigQueryLoadSolidDefinition, BigQueryLoadSource\n'), ((7417, 7451), 'dagster.execute_pipeline', 'execute_pipeline', (['pipeline', 'config'], {}), '(pipeline, config)\n', (7433, 7451), False, 'from dagster import DependencyDefinition, InputDefinition, List, ModeDefinition, Nothing, OutputDefinition, Path, PipelineDefinition, execute_pipeline, solid\n'), ((8106, 8158), 'dagster_gcp.BigQueryCreateDatasetSolidDefinition', 'BigQueryCreateDatasetSolidDefinition', (['"""create_solid"""'], {}), "('create_solid')\n", (8142, 8158), False, 'from dagster_gcp import bigquery_resource, BigQueryError, BigQuerySolidDefinition, BigQueryCreateDatasetSolidDefinition, BigQueryDeleteDatasetSolidDefinition, BigQueryLoadSolidDefinition, BigQueryLoadSource\n'), ((8176, 8241), 'dagster_gcp.BigQueryLoadSolidDefinition', 'BigQueryLoadSolidDefinition', (['"""load_solid"""', 'BigQueryLoadSource.Gcs'], {}), "('load_solid', BigQueryLoadSource.Gcs)\n", (8203, 8241), False, 'from dagster_gcp import bigquery_resource, BigQueryError, BigQuerySolidDefinition, BigQueryCreateDatasetSolidDefinition, BigQueryDeleteDatasetSolidDefinition, BigQueryLoadSolidDefinition, BigQueryLoadSource\n'), ((8260, 8399), 'dagster_gcp.BigQuerySolidDefinition', 'BigQuerySolidDefinition', (['"""query_solid"""', "['SELECT string_field_0, string_field_1 FROM foo.df ORDER BY string_field_0 ASC LIMIT 1'\n ]"], {}), "('query_solid', [\n 'SELECT string_field_0, string_field_1 FROM foo.df ORDER BY string_field_0 ASC LIMIT 1'\n ])\n", (8283, 8399), False, 'from dagster_gcp import bigquery_resource, BigQueryError, BigQuerySolidDefinition, BigQueryCreateDatasetSolidDefinition, BigQueryDeleteDatasetSolidDefinition, BigQueryLoadSolidDefinition, BigQueryLoadSource\n'), ((8432, 8484), 'dagster_gcp.BigQueryDeleteDatasetSolidDefinition', 'BigQueryDeleteDatasetSolidDefinition', (['"""delete_solid"""'], {}), "('delete_solid')\n", (8468, 8484), False, 'from dagster_gcp import bigquery_resource, BigQueryError, BigQuerySolidDefinition, BigQueryCreateDatasetSolidDefinition, BigQueryDeleteDatasetSolidDefinition, BigQueryLoadSolidDefinition, BigQueryLoadSource\n'), ((9870, 9904), 'dagster.execute_pipeline', 'execute_pipeline', (['pipeline', 'config'], {}), '(pipeline, config)\n', (9886, 9904), False, 'from dagster import DependencyDefinition, InputDefinition, List, ModeDefinition, Nothing, OutputDefinition, Path, PipelineDefinition, execute_pipeline, solid\n'), ((774, 825), 'dagster.ModeDefinition', 'ModeDefinition', ([], {'resources': "{'bq': bigquery_resource}"}), "(resources={'bq': bigquery_resource})\n", (788, 825), False, 'from dagster import DependencyDefinition, InputDefinition, List, ModeDefinition, Nothing, OutputDefinition, Path, PipelineDefinition, execute_pipeline, solid\n'), ((4347, 4386), 'dagster.core.types.evaluator.evaluate_config_value', 'evaluate_config_value', (['env_type', 'config'], {}), '(env_type, config)\n', (4368, 4386), False, 'from dagster.core.types.evaluator import evaluate_config_value\n'), ((4916, 4944), 'pytest.raises', 'pytest.raises', (['BigQueryError'], {}), '(BigQueryError)\n', (4929, 4944), False, 'import pytest\n'), ((4966, 5007), 'dagster.execute_pipeline', 'execute_pipeline', (['create_pipeline', 'config'], {}), '(create_pipeline, config)\n', (4982, 5007), False, 'from dagster import DependencyDefinition, InputDefinition, List, ModeDefinition, Nothing, OutputDefinition, Path, PipelineDefinition, execute_pipeline, solid\n'), ((5856, 5884), 'pytest.raises', 'pytest.raises', (['BigQueryError'], {}), '(BigQueryError)\n', (5869, 5884), False, 'import pytest\n'), ((5906, 5947), 'dagster.execute_pipeline', 'execute_pipeline', (['delete_pipeline', 'config'], {}), '(delete_pipeline, config)\n', (5922, 5947), False, 'from dagster import DependencyDefinition, InputDefinition, List, ModeDefinition, Nothing, OutputDefinition, Path, PipelineDefinition, execute_pipeline, solid\n'), ((7701, 7769), 'mock.patch.dict', 'mock.patch.dict', (['sys.modules', "{'pyarrow': None, 'fastparquet': None}"], {}), "(sys.modules, {'pyarrow': None, 'fastparquet': None})\n", (7716, 7769), False, 'import mock\n'), ((7784, 7812), 'pytest.raises', 'pytest.raises', (['BigQueryError'], {}), '(BigQueryError)\n', (7797, 7812), False, 'import pytest\n'), ((7847, 7881), 'dagster.execute_pipeline', 'execute_pipeline', (['pipeline', 'config'], {}), '(pipeline, config)\n', (7863, 7881), False, 'from dagster import DependencyDefinition, InputDefinition, List, ModeDefinition, Nothing, OutputDefinition, Path, PipelineDefinition, execute_pipeline, solid\n'), ((1887, 1912), 'datetime.date', 'datetime.date', (['(2014)', '(1)', '(1)'], {}), '(2014, 1, 1)\n', (1900, 1912), False, 'import datetime\n'), ((4025, 4070), 'dagster_gcp.BigQuerySolidDefinition', 'BigQuerySolidDefinition', (['"""test"""', "['SELECT 1']"], {}), "('test', ['SELECT 1'])\n", (4048, 4070), False, 'from dagster_gcp import bigquery_resource, BigQueryError, BigQuerySolidDefinition, BigQueryCreateDatasetSolidDefinition, BigQueryDeleteDatasetSolidDefinition, BigQueryLoadSolidDefinition, BigQueryLoadSource\n'), ((4734, 4775), 'dagster.execute_pipeline', 'execute_pipeline', (['create_pipeline', 'config'], {}), '(create_pipeline, config)\n', (4750, 4775), False, 'from dagster import DependencyDefinition, InputDefinition, List, ModeDefinition, Nothing, OutputDefinition, Path, PipelineDefinition, execute_pipeline, solid\n'), ((5360, 5401), 'dagster.execute_pipeline', 'execute_pipeline', (['delete_pipeline', 'config'], {}), '(delete_pipeline, config)\n', (5376, 5401), False, 'from dagster import DependencyDefinition, InputDefinition, List, ModeDefinition, Nothing, OutputDefinition, Path, PipelineDefinition, execute_pipeline, solid\n'), ((5607, 5648), 'dagster.execute_pipeline', 'execute_pipeline', (['delete_pipeline', 'config'], {}), '(delete_pipeline, config)\n', (5623, 5648), False, 'from dagster import DependencyDefinition, InputDefinition, List, ModeDefinition, Nothing, OutputDefinition, Path, PipelineDefinition, execute_pipeline, solid\n'), ((6471, 6506), 'dagster.InputDefinition', 'InputDefinition', (['"""success"""', 'Nothing'], {}), "('success', Nothing)\n", (6486, 6506), False, 'from dagster import DependencyDefinition, InputDefinition, List, ModeDefinition, Nothing, OutputDefinition, Path, PipelineDefinition, execute_pipeline, solid\n'), ((6518, 6545), 'dagster.OutputDefinition', 'OutputDefinition', (['DataFrame'], {}), '(DataFrame)\n', (6534, 6545), False, 'from dagster import DependencyDefinition, InputDefinition, List, ModeDefinition, Nothing, OutputDefinition, Path, PipelineDefinition, execute_pipeline, solid\n'), ((8505, 8540), 'dagster.InputDefinition', 'InputDefinition', (['"""success"""', 'Nothing'], {}), "('success', Nothing)\n", (8520, 8540), False, 'from dagster import DependencyDefinition, InputDefinition, List, ModeDefinition, Nothing, OutputDefinition, Path, PipelineDefinition, execute_pipeline, solid\n'), ((7092, 7128), 'dagster.DependencyDefinition', 'DependencyDefinition', (['"""create_solid"""'], {}), "('create_solid')\n", (7112, 7128), False, 'from dagster import DependencyDefinition, InputDefinition, List, ModeDefinition, Nothing, OutputDefinition, Path, PipelineDefinition, execute_pipeline, solid\n'), ((7164, 7197), 'dagster.DependencyDefinition', 'DependencyDefinition', (['"""return_df"""'], {}), "('return_df')\n", (7184, 7197), False, 'from dagster import DependencyDefinition, InputDefinition, List, ModeDefinition, Nothing, OutputDefinition, Path, PipelineDefinition, execute_pipeline, solid\n'), ((7237, 7271), 'dagster.DependencyDefinition', 'DependencyDefinition', (['"""load_solid"""'], {}), "('load_solid')\n", (7257, 7271), False, 'from dagster import DependencyDefinition, InputDefinition, List, ModeDefinition, Nothing, OutputDefinition, Path, PipelineDefinition, execute_pipeline, solid\n'), ((7312, 7347), 'dagster.DependencyDefinition', 'DependencyDefinition', (['"""query_solid"""'], {}), "('query_solid')\n", (7332, 7347), False, 'from dagster import DependencyDefinition, InputDefinition, List, ModeDefinition, Nothing, OutputDefinition, Path, PipelineDefinition, execute_pipeline, solid\n'), ((8569, 8579), 'dagster.List', 'List', (['Path'], {}), '(Path)\n', (8573, 8579), False, 'from dagster import DependencyDefinition, InputDefinition, List, ModeDefinition, Nothing, OutputDefinition, Path, PipelineDefinition, execute_pipeline, solid\n'), ((9531, 9567), 'dagster.DependencyDefinition', 'DependencyDefinition', (['"""create_solid"""'], {}), "('create_solid')\n", (9551, 9567), False, 'from dagster import DependencyDefinition, InputDefinition, List, ModeDefinition, Nothing, OutputDefinition, Path, PipelineDefinition, execute_pipeline, solid\n'), ((9612, 9650), 'dagster.DependencyDefinition', 'DependencyDefinition', (['"""return_gcs_uri"""'], {}), "('return_gcs_uri')\n", (9632, 9650), False, 'from dagster import DependencyDefinition, InputDefinition, List, ModeDefinition, Nothing, OutputDefinition, Path, PipelineDefinition, execute_pipeline, solid\n'), ((9690, 9724), 'dagster.DependencyDefinition', 'DependencyDefinition', (['"""load_solid"""'], {}), "('load_solid')\n", (9710, 9724), False, 'from dagster import DependencyDefinition, InputDefinition, List, ModeDefinition, Nothing, OutputDefinition, Path, PipelineDefinition, execute_pipeline, solid\n'), ((9765, 9800), 'dagster.DependencyDefinition', 'DependencyDefinition', (['"""query_solid"""'], {}), "('query_solid')\n", (9785, 9800), False, 'from dagster import DependencyDefinition, InputDefinition, List, ModeDefinition, Nothing, OutputDefinition, Path, PipelineDefinition, execute_pipeline, solid\n')] |
# -*- coding: utf-8 -*-
"""
Created on Wed Aug 1 14:52:43 2018
@author: user
"""
import pandas as pd
from keras import preprocessing
import os
import datetime
from multiclass.AnalizeRunner import AnalizeRunner
##################################################
prefix = "dataset"
data_path = "C:\\Users\\afy\\PycharmProjects\\AnalizeProject\\deep-learning\Data\\result\\2018-09-19 23_05_12.089157\\filtered\\"
model_path = "C:\\Users\\afy\\PycharmProjects\\AnalizeProject\\multiclass\\result\\"
main_folder_name = model_path + str(datetime.datetime.now()).replace(":", "_") + "\\"
runner = AnalizeRunner()
def read_type_data():
df = pd.read_csv(data_path + prefix + "_types.zip", delimiter=' ', header=None, compression="zip")
df[0] = df[0].astype('category')
cat = df[0].cat
df[0] = df[0].cat.codes
y = df[0].values
return y
def read_call_data():
df = pd.read_csv(data_path + prefix + "_calls.zip", delimiter=' ', header=None, compression="zip")
D = df.values
ds_tmp = D[:, 0].tolist()
ds = []
for v in ds_tmp:
ds.append(v.split(','))
X = preprocessing.sequence.pad_sequences(ds, maxlen=342)
print(X.shape)
return X
os.makedirs(main_folder_name)
print("-------------------basliyor------------")
X = read_call_data()
y = read_type_data()
runner.startAnalize(X, y, main_folder_name) | [
"os.makedirs",
"pandas.read_csv",
"multiclass.AnalizeRunner.AnalizeRunner",
"datetime.datetime.now",
"keras.preprocessing.sequence.pad_sequences"
] | [((618, 633), 'multiclass.AnalizeRunner.AnalizeRunner', 'AnalizeRunner', ([], {}), '()\n', (631, 633), False, 'from multiclass.AnalizeRunner import AnalizeRunner\n'), ((1239, 1268), 'os.makedirs', 'os.makedirs', (['main_folder_name'], {}), '(main_folder_name)\n', (1250, 1268), False, 'import os\n'), ((669, 766), 'pandas.read_csv', 'pd.read_csv', (["(data_path + prefix + '_types.zip')"], {'delimiter': '""" """', 'header': 'None', 'compression': '"""zip"""'}), "(data_path + prefix + '_types.zip', delimiter=' ', header=None,\n compression='zip')\n", (680, 766), True, 'import pandas as pd\n'), ((922, 1019), 'pandas.read_csv', 'pd.read_csv', (["(data_path + prefix + '_calls.zip')"], {'delimiter': '""" """', 'header': 'None', 'compression': '"""zip"""'}), "(data_path + prefix + '_calls.zip', delimiter=' ', header=None,\n compression='zip')\n", (933, 1019), True, 'import pandas as pd\n'), ((1145, 1197), 'keras.preprocessing.sequence.pad_sequences', 'preprocessing.sequence.pad_sequences', (['ds'], {'maxlen': '(342)'}), '(ds, maxlen=342)\n', (1181, 1197), False, 'from keras import preprocessing\n'), ((556, 579), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (577, 579), False, 'import datetime\n')] |
import json
import urllib.request
import subprocess
from time import sleep
from constants import ANKI_USER, DECK_NAME
NOTE_TYPE = 'vomBuch-insAnki Note'
ANKI_APP = None
def request(action, **params): # from AnkiConnect's page
return {'action': action, 'params': params, 'version': 6}
def invoke(action, **params): # from AnkiConnect's page
requestJson = json.dumps(request(action, **params)).encode('utf-8')
response = json.load(urllib.request.urlopen(urllib.request.Request('http://localhost:8765', requestJson)))
if len(response) != 2:
raise Exception('response has an unexpected number of fields')
if 'error' not in response:
raise Exception('response is missing required error field')
if 'result' not in response:
raise Exception('response is missing required result field')
if response['error'] is not None:
raise Exception(response['error'])
return response['result']
def createnotetype(): # Creates the note type of the generated notes
if NOTE_TYPE in invoke('modelNames'):
return
p = {'modelName': NOTE_TYPE, 'inOrderFields': ['DE', 'DE Info', 'Sense', 'EN', 'Examples', 'Marking']}
p['css'] = '.card{\nfont-family:arial;\nfont-size:20px;\ntext-align:center;\ncolor:black;\nbackground-color:white;}'
p['css'] += '\n\n.case{\nvertical-align:super;\nfont-style:italic;\nfont-size:80%;}\n\n'
for cl in ['topic', 'genus', 'info', 'style', 'rhetoric', 'region', 'collocator']:
p['css'] += '\n.' + cl + '{font-style: italic;}'
p['cardTemplates'] = [{}, {}]
p['cardTemplates'][0]['Front'] = '{{DE}}<i>{{DE Info}}</i>'
p['cardTemplates'][0]['Back'] = '{{FrontSide}}'
p['cardTemplates'][0]['Back'] += '\n<hr id=answer>'
p['cardTemplates'][0]['Back'] += '\n{{Sense}}<br>'
p['cardTemplates'][0]['Back'] += '\n{{EN}}'
p['cardTemplates'][0]['Back'] += '\n<br><br>'
p['cardTemplates'][0]['Back'] += '\n<small>{{Examples}}</small>'
p['cardTemplates'][1]['Front'] = '{{Sense}}<br>{{EN}}'
p['cardTemplates'][1]['Back'] = '{{FrontSide}}'
p['cardTemplates'][1]['Back'] += '\n<hr id=answer>'
p['cardTemplates'][1]['Back'] += '\n{{DE}}<i>{{DE Info}}</i>'
p['cardTemplates'][1]['Back'] += '\n<br><br>'
p['cardTemplates'][1]['Back'] += '\n<small>{{Examples}}</small>'
invoke('createModel', **p)
def runanki():
global ANKI_APP
if ANKI_APP is None or ANKI_APP.poll() is not None:
ANKI_APP = subprocess.Popen(['anki', '-p', ANKI_USER])
sleep(5)
invoke('sync')
def closeanki():
global ANKI_APP
if ANKI_APP is not None and ANKI_APP.poll() is None:
ANKI_APP.terminate()
def formatnote(n):
return {'deckName': DECK_NAME, "modelName": NOTE_TYPE, 'fields': n,
"options": {"allowDuplicate": False, "duplicateScope": "collection"}, 'tags': []}
def addnotes(maindict):
runanki()
createnotetype()
notesarray = []
invoke('createDeck', deck=DECK_NAME)
for marking in maindict:
for n in maindict[marking]['ankinotes']:
n.update({'Marking': marking})
notesarray.append(formatnote(n))
canaddnote = invoke('canAddNotes', notes=notesarray)
added = [item for i, item in enumerate(notesarray) if canaddnote[i]]
resadded = invoke('addNotes', notes=added)
invoke('sync')
added = [item for i, item in enumerate(added) if resadded[i] is not None]
sleep(5)
for marking in maindict:
for n in maindict[marking]['ankinotes']:
n.update({'couldadd': formatnote(n) in added})
closeanki()
return maindict
| [
"subprocess.Popen",
"time.sleep"
] | [((3429, 3437), 'time.sleep', 'sleep', (['(5)'], {}), '(5)\n', (3434, 3437), False, 'from time import sleep\n'), ((2466, 2509), 'subprocess.Popen', 'subprocess.Popen', (["['anki', '-p', ANKI_USER]"], {}), "(['anki', '-p', ANKI_USER])\n", (2482, 2509), False, 'import subprocess\n'), ((2518, 2526), 'time.sleep', 'sleep', (['(5)'], {}), '(5)\n', (2523, 2526), False, 'from time import sleep\n')] |
import subprocess
from datetime import datetime
from pathlib import Path
from typing import Any
from typing import List
from typing import Optional
from typing import Union
from pyspark.sql import DataFrame
from pyspark.sql import functions as F
from cishouseholds.edit import assign_from_map
from cishouseholds.edit import rename_column_names
from cishouseholds.edit import update_column_values_from_map
from cishouseholds.extract import list_contents
from cishouseholds.pipeline.category_map import category_map
from cishouseholds.pipeline.config import get_config
from cishouseholds.pipeline.load import extract_from_table
from cishouseholds.pipeline.output_variable_name_map import output_name_map
from cishouseholds.pipeline.pipeline_stages import register_pipeline_stage
@register_pipeline_stage("tables_to_csv")
def tables_to_csv(table_file_pairs):
"""
Writes data from an existing HIVE table to csv output, including mapping of column names and values.
Takes a list of 2-item tuples or lists:
table_file_pairs:
- [HIVE_table_name, output_csv_file_name]
"""
config = get_config()
output_datetime = datetime.today().strftime("%Y%m%d-%H%M%S")
output_directory = Path(config["output_directory"]) / output_datetime
for table_name, output_file_name in table_file_pairs:
df = extract_from_table(table_name)
df = map_output_values_and_column_names(df, output_name_map, category_map)
write_csv_rename(df, output_directory / f"{output_file_name}_{output_datetime}")
@register_pipeline_stage("generate_outputs")
def generate_outputs():
config = get_config()
output_datetime = datetime.today().strftime("%Y%m%d-%H%M%S")
output_directory = Path(config["output_directory"]) / output_datetime
# TODO: Check that output dir exists
linked_df = extract_from_table(config["table_names"]["input"]["merged_antibody_swab"])
# all_visits_df = extract_from_table("response_level_records")
# participant_df = extract_from_table("participant_level_with_vaccination_data")
# linked_df = all_visits_df.join(participant_df, on="participant_id", how="left")
linked_df = linked_df.withColumn(
"completed_visits_subset",
F.when(
(F.col("participant_visit_status") == "Completed")
| (F.col("blood_sample_barcode").isNotNull() | (F.col("swab_sample_barcode").isNotNull())),
True,
).otherwise(False),
)
all_visits_output_df = map_output_values_and_column_names(linked_df, output_name_map, category_map)
complete_visits_output_df = all_visits_output_df.where(F.col("completed_visits_subset"))
write_csv_rename(
all_visits_output_df.drop("completed_visits_subset"),
output_directory / f"cishouseholds_all_visits_{output_datetime}",
)
write_csv_rename(
complete_visits_output_df.drop("completed_visits_subset"),
output_directory / f"cishouseholds_completed_visits_{output_datetime}",
)
def map_output_values_and_column_names(df: DataFrame, column_name_map: dict, value_map_by_column: dict):
"""
Map column values to numeric categories and rename columns based on maps.
Only applies when column exists in map and df.
"""
value_map_by_column = {k: v for k, v in value_map_by_column.items() if k in df.columns}
column_name_map = {column: column_name_map.get(column, column) for column in df.columns}
for column, value_map in value_map_by_column.items():
df = assign_from_map(df, column, column, value_map)
df = rename_column_names(df, column_name_map)
return df
def check_columns(col_args, selection_columns, error):
arguments = ["group by columns ", "name map", "value map"]
for argument, check in zip(arguments, col_args): # type: ignore
if check is not None:
for column in check: # type: ignore
if column not in selection_columns: # type: ignore
if error == 1:
raise IndexError(
f"column:{column} is required for {argument}, therefore they must be selected in arguments"
)
else:
raise AttributeError(f"column: {column} does not exist in dataframe")
def configure_outputs(
df: DataFrame,
selection_columns: Optional[Union[List[str], str]] = None,
group_by_columns: Optional[Union[List[str], str]] = None,
aggregate_function: Optional[Any] = None,
aggregate_column_name: Optional[str] = None,
name_map: Optional[dict] = None,
value_map: Optional[dict] = None,
complete_map: Optional[bool] = False,
):
"""
Customise the output of the pipeline using user inputs
Parameters
----------
df
selection_columns
group_by_columns
name_map
dictionary containy key value pairs of old and new column names to modify
value_map
dicitonary with key value pair: {column: mapping expression dictionary} to map values in given columns
complete_map
boolean expression to return error if all values in column must be mapped to constitue a correct output
"""
col_args = []
if type(group_by_columns) != list and group_by_columns is not None:
group_by_columns = [str(group_by_columns)]
if type(selection_columns) != list and selection_columns is not None:
selection_columns = [str(selection_columns)]
if group_by_columns is not None:
col_args.append(group_by_columns)
if value_map is not None:
col_args.append(value_map.keys()) # type: ignore
if name_map is not None:
col_args.append(name_map.keys()) # type: ignore
if selection_columns is not None:
check_columns(col_args, selection_columns, 1)
check_columns([*col_args, selection_columns], df.columns, 0)
if group_by_columns is not None:
if aggregate_function is None:
raise Exception("Aggregate function required: rows can only be grouped using an aggregation function")
if aggregate_column_name is not None:
prev_cols = set(df.columns)
df = df.groupBy(*group_by_columns).agg({"*": aggregate_function})
new_col = list(set(df.columns) - prev_cols)[0]
df = df.withColumnRenamed(new_col, aggregate_column_name)
else:
df = df.groupBy(*group_by_columns).agg({"*": aggregate_function})
if name_map is not None:
for current_name, to_be_name in name_map.items():
df = df.withColumnRenamed(current_name, to_be_name)
if value_map is not None:
for column_name_to_assign, map in value_map.items():
df = update_column_values_from_map(df, column_name_to_assign, map, complete_map)
return df
def write_csv_rename(df: DataFrame, file_path: Path):
"""
Writes a df to file_path as a single partition and moves to a single CSV with the same name.
Process first writes into outfile/_tmp and then copies the file
to rename it.
Parameters
----------
df
file_path
path to outgoing file, without filename extension
"""
temp_path = file_path / "_tmp"
(df.coalesce(1).write.mode("overwrite").csv(temp_path.as_posix(), header=True))
partitions = list_contents(temp_path.as_posix())["filename"].dropna().tolist()
partitions = [part for part in partitions if part.endswith(".csv")]
# move temp file to target location and rename
subprocess.check_call(["hadoop", "fs", "-mv", (temp_path / partitions[0]), file_path.as_posix() + ".csv"])
# remove original subfolder inc tmp
subprocess.call(["hadoop", "fs", "-rm", "-r", file_path], stdout=subprocess.DEVNULL)
| [
"cishouseholds.edit.update_column_values_from_map",
"cishouseholds.pipeline.load.extract_from_table",
"pathlib.Path",
"cishouseholds.pipeline.pipeline_stages.register_pipeline_stage",
"cishouseholds.pipeline.config.get_config",
"pyspark.sql.functions.col",
"cishouseholds.edit.rename_column_names",
"su... | [((782, 822), 'cishouseholds.pipeline.pipeline_stages.register_pipeline_stage', 'register_pipeline_stage', (['"""tables_to_csv"""'], {}), "('tables_to_csv')\n", (805, 822), False, 'from cishouseholds.pipeline.pipeline_stages import register_pipeline_stage\n'), ((1549, 1592), 'cishouseholds.pipeline.pipeline_stages.register_pipeline_stage', 'register_pipeline_stage', (['"""generate_outputs"""'], {}), "('generate_outputs')\n", (1572, 1592), False, 'from cishouseholds.pipeline.pipeline_stages import register_pipeline_stage\n'), ((1119, 1131), 'cishouseholds.pipeline.config.get_config', 'get_config', ([], {}), '()\n', (1129, 1131), False, 'from cishouseholds.pipeline.config import get_config\n'), ((1630, 1642), 'cishouseholds.pipeline.config.get_config', 'get_config', ([], {}), '()\n', (1640, 1642), False, 'from cishouseholds.pipeline.config import get_config\n'), ((1840, 1914), 'cishouseholds.pipeline.load.extract_from_table', 'extract_from_table', (["config['table_names']['input']['merged_antibody_swab']"], {}), "(config['table_names']['input']['merged_antibody_swab'])\n", (1858, 1914), False, 'from cishouseholds.pipeline.load import extract_from_table\n'), ((3575, 3615), 'cishouseholds.edit.rename_column_names', 'rename_column_names', (['df', 'column_name_map'], {}), '(df, column_name_map)\n', (3594, 3615), False, 'from cishouseholds.edit import rename_column_names\n'), ((7652, 7741), 'subprocess.call', 'subprocess.call', (["['hadoop', 'fs', '-rm', '-r', file_path]"], {'stdout': 'subprocess.DEVNULL'}), "(['hadoop', 'fs', '-rm', '-r', file_path], stdout=subprocess\n .DEVNULL)\n", (7667, 7741), False, 'import subprocess\n'), ((1220, 1252), 'pathlib.Path', 'Path', (["config['output_directory']"], {}), "(config['output_directory'])\n", (1224, 1252), False, 'from pathlib import Path\n'), ((1343, 1373), 'cishouseholds.pipeline.load.extract_from_table', 'extract_from_table', (['table_name'], {}), '(table_name)\n', (1361, 1373), False, 'from cishouseholds.pipeline.load import extract_from_table\n'), ((1731, 1763), 'pathlib.Path', 'Path', (["config['output_directory']"], {}), "(config['output_directory'])\n", (1735, 1763), False, 'from pathlib import Path\n'), ((2637, 2669), 'pyspark.sql.functions.col', 'F.col', (['"""completed_visits_subset"""'], {}), "('completed_visits_subset')\n", (2642, 2669), True, 'from pyspark.sql import functions as F\n'), ((3519, 3565), 'cishouseholds.edit.assign_from_map', 'assign_from_map', (['df', 'column', 'column', 'value_map'], {}), '(df, column, column, value_map)\n', (3534, 3565), False, 'from cishouseholds.edit import assign_from_map\n'), ((1154, 1170), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (1168, 1170), False, 'from datetime import datetime\n'), ((1665, 1681), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (1679, 1681), False, 'from datetime import datetime\n'), ((6712, 6787), 'cishouseholds.edit.update_column_values_from_map', 'update_column_values_from_map', (['df', 'column_name_to_assign', 'map', 'complete_map'], {}), '(df, column_name_to_assign, map, complete_map)\n', (6741, 6787), False, 'from cishouseholds.edit import update_column_values_from_map\n'), ((2266, 2299), 'pyspark.sql.functions.col', 'F.col', (['"""participant_visit_status"""'], {}), "('participant_visit_status')\n", (2271, 2299), True, 'from pyspark.sql import functions as F\n'), ((2331, 2360), 'pyspark.sql.functions.col', 'F.col', (['"""blood_sample_barcode"""'], {}), "('blood_sample_barcode')\n", (2336, 2360), True, 'from pyspark.sql import functions as F\n'), ((2376, 2404), 'pyspark.sql.functions.col', 'F.col', (['"""swab_sample_barcode"""'], {}), "('swab_sample_barcode')\n", (2381, 2404), True, 'from pyspark.sql import functions as F\n')] |
import os
from flask import Flask
from flask_restful import Resource, Api
app = Flask(__name__)
api = Api(app)
class EnvironmentVariablesEndpoint(Resource):
def get(self):
return [(key, os.environ[key]) for key in os.environ.keys()]
api.add_resource(EnvironmentVariablesEndpoint, '/')
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0', port=8000)
| [
"os.environ.keys",
"flask_restful.Api",
"flask.Flask"
] | [((82, 97), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (87, 97), False, 'from flask import Flask\n'), ((104, 112), 'flask_restful.Api', 'Api', (['app'], {}), '(app)\n', (107, 112), False, 'from flask_restful import Resource, Api\n'), ((230, 247), 'os.environ.keys', 'os.environ.keys', ([], {}), '()\n', (245, 247), False, 'import os\n')] |
from __future__ import annotations
import os
import signal
import subprocess
import sys
import time
from multiprocessing import cpu_count
from typing import List, Union
import click
from .__version__ import __version__
from .routing.commands import display_urls
from .utils import F, import_from_string, import_module
def execute(command: Union[List[str], str]) -> int:
if isinstance(command, str):
command = command.split(" ")
click.echo("Execute command: ", nl=False)
click.secho(" ".join(command), fg="green")
process = subprocess.Popen(command, shell=False)
def sigint_handler(signo, frame):
process.terminate()
process.wait()
signal.signal(signal.SIGTERM, sigint_handler)
while process.poll() is None:
time.sleep(1)
return process.returncode
@click.group(help=f"Index.py {__version__}")
def index_cli():
pass
try:
import hypercorn
except ImportError:
pass
else:
@click.command(help="use hypercorn to run Index.py application")
@click.option(
"--bind",
default="127.0.0.1:4190",
show_default=True,
help="A string of the form: HOST:PORT, unix:PATH, fd://FD.",
)
@click.option(
"--log-level",
type=click.Choice(["critical", "error", "warning", "info", "debug"]),
default="info",
show_default=True,
)
@click.option(
"--worker-class",
"-k",
default="asyncio",
type=click.Choice(["asyncio", "uvloop", "trio"]),
show_choices=True,
show_default=True,
)
@click.option(
"--configuration",
"-c",
type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True),
)
@click.argument("application")
def hypercorn_cli(
worker_class: str,
configuration: str,
application: str,
bind: str,
log_level: str,
):
sys.path.insert(0, os.getcwd())
asgi_app = import_from_string(application)
config = hypercorn.Config()
if configuration is not None:
if configuration.endswith(".py"):
config.from_pyfile(configuration)
elif configuration.endswith(".toml"):
config.from_toml(configuration)
else:
click.secho(
"Please use configuration file path endswith `.py` or `.toml`.",
fg="red",
)
raise SystemExit(1)
config.bind = [bind]
config.loglevel = log_level.upper()
config.worker_class = worker_class
create_signal_handle = lambda shutdown_event: lambda sig, frame: (
setattr(asgi_app, "should_exit", True), # type: ignore
shutdown_event.set(),
)
if worker_class == "uvloop":
import uvloop
uvloop.install()
if worker_class in ("asyncio", "uvloop"):
import asyncio
from hypercorn.asyncio import serve
loop = asyncio.get_event_loop()
shutdown_event = asyncio.Event(loop=loop)
for sig in {signal.SIGINT, signal.SIGTERM}:
signal.signal(sig, create_signal_handle(shutdown_event))
loop.run_until_complete(
serve(asgi_app, config, shutdown_trigger=shutdown_event.wait) # type: ignore
)
else:
import trio
from hypercorn.trio import serve # type: ignore
shutdown_event = trio.Event()
for sig in {signal.SIGINT, signal.SIGTERM}:
signal.signal(sig, create_signal_handle(shutdown_event))
trio.run(serve(asgi_app, config, shutdown_trigger=shutdown_event.wait)) # type: ignore
index_cli.add_command(hypercorn_cli, name="hypercorn")
try:
import uvicorn
except ImportError:
pass
else:
from .applications import Index
# See https://stackoverflow.com/questions/58133694/graceful-shutdown-of-uvicorn-starlette-app-with-websockets
origin_handle_exit = uvicorn.Server.handle_exit
def handle_exit(self: uvicorn.Server, sig, frame):
application = self.config.loaded_app
while not isinstance(application, Index):
application = application.app
application.should_exit = True
return origin_handle_exit(self, sig, frame)
uvicorn.Server.handle_exit = handle_exit
@click.command(help="use uvicorn to run Index.py application")
@click.option(
"--bind",
default="127.0.0.1:4190",
show_default=True,
help="A string of the form: HOST:PORT, unix:PATH, fd://FD.",
)
@click.option("--autoreload/--no-autoreload", default=True, show_default=True)
@click.option(
"--log-level",
type=click.Choice(["critical", "error", "warning", "info", "debug"]),
default="info",
show_default=True,
)
@click.argument("application")
def uvicorn_cli(application: str, bind: str, autoreload: bool, log_level: str):
sys.path.insert(0, os.getcwd())
if bind.startswith("unix:"):
bind_config = {"uds": bind[5:] | F(os.path.normpath) | F(os.path.abspath)}
if autoreload:
click.secho(
"Reload option doesnt work with unix sockets "
"in uvicorn: https://github.com/encode/uvicorn/issues/722",
fg="yellow",
)
elif bind.startswith("fd://"):
bind_config = {"fd": int(bind[5:])}
if autoreload:
click.secho(
"Reload option doesnt work with fd "
"in uvicorn: https://github.com/encode/uvicorn/issues/368",
fg="yellow",
)
else:
if ":" in bind:
host, port = bind.split(":")
bind_config = {"host": host, "port": int(port)}
else:
bind_config = {"host": bind, "port": 4190}
uvicorn.run(
application,
**bind_config,
log_level=log_level,
interface="asgi3",
lifespan="on",
reload=autoreload,
)
index_cli.add_command(uvicorn_cli, "uvicorn")
try:
import gunicorn
assert gunicorn.version_info > (20, 1)
del gunicorn
except ImportError:
pass
else:
MASTER_PID_FILE = ".gunicorn.pid"
def read_gunicorn_master_pid(pid_file: str = MASTER_PID_FILE) -> int:
try:
with open(os.path.join(os.getcwd(), pid_file), "r") as file:
return int(file.read())
except FileNotFoundError:
sys.exit(
(
f'File "{pid_file}" not found, '
+ "please make sure you have started gunicorn using the "
+ "`index-cli gunicorn start ...`."
)
)
@click.group(help="use gunicorn to run Index.py application")
def gunicorn_cli():
pass
@gunicorn_cli.command(help="Run gunicorn")
@click.option(
"--bind",
default="127.0.0.1:4190",
show_default=True,
help="A string of the form: HOST:PORT, unix:PATH, fd://FD.",
)
@click.option("--autoreload/--no-autoreload", default=False, show_default=True)
@click.option(
"--log-level",
type=click.Choice(["critical", "error", "warning", "info", "debug"]),
default="info",
show_default=True,
)
@click.option("--workers", "-w", default=cpu_count(), show_default=True)
@click.option(
"--worker-class",
"-k",
default="uvicorn.workers.UvicornWorker",
show_default=True,
)
@click.option("--daemon", "-d", default=False, is_flag=True, show_default=True)
@click.option(
"--configuration",
"-c",
type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True),
)
@click.argument("application")
def start(
workers: int,
worker_class: str,
daemon: bool,
configuration: str,
application: str,
bind: str,
autoreload: bool,
log_level: str,
):
command = (
f"{sys.executable} -m gunicorn -k {worker_class}"
+ f" --bind {bind}"
+ f" --chdir {os.getcwd()}"
+ f" --workers {workers}"
+ f" --pid {MASTER_PID_FILE}"
+ f" --log-level {log_level}"
)
args = command.split(" ")
if daemon:
args.extend("-D --log-file gunicorn.log".split(" "))
if autoreload:
args.append("--reload")
if configuration:
args.append("-c")
args.append(configuration.strip())
args.append(application)
execute(args)
# Gunicorn signal handler
# https://docs.gunicorn.org/en/stable/signals.html
@gunicorn_cli.command(help="Increment the number of processes by one")
def incr():
os.kill(read_gunicorn_master_pid(), signal.SIGTTIN)
@gunicorn_cli.command(help="Decrement the number of processes by one")
def decr():
os.kill(read_gunicorn_master_pid(), signal.SIGTTOU)
@gunicorn_cli.command(help="Stop gunicorn processes")
@click.option("--force", "-f", default=False, is_flag=True)
def stop(force):
os.kill(read_gunicorn_master_pid(), signal.SIGINT if force else signal.SIGTERM)
@gunicorn_cli.command(help="Reload configuration and recreate worker processes")
def reload():
os.kill(read_gunicorn_master_pid(), signal.SIGHUP)
@gunicorn_cli.command(help="Restart gunicorn master processes and worker processes")
@click.option("--force-stop", "-f", default=False, is_flag=True)
def restart(force_stop):
oldpid = read_gunicorn_master_pid()
os.kill(oldpid, signal.SIGUSR2)
# Waiting for starting new master process and worker processes
while not os.path.exists(os.path.join(os.getcwd(), MASTER_PID_FILE + ".2")):
time.sleep(0.5)
# Stop old master process and worker processes
os.kill(oldpid, signal.SIGINT if force_stop else signal.SIGTERM)
index_cli.add_command(gunicorn_cli, "gunicorn")
index_cli.add_command(display_urls, "display-urls")
import_module("commands")
| [
"click.Choice",
"uvloop.install",
"time.sleep",
"multiprocessing.cpu_count",
"click.echo",
"sys.exit",
"os.kill",
"hypercorn.trio.serve",
"click.secho",
"click.group",
"subprocess.Popen",
"click.option",
"trio.Event",
"asyncio.get_event_loop",
"click.command",
"click.argument",
"sign... | [((825, 868), 'click.group', 'click.group', ([], {'help': 'f"""Index.py {__version__}"""'}), "(help=f'Index.py {__version__}')\n", (836, 868), False, 'import click\n'), ((450, 491), 'click.echo', 'click.echo', (['"""Execute command: """'], {'nl': '(False)'}), "('Execute command: ', nl=False)\n", (460, 491), False, 'import click\n'), ((554, 592), 'subprocess.Popen', 'subprocess.Popen', (['command'], {'shell': '(False)'}), '(command, shell=False)\n', (570, 592), False, 'import subprocess\n'), ((688, 733), 'signal.signal', 'signal.signal', (['signal.SIGTERM', 'sigint_handler'], {}), '(signal.SIGTERM, sigint_handler)\n', (701, 733), False, 'import signal\n'), ((964, 1027), 'click.command', 'click.command', ([], {'help': '"""use hypercorn to run Index.py application"""'}), "(help='use hypercorn to run Index.py application')\n", (977, 1027), False, 'import click\n'), ((1033, 1166), 'click.option', 'click.option', (['"""--bind"""'], {'default': '"""127.0.0.1:4190"""', 'show_default': '(True)', 'help': '"""A string of the form: HOST:PORT, unix:PATH, fd://FD."""'}), "('--bind', default='127.0.0.1:4190', show_default=True, help=\n 'A string of the form: HOST:PORT, unix:PATH, fd://FD.')\n", (1045, 1166), False, 'import click\n'), ((1738, 1767), 'click.argument', 'click.argument', (['"""application"""'], {}), "('application')\n", (1752, 1767), False, 'import click\n'), ((4426, 4487), 'click.command', 'click.command', ([], {'help': '"""use uvicorn to run Index.py application"""'}), "(help='use uvicorn to run Index.py application')\n", (4439, 4487), False, 'import click\n'), ((4493, 4626), 'click.option', 'click.option', (['"""--bind"""'], {'default': '"""127.0.0.1:4190"""', 'show_default': '(True)', 'help': '"""A string of the form: HOST:PORT, unix:PATH, fd://FD."""'}), "('--bind', default='127.0.0.1:4190', show_default=True, help=\n 'A string of the form: HOST:PORT, unix:PATH, fd://FD.')\n", (4505, 4626), False, 'import click\n'), ((4666, 4743), 'click.option', 'click.option', (['"""--autoreload/--no-autoreload"""'], {'default': '(True)', 'show_default': '(True)'}), "('--autoreload/--no-autoreload', default=True, show_default=True)\n", (4678, 4743), False, 'import click\n'), ((4926, 4955), 'click.argument', 'click.argument', (['"""application"""'], {}), "('application')\n", (4940, 4955), False, 'import click\n'), ((6935, 6995), 'click.group', 'click.group', ([], {'help': '"""use gunicorn to run Index.py application"""'}), "(help='use gunicorn to run Index.py application')\n", (6946, 6995), False, 'import click\n'), ((7086, 7219), 'click.option', 'click.option', (['"""--bind"""'], {'default': '"""127.0.0.1:4190"""', 'show_default': '(True)', 'help': '"""A string of the form: HOST:PORT, unix:PATH, fd://FD."""'}), "('--bind', default='127.0.0.1:4190', show_default=True, help=\n 'A string of the form: HOST:PORT, unix:PATH, fd://FD.')\n", (7098, 7219), False, 'import click\n'), ((7259, 7337), 'click.option', 'click.option', (['"""--autoreload/--no-autoreload"""'], {'default': '(False)', 'show_default': '(True)'}), "('--autoreload/--no-autoreload', default=False, show_default=True)\n", (7271, 7337), False, 'import click\n'), ((7597, 7698), 'click.option', 'click.option', (['"""--worker-class"""', '"""-k"""'], {'default': '"""uvicorn.workers.UvicornWorker"""', 'show_default': '(True)'}), "('--worker-class', '-k', default=\n 'uvicorn.workers.UvicornWorker', show_default=True)\n", (7609, 7698), False, 'import click\n'), ((7738, 7816), 'click.option', 'click.option', (['"""--daemon"""', '"""-d"""'], {'default': '(False)', 'is_flag': '(True)', 'show_default': '(True)'}), "('--daemon', '-d', default=False, is_flag=True, show_default=True)\n", (7750, 7816), False, 'import click\n'), ((7973, 8002), 'click.argument', 'click.argument', (['"""application"""'], {}), "('application')\n", (7987, 8002), False, 'import click\n'), ((9295, 9353), 'click.option', 'click.option', (['"""--force"""', '"""-f"""'], {'default': '(False)', 'is_flag': '(True)'}), "('--force', '-f', default=False, is_flag=True)\n", (9307, 9353), False, 'import click\n'), ((9721, 9784), 'click.option', 'click.option', (['"""--force-stop"""', '"""-f"""'], {'default': '(False)', 'is_flag': '(True)'}), "('--force-stop', '-f', default=False, is_flag=True)\n", (9733, 9784), False, 'import click\n'), ((777, 790), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (787, 790), False, 'import time\n'), ((2030, 2048), 'hypercorn.Config', 'hypercorn.Config', ([], {}), '()\n', (2046, 2048), False, 'import hypercorn\n'), ((6027, 6145), 'uvicorn.run', 'uvicorn.run', (['application'], {'log_level': 'log_level', 'interface': '"""asgi3"""', 'lifespan': '"""on"""', 'reload': 'autoreload'}), "(application, **bind_config, log_level=log_level, interface=\n 'asgi3', lifespan='on', reload=autoreload)\n", (6038, 6145), False, 'import uvicorn\n'), ((9867, 9898), 'os.kill', 'os.kill', (['oldpid', 'signal.SIGUSR2'], {}), '(oldpid, signal.SIGUSR2)\n', (9874, 9898), False, 'import os\n'), ((10146, 10210), 'os.kill', 'os.kill', (['oldpid', '(signal.SIGINT if force_stop else signal.SIGTERM)'], {}), '(oldpid, signal.SIGINT if force_stop else signal.SIGTERM)\n', (10153, 10210), False, 'import os\n'), ((1949, 1960), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1958, 1960), False, 'import os\n'), ((2877, 2893), 'uvloop.install', 'uvloop.install', ([], {}), '()\n', (2891, 2893), False, 'import uvloop\n'), ((3040, 3064), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (3062, 3064), False, 'import asyncio\n'), ((3095, 3119), 'asyncio.Event', 'asyncio.Event', ([], {'loop': 'loop'}), '(loop=loop)\n', (3108, 3119), False, 'import asyncio\n'), ((3524, 3536), 'trio.Event', 'trio.Event', ([], {}), '()\n', (3534, 3536), False, 'import trio\n'), ((1256, 1319), 'click.Choice', 'click.Choice', (["['critical', 'error', 'warning', 'info', 'debug']"], {}), "(['critical', 'error', 'warning', 'info', 'debug'])\n", (1268, 1319), False, 'import click\n'), ((1477, 1520), 'click.Choice', 'click.Choice', (["['asyncio', 'uvloop', 'trio']"], {}), "(['asyncio', 'uvloop', 'trio'])\n", (1489, 1520), False, 'import click\n'), ((1655, 1725), 'click.Path', 'click.Path', ([], {'exists': '(True)', 'file_okay': '(True)', 'dir_okay': '(False)', 'readable': '(True)'}), '(exists=True, file_okay=True, dir_okay=False, readable=True)\n', (1665, 1725), False, 'import click\n'), ((5067, 5078), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (5076, 5078), False, 'import os\n'), ((4799, 4862), 'click.Choice', 'click.Choice', (["['critical', 'error', 'warning', 'info', 'debug']"], {}), "(['critical', 'error', 'warning', 'info', 'debug'])\n", (4811, 4862), False, 'import click\n'), ((7393, 7456), 'click.Choice', 'click.Choice', (["['critical', 'error', 'warning', 'info', 'debug']"], {}), "(['critical', 'error', 'warning', 'info', 'debug'])\n", (7405, 7456), False, 'import click\n'), ((7560, 7571), 'multiprocessing.cpu_count', 'cpu_count', ([], {}), '()\n', (7569, 7571), False, 'from multiprocessing import cpu_count\n'), ((7890, 7960), 'click.Path', 'click.Path', ([], {'exists': '(True)', 'file_okay': '(True)', 'dir_okay': '(False)', 'readable': '(True)'}), '(exists=True, file_okay=True, dir_okay=False, readable=True)\n', (7900, 7960), False, 'import click\n'), ((10067, 10082), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (10077, 10082), False, 'import time\n'), ((3303, 3364), 'hypercorn.trio.serve', 'serve', (['asgi_app', 'config'], {'shutdown_trigger': 'shutdown_event.wait'}), '(asgi_app, config, shutdown_trigger=shutdown_event.wait)\n', (3308, 3364), False, 'from hypercorn.trio import serve\n'), ((3688, 3749), 'hypercorn.trio.serve', 'serve', (['asgi_app', 'config'], {'shutdown_trigger': 'shutdown_event.wait'}), '(asgi_app, config, shutdown_trigger=shutdown_event.wait)\n', (3693, 3749), False, 'from hypercorn.trio import serve\n'), ((5248, 5386), 'click.secho', 'click.secho', (['"""Reload option doesnt work with unix sockets in uvicorn: https://github.com/encode/uvicorn/issues/722"""'], {'fg': '"""yellow"""'}), "(\n 'Reload option doesnt work with unix sockets in uvicorn: https://github.com/encode/uvicorn/issues/722'\n , fg='yellow')\n", (5259, 5386), False, 'import click\n'), ((6682, 6826), 'sys.exit', 'sys.exit', (['(f\'File "{pid_file}" not found, \' +\n \'please make sure you have started gunicorn using the \' +\n \'`index-cli gunicorn start ...`.\')'], {}), '(f\'File "{pid_file}" not found, \' +\n \'please make sure you have started gunicorn using the \' +\n \'`index-cli gunicorn start ...`.\')\n', (6690, 6826), False, 'import sys\n'), ((2315, 2405), 'click.secho', 'click.secho', (['"""Please use configuration file path endswith `.py` or `.toml`."""'], {'fg': '"""red"""'}), "('Please use configuration file path endswith `.py` or `.toml`.',\n fg='red')\n", (2326, 2405), False, 'import click\n'), ((5589, 5717), 'click.secho', 'click.secho', (['"""Reload option doesnt work with fd in uvicorn: https://github.com/encode/uvicorn/issues/368"""'], {'fg': '"""yellow"""'}), "(\n 'Reload option doesnt work with fd in uvicorn: https://github.com/encode/uvicorn/issues/368'\n , fg='yellow')\n", (5600, 5717), False, 'import click\n'), ((10016, 10027), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (10025, 10027), False, 'import os\n'), ((6558, 6569), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (6567, 6569), False, 'import os\n'), ((8359, 8370), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (8368, 8370), False, 'import os\n')] |
from fastapi import Request
from fastapi.responses import JSONResponse
from db import AuthDb
async def auth_check(request: Request, call_next):
if (request.url.path == "/") or (request.url.path == "/docs"):
response = await call_next(request)
return response
else:
try:
"""
This Block will be responsible for the authorisation of the api key
"""
token = request.headers['authorization']
obj = AuthDb
trust = await obj.fetch_data(token)
if not trust:
return JSONResponse({"message": "Unauthorized"}, status_code=403)
else:
response = await call_next(request)
return response
except KeyError:
return JSONResponse({"message": "Unauthorized"}, status_code=403)
"""
HAB CON TO DB
"""
| [
"fastapi.responses.JSONResponse"
] | [((598, 656), 'fastapi.responses.JSONResponse', 'JSONResponse', (["{'message': 'Unauthorized'}"], {'status_code': '(403)'}), "({'message': 'Unauthorized'}, status_code=403)\n", (610, 656), False, 'from fastapi.responses import JSONResponse\n'), ((803, 861), 'fastapi.responses.JSONResponse', 'JSONResponse', (["{'message': 'Unauthorized'}"], {'status_code': '(403)'}), "({'message': 'Unauthorized'}, status_code=403)\n", (815, 861), False, 'from fastapi.responses import JSONResponse\n')] |
import sys, os
from tests.data import DNA_QUERY, PEPTIDE_QUERY
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, myPath + '/../')
from fastapi.testclient import TestClient
from main import app
client = TestClient(app)
REQUIRED_RESPONSE_DATA = [
'query',
'weights',
'seq_type',
'peptide_seq',
'dna_seq',
'stop_codon',
'optimized_sd',
'min_difference_sumsquares',
'best_expression_sd',
'optimized_ad',
'min_difference_absvalue',
'best_expression_ad'
]
def test_dna_optimization():
# validate call
response = client.post(
"/optimize/dna",
json=DNA_QUERY
)
assert response.status_code == 200
# valdiate data
data = response.json()
assert all(k in data for k in REQUIRED_RESPONSE_DATA)
def test_dna_optimization():
# validate call
response = client.post(
"/optimize/protein",
json=PEPTIDE_QUERY
)
assert response.status_code == 200
# valdiate data
data = response.json()
assert all(k in data for k in REQUIRED_RESPONSE_DATA) | [
"os.path.abspath",
"fastapi.testclient.TestClient",
"sys.path.insert"
] | [((116, 151), 'sys.path.insert', 'sys.path.insert', (['(0)', "(myPath + '/../')"], {}), "(0, myPath + '/../')\n", (131, 151), False, 'import sys, os\n'), ((226, 241), 'fastapi.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (236, 241), False, 'from fastapi.testclient import TestClient\n'), ((89, 114), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (104, 114), False, 'import sys, os\n')] |
import sqlite3
import datetime
import time
import logging
import os
from bot_constant import CQ_ROOT
CQ_IMAGE_ROOT = os.path.join(CQ_ROOT, r'data/image')
logger = logging.getLogger("CTB." + __name__)
class FileDB:
def __init__(self, db_name: str):
self.conn = sqlite3.connect(db_name, check_same_thread=False)
self.cursor = self.conn.cursor()
self.db_name = db_name
self.table_name = 'file_ids'
self.cursor.execute(f"SELECT count(*) FROM sqlite_master WHERE type='table' AND name='{self.table_name}';")
result = self.cursor.fetchall()
if result[0][0]:
pass
else:
"""
FileDB contains:
filename: str
file_type: str
file_md5: str
fileid_tg: str
size: str, bytes
last_usage_date: int, unix timestamp
usage_count: int
"""
self.cursor.execute(f"create table {self.table_name} (download_date int primary key,"
f"filename text, file_type text, file_md5 text, fileid_tg text, file_size int,"
f" last_usage_date int, usage_count int)")
self.cursor.execute(f"create unique index md5_index on {self.table_name}(file_md5);")
self.cursor.execute(f"create unique index fileid_index on {self.table_name}(fileid_tg);")
self.conn.commit()
def get_filename_by_fileid(self, fileid_tg: str):
"""
acquire filename by fileid
:param fileid_tg: telegram file id
:return: filename
"""
self.cursor.execute(f"select usage_count, filename from '{self.table_name}'"
f" where fileid_tg='{fileid_tg}'")
result = self.cursor.fetchall()
if result:
timestamp = int(time.mktime(datetime.datetime.now().timetuple()))
self.cursor.execute(
f"update '{self.table_name}' set last_usage_date=?, usage_count=? where fileid_tg=?;",
(timestamp, result[0][0]+1, fileid_tg))
self.conn.commit()
return result[0][1]
return False
def get_fileid_by_md5(self, file_md5):
"""
acquire fileid by md5
:param file_md5: md5
:return: fileid
"""
self.cursor.execute(f"select usage_count, fileid_tg, file_type from '{self.table_name}' where file_md5='{file_md5}'")
result = self.cursor.fetchall()
if result:
timestamp = int(time.mktime(datetime.datetime.now().timetuple()))
self.cursor.execute(
f"update '{self.table_name}' set last_usage_date=?, usage_count=? where file_md5=?;",
(timestamp, result[0][0]+1, file_md5))
self.conn.commit()
return {'file_id': result[0][1], 'file_type': result[0][2]}
return False
def qq_add_resource(self, filename: str, file_type: str, file_md5: str, file_size: int, fileid_tg):
"""
add resource received by qq
:param filename:
:param file_type:
:param file_md5:
:param file_size:
:param fileid_tg:
"""
timestamp = int(time.mktime(datetime.datetime.now().timetuple()))
self.cursor.execute(f"insert into '{self.table_name}' "
f"(download_date, filename, file_type, file_md5, fileid_tg, file_size,"
f" last_usage_date, usage_count)"
f"values (?, ?, ?, ?, ?, ?, ?, ?)",
(timestamp, filename, file_type, file_md5, fileid_tg, file_size, timestamp, 1))
self.conn.commit()
def tg_add_resource(self, fileid_tg: str, filename: str, file_type: str, file_md5: str, file_size: int):
"""
add resource acquired by telegram
:param fileid_tg:
:param filename:
:param file_type:
:param file_md5:
:param file_size:
"""
timestamp = int(time.mktime(datetime.datetime.now().timetuple()))
self.cursor.execute(f"insert into '{self.table_name}' "
f"(download_date, filename, file_type, file_md5, fileid_tg, file_size,"
f" last_usage_date, usage_count)"
f"values (?, ?, ?, ?, ?, ?, ?, ?)",
(timestamp, filename, file_type, file_md5, fileid_tg, file_size, timestamp, 1))
self.conn.commit()
@staticmethod
def calculate_real_size():
"""
calculate real size
real size is the size of the directory
:return: size in bytes
"""
real_size = 0
for root, dirs, files in os.walk(CQ_IMAGE_ROOT):
real_size += sum([os.path.getsize(os.path.join(root, name)) for name in files])
return real_size
def calculate_db_size(self):
"""
calculate db size
db size is the sum of size in db
:return: size in bytes
"""
self.cursor.execute(f"select sum(file_size) from {self.table_name}")
db_size = self.cursor.fetchall()[0][0]
return db_size
def purge_half(self):
"""
use LRU cache policy
:return:
"""
pass
def purge_all(self):
"""
remove old data and reconstruct db
:return:
"""
self.conn.close()
os.remove(self.db_name)
for i in os.listdir(CQ_IMAGE_ROOT):
path_file = os.path.join(CQ_IMAGE_ROOT, i)
if os.path.isfile(path_file):
os.remove(path_file)
self.conn = sqlite3.connect(self.db_name, check_same_thread=False)
self.cursor = self.conn.cursor()
self.cursor.execute(f"create table {self.table_name} (download_date int primary key,"
f"filename text, file_type text, file_md5 text, fileid_tg text, file_size int,"
f" last_usage_date int, usage_count int)")
self.cursor.execute(f"create unique index md5_index on {self.table_name}(file_md5);")
self.cursor.execute(f"create unique index fileid_index on {self.table_name}(fileid_tg);")
self.conn.commit()
def purge_one_time(self):
"""
purge one time usage file
:return: purged size
"""
purged_size = 0
self.cursor.execute(f"select download_date, filename, file_size from {self.table_name} where usage_count=1")
data = self.cursor.fetchall()
for entry in data:
self.cursor.execute(f"delete from {self.table_name} where download_date=?", (str(entry[0]),))
if os.path.exists(entry[1]):
os.remove(os.path.join(CQ_IMAGE_ROOT, entry[1]))
purged_size += entry[2]
self.conn.commit()
return purged_size
def sync_cache(self):
"""
sync cache status with db, this will remove file records from db
:return: size reduced
"""
size_reduced = 0
self.cursor.execute(f"select download_date, filename, file_size from {self.table_name}")
data = self.cursor.fetchall()
for entry in data:
if not os.path.exists(os.path.join(CQ_IMAGE_ROOT, entry[1])):
self.cursor.execute(f"delete from {self.table_name} where download_date=?", (str(entry[0]),))
size_reduced += entry[2]
self.conn.commit()
return size_reduced
def __del__(self):
self.conn.close()
| [
"logging.getLogger",
"os.path.exists",
"os.listdir",
"sqlite3.connect",
"os.path.join",
"os.path.isfile",
"datetime.datetime.now",
"os.walk",
"os.remove"
] | [((118, 153), 'os.path.join', 'os.path.join', (['CQ_ROOT', '"""data/image"""'], {}), "(CQ_ROOT, 'data/image')\n", (130, 153), False, 'import os\n'), ((165, 201), 'logging.getLogger', 'logging.getLogger', (["('CTB.' + __name__)"], {}), "('CTB.' + __name__)\n", (182, 201), False, 'import logging\n'), ((276, 325), 'sqlite3.connect', 'sqlite3.connect', (['db_name'], {'check_same_thread': '(False)'}), '(db_name, check_same_thread=False)\n', (291, 325), False, 'import sqlite3\n'), ((4747, 4769), 'os.walk', 'os.walk', (['CQ_IMAGE_ROOT'], {}), '(CQ_IMAGE_ROOT)\n', (4754, 4769), False, 'import os\n'), ((5445, 5468), 'os.remove', 'os.remove', (['self.db_name'], {}), '(self.db_name)\n', (5454, 5468), False, 'import os\n'), ((5486, 5511), 'os.listdir', 'os.listdir', (['CQ_IMAGE_ROOT'], {}), '(CQ_IMAGE_ROOT)\n', (5496, 5511), False, 'import os\n'), ((5667, 5721), 'sqlite3.connect', 'sqlite3.connect', (['self.db_name'], {'check_same_thread': '(False)'}), '(self.db_name, check_same_thread=False)\n', (5682, 5721), False, 'import sqlite3\n'), ((5537, 5567), 'os.path.join', 'os.path.join', (['CQ_IMAGE_ROOT', 'i'], {}), '(CQ_IMAGE_ROOT, i)\n', (5549, 5567), False, 'import os\n'), ((5583, 5608), 'os.path.isfile', 'os.path.isfile', (['path_file'], {}), '(path_file)\n', (5597, 5608), False, 'import os\n'), ((6700, 6724), 'os.path.exists', 'os.path.exists', (['entry[1]'], {}), '(entry[1])\n', (6714, 6724), False, 'import os\n'), ((5626, 5646), 'os.remove', 'os.remove', (['path_file'], {}), '(path_file)\n', (5635, 5646), False, 'import os\n'), ((6752, 6789), 'os.path.join', 'os.path.join', (['CQ_IMAGE_ROOT', 'entry[1]'], {}), '(CQ_IMAGE_ROOT, entry[1])\n', (6764, 6789), False, 'import os\n'), ((7260, 7297), 'os.path.join', 'os.path.join', (['CQ_IMAGE_ROOT', 'entry[1]'], {}), '(CQ_IMAGE_ROOT, entry[1])\n', (7272, 7297), False, 'import os\n'), ((3246, 3269), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3267, 3269), False, 'import datetime\n'), ((4049, 4072), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4070, 4072), False, 'import datetime\n'), ((4817, 4841), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (4829, 4841), False, 'import os\n'), ((1875, 1898), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1896, 1898), False, 'import datetime\n'), ((2565, 2588), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2586, 2588), False, 'import datetime\n')] |
import board3d as go_board
import numpy as np
import global_vars_go as gvg
def games_to_states(game_data):
train_boards = []
train_next_moves = []
for game_index in range(len(game_data)):
board = go_board.setup_board(game_data[game_index])
for node in game_data[game_index].get_main_sequence():
board = go_board.switch_player_perspec(board) # Changes player perspective, black becomes white and vice versa
node_move = node.get_move()[1]
if node_move is not None:
train_boards.append(np.copy(board))
next_move = np.zeros(gvg.board_size * gvg.board_size).reshape(gvg.board_size, gvg.board_size)
next_move[node_move[0], node_move[1]] = gvg.filled # y = an array in the form [board_x_position, board_y_position]
train_next_moves.append(next_move.reshape(gvg.board_size * gvg.board_size))
board = go_board.make_move(board, node_move, gvg.bot_channel, gvg.player_channel) # Update board with new move
if board is None:
print("ERROR! Illegal move, {}, while training".format(node_move))
return train_boards, train_next_moves
def new_board():
return np.zeros((gvg.board_size, gvg.board_size, gvg.board_channels))
| [
"numpy.copy",
"board3d.make_move",
"numpy.zeros",
"board3d.setup_board",
"board3d.switch_player_perspec"
] | [((1261, 1323), 'numpy.zeros', 'np.zeros', (['(gvg.board_size, gvg.board_size, gvg.board_channels)'], {}), '((gvg.board_size, gvg.board_size, gvg.board_channels))\n', (1269, 1323), True, 'import numpy as np\n'), ((225, 268), 'board3d.setup_board', 'go_board.setup_board', (['game_data[game_index]'], {}), '(game_data[game_index])\n', (245, 268), True, 'import board3d as go_board\n'), ((354, 391), 'board3d.switch_player_perspec', 'go_board.switch_player_perspec', (['board'], {}), '(board)\n', (384, 391), True, 'import board3d as go_board\n'), ((958, 1031), 'board3d.make_move', 'go_board.make_move', (['board', 'node_move', 'gvg.bot_channel', 'gvg.player_channel'], {}), '(board, node_move, gvg.bot_channel, gvg.player_channel)\n', (976, 1031), True, 'import board3d as go_board\n'), ((579, 593), 'numpy.copy', 'np.copy', (['board'], {}), '(board)\n', (586, 593), True, 'import numpy as np\n'), ((624, 665), 'numpy.zeros', 'np.zeros', (['(gvg.board_size * gvg.board_size)'], {}), '(gvg.board_size * gvg.board_size)\n', (632, 665), True, 'import numpy as np\n')] |
import os
import struct
def readFile(path):
if not os.path.isfile(path):
raise FileNotFoundError
else:
with open(path, 'r') as file:
source = file.read()
return source
def cleaner(source):
lines = source.split('\n')
for i in range(len(lines)):
strings = lines[i].split()
for string in strings:
if string[0] == ';':
index = strings.index(string)
delete = strings[index:]
for item in delete:
strings.remove(item)
lines[i] = ' '.join(strings)
return '\n'.join(lines)
def write_file(path, header, object_dict, mode):
if mode == 'b':
with open(path, 'wb+') as output:
for memory_location in header:
output.write(object_dict[memory_location])
elif mode == 't':
with open(path, 'w+') as output:
for memory_location in header:
integer = struct.unpack('>i', object_dict[memory_location])
integer = integer[0]
output.write(dectobin(integer, 16) + '\n')
def dectobin(decimal, bits):
binary = bin(decimal & int("1" * bits, 2))[2:]
return ("{0:0>%s}" % bits).format(binary) | [
"os.path.isfile",
"struct.unpack"
] | [((60, 80), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (74, 80), False, 'import os\n'), ((1022, 1071), 'struct.unpack', 'struct.unpack', (['""">i"""', 'object_dict[memory_location]'], {}), "('>i', object_dict[memory_location])\n", (1035, 1071), False, 'import struct\n')] |
from jinja2 import Environment, FileSystemLoader, Template, TemplateNotFound
import collections
import os
import yaml
from os.path import dirname, basename
from .env import environ
from ..log import logger
def reader(fn):
logger.debug('loading', f=fn)
try:
tmplenv = Environment(loader=FileSystemLoader(dirname(fn)))
tmpl = tmplenv.get_template(str(basename(fn)))
part = tmpl.render(**environ)
data = yaml.load(part)
return data
except TemplateNotFound:
logger.warn('Template not found', file=fn)
except Exception:
logger.exception('config')
| [
"os.path.dirname",
"os.path.basename",
"yaml.load"
] | [((442, 457), 'yaml.load', 'yaml.load', (['part'], {}), '(part)\n', (451, 457), False, 'import yaml\n'), ((374, 386), 'os.path.basename', 'basename', (['fn'], {}), '(fn)\n', (382, 386), False, 'from os.path import dirname, basename\n'), ((320, 331), 'os.path.dirname', 'dirname', (['fn'], {}), '(fn)\n', (327, 331), False, 'from os.path import dirname, basename\n')] |
import numpy as np
def white_noise(im, scale):
im = im + np.random.normal(0.0, scale, im.shape)
im = np.maximum(im, 0.0)
im = np.minimum(im, 1.0)
return im
def salt_and_pepper(im, prob):
if prob > 1 or prob < 0:
raise ValueError("Prob must be within 0 to 1")
if im.ndim == 2:
im = im[:, :, np.newaxis]
h, w, _ = im.shape
mask = np.random.rand(h, w)
salt = mask < (prob / 2)
pepper = mask > (1 - prob / 2)
im_ = im.copy()
im_[salt, :] = 1.0
im_[pepper, :] = 0.0
return np.squeeze(im_)
| [
"numpy.random.normal",
"numpy.random.rand",
"numpy.minimum",
"numpy.squeeze",
"numpy.maximum"
] | [((111, 130), 'numpy.maximum', 'np.maximum', (['im', '(0.0)'], {}), '(im, 0.0)\n', (121, 130), True, 'import numpy as np\n'), ((140, 159), 'numpy.minimum', 'np.minimum', (['im', '(1.0)'], {}), '(im, 1.0)\n', (150, 159), True, 'import numpy as np\n'), ((381, 401), 'numpy.random.rand', 'np.random.rand', (['h', 'w'], {}), '(h, w)\n', (395, 401), True, 'import numpy as np\n'), ((548, 563), 'numpy.squeeze', 'np.squeeze', (['im_'], {}), '(im_)\n', (558, 563), True, 'import numpy as np\n'), ((63, 101), 'numpy.random.normal', 'np.random.normal', (['(0.0)', 'scale', 'im.shape'], {}), '(0.0, scale, im.shape)\n', (79, 101), True, 'import numpy as np\n')] |
from django.conf.urls import url
from accounts import views as account_views
urlpatterns = [
url(r'users/$', account_views.users_list, name='users-list'),
url(r'users/new/$', account_views.user_create, name='user-create'),
url(r'user/(?P<pk>\d+)/$', account_views.user_single, name='user-single'),
url(r'user/(?P<pk>\d+)/edit/$', account_views.user_edit, name='user-edit'),
url(r'user/(?P<pk>\d+)/delete/$', account_views.user_delete, name='user-delete'),
]
| [
"django.conf.urls.url"
] | [((99, 158), 'django.conf.urls.url', 'url', (['"""users/$"""', 'account_views.users_list'], {'name': '"""users-list"""'}), "('users/$', account_views.users_list, name='users-list')\n", (102, 158), False, 'from django.conf.urls import url\n'), ((165, 230), 'django.conf.urls.url', 'url', (['"""users/new/$"""', 'account_views.user_create'], {'name': '"""user-create"""'}), "('users/new/$', account_views.user_create, name='user-create')\n", (168, 230), False, 'from django.conf.urls import url\n'), ((237, 310), 'django.conf.urls.url', 'url', (['"""user/(?P<pk>\\\\d+)/$"""', 'account_views.user_single'], {'name': '"""user-single"""'}), "('user/(?P<pk>\\\\d+)/$', account_views.user_single, name='user-single')\n", (240, 310), False, 'from django.conf.urls import url\n'), ((316, 390), 'django.conf.urls.url', 'url', (['"""user/(?P<pk>\\\\d+)/edit/$"""', 'account_views.user_edit'], {'name': '"""user-edit"""'}), "('user/(?P<pk>\\\\d+)/edit/$', account_views.user_edit, name='user-edit')\n", (319, 390), False, 'from django.conf.urls import url\n'), ((396, 481), 'django.conf.urls.url', 'url', (['"""user/(?P<pk>\\\\d+)/delete/$"""', 'account_views.user_delete'], {'name': '"""user-delete"""'}), "('user/(?P<pk>\\\\d+)/delete/$', account_views.user_delete, name='user-delete'\n )\n", (399, 481), False, 'from django.conf.urls import url\n')] |
from qrogue.game.logic.actors import Player, Robot
from qrogue.game.logic.actors.controllables import TestBot, LukeBot
from qrogue.game.world.map import CallbackPack
from qrogue.util import Logger, PathConfig, AchievementManager, RandomManager, CommonPopups, CheatConfig
from qrogue.util.achievements import Achievement
class SaveData:
__ROBOT_SECTION = "[Robots]"
__COLLECTIBLE_SECTION = "[Collectibles]"
__ACHIEVEMENT_SECTION = "[Achievements]"
__instance = None
@staticmethod
def instance() -> "SaveData":
if SaveData.__instance is None:
Logger.instance().throw(Exception("This singleton has not been initialized yet!"))
return SaveData.__instance
@staticmethod
def __empty_save_file() -> str:
pass
def __init__(self):
if SaveData.__instance is not None:
Logger.instance().throw(Exception("This class is a singleton!"))
else:
self.__player = Player()
path = PathConfig.find_latest_save_file()
content = ""
try:
content = PathConfig.read(path, in_user_path=True).splitlines()
except FileNotFoundError:
Logger.instance().error(NotImplementedError("This line should not be reachable! Please send us the log "
"files so we can fix the issue as soon as possible. "
"Thank you!"))
index = content.index(SaveData.__ACHIEVEMENT_SECTION)
achievement_list = []
for i in range(index + 1, len(content)):
achievement = Achievement.from_string(content[i])
if achievement:
achievement_list.append(achievement)
self.__achievements = AchievementManager(achievement_list)
self.__available_robots = [
TestBot(CallbackPack.instance().game_over),
LukeBot(CallbackPack.instance().game_over),
]
SaveData.__instance = self
@property
def achievement_manager(self) -> AchievementManager:
return self.__achievements
@property
def player(self) -> Player:
return self.__player
def get_expedition_seed(self) -> int:
return RandomManager.instance().get_seed(msg="SaveData.get_expedition_seed()") #7 # todo implement
def available_robots(self) -> iter:
return iter(self.__available_robots)
def get_robot(self, index: int) -> Robot:
if 0 <= index < len(self.__available_robots):
return self.__available_robots[index]
return None
def save(self) -> CommonPopups:
if CheatConfig.did_cheat():
return CommonPopups.NoSavingWithCheats
try:
data = ""
data += f"{SaveData.__ROBOT_SECTION}\n"
data += f"{SaveData.__COLLECTIBLE_SECTION}\n"
data += f"{SaveData.__ACHIEVEMENT_SECTION}\n"
data += f"{self.achievement_manager.to_string()}\n"
PathConfig.new_save_file(data)
return CommonPopups.SavingSuccessful
except:
return CommonPopups.SavingFailed
| [
"qrogue.util.Logger.instance",
"qrogue.util.RandomManager.instance",
"qrogue.util.PathConfig.read",
"qrogue.util.PathConfig.find_latest_save_file",
"qrogue.game.logic.actors.Player",
"qrogue.util.CheatConfig.did_cheat",
"qrogue.util.AchievementManager",
"qrogue.game.world.map.CallbackPack.instance",
... | [((2731, 2754), 'qrogue.util.CheatConfig.did_cheat', 'CheatConfig.did_cheat', ([], {}), '()\n', (2752, 2754), False, 'from qrogue.util import Logger, PathConfig, AchievementManager, RandomManager, CommonPopups, CheatConfig\n'), ((963, 971), 'qrogue.game.logic.actors.Player', 'Player', ([], {}), '()\n', (969, 971), False, 'from qrogue.game.logic.actors import Player, Robot\n'), ((991, 1025), 'qrogue.util.PathConfig.find_latest_save_file', 'PathConfig.find_latest_save_file', ([], {}), '()\n', (1023, 1025), False, 'from qrogue.util import Logger, PathConfig, AchievementManager, RandomManager, CommonPopups, CheatConfig\n'), ((1838, 1874), 'qrogue.util.AchievementManager', 'AchievementManager', (['achievement_list'], {}), '(achievement_list)\n', (1856, 1874), False, 'from qrogue.util import Logger, PathConfig, AchievementManager, RandomManager, CommonPopups, CheatConfig\n'), ((3086, 3116), 'qrogue.util.PathConfig.new_save_file', 'PathConfig.new_save_file', (['data'], {}), '(data)\n', (3110, 3116), False, 'from qrogue.util import Logger, PathConfig, AchievementManager, RandomManager, CommonPopups, CheatConfig\n'), ((1679, 1714), 'qrogue.util.achievements.Achievement.from_string', 'Achievement.from_string', (['content[i]'], {}), '(content[i])\n', (1702, 1714), False, 'from qrogue.util.achievements import Achievement\n'), ((2330, 2354), 'qrogue.util.RandomManager.instance', 'RandomManager.instance', ([], {}), '()\n', (2352, 2354), False, 'from qrogue.util import Logger, PathConfig, AchievementManager, RandomManager, CommonPopups, CheatConfig\n'), ((589, 606), 'qrogue.util.Logger.instance', 'Logger.instance', ([], {}), '()\n', (604, 606), False, 'from qrogue.util import Logger, PathConfig, AchievementManager, RandomManager, CommonPopups, CheatConfig\n'), ((856, 873), 'qrogue.util.Logger.instance', 'Logger.instance', ([], {}), '()\n', (871, 873), False, 'from qrogue.util import Logger, PathConfig, AchievementManager, RandomManager, CommonPopups, CheatConfig\n'), ((1094, 1134), 'qrogue.util.PathConfig.read', 'PathConfig.read', (['path'], {'in_user_path': '(True)'}), '(path, in_user_path=True)\n', (1109, 1134), False, 'from qrogue.util import Logger, PathConfig, AchievementManager, RandomManager, CommonPopups, CheatConfig\n'), ((1940, 1963), 'qrogue.game.world.map.CallbackPack.instance', 'CallbackPack.instance', ([], {}), '()\n', (1961, 1963), False, 'from qrogue.game.world.map import CallbackPack\n'), ((2000, 2023), 'qrogue.game.world.map.CallbackPack.instance', 'CallbackPack.instance', ([], {}), '()\n', (2021, 2023), False, 'from qrogue.game.world.map import CallbackPack\n'), ((1202, 1219), 'qrogue.util.Logger.instance', 'Logger.instance', ([], {}), '()\n', (1217, 1219), False, 'from qrogue.util import Logger, PathConfig, AchievementManager, RandomManager, CommonPopups, CheatConfig\n')] |
import sqlite3
connection = sqlite3.connect("rpg.db")
create_sql = """
CREATE TABLE IF NOT EXISTS Heroi(
id INTEGER PRIMARY KEY,
nome TEXT NOT NULL,
fisico INTEGER NOT NULL,
magia INTEGER NOT NULL,
agilidade INTEGER NOT NULL
)
"""
cursor = connection.cursor()
#2o passo: pegar o cursor
cursor.execute(create_sql)
#3o passo: cursor.execute passando uma string de sql
connection.commit()
#4o passo: fazer o commit (se for uma query que altera o banco)
connection.close()
#5o passo: fechar a conexao
def consulta_itens_por_heroi(idHeroi):
list_itens = []
connection = sqlite3.connect("rpg.db")
cursor = connection.cursor()
sql = "SELECT * FROM ItemDoHeroi WHERE idHeroi = (?)"
cursor.execute(sql, [idHeroi])
retorno= cursor.fetchall()
tamanho = len(retorno)
count = 0
if tamanho >= 1:
for i in retorno:
list_itens.append({'id':i[0],'idItem':i[1],'idHeroi':i[2]})
count += 1
#print(list_itens,idHeroi)
return list_itens | [
"sqlite3.connect"
] | [((33, 58), 'sqlite3.connect', 'sqlite3.connect', (['"""rpg.db"""'], {}), "('rpg.db')\n", (48, 58), False, 'import sqlite3\n'), ((599, 624), 'sqlite3.connect', 'sqlite3.connect', (['"""rpg.db"""'], {}), "('rpg.db')\n", (614, 624), False, 'import sqlite3\n')] |
from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import ugettext_lazy as _
class FactsApphook(CMSApp):
name = _("Facts")
urls = ["facts.urls"]
apphook_pool.register(FactsApphook)
| [
"cms.apphook_pool.apphook_pool.register",
"django.utils.translation.ugettext_lazy"
] | [((208, 243), 'cms.apphook_pool.apphook_pool.register', 'apphook_pool.register', (['FactsApphook'], {}), '(FactsApphook)\n', (229, 243), False, 'from cms.apphook_pool import apphook_pool\n'), ((170, 180), 'django.utils.translation.ugettext_lazy', '_', (['"""Facts"""'], {}), "('Facts')\n", (171, 180), True, 'from django.utils.translation import ugettext_lazy as _\n')] |
'''
MIT License
Copyright (c) 2019 <NAME> and <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
from mrjob.job import MRJob
import re
lineformat = re.compile(r"""(?P<ipaddress>\d{1,3}\.\d{1,3}\.\d{1,3}\ \
.\d{1,3}) - - \[(?P<dateandtime>\d{2}\/[a-z]{3} \
\/\d{4}:\d{2}:\d{2}:\d{2} (\+|\-)\d{4})\] \
((\"(GET|POST) )(?P<url>.+)(http\/1\.1")) \
(?P<statuscode>\d{3}) (?P<bytessent>\d+) \
(?P<refferer>-|"([^"]+)") (["] \
(?P<useragent>[^"]+)["])""", re.IGNORECASE)
class MRVisitCounter(MRJob):
def mapper(self, key, line):
try:
data=re.search(lineformat, line)
datadict = data.groupdict()
yield datadict['ipaddress'], 1
except:
pass
def reducer(self, address, visits):
yield address, sum(visits)
if __name__ == '__main__':
MRVisitCounter.run() | [
"re.search",
"re.compile"
] | [((1134, 1632), 're.compile', 're.compile', (['"""(?P<ipaddress>\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\ \\\\\n .\\\\d{1,3}) - - \\\\[(?P<dateandtime>\\\\d{2}\\\\/[a-z]{3} \\\\\n \\\\/\\\\d{4}:\\\\d{2}:\\\\d{2}:\\\\d{2} (\\\\+|\\\\-)\\\\d{4})\\\\] \\\\\n ((\\\\"(GET|POST) )(?P<url>.+)(http\\\\/1\\\\.1")) \\\\\n (?P<statuscode>\\\\d{3}) (?P<bytessent>\\\\d+) \\\\\n (?P<refferer>-|"([^"]+)") (["] \\\\\n (?P<useragent>[^"]+)["])"""', 're.IGNORECASE'], {}), '(\n """(?P<ipaddress>\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\ \\\\\n .\\\\d{1,3}) - - \\\\[(?P<dateandtime>\\\\d{2}\\\\/[a-z]{3} \\\\\n \\\\/\\\\d{4}:\\\\d{2}:\\\\d{2}:\\\\d{2} (\\\\+|\\\\-)\\\\d{4})\\\\] \\\\\n ((\\\\"(GET|POST) )(?P<url>.+)(http\\\\/1\\\\.1")) \\\\\n (?P<statuscode>\\\\d{3}) (?P<bytessent>\\\\d+) \\\\\n (?P<refferer>-|"([^"]+)") (["] \\\\\n (?P<useragent>[^"]+)["])"""\n , re.IGNORECASE)\n', (1144, 1632), False, 'import re\n'), ((1695, 1722), 're.search', 're.search', (['lineformat', 'line'], {}), '(lineformat, line)\n', (1704, 1722), False, 'import re\n')] |
from .coordinates import Coordinates
from typing import List
import csv
import pandas as pd
class Tracker:
@staticmethod
def load(csv_path: str):
airsim_results = pd.read_csv(csv_path, encoding='utf-8')
tracker = Tracker()
for index, row in airsim_results.iterrows():
coordinates = Coordinates(row['x'], row['y'])
tracker.update_coordinates(coordinates)
return tracker
_initial_coordinates: Coordinates
_coordinates: List[Coordinates]
def __init__(self, initial_coordinates: Coordinates = Coordinates(0.0,0.0)) -> None:
self._initial_coordinates = initial_coordinates
self._coordinates = [Coordinates(0.0,0.0)]
def update_coordinates(self, coordinates: Coordinates) -> None:
coordinates.x -= self._initial_coordinates.x
coordinates.y -= self._initial_coordinates.y
if coordinates == self.__get_last_coordinates():
return
self._coordinates.append(coordinates)
def __get_last_coordinates(self):
return self._coordinates[len(self._coordinates)-1]
def get_coordinates(self) -> List[Coordinates]:
return self._coordinates
def save_coordinates(self, path: str) -> None:
with open(path,'w') as csv_file:
fieldnames = ['x', 'y', 'timestamp']
writer = csv.DictWriter(csv_file, fieldnames=fieldnames, lineterminator = '\n')
writer.writeheader()
for coordinate in self._coordinates:
writer.writerow({
'x': str(coordinate.x),
'y': str(coordinate.y),
'timestamp': str(coordinate.timestamp)
})
| [
"csv.DictWriter",
"pandas.read_csv"
] | [((181, 220), 'pandas.read_csv', 'pd.read_csv', (['csv_path'], {'encoding': '"""utf-8"""'}), "(csv_path, encoding='utf-8')\n", (192, 220), True, 'import pandas as pd\n'), ((1355, 1423), 'csv.DictWriter', 'csv.DictWriter', (['csv_file'], {'fieldnames': 'fieldnames', 'lineterminator': '"""\n"""'}), "(csv_file, fieldnames=fieldnames, lineterminator='\\n')\n", (1369, 1423), False, 'import csv\n')] |
'''
-*- coding: utf-8 -*-
@Time : 18-12-1 下午4:19
@Author : SamSa
@Site :
@File : like_blog.py
@Software: PyCharm
@Statement: 收藏博客
'''
from flask import session
from flask_restful import Resource, reqparse
from App.models.likeModel import Like
parse = reqparse.RequestParser()
parse.add_argument('blog_id')
class LikeBlogResource(Resource):
def get(self):
u_id = session.get('user_id')
args = parse.parse_args()
blog_id = args.get('blog_id')
like_blog = Like()
like_blog.u_id = u_id
like_blog.blog_id = blog_id
like_blog.save()
return {
'msg':'收藏成功'
} | [
"flask.session.get",
"flask_restful.reqparse.RequestParser",
"App.models.likeModel.Like"
] | [((261, 285), 'flask_restful.reqparse.RequestParser', 'reqparse.RequestParser', ([], {}), '()\n', (283, 285), False, 'from flask_restful import Resource, reqparse\n'), ((385, 407), 'flask.session.get', 'session.get', (['"""user_id"""'], {}), "('user_id')\n", (396, 407), False, 'from flask import session\n'), ((501, 507), 'App.models.likeModel.Like', 'Like', ([], {}), '()\n', (505, 507), False, 'from App.models.likeModel import Like\n')] |
API_KEY = ""
DONT_PRINT_USAGE_FOR = []
REP_DIRECTORY = "C:\\GabenStorage"
MAX_UPLOAD_SIZE_MB = 300
UNITY = {
"2017.2.0f3": "C:\\Program Files\\Unity\\Editor\\Unity.exe",
"2017.4.3f1": "C:\\Program Files\\Unity2017.4.3\\Editor\\Unity.exe"
}
QUOTES = ["I'm a handsome man with a charming personality.", \
"If Nvidia makes better graphics technology, all the games are going to shine", \
"If we come out with a better game, people are going to buy more PCs.", \
"The PC is successful because we're all benefiting from the competition with each other.", \
"I think Windows 8 is a catastrophe for everyone in the PC space.", \
"The Steam store is this very safe, boring entertainment experience", \
"Photoshop should be a free-to-play game.", \
"The easiest way to stop piracy is not by putting antipiracy technology to work", \
"Ninety percent of games lose money; 10 percent make a lot of money", \
"Solar Games perhaps a best place to work", \
"The programmers of tomorrow are the wizards of the future. ", \
"Don't ever, ever try to lie to the internet.", \
"I've always wanted to be a giant space crab.", \
"George Lucas should have distributed the 'source code' to Star Wars.", \
"The PS3 is a total disaster on so many levels", \
"I'd like to thank Sony for their gracious hospitality, and for not repeatedly punching me in the face."]
def get_random_quote():
import random
return random.choice(QUOTES) | [
"random.choice"
] | [((1457, 1478), 'random.choice', 'random.choice', (['QUOTES'], {}), '(QUOTES)\n', (1470, 1478), False, 'import random\n')] |
from datetime import datetime
import sys
sys.path.insert(1, r'C:\Users\ASUS\Desktop\sources\Telegram\werewolf\Darkhelper\2\V2\Databases')
from Databases.Groups import GroupsPlayersBase , GroupsBase , GroupsControlBase
from Databases.Groups.Bet import BetBase
from Databases.Users import AdminsBase
from Databases.Users.AfksBase import Set_All_Group_AFK_Zero
from Databases.Stats import AdminStatsBase , GroupStatsBase
from Classes.Statics import Statics
from Databases.Users.UsersBase import Show_Group_ALL_User_Points , Show_All_user_Points
from Databases.Users.ShekarsBase import Delete_Shekar
class Group:
def __init__(self,Chat_id : int):
Details=GroupsBase.Show_Group_Features(int(Chat_id))
self.All_Atrebeutes=Details
self.chat_id=int(Chat_id)
self.Main = int(Details['group_id'])
self.Support = int(Details['support_id'])
self.Subscription_Date=str(Details['tamdid_date'])
self.Deadline=int(Details['davazdah'])
self.Auto_Tag=int(Details['auto_tag'])
self.Auto_DeleteTag=int(Details['auto_del'])
self.Auto_Tag_Support=int(Details['auto_tag_sup'])
self.Auto_DeleteTag_Sup=int(Details['auto_del_sup'])
self.Alarm=int(Details['alarm'])
self.Bet=int(Details['bet'])
self.Least_State=int(Details['state'])
self.State_Lock=int(Details['state_lock'])
self.Warn=int(Details['warn'])
#--------------------------------------|
# 0 - onyx |
# 1 - werewolf |
# 2 - black |
self.Bot_Kind=int(Details['bot_kind'])#|
#--------------------------------------|
self.Mute_Fun=int(Details['fun_mute'])
self.Auto_nextGame=int(Details['auto_next_game'])
self.NextGame_Response=int(Details['next_game_response'])
self.emoji1=str(Details['emoji1'])
self.emoji2=str(Details['emoji2'])
self.emoji3=str(Details['emoji3'])
self.Sooti=int(Details['sooti'])
self.Admin_Alarm=int(Details['admin_Alarm'])
self.Ghaleb=str(Details['ghaleb'])
self.JoinTime_Alarm=int(Details['jointime_sup'])
self.Dead_NextGame=int(Details['dead_next'])
self.Shekar_Pin=int(Details['pin_shekar'])
self.Nazer_pin=int(Details['pin_nazer'])
self.List_Pin=int(Details['pin_list'])
self.Role_Saver=int(Details['role_saver'])
self.Questions=int(Details['questions'])
self.Bors=int(Details['bors'])
self.Message_State=int(Details['message_state'])
self.Next_Message_Id=int(Details['auto_next_message_id'])
self.is_Question_Sended=int(Details['question_sended'])
self.Auto_Start=int(Details['auto_start'])
self.Afk_Warn=int(Details['afk_warn'])
self.Is_Join_Time=int(Details['join_time'])
self.Is_Tagging=int(Details['is_tagging'])
self.Is_Time_For_Question=bool(Details['Its_Question_Time'])
self.Players_Lock_Only=int(Details['players_state_lock'])
#-----------------------------------------------------------
Controls=GroupsControlBase.Show_Group_Control_Features(self.Main)
self.All_Controls=Controls
self.Welcome_Turn=int(Controls['welcometurn'])
self.Anti_Spam=int(Controls['anti_spam'])
self.Anti_Robot=int(Controls['anti_robot'])
self.Anti_NFSW=int(Controls['fosh_filter'])
self.Anti_Tabchi=int(Controls['anti_tabchi'])
self.Channel =str(Controls['channel'])
self.Channel_Lock=int(Controls['channellock'])
self.Group_Lock=int(Controls['lock'])
self.Voice_Lock=int(Controls['voice_lock'])
self.Sticker_Lock=int(Controls['sticker_lock'])
self.Photo_Lock=int(Controls['photo_lock'])
self.Link_Lock=int(Controls['link_lock'])
self.Forward_Lock=int(Controls['forward_lock'])
self.Video_Lock=int(Controls['video_lock'])
self.Service_Lock=int(Controls['service_lock'])
self.Spam_Count=int(Controls['spam_count'])
self.Welcome=str(Controls['welcome'])
self.Channel_Text=str(Controls['channel_text'])
#-----------------------------------------porn
self.Porn=str(Controls['porn'])
#-----------------------------
Controls=Controls['Filters']
self.Porn_All_Filters=Controls
self.Porn_Dick_Filter=str(Controls['dick'])
self.Porn_Pussy_Filter=str(Controls['pussy'])
self.Porn_Coverd_Pussy_Filter=str(Controls['coveredpossy'])
self.Porn_FBoobs_Filter=str(Controls['fboobs'])
self.Porn_MBoobs_Filter=str(Controls['mboobs'])
self.Porn_CoveredBoobs_Filter=str(Controls['coveredboobs'])
self.Porn_Stomach_Filter=str(Controls['stomack'])
self.Porn_ZirBaghal_Filter=str(Controls['baghal'])
self.Porn_Ass_Filter=str(Controls['ass'])
self.Porn_Feet_Filter=str(Controls['feet'])
self.Porn_Covered_ASS_Filter=str(Controls['coveredass'])
#-----------------------------------------------------------------
@property
def All_Players(self):
return Show_All_user_Points()
@property
def All_Group_Players(self):
return Show_Group_ALL_User_Points(self.Main)
async def Get_Players_usernames(self,bot,lists):
for i in lists:
try:
user=await bot.get_users(i)
if user.username :
yield user.mention
except:pass
#-----------------------------------------------------------------
def __int__(self) -> int:
return int(self.Support)
def __str__(self) -> str:
return str(self.Main)
#-----------------------------------------------------------------
@property
def Show_Istagging(self):
return GroupsBase.Show_one_feature('is_tagging',self.chat_id)
@property
def Show_JoinTime(self):
return GroupsBase.Show_one_feature('join_time',self.chat_id)
@property
def Join_time_Started(self):
GroupsBase.Change_Group_Feature(self.Main , 'join_time' , 1)
return 1
@property
def Join_time_Finished(self):
GroupsBase.Change_Group_Feature(self.Main , 'join_time' , 0)
return 0
#-----------------------------------------------------------------
@property
def Show_All_Admins_Points(self):
return AdminStatsBase.Show_Gap_All_Admins_Points(self.Main)
@property
def Show_Today_Admins_Points(self):
return AdminStatsBase.Show_Gap_All_Admins_Points_Today(self.Main)
@property
def Admins(self):
admins=AdminsBase.Show_All_Admins(self.Main)
return [ admins , len(admins) ]
@property
def Show_Owner(self):
return int(AdminsBase.Show_Owner(self.Main))
#-----------------------------------------------------------------
@property
def Show_Emojis(self):
return [ self.emoji1 , self.emoji2 , self.emoji3 ]
@property
def Show_Welcome(self):
wel=self.Welcome
if wel == 'none':
return None
else:return wel
@property
def Show_Ghaleb(self):
ghlb=self.Ghaleb
if ghlb == 'none':
return None
else:return ghlb
@property
def Show_Channel(self):
chnl=GroupsControlBase.Show_Channel(self.Main)
if chnl == 'none':
return None
else:return chnl
@property
def Show_Next_Game_Text(self):
if self.Bot_Kind ==0:return ' /nextgame@OnyxWereBetaBot '
elif self.Bot_Kind ==1:return ' /nextgame@werewolfbot '
elif self.Bot_Kind ==2:return ' /nextgame@Blackwwrobot \n /nextgame@blackwerewolfbot '
#-----------------------------------------------------------------
def Turn_Welcome_Turn(self):
if self.Welcome_Turn:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'welcometurn' , x)
return x
def Turn_Covered_Ass_Filter_Lock(self):
if self.Porn_Covered_ASS_Filter:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'coveredass' , x)
return x
def Turn_Dick_Filter_Lock(self):
if self.Porn_Dick_Filter:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'dick' , x)
return x
def Turn_pussy_Filter_Lock(self):
if self.Porn_Pussy_Filter:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'pussy' , x)
return x
def Turn_CoveredPussy_Filter_Lock(self):
if self.Porn_CoveredBoobs_Filter:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'coveredpossy' , x)
return x
def Turn_FBoobs_Filter_Lock(self):
if self.Porn_FBoobs_Filter:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'fboobs' , x)
return x
def Turn_MBoobs_Filter_Lock(self):
if self.Porn_MBoobs_Filter:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'mboobs' , x)
return x
def Turn_Covers_Boobs_Filter_Lock(self):
if self.Porn_CoveredBoobs_Filter:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'coveredboobs' , x)
return x
def Turn_Stomach_Filter_Lock(self):
if self.Porn_Stomach_Filter:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'stomack' , x)
return x
def Turn_ZirBaghal_Filter_Lock(self):
if self.Porn_ZirBaghal_Filter:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'baghal' , x)
return x
def Turn_Ass_Filter_Lock(self):
if self.Porn_Ass_Filter:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'ass' , x)
return x
def Turn_Feet_Filter_Lock(self):
if self.Porn_Feet_Filter:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'feet' , x)
return x
#-----------------------------------------------------------------
def Turn_Video_Lock(self):
if self.Video_Lock:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'video_lock' , x)
return x
def Turn_Service_Lock(self):
if self.Service_Lock:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'service_lock' , x)
return x
def Turn_Voice_Lock(self):
if self.Voice_Lock:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'voice_lock' , x)
return x
def Turn_Sticker_Lock(self):
if self.Sticker_Lock:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'sticker_lock' , x)
return x
def Turn_Photo_Lock(self):
if self.Photo_Lock:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'photo_lock' , x)
return x
def Turn_Link_Lock(self):
if self.Link_Lock:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'link_lock' , x)
return x
def Turn_Forward_Lock(self):
if self.Forward_Lock:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'forward_lock' , x)
return x
def Set_Anti_Spam(self,x):
if self.Anti_Robot:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'anti_spam' , x)
return x
def Turn_Anti_Robot(self):
if self.Anti_Robot:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'anti_robot' , x)
return x
def Turn_Anti_Porn(self):
if self.Porn:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'porn' , x)
return x
def Turn_Anti_NFSW(self):
if self.Anti_NFSW:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'fosh_filter' , x)
return x
def Turn_Anti_Tabchi(self):
if self.Anti_Tabchi:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'anti_tabchi' , x)
return x
def Set_Channel(self , x):
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'channel' , x)
return x
def Set_Channel_text(self , x):
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'channel_text' , x)
return x
def Set_Welcome(self , x):
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'welcome' , x)
return x
def Set_Spam_Count(self , x):
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'spam_count' , x)
return x
def Turn_Channel_Lock(self):
if self.Channel_Lock:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'channellock' , x)
return x
def Turn_Lock(self):
if self.Group_Lock:
x=0
else:
x=1
GroupsControlBase.Change_Group_Control_Feature(self.Main , 'lock' , x)
return x
#--------------------------------------------------------------------------
def Change_Message_State(self):
if self.Message_State:
x=0
else:
x=1
GroupsBase.Change_Group_Feature(self.Main , 'message_state' , x)
return x
def Change_Bors(self):
if self.Bors:
x=0
else:
x=1
GroupsBase.Change_Group_Feature(self.Main , 'bors' , x)
return x
def Change_Questions(self):
if self.Questions:
x=0
else:
x=1
GroupsBase.Change_Group_Feature(self.Main , 'questions' , x)
return x
def Change_Role_Saver(self):
if self.Role_Saver:
x=0
else:
x=1
GroupsBase.Change_Group_Feature(self.Main , 'role_saver' , x)
return x
def Change_Nazer_pin(self):
if self.Nazer_pin:
x=0
else:
x=1
GroupsBase.Change_Group_Feature(self.Main , 'pin_nazer' , x)
return x
def Change_Shekar_Pin(self):
if self.Shekar_Pin:
x=0
else:
x=1
GroupsBase.Change_Group_Feature(self.Main , 'pin_shekar' , x)
return x
def Change_Dead_NextGame(self):
if self.Dead_NextGame:
x=0
else:
x=1
GroupsBase.Change_Group_Feature(self.Main , 'dead_next' , x)
return x
def Change_JoinTime_Alarm(self):
if self.JoinTime_Alarm:
x=0
else:
x=1
GroupsBase.Change_Group_Feature(self.Main , 'jointime_sup' , x)
return x
def Set_Ghaleb(self , x):
GroupsBase.Change_Group_Feature(self.Main , 'ghaleb' , x)
return x
def Set_Next_Message_Id(self , x):
GroupsBase.Change_Group_Feature(self.Main , 'auto_next_message_id' , x)
return x
def Change_Afk_Warn(self):
if self.Afk_Warn:
x=0
else:
x=1
GroupsBase.Change_Group_Feature(self.Main , 'afk_warn' , x)
return x
def Change_Admin_Alarm(self):
if self.Admin_Alarm:
x=0
else:
x=1
GroupsBase.Change_Group_Feature(self.Main , 'admin_Alarm' , x)
return x
def Change_Sooti(self):
if self.Sooti:
x=0
else:
x=1
GroupsBase.Change_Group_Feature(self.Main , 'sooti' , x)
return x
def Set_emoji1(self , x):
GroupsBase.Change_Group_Feature(self.Main , 'emoji1' , x)
return x
def Set_emoji2(self , x):
GroupsBase.Change_Group_Feature(self.Main , 'emoji2' , x)
return x
def Set_emoji3(self , x):
GroupsBase.Change_Group_Feature(self.Main , 'emoji3' , x)
return x
def Change_NextGame_Response(self):
if self.NextGame_Response:
x=0
else:
x=1
GroupsBase.Change_Group_Feature(self.Main , 'next_game_response' , x)
return x
def DeadLine_Ends(self):
GroupsBase.Change_Group_Feature(self.Main , 'davazdah' , 0)
return True
def Change_Auto_NextGame(self):
if self.Auto_nextGame:
x=0
else:
x=1
GroupsBase.Change_Group_Feature(self.Main , 'auto_next_game' , x)
return x
def Change_Mute_Fun(self):
if self.Mute_Fun:
x=0
else:
x=1
GroupsBase.Change_Group_Feature(self.Main , 'fun_mute' , x)
return x
def Change_Bot_Kind(self , x):
GroupsBase.Change_Group_Feature(self.Main , 'bot_kind' , x)
return x
def Set_Warn(self , x):
GroupsBase.Change_Group_Feature(self.Main , 'warn' , x)
return x
def Change_State_Lock(self,x):
GroupsBase.Change_Group_Feature(self.Main , 'state_lock' , x)
return x
def Set_State(self , x):
GroupsBase.Change_Group_Feature(self.Main , 'state' , x)
return x
def Change_Bet(self):
if self.Bet:
x=0
else:
x=1
GroupsBase.Change_Group_Feature(self.Main , 'bet' , x)
return x
def Change_Auto_Tag(self):
if self.Auto_Tag:
x=0
else:
x=1
GroupsBase.Change_Group_Feature(self.Main , 'auto_tag' , x)
return x
def Change_Auto_DeleteTag(self):
if self.Auto_DeleteTag:
x=0
else:
x=1
GroupsBase.Change_Group_Feature(self.Main , 'auto_del' , x)
return x
def Change_Auto_Tag_Support(self):
if self.Auto_Tag_Support:
x=0
else:
x=1
GroupsBase.Change_Group_Feature(self.Main , 'auto_tag_sup' , x)
return x
def Change_Auto_DeleteTag_Sup(self):
if self.Auto_DeleteTag_Sup:
x=0
else:
x=1
GroupsBase.Change_Group_Feature(self.Main , 'auto_del_sup' , x)
return x
def Change_Alarm(self):
if self.Alarm:
x=0
else:
x=1
GroupsBase.Change_Group_Feature(self.Main , 'alarm' , x)
return x
def Change_Auto_Start(self):
if self.Alarm:
x=0
else:
x=1
GroupsBase.Change_Group_Feature(self.Main , 'auto_start' , x)
return x
def Tag_Started(self):
GroupsBase.Change_Group_Feature(self.Main , 'is_tagging' , 1)
return
def Tag_Stopped(self):
GroupsBase.Change_Group_Feature(self.Main , 'is_tagging' , 0)
return
#------------------------------------------------------------------------|
def Manual_Control_Change(self,row,amnt): #|
if amnt: #|
x=0 #|
else: #|
x=1 #|
GroupsControlBase.Change_Group_Control_Feature(self.Main , row , x) #|
return x #|
#|
def Manual_Change(self,row,amnt): #|
if amnt: #|
x=0 #|
else: #|
x=1 #|
GroupsBase.Change_Group_Feature(self.Main , row , x) #|
return x #|
#------------------------------------------------------------------------|
def Reset_AFKS(self):
Set_All_Group_AFK_Zero( self.Main )
return True
def END_Bet(self , team : int ):
x=BetBase.win( team , self.Main )
return x
def Game_Started(self,hash,Join_Time,players):
'time,players,main,hour,afk,hash,date'
GroupStatsBase.Add_Game(Join_Time,players,self.Main,int((datetime.now()).hour),hash)
return True
def Add_Game_AFk(self , hash):
GroupStatsBase.Add_AFK(self.Main , hash)
return True
@property
def Last_Match(self):
return GroupStatsBase.Show_Group_State_last_Game(self.Main)
@property
def Show_Games(self):
return GroupStatsBase.Show_Group_State_All_Time(self.Main)
@property
def Show_Games_Today(self):
return GroupStatsBase.Show_Group_State_Today(self.Main)
def Is_Expired(self):
another_day = datetime.datetime.now().strptime(self.Subscription_Date,"%Y-%m-%d")
Day = datetime.datetime.now()
if Day < another_day:
return False
else : return True
#-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0|
def Tamdid(self,Date=30): #0|
GroupsBase.Add_Group(self.Main , self.Support , Date) #0|
return #0|
#-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0|
def Question_Sended(self):
GroupsBase.Change_Group_Feature(self.Main , 'question_sended' , 1)
return 1
def Question_Answered(self):
GroupsBase.Change_Group_Feature(self.Main , 'question_sended' , 0)
return 0
#----------------------------------------------------------------------------
def Its_Question_time(self):
GroupsBase.Change_Group_Feature(self.Main , 'Its_Question_Time' , 1)
return 1
def Question_Time_Passes(self):
GroupsBase.Change_Group_Feature(self.Main , 'Its_Question_Time' , 0)
return 0
#-------------------------------------------------------------------------
@property
def Show_Players(self):
return GroupsPlayersBase.Show_Players(self.Main)
def Delete_Players(self):
GroupsPlayersBase.Delete_All_Players(self.Main )
return True
async def Add_Players(self,id_list,bot):
for i in id_list:
name=(await bot.get_users(i)).first_name
GroupsPlayersBase.insert_players(i,self.Main,name)
return True
@property
def Show_Deads_Name(self):
return GroupsPlayersBase.Show_Dead_Players_Name(self.chat_id)
@property
def Zerib(self):
plyrs=self.Show_Players
deads=0
alives=0
all=len(plyrs)
for i in plyrs:
if int(i[1])==1:alives+=1
else:deads+=1
if alives>=40:
zarib_bet=float((all+alives)/(deads+30))
elif alives>35:
zarib_bet=float((all+alives)/(deads+26))
elif alives>=30:
zarib_bet=float((all+alives)/(deads+25))
elif alives>=25:
zarib_bet=float((all+alives)/(deads+23))
elif alives>=15:
zarib_bet=float((all+alives)/(deads+21))
elif alives>=10:
zarib_bet=float((all+alives)/(deads+16))
elif alives>=5:
zarib_bet=float((all+alives)/(deads+13))
elif alives<5:
zarib_bet=0.01
return zarib_bet
@property
def Team_Zarib(self):
Zr=float(self.Zerib)
return [Zr * 0.70 ,Zr * 0.80 ,Zr * 0.90 ,Zr ,Zr * 1 ,Zr * 1.05 ,Zr * 1.5 ]
@property
def Group_Teams(self):
if self.Bot_Kind==0:
Role='🧛🏻♀️ ومپایر 🧛🏻♀️'
elif self.Bot_Kind==1:
Role='💖 لاور 💖'
else:
Role='💣 بمبر 💣'
return {0:'👩🏻🦰👨🏻🦱 روستا 👩🏻🦰👨🏻🦱'
,1:'👥 فرقه 👥'
,2:'🐺 گرگ 🐺'
,3:'🔪 قاتل 🔪'
,4:Role
,5:'🔥 آتش 🔥'
,6:'👺 منافق 👺'}
@property
def Start_Command(self):
if self.Bot_Kind ==0: return '/startmighty@OnyxWereBetaBot'
elif self.Bot_Kind ==1: return '/startchaos@werewolfbot'
elif self.Bot_Kind ==2: return '/startmighty@Blackwwrobot'
def delete(self):
GroupsBase.Delete_Group(self.Main)
return True
def Delete_Shekar(self):
Delete_Shekar(self.Main)
return True | [
"Databases.Users.AdminsBase.Show_All_Admins",
"sys.path.insert",
"Databases.Groups.Bet.BetBase.win",
"Databases.Users.UsersBase.Show_Group_ALL_User_Points",
"Databases.Users.AdminsBase.Show_Owner",
"Databases.Stats.AdminStatsBase.Show_Gap_All_Admins_Points",
"Databases.Groups.GroupsBase.Add_Group",
"D... | [((44, 158), 'sys.path.insert', 'sys.path.insert', (['(1)', '"""C:\\\\Users\\\\ASUS\\\\Desktop\\\\sources\\\\Telegram\\\\werewolf\\\\Darkhelper\\\\2\\\\V2\\\\Databases"""'], {}), "(1,\n 'C:\\\\Users\\\\ASUS\\\\Desktop\\\\sources\\\\Telegram\\\\werewolf\\\\Darkhelper\\\\2\\\\V2\\\\Databases'\n )\n", (59, 158), False, 'import sys\n'), ((3218, 3274), 'Databases.Groups.GroupsControlBase.Show_Group_Control_Features', 'GroupsControlBase.Show_Group_Control_Features', (['self.Main'], {}), '(self.Main)\n', (3263, 3274), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((5263, 5285), 'Databases.Users.UsersBase.Show_All_user_Points', 'Show_All_user_Points', ([], {}), '()\n', (5283, 5285), False, 'from Databases.Users.UsersBase import Show_Group_ALL_User_Points, Show_All_user_Points\n'), ((5353, 5390), 'Databases.Users.UsersBase.Show_Group_ALL_User_Points', 'Show_Group_ALL_User_Points', (['self.Main'], {}), '(self.Main)\n', (5379, 5390), False, 'from Databases.Users.UsersBase import Show_Group_ALL_User_Points, Show_All_user_Points\n'), ((5964, 6019), 'Databases.Groups.GroupsBase.Show_one_feature', 'GroupsBase.Show_one_feature', (['"""is_tagging"""', 'self.chat_id'], {}), "('is_tagging', self.chat_id)\n", (5991, 6019), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((6082, 6136), 'Databases.Groups.GroupsBase.Show_one_feature', 'GroupsBase.Show_one_feature', (['"""join_time"""', 'self.chat_id'], {}), "('join_time', self.chat_id)\n", (6109, 6136), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((6194, 6252), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""join_time"""', '(1)'], {}), "(self.Main, 'join_time', 1)\n", (6225, 6252), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((6332, 6390), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""join_time"""', '(0)'], {}), "(self.Main, 'join_time', 0)\n", (6363, 6390), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((6549, 6601), 'Databases.Stats.AdminStatsBase.Show_Gap_All_Admins_Points', 'AdminStatsBase.Show_Gap_All_Admins_Points', (['self.Main'], {}), '(self.Main)\n', (6590, 6601), False, 'from Databases.Stats import AdminStatsBase, GroupStatsBase\n'), ((6676, 6734), 'Databases.Stats.AdminStatsBase.Show_Gap_All_Admins_Points_Today', 'AdminStatsBase.Show_Gap_All_Admins_Points_Today', (['self.Main'], {}), '(self.Main)\n', (6723, 6734), False, 'from Databases.Stats import AdminStatsBase, GroupStatsBase\n'), ((6791, 6828), 'Databases.Users.AdminsBase.Show_All_Admins', 'AdminsBase.Show_All_Admins', (['self.Main'], {}), '(self.Main)\n', (6817, 6828), False, 'from Databases.Users import AdminsBase\n'), ((7498, 7539), 'Databases.Groups.GroupsControlBase.Show_Channel', 'GroupsControlBase.Show_Channel', (['self.Main'], {}), '(self.Main)\n', (7528, 7539), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((8095, 8170), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""welcometurn"""', 'x'], {}), "(self.Main, 'welcometurn', x)\n", (8141, 8170), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((8341, 8415), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""coveredass"""', 'x'], {}), "(self.Main, 'coveredass', x)\n", (8387, 8415), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((8570, 8638), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""dick"""', 'x'], {}), "(self.Main, 'dick', x)\n", (8616, 8638), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((8795, 8864), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""pussy"""', 'x'], {}), "(self.Main, 'pussy', x)\n", (8841, 8864), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((9035, 9111), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""coveredpossy"""', 'x'], {}), "(self.Main, 'coveredpossy', x)\n", (9081, 9111), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((9270, 9340), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""fboobs"""', 'x'], {}), "(self.Main, 'fboobs', x)\n", (9316, 9340), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((9499, 9569), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""mboobs"""', 'x'], {}), "(self.Main, 'mboobs', x)\n", (9545, 9569), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((9740, 9816), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""coveredboobs"""', 'x'], {}), "(self.Main, 'coveredboobs', x)\n", (9786, 9816), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((9977, 10048), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""stomack"""', 'x'], {}), "(self.Main, 'stomack', x)\n", (10023, 10048), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((10213, 10283), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""baghal"""', 'x'], {}), "(self.Main, 'baghal', x)\n", (10259, 10283), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((10436, 10503), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""ass"""', 'x'], {}), "(self.Main, 'ass', x)\n", (10482, 10503), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((10658, 10726), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""feet"""', 'x'], {}), "(self.Main, 'feet', x)\n", (10704, 10726), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((10939, 11013), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""video_lock"""', 'x'], {}), "(self.Main, 'video_lock', x)\n", (10985, 11013), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((11160, 11236), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""service_lock"""', 'x'], {}), "(self.Main, 'service_lock', x)\n", (11206, 11236), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((11387, 11461), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""voice_lock"""', 'x'], {}), "(self.Main, 'voice_lock', x)\n", (11433, 11461), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((11608, 11684), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""sticker_lock"""', 'x'], {}), "(self.Main, 'sticker_lock', x)\n", (11654, 11684), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((11827, 11901), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""photo_lock"""', 'x'], {}), "(self.Main, 'photo_lock', x)\n", (11873, 11901), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((12041, 12114), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""link_lock"""', 'x'], {}), "(self.Main, 'link_lock', x)\n", (12087, 12114), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((12261, 12337), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""forward_lock"""', 'x'], {}), "(self.Main, 'forward_lock', x)\n", (12307, 12337), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((12480, 12553), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""anti_spam"""', 'x'], {}), "(self.Main, 'anti_spam', x)\n", (12526, 12553), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((12696, 12770), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""anti_robot"""', 'x'], {}), "(self.Main, 'anti_robot', x)\n", (12742, 12770), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((12906, 12974), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""porn"""', 'x'], {}), "(self.Main, 'porn', x)\n", (12952, 12974), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((13115, 13190), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""fosh_filter"""', 'x'], {}), "(self.Main, 'fosh_filter', x)\n", (13161, 13190), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((13335, 13410), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""anti_tabchi"""', 'x'], {}), "(self.Main, 'anti_tabchi', x)\n", (13381, 13410), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((13475, 13546), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""channel"""', 'x'], {}), "(self.Main, 'channel', x)\n", (13521, 13546), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((13616, 13692), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""channel_text"""', 'x'], {}), "(self.Main, 'channel_text', x)\n", (13662, 13692), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((13757, 13828), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""welcome"""', 'x'], {}), "(self.Main, 'welcome', x)\n", (13803, 13828), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((13896, 13970), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""spam_count"""', 'x'], {}), "(self.Main, 'spam_count', x)\n", (13942, 13970), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((14117, 14192), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""channellock"""', 'x'], {}), "(self.Main, 'channellock', x)\n", (14163, 14192), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((14329, 14397), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', '"""lock"""', 'x'], {}), "(self.Main, 'lock', x)\n", (14375, 14397), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((14628, 14690), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""message_state"""', 'x'], {}), "(self.Main, 'message_state', x)\n", (14659, 14690), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((14823, 14876), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""bors"""', 'x'], {}), "(self.Main, 'bors', x)\n", (14854, 14876), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((15019, 15077), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""questions"""', 'x'], {}), "(self.Main, 'questions', x)\n", (15050, 15077), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((15222, 15281), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""role_saver"""', 'x'], {}), "(self.Main, 'role_saver', x)\n", (15253, 15281), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((15424, 15482), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""pin_nazer"""', 'x'], {}), "(self.Main, 'pin_nazer', x)\n", (15455, 15482), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((15635, 15694), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""pin_shekar"""', 'x'], {}), "(self.Main, 'pin_shekar', x)\n", (15666, 15694), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((15845, 15903), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""dead_next"""', 'x'], {}), "(self.Main, 'dead_next', x)\n", (15876, 15903), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((16056, 16117), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""jointime_sup"""', 'x'], {}), "(self.Main, 'jointime_sup', x)\n", (16087, 16117), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((16181, 16236), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""ghaleb"""', 'x'], {}), "(self.Main, 'ghaleb', x)\n", (16212, 16236), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((16309, 16378), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""auto_next_message_id"""', 'x'], {}), "(self.Main, 'auto_next_message_id', x)\n", (16340, 16378), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((16519, 16576), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""afk_warn"""', 'x'], {}), "(self.Main, 'afk_warn', x)\n", (16550, 16576), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((16723, 16783), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""admin_Alarm"""', 'x'], {}), "(self.Main, 'admin_Alarm', x)\n", (16754, 16783), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((16918, 16972), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""sooti"""', 'x'], {}), "(self.Main, 'sooti', x)\n", (16949, 16972), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((17036, 17091), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""emoji1"""', 'x'], {}), "(self.Main, 'emoji1', x)\n", (17067, 17091), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((17155, 17210), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""emoji2"""', 'x'], {}), "(self.Main, 'emoji2', x)\n", (17186, 17210), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((17274, 17329), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""emoji3"""', 'x'], {}), "(self.Main, 'emoji3', x)\n", (17305, 17329), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((17488, 17555), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""next_game_response"""', 'x'], {}), "(self.Main, 'next_game_response', x)\n", (17519, 17555), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((17618, 17675), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""davazdah"""', '(0)'], {}), "(self.Main, 'davazdah', 0)\n", (17649, 17675), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((17830, 17893), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""auto_next_game"""', 'x'], {}), "(self.Main, 'auto_next_game', x)\n", (17861, 17893), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((18034, 18091), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""fun_mute"""', 'x'], {}), "(self.Main, 'fun_mute', x)\n", (18065, 18091), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((18160, 18217), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""bot_kind"""', 'x'], {}), "(self.Main, 'bot_kind', x)\n", (18191, 18217), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((18278, 18331), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""warn"""', 'x'], {}), "(self.Main, 'warn', x)\n", (18309, 18331), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((18406, 18465), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""state_lock"""', 'x'], {}), "(self.Main, 'state_lock', x)\n", (18437, 18465), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((18528, 18582), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""state"""', 'x'], {}), "(self.Main, 'state', x)\n", (18559, 18582), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((18713, 18765), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""bet"""', 'x'], {}), "(self.Main, 'bet', x)\n", (18744, 18765), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((18906, 18963), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""auto_tag"""', 'x'], {}), "(self.Main, 'auto_tag', x)\n", (18937, 18963), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((19120, 19177), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""auto_del"""', 'x'], {}), "(self.Main, 'auto_del', x)\n", (19151, 19177), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((19334, 19395), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""auto_tag_sup"""', 'x'], {}), "(self.Main, 'auto_tag_sup', x)\n", (19365, 19395), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((19556, 19617), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""auto_del_sup"""', 'x'], {}), "(self.Main, 'auto_del_sup', x)\n", (19587, 19617), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((19752, 19806), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""alarm"""', 'x'], {}), "(self.Main, 'alarm', x)\n", (19783, 19806), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((19946, 20005), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""auto_start"""', 'x'], {}), "(self.Main, 'auto_start', x)\n", (19977, 20005), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((20066, 20125), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""is_tagging"""', '(1)'], {}), "(self.Main, 'is_tagging', 1)\n", (20097, 20125), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((20187, 20246), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""is_tagging"""', '(0)'], {}), "(self.Main, 'is_tagging', 0)\n", (20218, 20246), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((20754, 20819), 'Databases.Groups.GroupsControlBase.Change_Group_Control_Feature', 'GroupsControlBase.Change_Group_Control_Feature', (['self.Main', 'row', 'x'], {}), '(self.Main, row, x)\n', (20800, 20819), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((21394, 21444), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', 'row', 'x'], {}), '(self.Main, row, x)\n', (21425, 21444), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((21669, 21702), 'Databases.Users.AfksBase.Set_All_Group_AFK_Zero', 'Set_All_Group_AFK_Zero', (['self.Main'], {}), '(self.Main)\n', (21691, 21702), False, 'from Databases.Users.AfksBase import Set_All_Group_AFK_Zero\n'), ((21781, 21809), 'Databases.Groups.Bet.BetBase.win', 'BetBase.win', (['team', 'self.Main'], {}), '(team, self.Main)\n', (21792, 21809), False, 'from Databases.Groups.Bet import BetBase\n'), ((22103, 22142), 'Databases.Stats.GroupStatsBase.Add_AFK', 'GroupStatsBase.Add_AFK', (['self.Main', 'hash'], {}), '(self.Main, hash)\n', (22125, 22142), False, 'from Databases.Stats import AdminStatsBase, GroupStatsBase\n'), ((22223, 22275), 'Databases.Stats.GroupStatsBase.Show_Group_State_last_Game', 'GroupStatsBase.Show_Group_State_last_Game', (['self.Main'], {}), '(self.Main)\n', (22264, 22275), False, 'from Databases.Stats import AdminStatsBase, GroupStatsBase\n'), ((22338, 22389), 'Databases.Stats.GroupStatsBase.Show_Group_State_All_Time', 'GroupStatsBase.Show_Group_State_All_Time', (['self.Main'], {}), '(self.Main)\n', (22378, 22389), False, 'from Databases.Stats import AdminStatsBase, GroupStatsBase\n'), ((22456, 22504), 'Databases.Stats.GroupStatsBase.Show_Group_State_Today', 'GroupStatsBase.Show_Group_State_Today', (['self.Main'], {}), '(self.Main)\n', (22493, 22504), False, 'from Databases.Stats import AdminStatsBase, GroupStatsBase\n'), ((22644, 22667), 'datetime.datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (22665, 22667), False, 'from datetime import datetime\n'), ((22916, 22967), 'Databases.Groups.GroupsBase.Add_Group', 'GroupsBase.Add_Group', (['self.Main', 'self.Support', 'Date'], {}), '(self.Main, self.Support, Date)\n', (22936, 22967), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((23166, 23230), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""question_sended"""', '(1)'], {}), "(self.Main, 'question_sended', 1)\n", (23197, 23230), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((23297, 23361), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""question_sended"""', '(0)'], {}), "(self.Main, 'question_sended', 0)\n", (23328, 23361), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((23508, 23574), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""Its_Question_Time"""', '(1)'], {}), "(self.Main, 'Its_Question_Time', 1)\n", (23539, 23574), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((23644, 23710), 'Databases.Groups.GroupsBase.Change_Group_Feature', 'GroupsBase.Change_Group_Feature', (['self.Main', '"""Its_Question_Time"""', '(0)'], {}), "(self.Main, 'Its_Question_Time', 0)\n", (23675, 23710), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((23871, 23912), 'Databases.Groups.GroupsPlayersBase.Show_Players', 'GroupsPlayersBase.Show_Players', (['self.Main'], {}), '(self.Main)\n', (23901, 23912), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((23955, 24002), 'Databases.Groups.GroupsPlayersBase.Delete_All_Players', 'GroupsPlayersBase.Delete_All_Players', (['self.Main'], {}), '(self.Main)\n', (23991, 24002), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((24308, 24362), 'Databases.Groups.GroupsPlayersBase.Show_Dead_Players_Name', 'GroupsPlayersBase.Show_Dead_Players_Name', (['self.chat_id'], {}), '(self.chat_id)\n', (24348, 24362), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((26107, 26141), 'Databases.Groups.GroupsBase.Delete_Group', 'GroupsBase.Delete_Group', (['self.Main'], {}), '(self.Main)\n', (26130, 26141), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((26208, 26232), 'Databases.Users.ShekarsBase.Delete_Shekar', 'Delete_Shekar', (['self.Main'], {}), '(self.Main)\n', (26221, 26232), False, 'from Databases.Users.ShekarsBase import Delete_Shekar\n'), ((6934, 6966), 'Databases.Users.AdminsBase.Show_Owner', 'AdminsBase.Show_Owner', (['self.Main'], {}), '(self.Main)\n', (6955, 6966), False, 'from Databases.Users import AdminsBase\n'), ((24167, 24219), 'Databases.Groups.GroupsPlayersBase.insert_players', 'GroupsPlayersBase.insert_players', (['i', 'self.Main', 'name'], {}), '(i, self.Main, name)\n', (24199, 24219), False, 'from Databases.Groups import GroupsPlayersBase, GroupsBase, GroupsControlBase\n'), ((22561, 22584), 'datetime.datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (22582, 22584), False, 'from datetime import datetime\n'), ((22003, 22017), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (22015, 22017), False, 'from datetime import datetime\n')] |
from unittest import mock
from . import mock_2_sql
# odbc.connect
# connection.cursor()
# cursor.execute()
# results.description
# results.fetchall
@mock.patch('mocking.mock_2_sql.odbc')
def test_get_emp_name_wih_max_sal(odbc):
result = mock.Mock()
result.description = [["name"], ["salary"]]
result.fetchall.return_value = [
["Bob", 300], ["Rob", 200], ["William", 100]]
cursor = mock.Mock()
cursor.execute.return_value = result
connection = mock.Mock()
connection.cursor.return_value = cursor
odbc.connect.return_value = connection
assert "Bob" == mock_2_sql.get_employee_with_highest_salary()
| [
"unittest.mock.patch",
"unittest.mock.Mock"
] | [((152, 189), 'unittest.mock.patch', 'mock.patch', (['"""mocking.mock_2_sql.odbc"""'], {}), "('mocking.mock_2_sql.odbc')\n", (162, 189), False, 'from unittest import mock\n'), ((244, 255), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (253, 255), False, 'from unittest import mock\n'), ((408, 419), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (417, 419), False, 'from unittest import mock\n'), ((478, 489), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (487, 489), False, 'from unittest import mock\n')] |
from datetime import timedelta
from django.core.management.base import BaseCommand
from django.db.models import Q
from django.utils.timezone import now
from django_scopes import scopes_disabled
from pretix.base.models import Event, Quota
from pretix.base.services.quotas import QuotaAvailability
class Command(BaseCommand):
help = "Hide sold out events"
def add_arguments(self, parser):
parser.add_argument("--dry-run", action="store_true")
parser.add_argument("--allow-republish", action="store_true")
@scopes_disabled()
def handle(self, *args, **options):
eqs = Event.objects.filter(plugins__contains="pretix_hide_sold_out", live=True)
if not options.get("allow_republish"):
eqs = eqs.filter(is_public=True)
for e in eqs:
if e.has_subevents:
subevents = (
e.subevents_annotated("web")
.filter(
active=True,
is_public=True,
)
.filter(
Q(presale_end__gte=now())
| Q(
Q(presale_end__isnull=True)
& Q(Q(date_to__gte=now()) | Q(date_from__gte=now()))
)
)
)
subevents = list(subevents)
quotas_to_compute = []
for se in subevents:
quotas_to_compute += [
q
for q in se.active_quotas
]
if quotas_to_compute:
qa = QuotaAvailability()
qa.queue(*quotas_to_compute)
qa.compute(allow_cache=True)
any_available = False
for se in subevents:
if quotas_to_compute:
se._quota_cache = qa.results
if se.best_availability_state in (
Quota.AVAILABILITY_RESERVED,
Quota.AVAILABILITY_OK,
):
any_available = True
break
else:
quotas_to_compute = e.quotas.all()
if quotas_to_compute:
qa = QuotaAvailability()
qa.queue(*quotas_to_compute)
qa.compute(allow_cache=True)
any_available = any(
r[0] in (Quota.AVAILABILITY_RESERVED, Quota.AVAILABILITY_OK)
for r in qa.results.values()
)
if any_available and not e.is_public and options.get("allow_republish"):
if options.get("dry_run"):
print(f"Event {e.organizer.slug}/{e.slug} will be made public.")
else:
e.is_public = True
e.save()
elif not any_available and e.is_public:
if options.get("dry_run"):
print(f"Event {e.organizer.slug}/{e.slug} will be made non-public.")
else:
e.is_public = False
e.save()
| [
"django_scopes.scopes_disabled",
"pretix.base.services.quotas.QuotaAvailability",
"django.utils.timezone.now",
"django.db.models.Q",
"pretix.base.models.Event.objects.filter"
] | [((536, 553), 'django_scopes.scopes_disabled', 'scopes_disabled', ([], {}), '()\n', (551, 553), False, 'from django_scopes import scopes_disabled\n'), ((608, 681), 'pretix.base.models.Event.objects.filter', 'Event.objects.filter', ([], {'plugins__contains': '"""pretix_hide_sold_out"""', 'live': '(True)'}), "(plugins__contains='pretix_hide_sold_out', live=True)\n", (628, 681), False, 'from pretix.base.models import Event, Quota\n'), ((1670, 1689), 'pretix.base.services.quotas.QuotaAvailability', 'QuotaAvailability', ([], {}), '()\n', (1687, 1689), False, 'from pretix.base.services.quotas import QuotaAvailability\n'), ((2343, 2362), 'pretix.base.services.quotas.QuotaAvailability', 'QuotaAvailability', ([], {}), '()\n', (2360, 2362), False, 'from pretix.base.services.quotas import QuotaAvailability\n'), ((1107, 1112), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (1110, 1112), False, 'from django.utils.timezone import now\n'), ((1171, 1198), 'django.db.models.Q', 'Q', ([], {'presale_end__isnull': '(True)'}), '(presale_end__isnull=True)\n', (1172, 1198), False, 'from django.db.models import Q\n'), ((1246, 1251), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (1249, 1251), False, 'from django.utils.timezone import now\n'), ((1272, 1277), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (1275, 1277), False, 'from django.utils.timezone import now\n')] |
from django.contrib import admin
from .models import Cinema, ShowTime
class CinemaAdmin(admin.ModelAdmin):
list_display = ('title', 'genre', 'trailer', 'description', 'mpaa',)
search_fields = ('title',)
empty_value_display = '-пусто-'
class ShowTimeAdmin(admin.ModelAdmin):
list_display = ('cinema', 'datetime', 'price', 'format',)
autocomplete_fields = ('cinema',)
admin.site.register(Cinema, CinemaAdmin)
admin.site.register(ShowTime, ShowTimeAdmin)
| [
"django.contrib.admin.site.register"
] | [((393, 433), 'django.contrib.admin.site.register', 'admin.site.register', (['Cinema', 'CinemaAdmin'], {}), '(Cinema, CinemaAdmin)\n', (412, 433), False, 'from django.contrib import admin\n'), ((434, 478), 'django.contrib.admin.site.register', 'admin.site.register', (['ShowTime', 'ShowTimeAdmin'], {}), '(ShowTime, ShowTimeAdmin)\n', (453, 478), False, 'from django.contrib import admin\n')] |
import scrapy
import scrapy.spiders
from techradarscraper.items import TechLoader
class TechRadarSpider(scrapy.spiders.Spider):
name = 'techradar'
start_urls = ['https://www.thoughtworks.com/radar/a-z']
def parse(self, response):
links = response.css('.a-z-links > ul > li.blip.hit > a')
for link in links:
title = link.css('::text').extract_first()
href = link.css('::attr(href)').extract_first()
url = response.urljoin(href)
yield scrapy.Request(url, self.parse_tech)
def parse_tech(self, response):
return TechLoader(response=response).load_item()
if __name__ == '__main__':
from scrapy.utils.project import get_project_settings
settings = get_project_settings()
settings.setdict({
'LOG_LEVEL': 'INFO',
'OUTPUT_FILENAME': '../../out/techradar.json',
'ITEM_PIPELINES': {
'techradarscraper.pipelines.JsonWriterPipeline': 300,
},
})
from scrapy.crawler import CrawlerProcess
process = CrawlerProcess(settings)
process.crawl(TechRadarSpider)
process.start()
| [
"scrapy.crawler.CrawlerProcess",
"scrapy.utils.project.get_project_settings",
"techradarscraper.items.TechLoader",
"scrapy.Request"
] | [((746, 768), 'scrapy.utils.project.get_project_settings', 'get_project_settings', ([], {}), '()\n', (766, 768), False, 'from scrapy.utils.project import get_project_settings\n'), ((1049, 1073), 'scrapy.crawler.CrawlerProcess', 'CrawlerProcess', (['settings'], {}), '(settings)\n', (1063, 1073), False, 'from scrapy.crawler import CrawlerProcess\n'), ((513, 549), 'scrapy.Request', 'scrapy.Request', (['url', 'self.parse_tech'], {}), '(url, self.parse_tech)\n', (527, 549), False, 'import scrapy\n'), ((602, 631), 'techradarscraper.items.TechLoader', 'TechLoader', ([], {'response': 'response'}), '(response=response)\n', (612, 631), False, 'from techradarscraper.items import TechLoader\n')] |
import os
import federate_learning as fl
from federate_learning.orchestrator.control_strategy import ControlType
from federate_learning.orchestrator.control_strategy.control_strategy import Target
from federate_learning.orchestrator.control_strategy.control_strategy_factory import ControlStrategyFactory
num_rounds = int(os.getenv('FL_ROUNDS') or 15)
num_epochs = int(os.getenv('FL_EPOCHS') or 1)
batch_size = int(os.getenv('FL_BATCHSIZE') or 32)
k_fit = float(os.getenv('FL_K_FIT') or 1)
k_eval = float(os.getenv('FL_K_EVAL') or 1)
min_devices = int(os.getenv('FL_MIN') or 10)
control_type = int(os.getenv('FL_CONTROL') or ControlType.DYNAMIC_QUADRATIC_ROUNDS)
target_accuracy = float(os.getenv('FL_TACCURACY') or 0.8)
target_num_rounds = int(os.getenv('FL_TROUNDS') or 10)
target_network_cost = int(os.getenv('FL_TNETWORK') or 100)
model = str(os.getenv('FL_MODEL') or "mnist")
export_metrics = bool(os.getenv('FL_EXPORT_METRICS') or False)
terminate_on_finish = bool(os.getenv('FL_TERMINATE') or False)
control_strategy = ControlStrategyFactory.factory(num_rounds=num_rounds,
min_devices=min_devices,
num_epochs=num_epochs,
batch_size=batch_size,
k_fit=k_fit,
k_eval=k_eval,
control_type=control_type,
target=Target(accuracy=target_accuracy,
num_round=target_num_rounds,
network_cost=target_network_cost))
mnist_model = fl.Model(name=model,
framework="TF",
control_strategy=control_strategy)
models_for_training = [mnist_model]
app = fl.orchestrator.OrchestratorApp(__name__)
app.orchestrator.config(available_models=models_for_training,
export_metrics=export_metrics,
terminate_on_finish=terminate_on_finish)
app.run()
| [
"federate_learning.orchestrator.OrchestratorApp",
"federate_learning.Model",
"federate_learning.orchestrator.control_strategy.control_strategy.Target",
"os.getenv"
] | [((1805, 1876), 'federate_learning.Model', 'fl.Model', ([], {'name': 'model', 'framework': '"""TF"""', 'control_strategy': 'control_strategy'}), "(name=model, framework='TF', control_strategy=control_strategy)\n", (1813, 1876), True, 'import federate_learning as fl\n'), ((1966, 2007), 'federate_learning.orchestrator.OrchestratorApp', 'fl.orchestrator.OrchestratorApp', (['__name__'], {}), '(__name__)\n', (1997, 2007), True, 'import federate_learning as fl\n'), ((323, 345), 'os.getenv', 'os.getenv', (['"""FL_ROUNDS"""'], {}), "('FL_ROUNDS')\n", (332, 345), False, 'import os\n'), ((370, 392), 'os.getenv', 'os.getenv', (['"""FL_EPOCHS"""'], {}), "('FL_EPOCHS')\n", (379, 392), False, 'import os\n'), ((416, 441), 'os.getenv', 'os.getenv', (['"""FL_BATCHSIZE"""'], {}), "('FL_BATCHSIZE')\n", (425, 441), False, 'import os\n'), ((463, 484), 'os.getenv', 'os.getenv', (['"""FL_K_FIT"""'], {}), "('FL_K_FIT')\n", (472, 484), False, 'import os\n'), ((506, 528), 'os.getenv', 'os.getenv', (['"""FL_K_EVAL"""'], {}), "('FL_K_EVAL')\n", (515, 528), False, 'import os\n'), ((553, 572), 'os.getenv', 'os.getenv', (['"""FL_MIN"""'], {}), "('FL_MIN')\n", (562, 572), False, 'import os\n'), ((599, 622), 'os.getenv', 'os.getenv', (['"""FL_CONTROL"""'], {}), "('FL_CONTROL')\n", (608, 622), False, 'import os\n'), ((688, 713), 'os.getenv', 'os.getenv', (['"""FL_TACCURACY"""'], {}), "('FL_TACCURACY')\n", (697, 713), False, 'import os\n'), ((746, 769), 'os.getenv', 'os.getenv', (['"""FL_TROUNDS"""'], {}), "('FL_TROUNDS')\n", (755, 769), False, 'import os\n'), ((803, 827), 'os.getenv', 'os.getenv', (['"""FL_TNETWORK"""'], {}), "('FL_TNETWORK')\n", (812, 827), False, 'import os\n'), ((848, 869), 'os.getenv', 'os.getenv', (['"""FL_MODEL"""'], {}), "('FL_MODEL')\n", (857, 869), False, 'import os\n'), ((904, 934), 'os.getenv', 'os.getenv', (['"""FL_EXPORT_METRICS"""'], {}), "('FL_EXPORT_METRICS')\n", (913, 934), False, 'import os\n'), ((972, 997), 'os.getenv', 'os.getenv', (['"""FL_TERMINATE"""'], {}), "('FL_TERMINATE')\n", (981, 997), False, 'import os\n'), ((1566, 1666), 'federate_learning.orchestrator.control_strategy.control_strategy.Target', 'Target', ([], {'accuracy': 'target_accuracy', 'num_round': 'target_num_rounds', 'network_cost': 'target_network_cost'}), '(accuracy=target_accuracy, num_round=target_num_rounds, network_cost=\n target_network_cost)\n', (1572, 1666), False, 'from federate_learning.orchestrator.control_strategy.control_strategy import Target\n')] |
# views.py
#Importar conector MySQL
import mysql.connector
import requests
import json
from flask import render_template, request
from flask_table import Table, Col
from app import app
##Conexión a la BD
inf_Activos_Fijos = mysql.connector.connect(
host="localhost",
user="root",
passwd="<PASSWORD>",
database="inf_Activos_Fijos"
)
cursor = inf_Activos_Fijos.cursor()
@app.route('/')
def index1():
return render_template("index.html")
@app.route('/index.html')
def index2():
return render_template("index.html")
@app.route('/404.html')
def p404():
return render_template("404.html")
@app.route('/Activo_Fijo.html')
def Activo_fijo():
cursor.execute("select * from `activos_fijos`;")
data = cursor.fetchall()
print(*data)
class activos_fijosTable(Table):
id = Col('ID')
descripcion = Col('Descripcion')
departamento = Col('Departamento')
tipo_activo = Col('Tipo de Activo')
fecha_registro = Col('Fecha de Registro')
valor_compra = Col('Valor Compra')
depreciacion_acumulada = Col('Depreciación Acumulada')
class activos_fijo(object):
def __init__(self, id, descripcion, departamento, tipo_activo, fecha_registro, valor_compra, depreciacion_acumulada):
self.id = id
self.descripcion = descripcion
self.departamento = departamento
self.tipo_activo = tipo_activo
self.fecha_registro = fecha_registro
self.valor_compra = valor_compra
self.depreciacion_acumulada = depreciacion_acumulada
a = ""
b = ""
c = ""
d = ""
e = ""
f = ""
g = ""
for a, b, c, d, e, f, g in data:
print(a,b,c,d,e,f,g)
activos_fijo = [activos_fijo(a, b, c, d, e, f, g)]
table = activos_fijosTable(activos_fijo)
return render_template("Activo_fijo.html", table = table)
@app.route('/Depreciacion.html')
def depreciacion():
cursor.execute("select * from `calculo_depreciacion`;")
data = cursor.fetchall()
print(*data)
class calculo_depreciacionTable(Table):
id = Col('ID')
ano_proceso = Col('Ano de proceso')
mes_proceso= Col('Mes de proceso')
activo_fijo = Col('Activo Fijo')
fecha_proceso = Col('Fecha de proceso')
monto_depreciado = Col('Monto Depreciado')
depreciacion_acumulada = Col('Depreciación Acumulada')
cuenta_compra = Col('Cuenta Compra')
cuenta_depreciacion = Col('Cuenta Depreciación')
class calculo_depreciacion(object):
def __init__(self, id, ano_proceso, mes_proceso, activo_fijo, fecha_proceso, monto_depreciado, depreciacion_acumulada, cuenta_compra, cuenta_depreciacion):
self.id = id
self.ano_proceso = ano_proceso
self.mes_proceso = mes_proceso
self.activo_fijo = activo_fijo
self.fecha_proceso = fecha_proceso
self.monto_depreciado = monto_depreciado
self.depreciacion_acumulada = depreciacion_acumulada
self.cuenta_compra = cuenta_compra
self.cuenta_depreciacion = cuenta_depreciacion
a = ""
b = ""
c = ""
d = ""
e = ""
f = ""
g = ""
h = ""
i = ""
for a, b, c, d, e, f, g, h, i in data:
print(a,b,c,d,e,f,g,h, i)
depreciacion = [calculo_depreciacion(a, b, c, d, e, f, g, h, i)]
table = calculo_depreciacionTable(depreciacion)
return render_template("Depreciacion.html", table = table)
@app.route('/btn_agregar_dp', methods=['POST'])
def btn_agregar_dp():
ano_proceso = int(request.form['ano_proceso'])
activos_fijos = request.form['activos_fijos']
mes_proceso = request.form['mes_proceso']
fecha_proceso = request.form['fecha_proceso']
monto_depreciado = request.form['monto_dep']
depreciacion_acumulada = request.form['depreciacion_acumulada']
cuenta_compra = int(request.form['cuenta_compra'])
cuenta_depreciacion = request.form['cuenta_dep']
cursor.execute("Insert into inf_Activos_Fijos.calculo_depreciacion (cd_ano_proceso, cd_mes_proceso, id_activos_fijos, cd_fecha_proceso, cd_monto_depreciado, cd_depreciacion_acumulada, cd_cuenta_compra, cd_cuenta_depreciacion) values('"+str(ano_proceso)+"', '"+mes_proceso+"', '"+activos_fijos+"', '"+fecha_proceso+"', "+monto_depreciado+", "+depreciacion_acumulada+", '"+str(cuenta_compra)+"', '"+cuenta_depreciacion+"');")
inf_Activos_Fijos.commit()
return render_template("Depreciacion.html")
@app.route('/index.html')
def dash():
cuenta_compra = int(request.form['cuenta_compra'])
ano_proceso = int(request.form['ano_proceso'])
depreciacion_acumulada = request.form['depreciacion_acumulada']
valor_compra = cuenta_compra
ano_dep = ano_proceso
monto_dep = valor_compra / ano_dep
dep_ac = monto_dep
dep_rest = valor_compra - dep_ac
return render_template("index.html", depreciacion_acumulada=depreciacion_acumulada, monto_dep=monto_dep, dep_rest=dep_rest)
@app.route('/btn_agregar_af', methods=['POST'])
def btn_agregar_af():
descripcion = request.form['descripcion']
departamento = request.form['departamento']
vcompra = request.form['valor_compra']
depreciacion_acumulada = request.form['depreciacion_acumulada']
tactivo = request.form['row-1-office']
fregistro = request.form['fecharegistro']
cursor.execute("Insert into inf_Activos_Fijos.activos_fijos (act_descripcion, act_departamento, act_tipo_activo, act_fecha_registro, act_valor_compra, act_depreciacion_acumulada) values('"+descripcion+"', '"+departamento+"', '"+tactivo+"', '"+fregistro+"', "+vcompra+", "+depreciacion_acumulada+");")
inf_Activos_Fijos.commit()
return render_template("Activo_fijo.html")
@app.route('/btn_agregar_ta', methods=['POST'])
def btn_agregar_ta():
descripcion = request.form['descripcion']
ccompra = request.form['cuenta_compra']
cdepreciacion = request.form['cuenta_depreciacion']
estado = request.form['row-1-office']
cursor.execute("Insert into inf_Activos_Fijos.tipo_activos (ta_descripcion, ta_CCCompra, ta_CCDepreciacion, ta_estado) values('"+descripcion+"', '"+ccompra+"', '"+cdepreciacion+"', '"+estado+"');")
inf_Activos_Fijos.commit()
return render_template("tipos-de-activos.html")
@app.route('/Empleados.html')
def Empleados():
cursor.execute("select * from `empleados`;")
data = cursor.fetchall()
print(*data)
class empleadosTable(Table):
id = Col('ID')
nombre = Col('Nombre')
cedula = Col('Cedula')
departamento = Col('Departamento')
tipo_persona = Col('Tipo de Persona')
fecha_ingreso = Col('Fecha de Ingreso')
estado = Col('Estado')
class empleado(object):
def __init__(self, id, nombre, cedula, departamento, tipo_persona, fecha_ingreso, estado):
self.id = id
self.nombre = nombre
self.cedula = cedula
self.departamento = departamento
self.tipo_persona = tipo_persona
self.fecha_ingreso = fecha_ingreso
self.estado = estado
a = ""
b = ""
c = ""
d = ""
e = ""
f = ""
g = ""
for a, b, c, d, e, f, g in data:
print(a,b,c,d,e,f,g)
empleados = [empleado(a, b, c, d, e, f, g)]
table = empleadosTable(empleados)
return render_template("Empleados.html", table = table)
@app.route('/Administracion.html')
def administracion():
cursor.execute("select * from `departamentos`;")
data = cursor.fetchall()
print(*data)
class departamentosTable(Table):
id = Col('ID')
descripcion = Col('Descripcion')
estado = Col('Estado')
class departamento(object):
def __init__(self, id, descripcion, estado):
self.id = id
self.descripcion = descripcion
self.estado = estado
a = ""
b = ""
c = ""
for a, b, c in data:
print(a,b,c)
departamentos = [departamento(a, b, c)]
table = departamentosTable(departamentos)
return render_template("Administracion.html", table = table)
@app.route('/get_data_ta')
def get_data_ta():
cursor.execute("select * from `tipo_activos`;")
data = cursor.fetchall()
@app.route('/btn_agregar_e', methods=['POST'])
def btn_agregar_e():
nombre = str(request.form.get('nombre', False))
cedula = str(request.form.get('cedula', False))
departamento = str(request.form.get('departamento', False))
fingreso = str(request.form.get('fingreso', False))
tipopersona = str(request.form.get('row-1-office', False))
estado = str(request.form.get('row-2-office', False))
cursor.execute("Insert into inf_Activos_Fijos.empleados (emp_nombre, emp_cedula, emp_departamento, emp_tipo_persona, emp_fecha_ingreso, emp_estado) values('"+nombre+"', '"+cedula+"', '"+departamento+"', '"+tipopersona+"','2019-08-08', '"+estado+"');")
inf_Activos_Fijos.commit()
return render_template("Empleados.html")
@app.route('/btn_agregar_a', methods=['POST'])
def btn_agregar_a():
descripcion = str(request.form.get('descripcion', False))
estado = str(request.form.get('row-1-office', False))
cursor.execute("Insert into inf_Activos_Fijos.departamentos (dep_descripcion, dep_estado) values('"+descripcion+"', '"+estado+"');")
inf_Activos_Fijos.commit()
return render_template("Administracion.html")
@app.route('/btn_agregar_f', methods=['POST'])
def btn_agregar_f():
descripcion = request.form['descripcion']
estado = request.form['row-1-office']
cursor.execute("Insert into inf_Activos_Fijos.departamentos (dep_descripcion, dep_estado) values('"+descripcion+"', '"+estado+"');")
inf_Activos_Fijos.commit()
return render_template("Finanzas.html")
@app.route('/btn_agregar_rh', methods=['POST'])
def btn_agregar_rh():
descripcion = request.form['descripcion']
estado = request.form['row-1-office']
cursor.execute("Insert into inf_Activos_Fijos.departamentos (dep_descripcion, dep_estado) values('"+descripcion+"', '"+estado+"');")
inf_Activos_Fijos.commit()
return render_template("Recursos_humanos.html")
@app.route('/Finanzas.html')
def finanzas():
cursor.execute("select * from `departamentos`;")
data = cursor.fetchall()
print(*data)
class departamentosTable(Table):
id = Col('ID')
descripcion = Col('Descripcion')
estado = Col('Estado')
class departamento(object):
def __init__(self, id, descripcion, estado):
self.id = id
self.descripcion = descripcion
self.estado = estado
a = ""
b = ""
c = ""
for a, b, c in data:
print(a,b,c)
departamentos = [departamento(a, b, c)]
table = departamentosTable(departamentos)
return render_template("Finanzas.html", table = table)
@app.route('/Recursos_humanos.html')
def recursos_humanos():
cursor.execute("select * from `departamentos`;")
data = cursor.fetchall()
print(*data)
class departamentosTable(Table):
id = Col('ID')
descripcion = Col('Descripcion')
estado = Col('Estado')
class departamento(object):
def __init__(self, id, descripcion, estado):
self.id = id
self.descripcion = descripcion
self.estado = estado
a = ""
b = ""
c = ""
for a, b, c in data:
print(a,b,c)
departamentos = [departamento(a, b, c)]
table = departamentosTable(departamentos)
return render_template("Recursos_humanos.html", table = table)
@app.route('/Asientos_contables.html')
def tables():
return render_template("Asientos_contables.html")
@app.route('/btn_agregar_ac', methods=['POST'])
def btn_agregar_ac():
descripcion = request.form['descripcion']
monto_asiento = request.form['monto_asiento']
jsonstring = '''{
"Cuentas": [
{ "id": 65, "cuenta": "Gasto depreciación Activos Fijos", "tipo": "DB", "monto": '''+monto_asiento+''' },
{ "id": 66, "cuenta": "Depreciación Acumulada Activos Fijos", "tipo": "CR", "monto": '''+monto_asiento+''' }
],
"Descripcion": "'''+descripcion+'''",
"Auxiliar": 8
}
'''
with open('asientos.json', 'w') as f:
f.write(jsonstring)
json1_file = open('asientos.json')
json1_str = json1_file.read()
json1_data = json.loads(json1_str)
r = requests.post('https://sistemacontabilidad20190808055834.azurewebsites.net/api/asientocontable', json=json1_data )
print (r)
return render_template("Asientos_contables.html")
@app.route('/Tipos-de-activos.html')
def tipos_de_activos():
cursor.execute("select * from `tipo_activos`;")
data = cursor.fetchall()
print(*data)
class tipo_activosTable(Table):
id = Col('ID')
descripcion = Col('Descripcion')
cuenta_compra = Col('Cuenta Compra')
cuenta_depreciacion = Col('Cuenta Depreciacion')
estado = Col('Estado')
class tipo_activo(object):
def __init__(self, id, descripcion, cuenta_compra, cuenta_depreciacion, estado):
self.id = id
self.descripcion = descripcion
self.cuenta_compra = cuenta_compra
self.cuenta_depreciacion = cuenta_depreciacion
self.estado = estado
a = ""
b = ""
c = ""
d = ""
e = ""
for a, b, c, d, e in data:
print(a,b,c,d,e)
activos = [tipo_activo(a, b, c, d, e)]
table = tipo_activosTable(activos)
return render_template("tipos-de-activos.html", table = table)
@app.route('/utilities-animation.html')
def utilities_animation():
return render_template("utilities-animation.html")
@app.route('/utilities-border.html')
def utilities_border():
return render_template("utilities-border.html")
@app.route('/utilities-color.html')
def utilities_color():
return render_template("utilities-color.html")
@app.route('/utilities-other.html')
def utilities_other():
return render_template("utilities-other.html")
| [
"flask.render_template",
"json.loads",
"requests.post",
"flask.request.form.get",
"app.app.route",
"flask_table.Col"
] | [((384, 398), 'app.app.route', 'app.route', (['"""/"""'], {}), "('/')\n", (393, 398), False, 'from app import app\n'), ((456, 480), 'app.app.route', 'app.route', (['"""/index.html"""'], {}), "('/index.html')\n", (465, 480), False, 'from app import app\n'), ((538, 560), 'app.app.route', 'app.route', (['"""/404.html"""'], {}), "('/404.html')\n", (547, 560), False, 'from app import app\n'), ((614, 644), 'app.app.route', 'app.route', (['"""/Activo_Fijo.html"""'], {}), "('/Activo_Fijo.html')\n", (623, 644), False, 'from app import app\n'), ((1892, 1923), 'app.app.route', 'app.route', (['"""/Depreciacion.html"""'], {}), "('/Depreciacion.html')\n", (1901, 1923), False, 'from app import app\n'), ((3506, 3552), 'app.app.route', 'app.route', (['"""/btn_agregar_dp"""'], {'methods': "['POST']"}), "('/btn_agregar_dp', methods=['POST'])\n", (3515, 3552), False, 'from app import app\n'), ((4507, 4531), 'app.app.route', 'app.route', (['"""/index.html"""'], {}), "('/index.html')\n", (4516, 4531), False, 'from app import app\n'), ((5006, 5052), 'app.app.route', 'app.route', (['"""/btn_agregar_af"""'], {'methods': "['POST']"}), "('/btn_agregar_af', methods=['POST'])\n", (5015, 5052), False, 'from app import app\n'), ((5755, 5801), 'app.app.route', 'app.route', (['"""/btn_agregar_ta"""'], {'methods': "['POST']"}), "('/btn_agregar_ta', methods=['POST'])\n", (5764, 5801), False, 'from app import app\n'), ((6300, 6328), 'app.app.route', 'app.route', (['"""/Empleados.html"""'], {}), "('/Empleados.html')\n", (6309, 6328), False, 'from app import app\n'), ((7411, 7444), 'app.app.route', 'app.route', (['"""/Administracion.html"""'], {}), "('/Administracion.html')\n", (7420, 7444), False, 'from app import app\n'), ((8125, 8150), 'app.app.route', 'app.route', (['"""/get_data_ta"""'], {}), "('/get_data_ta')\n", (8134, 8150), False, 'from app import app\n'), ((8253, 8298), 'app.app.route', 'app.route', (['"""/btn_agregar_e"""'], {'methods': "['POST']"}), "('/btn_agregar_e', methods=['POST'])\n", (8262, 8298), False, 'from app import app\n'), ((9000, 9045), 'app.app.route', 'app.route', (['"""/btn_agregar_a"""'], {'methods': "['POST']"}), "('/btn_agregar_a', methods=['POST'])\n", (9009, 9045), False, 'from app import app\n'), ((9408, 9453), 'app.app.route', 'app.route', (['"""/btn_agregar_f"""'], {'methods': "['POST']"}), "('/btn_agregar_f', methods=['POST'])\n", (9417, 9453), False, 'from app import app\n'), ((9778, 9824), 'app.app.route', 'app.route', (['"""/btn_agregar_rh"""'], {'methods': "['POST']"}), "('/btn_agregar_rh', methods=['POST'])\n", (9787, 9824), False, 'from app import app\n'), ((10158, 10185), 'app.app.route', 'app.route', (['"""/Finanzas.html"""'], {}), "('/Finanzas.html')\n", (10167, 10185), False, 'from app import app\n'), ((10854, 10889), 'app.app.route', 'app.route', (['"""/Recursos_humanos.html"""'], {}), "('/Recursos_humanos.html')\n", (10863, 10889), False, 'from app import app\n'), ((11574, 11611), 'app.app.route', 'app.route', (['"""/Asientos_contables.html"""'], {}), "('/Asientos_contables.html')\n", (11583, 11611), False, 'from app import app\n'), ((11682, 11728), 'app.app.route', 'app.route', (['"""/btn_agregar_ac"""'], {'methods': "['POST']"}), "('/btn_agregar_ac', methods=['POST'])\n", (11691, 11728), False, 'from app import app\n'), ((12623, 12658), 'app.app.route', 'app.route', (['"""/Tipos-de-activos.html"""'], {}), "('/Tipos-de-activos.html')\n", (12632, 12658), False, 'from app import app\n'), ((13608, 13646), 'app.app.route', 'app.route', (['"""/utilities-animation.html"""'], {}), "('/utilities-animation.html')\n", (13617, 13646), False, 'from app import app\n'), ((13731, 13766), 'app.app.route', 'app.route', (['"""/utilities-border.html"""'], {}), "('/utilities-border.html')\n", (13740, 13766), False, 'from app import app\n'), ((13845, 13879), 'app.app.route', 'app.route', (['"""/utilities-color.html"""'], {}), "('/utilities-color.html')\n", (13854, 13879), False, 'from app import app\n'), ((13956, 13990), 'app.app.route', 'app.route', (['"""/utilities-other.html"""'], {}), "('/utilities-other.html')\n", (13965, 13990), False, 'from app import app\n'), ((424, 453), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (439, 453), False, 'from flask import render_template, request\n'), ((506, 535), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (521, 535), False, 'from flask import render_template, request\n'), ((584, 611), 'flask.render_template', 'render_template', (['"""404.html"""'], {}), "('404.html')\n", (599, 611), False, 'from flask import render_template, request\n'), ((1839, 1887), 'flask.render_template', 'render_template', (['"""Activo_fijo.html"""'], {'table': 'table'}), "('Activo_fijo.html', table=table)\n", (1854, 1887), False, 'from flask import render_template, request\n'), ((3452, 3501), 'flask.render_template', 'render_template', (['"""Depreciacion.html"""'], {'table': 'table'}), "('Depreciacion.html', table=table)\n", (3467, 3501), False, 'from flask import render_template, request\n'), ((4468, 4504), 'flask.render_template', 'render_template', (['"""Depreciacion.html"""'], {}), "('Depreciacion.html')\n", (4483, 4504), False, 'from flask import render_template, request\n'), ((4887, 5007), 'flask.render_template', 'render_template', (['"""index.html"""'], {'depreciacion_acumulada': 'depreciacion_acumulada', 'monto_dep': 'monto_dep', 'dep_rest': 'dep_rest'}), "('index.html', depreciacion_acumulada=depreciacion_acumulada,\n monto_dep=monto_dep, dep_rest=dep_rest)\n", (4902, 5007), False, 'from flask import render_template, request\n'), ((5717, 5752), 'flask.render_template', 'render_template', (['"""Activo_fijo.html"""'], {}), "('Activo_fijo.html')\n", (5732, 5752), False, 'from flask import render_template, request\n'), ((6257, 6297), 'flask.render_template', 'render_template', (['"""tipos-de-activos.html"""'], {}), "('tipos-de-activos.html')\n", (6272, 6297), False, 'from flask import render_template, request\n'), ((7360, 7406), 'flask.render_template', 'render_template', (['"""Empleados.html"""'], {'table': 'table'}), "('Empleados.html', table=table)\n", (7375, 7406), False, 'from flask import render_template, request\n'), ((8069, 8120), 'flask.render_template', 'render_template', (['"""Administracion.html"""'], {'table': 'table'}), "('Administracion.html', table=table)\n", (8084, 8120), False, 'from flask import render_template, request\n'), ((8964, 8997), 'flask.render_template', 'render_template', (['"""Empleados.html"""'], {}), "('Empleados.html')\n", (8979, 8997), False, 'from flask import render_template, request\n'), ((9367, 9405), 'flask.render_template', 'render_template', (['"""Administracion.html"""'], {}), "('Administracion.html')\n", (9382, 9405), False, 'from flask import render_template, request\n'), ((9743, 9775), 'flask.render_template', 'render_template', (['"""Finanzas.html"""'], {}), "('Finanzas.html')\n", (9758, 9775), False, 'from flask import render_template, request\n'), ((10115, 10155), 'flask.render_template', 'render_template', (['"""Recursos_humanos.html"""'], {}), "('Recursos_humanos.html')\n", (10130, 10155), False, 'from flask import render_template, request\n'), ((10804, 10849), 'flask.render_template', 'render_template', (['"""Finanzas.html"""'], {'table': 'table'}), "('Finanzas.html', table=table)\n", (10819, 10849), False, 'from flask import render_template, request\n'), ((11516, 11569), 'flask.render_template', 'render_template', (['"""Recursos_humanos.html"""'], {'table': 'table'}), "('Recursos_humanos.html', table=table)\n", (11531, 11569), False, 'from flask import render_template, request\n'), ((11637, 11679), 'flask.render_template', 'render_template', (['"""Asientos_contables.html"""'], {}), "('Asientos_contables.html')\n", (11652, 11679), False, 'from flask import render_template, request\n'), ((12405, 12426), 'json.loads', 'json.loads', (['json1_str'], {}), '(json1_str)\n', (12415, 12426), False, 'import json\n'), ((12436, 12559), 'requests.post', 'requests.post', (['"""https://sistemacontabilidad20190808055834.azurewebsites.net/api/asientocontable"""'], {'json': 'json1_data'}), "(\n 'https://sistemacontabilidad20190808055834.azurewebsites.net/api/asientocontable'\n , json=json1_data)\n", (12449, 12559), False, 'import requests\n'), ((12577, 12619), 'flask.render_template', 'render_template', (['"""Asientos_contables.html"""'], {}), "('Asientos_contables.html')\n", (12592, 12619), False, 'from flask import render_template, request\n'), ((13550, 13603), 'flask.render_template', 'render_template', (['"""tipos-de-activos.html"""'], {'table': 'table'}), "('tipos-de-activos.html', table=table)\n", (13565, 13603), False, 'from flask import render_template, request\n'), ((13685, 13728), 'flask.render_template', 'render_template', (['"""utilities-animation.html"""'], {}), "('utilities-animation.html')\n", (13700, 13728), False, 'from flask import render_template, request\n'), ((13802, 13842), 'flask.render_template', 'render_template', (['"""utilities-border.html"""'], {}), "('utilities-border.html')\n", (13817, 13842), False, 'from flask import render_template, request\n'), ((13914, 13953), 'flask.render_template', 'render_template', (['"""utilities-color.html"""'], {}), "('utilities-color.html')\n", (13929, 13953), False, 'from flask import render_template, request\n'), ((14025, 14064), 'flask.render_template', 'render_template', (['"""utilities-other.html"""'], {}), "('utilities-other.html')\n", (14040, 14064), False, 'from flask import render_template, request\n'), ((813, 822), 'flask_table.Col', 'Col', (['"""ID"""'], {}), "('ID')\n", (816, 822), False, 'from flask_table import Table, Col\n'), ((845, 863), 'flask_table.Col', 'Col', (['"""Descripcion"""'], {}), "('Descripcion')\n", (848, 863), False, 'from flask_table import Table, Col\n'), ((887, 906), 'flask_table.Col', 'Col', (['"""Departamento"""'], {}), "('Departamento')\n", (890, 906), False, 'from flask_table import Table, Col\n'), ((929, 950), 'flask_table.Col', 'Col', (['"""Tipo de Activo"""'], {}), "('Tipo de Activo')\n", (932, 950), False, 'from flask_table import Table, Col\n'), ((976, 1000), 'flask_table.Col', 'Col', (['"""Fecha de Registro"""'], {}), "('Fecha de Registro')\n", (979, 1000), False, 'from flask_table import Table, Col\n'), ((1024, 1043), 'flask_table.Col', 'Col', (['"""Valor Compra"""'], {}), "('Valor Compra')\n", (1027, 1043), False, 'from flask_table import Table, Col\n'), ((1077, 1106), 'flask_table.Col', 'Col', (['"""Depreciación Acumulada"""'], {}), "('Depreciación Acumulada')\n", (1080, 1106), False, 'from flask_table import Table, Col\n'), ((2107, 2116), 'flask_table.Col', 'Col', (['"""ID"""'], {}), "('ID')\n", (2110, 2116), False, 'from flask_table import Table, Col\n'), ((2139, 2160), 'flask_table.Col', 'Col', (['"""Ano de proceso"""'], {}), "('Ano de proceso')\n", (2142, 2160), False, 'from flask_table import Table, Col\n'), ((2182, 2203), 'flask_table.Col', 'Col', (['"""Mes de proceso"""'], {}), "('Mes de proceso')\n", (2185, 2203), False, 'from flask_table import Table, Col\n'), ((2226, 2244), 'flask_table.Col', 'Col', (['"""Activo Fijo"""'], {}), "('Activo Fijo')\n", (2229, 2244), False, 'from flask_table import Table, Col\n'), ((2269, 2292), 'flask_table.Col', 'Col', (['"""Fecha de proceso"""'], {}), "('Fecha de proceso')\n", (2272, 2292), False, 'from flask_table import Table, Col\n'), ((2320, 2343), 'flask_table.Col', 'Col', (['"""Monto Depreciado"""'], {}), "('Monto Depreciado')\n", (2323, 2343), False, 'from flask_table import Table, Col\n'), ((2377, 2406), 'flask_table.Col', 'Col', (['"""Depreciación Acumulada"""'], {}), "('Depreciación Acumulada')\n", (2380, 2406), False, 'from flask_table import Table, Col\n'), ((2431, 2451), 'flask_table.Col', 'Col', (['"""Cuenta Compra"""'], {}), "('Cuenta Compra')\n", (2434, 2451), False, 'from flask_table import Table, Col\n'), ((2482, 2508), 'flask_table.Col', 'Col', (['"""Cuenta Depreciación"""'], {}), "('Cuenta Depreciación')\n", (2485, 2508), False, 'from flask_table import Table, Col\n'), ((6487, 6496), 'flask_table.Col', 'Col', (['"""ID"""'], {}), "('ID')\n", (6490, 6496), False, 'from flask_table import Table, Col\n'), ((6514, 6527), 'flask_table.Col', 'Col', (['"""Nombre"""'], {}), "('Nombre')\n", (6517, 6527), False, 'from flask_table import Table, Col\n'), ((6545, 6558), 'flask_table.Col', 'Col', (['"""Cedula"""'], {}), "('Cedula')\n", (6548, 6558), False, 'from flask_table import Table, Col\n'), ((6582, 6601), 'flask_table.Col', 'Col', (['"""Departamento"""'], {}), "('Departamento')\n", (6585, 6601), False, 'from flask_table import Table, Col\n'), ((6625, 6647), 'flask_table.Col', 'Col', (['"""Tipo de Persona"""'], {}), "('Tipo de Persona')\n", (6628, 6647), False, 'from flask_table import Table, Col\n'), ((6672, 6695), 'flask_table.Col', 'Col', (['"""Fecha de Ingreso"""'], {}), "('Fecha de Ingreso')\n", (6675, 6695), False, 'from flask_table import Table, Col\n'), ((6713, 6726), 'flask_table.Col', 'Col', (['"""Estado"""'], {}), "('Estado')\n", (6716, 6726), False, 'from flask_table import Table, Col\n'), ((7616, 7625), 'flask_table.Col', 'Col', (['"""ID"""'], {}), "('ID')\n", (7619, 7625), False, 'from flask_table import Table, Col\n'), ((7648, 7666), 'flask_table.Col', 'Col', (['"""Descripcion"""'], {}), "('Descripcion')\n", (7651, 7666), False, 'from flask_table import Table, Col\n'), ((7684, 7697), 'flask_table.Col', 'Col', (['"""Estado"""'], {}), "('Estado')\n", (7687, 7697), False, 'from flask_table import Table, Col\n'), ((8337, 8370), 'flask.request.form.get', 'request.form.get', (['"""nombre"""', '(False)'], {}), "('nombre', False)\n", (8353, 8370), False, 'from flask import render_template, request\n'), ((8389, 8422), 'flask.request.form.get', 'request.form.get', (['"""cedula"""', '(False)'], {}), "('cedula', False)\n", (8405, 8422), False, 'from flask import render_template, request\n'), ((8447, 8486), 'flask.request.form.get', 'request.form.get', (['"""departamento"""', '(False)'], {}), "('departamento', False)\n", (8463, 8486), False, 'from flask import render_template, request\n'), ((8507, 8542), 'flask.request.form.get', 'request.form.get', (['"""fingreso"""', '(False)'], {}), "('fingreso', False)\n", (8523, 8542), False, 'from flask import render_template, request\n'), ((8566, 8605), 'flask.request.form.get', 'request.form.get', (['"""row-1-office"""', '(False)'], {}), "('row-1-office', False)\n", (8582, 8605), False, 'from flask import render_template, request\n'), ((8624, 8663), 'flask.request.form.get', 'request.form.get', (['"""row-2-office"""', '(False)'], {}), "('row-2-office', False)\n", (8640, 8663), False, 'from flask import render_template, request\n'), ((9089, 9127), 'flask.request.form.get', 'request.form.get', (['"""descripcion"""', '(False)'], {}), "('descripcion', False)\n", (9105, 9127), False, 'from flask import render_template, request\n'), ((9146, 9185), 'flask.request.form.get', 'request.form.get', (['"""row-1-office"""', '(False)'], {}), "('row-1-office', False)\n", (9162, 9185), False, 'from flask import render_template, request\n'), ((10351, 10360), 'flask_table.Col', 'Col', (['"""ID"""'], {}), "('ID')\n", (10354, 10360), False, 'from flask_table import Table, Col\n'), ((10383, 10401), 'flask_table.Col', 'Col', (['"""Descripcion"""'], {}), "('Descripcion')\n", (10386, 10401), False, 'from flask_table import Table, Col\n'), ((10419, 10432), 'flask_table.Col', 'Col', (['"""Estado"""'], {}), "('Estado')\n", (10422, 10432), False, 'from flask_table import Table, Col\n'), ((11063, 11072), 'flask_table.Col', 'Col', (['"""ID"""'], {}), "('ID')\n", (11066, 11072), False, 'from flask_table import Table, Col\n'), ((11095, 11113), 'flask_table.Col', 'Col', (['"""Descripcion"""'], {}), "('Descripcion')\n", (11098, 11113), False, 'from flask_table import Table, Col\n'), ((11131, 11144), 'flask_table.Col', 'Col', (['"""Estado"""'], {}), "('Estado')\n", (11134, 11144), False, 'from flask_table import Table, Col\n'), ((12830, 12839), 'flask_table.Col', 'Col', (['"""ID"""'], {}), "('ID')\n", (12833, 12839), False, 'from flask_table import Table, Col\n'), ((12862, 12880), 'flask_table.Col', 'Col', (['"""Descripcion"""'], {}), "('Descripcion')\n", (12865, 12880), False, 'from flask_table import Table, Col\n'), ((12905, 12925), 'flask_table.Col', 'Col', (['"""Cuenta Compra"""'], {}), "('Cuenta Compra')\n", (12908, 12925), False, 'from flask_table import Table, Col\n'), ((12956, 12982), 'flask_table.Col', 'Col', (['"""Cuenta Depreciacion"""'], {}), "('Cuenta Depreciacion')\n", (12959, 12982), False, 'from flask_table import Table, Col\n'), ((13000, 13013), 'flask_table.Col', 'Col', (['"""Estado"""'], {}), "('Estado')\n", (13003, 13013), False, 'from flask_table import Table, Col\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-01-28 15:24
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('manage_room', '0004_auto_20170127_1505'),
]
operations = [
migrations.RemoveField(
model_name='slide',
name='prev_id',
),
migrations.AlterField(
model_name='slide',
name='next_id',
field=models.PositiveSmallIntegerField(unique=True),
),
]
| [
"django.db.migrations.RemoveField",
"django.db.models.PositiveSmallIntegerField"
] | [((304, 362), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""slide"""', 'name': '"""prev_id"""'}), "(model_name='slide', name='prev_id')\n", (326, 362), False, 'from django.db import migrations, models\n'), ((508, 553), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {'unique': '(True)'}), '(unique=True)\n', (540, 553), False, 'from django.db import migrations, models\n')] |
import os
os.system("sudo apt-get update")
#os.system('sudo apt-get install openjdk-8-jre -y')
os.system('wget --header "Cookie: oraclelicense=accept-securebackup-cookie" http://download.oracle.com/otn-pub/java/jdk/8u131-b11/d54c1d3a095b4ff2b6607d096fa80163/jdk-8u131-linux-x64.tar.gz')
os.system('tar -zxf jdk-8u131-linux-x64.tar.gz')
os.system('export JAVA_HOME=/home/jupyter/Predict-Churn/jdk1.8.0_131/')
os.system('export PATH="$JAVA_HOME/bin:$PATH"')
os.system('pip install http://h2o-release.s3.amazonaws.com/h2o/rel-ueno/5/Python/h2o-3.10.4.5-py2.py3-none-any.whl')
| [
"os.system"
] | [((10, 42), 'os.system', 'os.system', (['"""sudo apt-get update"""'], {}), "('sudo apt-get update')\n", (19, 42), False, 'import os\n'), ((95, 296), 'os.system', 'os.system', (['"""wget --header "Cookie: oraclelicense=accept-securebackup-cookie" http://download.oracle.com/otn-pub/java/jdk/8u131-b11/d54c1d3a095b4ff2b6607d096fa80163/jdk-8u131-linux-x64.tar.gz"""'], {}), '(\n \'wget --header "Cookie: oraclelicense=accept-securebackup-cookie" http://download.oracle.com/otn-pub/java/jdk/8u131-b11/d54c1d3a095b4ff2b6607d096fa80163/jdk-8u131-linux-x64.tar.gz\'\n )\n', (104, 296), False, 'import os\n'), ((287, 335), 'os.system', 'os.system', (['"""tar -zxf jdk-8u131-linux-x64.tar.gz"""'], {}), "('tar -zxf jdk-8u131-linux-x64.tar.gz')\n", (296, 335), False, 'import os\n'), ((336, 407), 'os.system', 'os.system', (['"""export JAVA_HOME=/home/jupyter/Predict-Churn/jdk1.8.0_131/"""'], {}), "('export JAVA_HOME=/home/jupyter/Predict-Churn/jdk1.8.0_131/')\n", (345, 407), False, 'import os\n'), ((408, 455), 'os.system', 'os.system', (['"""export PATH="$JAVA_HOME/bin:$PATH\\""""'], {}), '(\'export PATH="$JAVA_HOME/bin:$PATH"\')\n', (417, 455), False, 'import os\n'), ((456, 582), 'os.system', 'os.system', (['"""pip install http://h2o-release.s3.amazonaws.com/h2o/rel-ueno/5/Python/h2o-3.10.4.5-py2.py3-none-any.whl"""'], {}), "(\n 'pip install http://h2o-release.s3.amazonaws.com/h2o/rel-ueno/5/Python/h2o-3.10.4.5-py2.py3-none-any.whl'\n )\n", (465, 582), False, 'import os\n')] |
"""
Procuret Python
Test With Supplier Module
author: <EMAIL>
"""
from procuret.ancillary.command_line import CommandLine
from procuret.tests.variants.with_session import TestWithSession
class TestWithSupplier(TestWithSession):
def __init__(self) -> None:
cl = CommandLine.load()
self._supplier_id = cl.require(
key='--supplier-id',
of_type=int,
type_name='integer'
)
return super().__init__()
supplier_id = property(lambda s: s._supplier_id)
# _tws_cached_supplier: Optional[Supplier] = None
# def _tws_load_supplier(self) -> Supplier:
# if self._tws_cached_supplier is None:
# raise NotImplementedError
# return self._tws_cached_supplier
| [
"procuret.ancillary.command_line.CommandLine.load"
] | [((277, 295), 'procuret.ancillary.command_line.CommandLine.load', 'CommandLine.load', ([], {}), '()\n', (293, 295), False, 'from procuret.ancillary.command_line import CommandLine\n')] |
"""Tests the musicbrainz plugin."""
import datetime
from unittest.mock import patch
import musicbrainzngs # noqa: F401
import pytest
import tests.plugins.musicbrainz.resources as mb_rsrc
from moe.plugins import musicbrainz as moe_mb
@pytest.fixture
def mock_mb_by_id():
"""Mock the musicbrainzngs api call `get_release_by_id`."""
with patch(
"moe.plugins.musicbrainz.mb_core.musicbrainzngs.get_release_by_id",
autospec=True,
) as mock_mb_by_id:
yield mock_mb_by_id
class TestImportCandidates:
"""Test the ``import_candidtates`` hook implementation."""
def test_get_matching_albums(self, mock_album, tmp_config):
"""Get matching albums when searching for candidates to import."""
config = tmp_config("default_plugins = ['import', 'musicbrainz']")
with patch.object(moe_mb.mb_core, "get_matching_album") as mock_gma:
mock_gma.return_value = mock_album
candidates = config.plugin_manager.hook.import_candidates(
config=config, album=mock_album
)
mock_gma.assert_called_once_with(mock_album)
assert candidates == [mock_album]
class TestGetMatchingAlbum:
"""Test `get_matching_album()`."""
@pytest.mark.network
def test_network(self, mock_album):
"""Make sure we can actually hit the real API.
Since `get_matching_album` also calls `get_album_by_id`, this test serves as a
network test for both.
"""
mock_album.artist = "<NAME>"
mock_album.title = "My Beautiful Dark Twisted Fantasy"
mb_album = moe_mb.get_matching_album(mock_album)
# don't test every field since we can't actually guarantee the accuracy of
# musicbrainz's search results every time
assert mb_album.artist == mock_album.artist
assert mb_album.title == mock_album.title
def test_album_search(self, mock_album, mock_mb_by_id):
"""Searching for a release uses the expected parameters."""
mock_album.artist = "<NAME>"
mock_album.title = "My Beautiful Dark Twisted Fantasy"
mock_album.date = datetime.date(2010, 11, 22)
search_criteria = {
"artist": "<NAME>",
"release": "My Beautiful Dark Twisted Fantasy",
"date": "2010-11-22",
}
mock_mb_by_id.return_value = mb_rsrc.full_release.release
with patch(
"moe.plugins.musicbrainz.mb_core.musicbrainzngs.search_releases",
return_value=mb_rsrc.full_release.search,
autospec=True,
) as mock_mb_search:
mb_album = moe_mb.get_matching_album(mock_album)
mock_mb_search.assert_called_once_with(limit=1, **search_criteria)
assert mb_album == mb_rsrc.full_release.album
def test_dont_search_if_mbid(self, mock_album):
"""Use ``mb_album_id`` to search by id if it exists."""
mock_album.mb_album_id = "1"
with patch(
"moe.plugins.musicbrainz.mb_core.get_album_by_id",
) as mock_mb_by_id:
moe_mb.get_matching_album(mock_album)
mock_mb_by_id.assert_called_once_with(mock_album.mb_album_id)
class TestGetAlbumById:
"""Test `get_album_by_id()`.
You can use the following code to print the result of a musicbrainz api query.
def test_print_result(self):
id = "3af9a6ca-c38a-41a7-a53c-32a97e869e8e"
includes = ["artist-credits", "recordings"]
print(musicbrainzngs.get_release_by_id(id, includes))
assert 0
Make sure to add any ``includes`` for whatever is needed for the test.
"""
def test_album_search(self, mock_mb_by_id):
"""Searching for a release returns the expected album."""
mb_album_id = "2fcfcaaa-6594-4291-b79f-2d354139e108"
mock_mb_by_id.return_value = mb_rsrc.full_release.release
mb_album = moe_mb.get_album_by_id(mb_album_id)
mock_mb_by_id.assert_called_once_with(
mb_album_id, includes=moe_mb.mb_core.RELEASE_INCLUDES
)
assert mb_album == mb_rsrc.full_release.album
def test_partial_date_year_mon(self, mock_mb_by_id):
"""If given date is missing the day, default to 1."""
mb_album_id = "112dec42-65f2-3bde-8d7d-26deddde10b2"
mock_mb_by_id.return_value = mb_rsrc.partial_date.partial_date_year_mon
mb_album = moe_mb.get_album_by_id(mb_album_id)
assert mb_album.date == datetime.date(1992, 12, 1)
def test_partial_date_year(self, mock_mb_by_id):
"""If given date is missing the day and month, default to 1 for each."""
mb_album_id = "112dec42-65f2-3bde-8d7d-26deddde10b2"
mock_mb_by_id.return_value = mb_rsrc.partial_date.partial_date_year
mb_album = moe_mb.get_album_by_id(mb_album_id)
assert mb_album.date == datetime.date(1992, 1, 1)
def test_multi_disc_release(self, mock_mb_by_id):
"""We can add a release with multiple discs."""
mb_album_id = "3af9a6ca-c38a-41a7-a53c-32a97e869e8e"
mock_mb_by_id.return_value = mb_rsrc.multi_disc.release
mb_album = moe_mb.get_album_by_id(mb_album_id)
assert mb_album.disc_total == 2
assert any(track.disc == 1 for track in mb_album.tracks)
assert any(track.disc == 2 for track in mb_album.tracks)
class TestPluginRegistration:
"""Test the `plugin_registration` hook implementation."""
def test_musicbrainz_core(self, tmp_config):
"""Enable the musicbrainz core plugin if specified in the config."""
config = tmp_config(settings='default_plugins = ["musicbrainz"]')
assert config.plugin_manager.has_plugin("musicbrainz_core")
| [
"moe.plugins.musicbrainz.get_album_by_id",
"datetime.date",
"unittest.mock.patch.object",
"moe.plugins.musicbrainz.get_matching_album",
"unittest.mock.patch"
] | [((349, 441), 'unittest.mock.patch', 'patch', (['"""moe.plugins.musicbrainz.mb_core.musicbrainzngs.get_release_by_id"""'], {'autospec': '(True)'}), "('moe.plugins.musicbrainz.mb_core.musicbrainzngs.get_release_by_id',\n autospec=True)\n", (354, 441), False, 'from unittest.mock import patch\n'), ((1610, 1647), 'moe.plugins.musicbrainz.get_matching_album', 'moe_mb.get_matching_album', (['mock_album'], {}), '(mock_album)\n', (1635, 1647), True, 'from moe.plugins import musicbrainz as moe_mb\n'), ((2139, 2166), 'datetime.date', 'datetime.date', (['(2010)', '(11)', '(22)'], {}), '(2010, 11, 22)\n', (2152, 2166), False, 'import datetime\n'), ((3910, 3945), 'moe.plugins.musicbrainz.get_album_by_id', 'moe_mb.get_album_by_id', (['mb_album_id'], {}), '(mb_album_id)\n', (3932, 3945), True, 'from moe.plugins import musicbrainz as moe_mb\n'), ((4405, 4440), 'moe.plugins.musicbrainz.get_album_by_id', 'moe_mb.get_album_by_id', (['mb_album_id'], {}), '(mb_album_id)\n', (4427, 4440), True, 'from moe.plugins import musicbrainz as moe_mb\n'), ((4793, 4828), 'moe.plugins.musicbrainz.get_album_by_id', 'moe_mb.get_album_by_id', (['mb_album_id'], {}), '(mb_album_id)\n', (4815, 4828), True, 'from moe.plugins import musicbrainz as moe_mb\n'), ((5144, 5179), 'moe.plugins.musicbrainz.get_album_by_id', 'moe_mb.get_album_by_id', (['mb_album_id'], {}), '(mb_album_id)\n', (5166, 5179), True, 'from moe.plugins import musicbrainz as moe_mb\n'), ((829, 879), 'unittest.mock.patch.object', 'patch.object', (['moe_mb.mb_core', '"""get_matching_album"""'], {}), "(moe_mb.mb_core, 'get_matching_album')\n", (841, 879), False, 'from unittest.mock import patch\n'), ((2411, 2543), 'unittest.mock.patch', 'patch', (['"""moe.plugins.musicbrainz.mb_core.musicbrainzngs.search_releases"""'], {'return_value': 'mb_rsrc.full_release.search', 'autospec': '(True)'}), "('moe.plugins.musicbrainz.mb_core.musicbrainzngs.search_releases',\n return_value=mb_rsrc.full_release.search, autospec=True)\n", (2416, 2543), False, 'from unittest.mock import patch\n'), ((2629, 2666), 'moe.plugins.musicbrainz.get_matching_album', 'moe_mb.get_matching_album', (['mock_album'], {}), '(mock_album)\n', (2654, 2666), True, 'from moe.plugins import musicbrainz as moe_mb\n'), ((2965, 3021), 'unittest.mock.patch', 'patch', (['"""moe.plugins.musicbrainz.mb_core.get_album_by_id"""'], {}), "('moe.plugins.musicbrainz.mb_core.get_album_by_id')\n", (2970, 3021), False, 'from unittest.mock import patch\n'), ((3075, 3112), 'moe.plugins.musicbrainz.get_matching_album', 'moe_mb.get_matching_album', (['mock_album'], {}), '(mock_album)\n', (3100, 3112), True, 'from moe.plugins import musicbrainz as moe_mb\n'), ((4474, 4500), 'datetime.date', 'datetime.date', (['(1992)', '(12)', '(1)'], {}), '(1992, 12, 1)\n', (4487, 4500), False, 'import datetime\n'), ((4862, 4887), 'datetime.date', 'datetime.date', (['(1992)', '(1)', '(1)'], {}), '(1992, 1, 1)\n', (4875, 4887), False, 'import datetime\n')] |
# -*- coding: utf-8 -*-
################################################################################
## Form generated from reading UI file 'formulaSetupDialog.ui'
##
## Created by: Qt User Interface Compiler version 5.15.1
##
## WARNING! All changes made in this file will be lost when recompiling UI file!
################################################################################
import sys
import os
from PySide2.QtCore import *
from PySide2.QtGui import *
from PySide2.QtWidgets import *
sys.path.append('../pjrd')
from pjrd.helpers import test, dbConnection
from pjrd.formulaEditor import formulaEditorDialog
os.environ['QT_MAC_WANTS_LAYER'] = '1'
class formulaSetupDialog(QDialog):
def __init__(self, mainWindow):
super(formulaSetupDialog, self).__init__()
self.mainWindow = mainWindow
self.setupUi(self)
self.setupLogic()
def setupUi(self, formulaSetupDialog):
if not formulaSetupDialog.objectName():
formulaSetupDialog.setObjectName(u"formulaSetupDialog")
formulaSetupDialog.resize(672, 419)
sizePolicy = QSizePolicy(QSizePolicy.Maximum, QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(formulaSetupDialog.sizePolicy().hasHeightForWidth())
formulaSetupDialog.setSizePolicy(sizePolicy)
formulaSetupDialog.setMaximumSize(QSize(678, 419))
self.verticalLayout = QVBoxLayout(formulaSetupDialog)
self.verticalLayout.setObjectName(u"verticalLayout")
self.dialogHeaderLabel = QLabel(formulaSetupDialog)
self.dialogHeaderLabel.setObjectName(u"dialogHeaderLabel")
font = QFont()
font.setPointSize(13)
self.dialogHeaderLabel.setFont(font)
self.dialogHeaderLabel.setAutoFillBackground(True)
self.dialogHeaderLabel.setAlignment(Qt.AlignCenter)
self.verticalLayout.addWidget(self.dialogHeaderLabel)
self.line_2 = QFrame(formulaSetupDialog)
self.line_2.setObjectName(u"line_2")
self.line_2.setFrameShape(QFrame.HLine)
self.line_2.setFrameShadow(QFrame.Sunken)
self.verticalLayout.addWidget(self.line_2)
self.headerFrame = QFrame(formulaSetupDialog)
self.headerFrame.setObjectName(u"headerFrame")
sizePolicy1 = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Minimum)
sizePolicy1.setHorizontalStretch(0)
sizePolicy1.setVerticalStretch(0)
sizePolicy1.setHeightForWidth(self.headerFrame.sizePolicy().hasHeightForWidth())
self.headerFrame.setSizePolicy(sizePolicy1)
self.headerFrame.setFrameShape(QFrame.StyledPanel)
self.horizontalLayout_2 = QHBoxLayout(self.headerFrame)
self.horizontalLayout_2.setObjectName(u"horizontalLayout_2")
self.revisionPromptLabel = QLabel(self.headerFrame)
self.revisionPromptLabel.setObjectName(u"revisionPromptLabel")
self.horizontalLayout_2.addWidget(self.revisionPromptLabel)
self.headerRadioBtnContainer = QWidget(self.headerFrame)
self.headerRadioBtnContainer.setObjectName(u"headerRadioBtnContainer")
self.horizontalLayout = QHBoxLayout(self.headerRadioBtnContainer)
self.horizontalLayout.setObjectName(u"horizontalLayout")
self.newFormulaRadioBtn = QRadioButton(self.headerRadioBtnContainer)
self.newFormulaRadioBtn.setObjectName(u"newFormulaRadioBtn")
self.horizontalLayout.addWidget(self.newFormulaRadioBtn)
self.revisionRadioBtn = QRadioButton(self.headerRadioBtnContainer)
self.revisionRadioBtn.setObjectName(u"revisionRadioBtn")
self.horizontalLayout.addWidget(self.revisionRadioBtn)
self.horizontalLayout_2.addWidget(self.headerRadioBtnContainer)
self.verticalLayout.addWidget(self.headerFrame)
self.bodyContainerWidget = QWidget(formulaSetupDialog)
self.bodyContainerWidget.setObjectName(u"bodyContainerWidget")
self.horizontalLayout_4 = QHBoxLayout(self.bodyContainerWidget)
self.horizontalLayout_4.setObjectName(u"horizontalLayout_4")
self.newFormulaContainerFrame = QFrame(self.bodyContainerWidget)
self.newFormulaContainerFrame.setObjectName(u"newFormulaContainerFrame")
self.newFormulaContainerFrame.setFrameShape(QFrame.StyledPanel)
self.verticalLayout_3 = QVBoxLayout(self.newFormulaContainerFrame)
self.verticalLayout_3.setObjectName(u"verticalLayout_3")
self.formulaContainerLabel = QLabel(self.newFormulaContainerFrame)
self.formulaContainerLabel.setObjectName(u"formulaContainerLabel")
self.formulaContainerLabel.setAlignment(Qt.AlignCenter)
self.verticalLayout_3.addWidget(self.formulaContainerLabel)
self.formulaNamePromptLabel = QLabel(self.newFormulaContainerFrame)
self.formulaNamePromptLabel.setObjectName(u"formulaNamePromptLabel")
self.verticalLayout_3.addWidget(self.formulaNamePromptLabel)
self.formulaNameLineEdit = QLineEdit(self.newFormulaContainerFrame)
self.formulaNameLineEdit.setObjectName(u"formulaNameLineEdit")
self.verticalLayout_3.addWidget(self.formulaNameLineEdit)
self.categoryLabel1 = QLabel(self.newFormulaContainerFrame)
self.categoryLabel1.setObjectName(u"categoryLabel1")
self.verticalLayout_3.addWidget(self.categoryLabel1)
self.newCategoryComboBox = QComboBox(self.newFormulaContainerFrame)
self.newCategoryComboBox.setObjectName(u"newCategoryComboBox")
self.newCategoryComboBox.setEditable(True)
self.verticalLayout_3.addWidget(self.newCategoryComboBox)
self.verticalSpacer = QSpacerItem(20, 40, QSizePolicy.Minimum, QSizePolicy.Expanding)
self.verticalLayout_3.addItem(self.verticalSpacer)
self.horizontalSpacer_2 = QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)
self.verticalLayout_3.addItem(self.horizontalSpacer_2)
self.horizontalLayout_4.addWidget(self.newFormulaContainerFrame)
self.revisionContainerFrame = QFrame(self.bodyContainerWidget)
self.revisionContainerFrame.setObjectName(u"revisionContainerFrame")
self.revisionContainerFrame.setFrameShape(QFrame.StyledPanel)
self.verticalLayout_2 = QVBoxLayout(self.revisionContainerFrame)
self.verticalLayout_2.setObjectName(u"verticalLayout_2")
self.revisionContainerLabel = QLabel(self.revisionContainerFrame)
self.revisionContainerLabel.setObjectName(u"revisionContainerLabel")
self.revisionContainerLabel.setAlignment(Qt.AlignCenter)
self.verticalLayout_2.addWidget(self.revisionContainerLabel)
self.categoryLabel2 = QLabel(self.revisionContainerFrame)
self.categoryLabel2.setObjectName(u"categoryLabel2")
self.verticalLayout_2.addWidget(self.categoryLabel2)
self.revisionCategoryComboBox = QComboBox(self.revisionContainerFrame)
self.revisionCategoryComboBox.setObjectName(u"revisionCategoryComboBox")
self.revisionCategoryComboBox.setEditable(False)
self.revisionCategoryComboBox.setInsertPolicy(QComboBox.NoInsert)
self.verticalLayout_2.addWidget(self.revisionCategoryComboBox)
self.revisedFormulaPromptLabel = QLabel(self.revisionContainerFrame)
self.revisedFormulaPromptLabel.setObjectName(u"revisedFormulaPromptLabel")
self.verticalLayout_2.addWidget(self.revisedFormulaPromptLabel)
self.formulasToDateComboBox = QComboBox(self.revisionContainerFrame)
self.formulasToDateComboBox.setObjectName(u"formulasToDateComboBox")
self.formulasToDateComboBox.setEditable(True)
self.formulasToDateComboBox.setInsertPolicy(QComboBox.NoInsert)
self.verticalLayout_2.addWidget(self.formulasToDateComboBox)
self.previousVersionLabel = QLabel(self.revisionContainerFrame)
self.previousVersionLabel.setObjectName(u"previousVersionLabel")
self.verticalLayout_2.addWidget(self.previousVersionLabel)
self.previousVersionPlaceholderLabel = QLabel(self.revisionContainerFrame)
self.previousVersionPlaceholderLabel.setObjectName(u"previousVersionPlaceholderLabel")
self.verticalLayout_2.addWidget(self.previousVersionPlaceholderLabel)
self.horizontalSpacer = QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)
self.verticalLayout_2.addItem(self.horizontalSpacer)
self.horizontalLayout_4.addWidget(self.revisionContainerFrame)
self.verticalLayout.addWidget(self.bodyContainerWidget)
self.buttonBox = QDialogButtonBox(formulaSetupDialog)
self.buttonBox.setObjectName(u"buttonBox")
self.buttonBox.setOrientation(Qt.Horizontal)
self.buttonBox.setStandardButtons(QDialogButtonBox.Cancel|QDialogButtonBox.Ok)
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(formulaSetupDialog)
self.buttonBox.accepted.connect(self.accept)
self.buttonBox.rejected.connect(self.close)
#self.buttonBox.accepted.connect(formulaSetupDialog.accept)
#self.buttonBox.rejected.connect(formulaSetupDialog.close)
##
self.revisionContainerFrame.setDisabled(True)
self.newFormulaContainerFrame.setDisabled(True)
###
self.revisionRadioBtn.toggled.connect(self.newFormulaContainerFrame.setDisabled)
self.revisionRadioBtn.toggled.connect(self.revisionContainerFrame.setEnabled)
self.newFormulaRadioBtn.toggled.connect(self.revisionContainerFrame.setDisabled)
self.newFormulaRadioBtn.toggled.connect(self.newFormulaContainerFrame.setEnabled)
QMetaObject.connectSlotsByName(formulaSetupDialog)
# setupUi
def retranslateUi(self, formulaSetupDialog):
formulaSetupDialog.setWindowTitle(QCoreApplication.translate("formulaSetupDialog", u"Setup", None))
self.dialogHeaderLabel.setText(QCoreApplication.translate("formulaSetupDialog", u"Formula Setup", None))
self.revisionPromptLabel.setText(QCoreApplication.translate("formulaSetupDialog", u"Is this a revision of a previous formula, or an entirely new formula?", None))
self.newFormulaRadioBtn.setText(QCoreApplication.translate("formulaSetupDialog", u"New Formula", None))
self.revisionRadioBtn.setText(QCoreApplication.translate("formulaSetupDialog", u"Revision", None))
self.formulaContainerLabel.setText(QCoreApplication.translate("formulaSetupDialog", u"New Formula", None))
self.formulaNamePromptLabel.setText(QCoreApplication.translate("formulaSetupDialog", u"What is the new formula name?", None))
self.categoryLabel1.setText(QCoreApplication.translate("formulaSetupDialog", u"Category", None))
self.revisionContainerLabel.setText(QCoreApplication.translate("formulaSetupDialog", u"Revision", None))
self.categoryLabel2.setText(QCoreApplication.translate("formulaSetupDialog", u"Category", None))
self.revisedFormulaPromptLabel.setText(QCoreApplication.translate("formulaSetupDialog", u"Select formula to be revised", None))
self.previousVersionLabel.setText(QCoreApplication.translate("formulaSetupDialog", u"Previous Version", None))
self.previousVersionPlaceholderLabel.setText("")
# retranslateUi
def setupLogic(self):
"""
-----------------------------
Purpose:
- Event setup, QModel data fill
Arguments:
- None
Return Value:
- None
"""
# creates the item models for the category boxes
categoryModel = QStandardItemModel()
with dbConnection('FormulaSchema').cursor() as cursor:
cursor.execute('SELECT DISTINCT category_name, category.category_id FROM formula INNER JOIN category ON formula.formula_category_id = category.category_id')
categories = cursor.fetchall()
for category in categories:
categoryItem = QStandardItem()
categoryItem.setText(category['category_name'])
categoryItem.setData(category, Qt.UserRole)
categoryModel.appendRow(categoryItem)
self.newCategoryComboBox.setModel(categoryModel)
self.revisionCategoryComboBox.setModel(categoryModel)
self.revisionCategoryComboBox.setCurrentIndex(-1)
# signal setup
self.revisionCategoryComboBox.currentIndexChanged.connect(self.comboBoxUpdate)
self.formulasToDateComboBox.currentIndexChanged.connect(self.updatePlaceholderLabel)
def comboBoxUpdate(self):
"""
-----------------------------
Purpose:
- Signal setup
Arguments:
- None
Return Value:
- None
"""
revisionComboboxSelection = self.revisionCategoryComboBox.itemData(self.revisionCategoryComboBox.currentIndex(), Qt.UserRole)
if revisionComboboxSelection is None:
return
else:
categoryID = revisionComboboxSelection['category_id']
if categoryID is None:
return
prevFormulasModel = QStandardItemModel()
blankItem = QStandardItem()
blankItem.setText('SELECT')
blankItem.setEditable(False)
blankItem.setData(0, Qt.UserRole)
prevFormulasModel.appendRow(blankItem)
# fills the data from
with dbConnection('FormulaSchema').cursor() as cursor:
cursor.execute('SELECT formula.formula_id, formula_name, formula.version_number, formula.formula_category_id, formula.version_of_id, category.category_name, category.category_id FROM formula LEFT JOIN category ON category.category_id = formula.formula_category_id WHERE formula.formula_category_id = %s', (categoryID,))
formulas = cursor.fetchall()
for formula in formulas:
formulaItem = QStandardItem()
formulaItem.setText(formula['formula_name'].title())
formulaItem.setData(formula, Qt.UserRole)
prevFormulasModel.appendRow(formulaItem)
self.formulasToDateComboBox.setModel(prevFormulasModel)
# updates the version number label
def updatePlaceholderLabel(self):
"""
-----------------------------
Purpose:
- Updates the version number label
Arguments:
- None
Return Value:
- None
"""
itemData = self.formulasToDateComboBox.itemData(self.formulasToDateComboBox.currentIndex(), Qt.UserRole)
if itemData == 0 or itemData is None:
return
versionNumber = itemData['version_number']
if versionNumber:
self.previousVersionPlaceholderLabel.setText(str(versionNumber))
else:
self.previousVersionPlaceholderLabel.setText('None')
# form accept
def accept(self):
"""
-----------------------------
Purpose:
- Method for calling the Formula Editor window
Arguments:
- None
Return Value:
- None
"""
# if neither new formula or revision is indicated, throws an error message and returns
if self.revisionRadioBtn.isChecked() is False and self.newFormulaRadioBtn.isChecked() is False:
msg = QMessageBox()
msg.setText('Please indicate whether this formula is new or a revision/iteration')
msg.exec_()
return
else:
isRevision = self.revisionRadioBtn.isChecked()
if isRevision is False:
# if the formula is new but no name was inputted
if self.formulaNameLineEdit.text() == '' or self.formulaNameLineEdit.text() is None:
msg = QMessageBox()
msg.setText('Input a formula name to continue')
msg.exec_()
return
# if everything goes right
else:
name = self.formulaNameLineEdit.text().title()
formulaEditor = formulaEditorDialog(self.mainWindow, formulaName = name, revision = False, category = self.newCategoryComboBox.currentText())
self.close()
formulaEditor.exec_()
else:
# if formula is a revision, but no previous formula was chosen
if self.formulasToDateComboBox.currentIndex() == -1:
msg = QMessageBox()
msg.setText('Select a previous formula that you are revising')
msg.exec_()
return
# if everything goes right
else:
prevID = self.formulasToDateComboBox.currentData(Qt.UserRole)['version_number']
prevName = self.formulasToDateComboBox.currentText().title()
#if isRevision is False:
formulaEditor = formulaEditorDialog(self.mainWindow, formulaName = prevName, revision = isRevision, prevRevisionID = prevID, category = self.revisionCategoryComboBox.currentText())
formulaEditor.exec_()
self.close()
'''
app = QApplication(sys.argv)
gui = formulaSetupDialog(app)
gui.show()
sys.exit(app.exec_())
#test(formulaSetupDialog)''' | [
"pjrd.helpers.dbConnection",
"sys.path.append"
] | [((508, 534), 'sys.path.append', 'sys.path.append', (['"""../pjrd"""'], {}), "('../pjrd')\n", (523, 534), False, 'import sys\n'), ((11730, 11759), 'pjrd.helpers.dbConnection', 'dbConnection', (['"""FormulaSchema"""'], {}), "('FormulaSchema')\n", (11742, 11759), False, 'from pjrd.helpers import test, dbConnection\n'), ((13513, 13542), 'pjrd.helpers.dbConnection', 'dbConnection', (['"""FormulaSchema"""'], {}), "('FormulaSchema')\n", (13525, 13542), False, 'from pjrd.helpers import test, dbConnection\n')] |
"""
@Time : 203/21/19 17:11
@Author : TaylorMei
@Email : <EMAIL>
@Project : iccv
@File : crop_image.py
@Function:
"""
import os
import numpy as np
import skimage.io
input_path = '/media/iccd/TAYLORMEI/depth/image'
output_path = '/media/iccd/TAYLORMEI/depth/crop'
if not os.path.exists(output_path):
os.mkdir(output_path)
imglist = os.listdir(input_path)
for i, imgname in enumerate(imglist):
print(i, imgname)
image = skimage.io.imread(os.path.join(input_path, imgname))
print(np.sum(image[80, :, :]))
for j in range(640):
if np.sum(image[j, :, :]) !=0 and np.sum(image[j, :, :]) !=367200:
print(j)
break
# crop = image[80:560, :, :]
# skimage.io.imsave(os.path.join(output_path, imgname), crop)
| [
"os.path.exists",
"os.listdir",
"os.path.join",
"numpy.sum",
"os.mkdir"
] | [((356, 378), 'os.listdir', 'os.listdir', (['input_path'], {}), '(input_path)\n', (366, 378), False, 'import os\n'), ((290, 317), 'os.path.exists', 'os.path.exists', (['output_path'], {}), '(output_path)\n', (304, 317), False, 'import os\n'), ((323, 344), 'os.mkdir', 'os.mkdir', (['output_path'], {}), '(output_path)\n', (331, 344), False, 'import os\n'), ((469, 502), 'os.path.join', 'os.path.join', (['input_path', 'imgname'], {}), '(input_path, imgname)\n', (481, 502), False, 'import os\n'), ((514, 537), 'numpy.sum', 'np.sum', (['image[80, :, :]'], {}), '(image[80, :, :])\n', (520, 537), True, 'import numpy as np\n'), ((575, 597), 'numpy.sum', 'np.sum', (['image[j, :, :]'], {}), '(image[j, :, :])\n', (581, 597), True, 'import numpy as np\n'), ((606, 628), 'numpy.sum', 'np.sum', (['image[j, :, :]'], {}), '(image[j, :, :])\n', (612, 628), True, 'import numpy as np\n')] |
import os
import pytest
from pyriksprot import interface
from pyriksprot.corpus import parlaclarin
from ..utility import RIKSPROT_PARLACLARIN_FAKE_FOLDER, RIKSPROT_PARLACLARIN_FOLDER
jj = os.path.join
def test_to_protocol_in_depth_validation_of_correct_parlaclarin_xml():
protocol: interface.Protocol = parlaclarin.ProtocolMapper.to_protocol(
jj(RIKSPROT_PARLACLARIN_FAKE_FOLDER, "prot-1958-fake.xml")
)
assert protocol is not None
assert len(protocol.utterances) == 4
assert len(protocol) == 4
assert protocol.name == 'prot-1958-fake'
assert protocol.date == '1958'
assert protocol.has_text, 'has text'
assert protocol.checksum(), 'checksum'
# FIXME: More checks
@pytest.mark.parametrize(
'filename',
[
("prot-197879--14.xml"),
],
)
def test_parlaclarin_xml_with_no_utterances(filename):
path: str = jj(RIKSPROT_PARLACLARIN_FOLDER, "protocols", filename.split('-')[1], filename)
protocol = parlaclarin.ProtocolMapper.to_protocol(path, segment_skip_size=0)
assert len(protocol.utterances) == 0, "utterances empty"
assert not protocol.has_text
# FIXME: More checks
def test_to_protocol_by_untangle():
filename = jj(RIKSPROT_PARLACLARIN_FAKE_FOLDER, "prot-1958-fake.xml")
protocol: parlaclarin.XmlUntangleProtocol = parlaclarin.XmlUntangleProtocol(filename)
assert protocol is not None
assert len(protocol.utterances) == 4
assert len(protocol) == 4
assert protocol.name == 'prot-1958-fake'
assert protocol.date == '1958'
assert protocol.has_text, 'has text'
| [
"pytest.mark.parametrize",
"pyriksprot.corpus.parlaclarin.XmlUntangleProtocol",
"pyriksprot.corpus.parlaclarin.ProtocolMapper.to_protocol"
] | [((724, 784), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""filename"""', "['prot-197879--14.xml']"], {}), "('filename', ['prot-197879--14.xml'])\n", (747, 784), False, 'import pytest\n'), ((980, 1045), 'pyriksprot.corpus.parlaclarin.ProtocolMapper.to_protocol', 'parlaclarin.ProtocolMapper.to_protocol', (['path'], {'segment_skip_size': '(0)'}), '(path, segment_skip_size=0)\n', (1018, 1045), False, 'from pyriksprot.corpus import parlaclarin\n'), ((1327, 1368), 'pyriksprot.corpus.parlaclarin.XmlUntangleProtocol', 'parlaclarin.XmlUntangleProtocol', (['filename'], {}), '(filename)\n', (1358, 1368), False, 'from pyriksprot.corpus import parlaclarin\n')] |
from django.contrib import admin
# Register your models here.
from .models import DpDienste,DpBesatzung
class BesatzungInline(admin.TabularInline):
model = DpBesatzung
extra = 0
class DiensteAdmin(admin.ModelAdmin):
list_display = ('tag', 'schicht', 'ordner', 'ordner_name')
list_filter = ('ordner__dienstplan', 'ordner__jahr', 'ordner__monat', 'tag')
inlines = [BesatzungInline]
admin.site.register(DpDienste, DiensteAdmin)
| [
"django.contrib.admin.site.register"
] | [((408, 452), 'django.contrib.admin.site.register', 'admin.site.register', (['DpDienste', 'DiensteAdmin'], {}), '(DpDienste, DiensteAdmin)\n', (427, 452), False, 'from django.contrib import admin\n')] |
# -*- coding: utf-8 -*-
# Define here the models for your spider middleware
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/spider-middleware.html
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from scrapy.http import HtmlResponse
class JSMiddleware(object):
def process_request(self,request,spider):
print("PhantomJS is starting")
configure = ['--load-images=false','--disk-cache=true']
driver = webdriver.PhantomJS(service_args=configure)
wait = WebDriverWait(driver,10)
driver.set_window_size(1400,900)
driver.get(request.url)
body = driver.page_source
return HtmlResponse(driver.current_url, body=body, encoding='utf-8', request=request)
| [
"selenium.webdriver.PhantomJS",
"selenium.webdriver.support.ui.WebDriverWait",
"scrapy.http.HtmlResponse"
] | [((491, 534), 'selenium.webdriver.PhantomJS', 'webdriver.PhantomJS', ([], {'service_args': 'configure'}), '(service_args=configure)\n', (510, 534), False, 'from selenium import webdriver\n'), ((550, 575), 'selenium.webdriver.support.ui.WebDriverWait', 'WebDriverWait', (['driver', '(10)'], {}), '(driver, 10)\n', (563, 575), False, 'from selenium.webdriver.support.ui import WebDriverWait\n'), ((697, 775), 'scrapy.http.HtmlResponse', 'HtmlResponse', (['driver.current_url'], {'body': 'body', 'encoding': '"""utf-8"""', 'request': 'request'}), "(driver.current_url, body=body, encoding='utf-8', request=request)\n", (709, 775), False, 'from scrapy.http import HtmlResponse\n')] |
from collections import defaultdict
from coalib.bearlib.aspects.collections import aspectlist
class bearclass(type):
"""
Metaclass for :class:`coalib.bears.Bear.Bear` and therefore all bear
classes.
Pushing bears into the future... ;)
"""
# by default a bear class has no aspects
aspects = defaultdict(lambda: aspectlist([]))
def __new__(mcs, clsname, bases, clsattrs, *varargs, aspects=None):
return type.__new__(mcs, clsname, bases, clsattrs, *varargs)
def __init__(cls, clsname, bases, clsattrs, *varargs, aspects=None):
"""
Initializes the ``.aspects`` dict on new bear classes from the mapping
given to the keyword-only `aspects` argument.
"""
type.__init__(cls, clsname, bases, clsattrs, *varargs)
if aspects is not None:
cls.aspects = defaultdict(
lambda: aspectlist([]),
((k, aspectlist(v)) for (k, v) in dict(aspects).items()))
| [
"coalib.bearlib.aspects.collections.aspectlist"
] | [((343, 357), 'coalib.bearlib.aspects.collections.aspectlist', 'aspectlist', (['[]'], {}), '([])\n', (353, 357), False, 'from coalib.bearlib.aspects.collections import aspectlist\n'), ((890, 904), 'coalib.bearlib.aspects.collections.aspectlist', 'aspectlist', (['[]'], {}), '([])\n', (900, 904), False, 'from coalib.bearlib.aspects.collections import aspectlist\n'), ((927, 940), 'coalib.bearlib.aspects.collections.aspectlist', 'aspectlist', (['v'], {}), '(v)\n', (937, 940), False, 'from coalib.bearlib.aspects.collections import aspectlist\n')] |
from pathlib import Path
import pandas as pd
import config
from mbs.mbs import *
import streamlit as st
from streamlit_autorefresh import st_autorefresh
import yaml
# ====================================
# Authentication
# ====================================
AK = config.API_KEY # not really necessary
AKS = config.API_KEY_SECRET # not really necessary
BT = config.BEARER_TOKEN
MKL_AK = config.MONKEYLEARN_API_KEY
MKL_ST_MODEL_ID = config.MONKEYLEARN_SENTIMENT_MODEL_ID
MKL_EX_MODEL_ID = config.MONKEYLEARN_KEYWORD_EXTRACTOR_MODEL_ID
DATA_DIR = Path('./data')
LOG_FILE = Path('./log/log_file.txt')
LOG_FILE_COLOR = Path('./log/log_file_color.txt')
NOTE_FILE = Path('./note/summary.yaml')
DEEPL_AK = config.DEEPL_API_KEY
size = 640
input_file = DATA_DIR/'tweet_bus_de_en.csv'
n_last = 30
# center = (11.57540, 48.13714)
# ============================================
# scratch streamlit
# --------------------------------------------
usecols = ['id', 'created_at', 'geo', 'place', 'coordinates', 'text',
'text_en', 'truncated', 'name', 'screen_name']
df = pd.read_csv(input_file,
parse_dates=['created_at'],
usecols=usecols)
# =============================================
# monkey learn
# --------------------------------------------
df_stx = get_mkl_st_dummy(df, MKL_AK)
df_kex = get_mkl_ex_dummy(df_stx, MKL_AK)
df_kex.to_csv(DATA_DIR/'mbs_kex.csv', index=False)
df_geo = extract_place(df_kex)
df_geo.to_csv(DATA_DIR/'mbs_geo.csv', index=False)
df_pn = add_sentiment_digit(df_kex)
df_agg = aggregate_sentiment(df_pn, freq='12H')
# '120S' '2H' '1D'
df_agg.to_csv(DATA_DIR/'mbs_agg.csv', index=False)
# --------------------------------------------
# calculate daily aggregate
# --------------------------------------------
fig_agg = visualize_agg(df_agg, size)
fig_count = visualize_count(df_agg, size)
# --------------------------------------------
fig_pn = visualize_pn(df_pn, size, vertical=True)
# --------------------------------------------
# wordcloud
# --------------------------------------------
wc = create_wordcloud(df_kex, size)
fig_wc = visualize_wc(wc)
# --------------------------------------------
# folium
# --------------------------------------------
m_1 = plot_sentiment(df_kex)
# --------------------------------------------
# logfile
# --------------------------------------------
i = 0
with open(LOG_FILE, 'r') as f:
log_text = f.readlines()
print(i+1)
log_text = [s.replace('\n', '') for s in log_text]
with open(LOG_FILE_COLOR, 'r') as f:
log_text_color = f.readlines()
log_text_color = [s.replace('\n', '<br />') for s in log_text_color]
# --------------------------------------------
# read YAML markdown text
with open(NOTE_FILE, 'r') as s:
try:
note = yaml.safe_load(s)
except yaml.YAMLError as e:
print(e)
# =====================================================================
# streamlit scratch
# =====================================================================
st.set_page_config(layout='wide')
count = st_autorefresh(interval=1000 * 1200, limit=16, key="sagasitemiyou")
# --------------------------------------------
# 1. row : cautions
# --------------------------------------------
col1, col2, col3 = st.columns((0.9, 0.9, 0.9))
with col1:
st.markdown(note['note1'], unsafe_allow_html=True)
with col2:
st.markdown(note['note2'], unsafe_allow_html=True)
with col3:
st.markdown(note['note3'], unsafe_allow_html=True)
st.markdown("""___""")
# --------------------------------------------
# 2. row questions and conclusions
# --------------------------------------------
st.title('How People like Munich Bus Service')
col1, col2, col3 = st.columns([0.9, 0.1, 1.4])
with col1:
st.markdown(note['questions'], unsafe_allow_html=True)
with col3:
st.markdown(note['conclusions'], unsafe_allow_html=True)
# --------------------------------------------
# 3. row
# --------------------------------------------
st.markdown('### Overall Sentiment')
st.plotly_chart(fig_agg, use_container_width=True)
st.markdown('### How many Tweets about Bus?')
st.plotly_chart(fig_count, use_container_width=True)
# --------------------------------------------
# 5. row : report and polling log
# --------------------------------------------
col1, col2 = st.columns((1, 0.9))
log = '\n'.join(log_text)
log_color = ' '.join(log_text_color[-4:])
with col1:
st.markdown(note['map_caption'])
with col2:
st.markdown('### Polling log')
st.markdown(log_color, unsafe_allow_html=True)
# --------------------------------------------
# 4. row
# --------------------------------------------
df_words = pd.DataFrame(dict(word=wc.words_.keys(), frac=wc.words_.values()))
df_words.sort_values(['frac'], ascending=False, inplace=True)
col1, col2, col3 = st.columns((2, 0.9, 0.9))
with col1:
st.markdown('### Where people are satisfied/dissatified?')
m_1.to_streamlit(height=size*1)
with col2:
text = f'### Last {n_last} Tweets'
st.markdown(text)
st.plotly_chart(fig_pn, use_container_width=True)
with col3:
st.markdown('### Satisfied/dissatified with...')
st.image(wc.to_image())
st.dataframe(df_words)
# --------------------------------------------
# 6. row
# --------------------------------------------
st.markdown('### All Data')
st.dataframe(df_kex.drop(
['Unnamed: 0', 'name', 'screen_name'], axis=1, errors='ignore'),
height=size)
# --------------------------------------------
| [
"streamlit_autorefresh.st_autorefresh",
"streamlit.markdown",
"pandas.read_csv",
"pathlib.Path",
"yaml.safe_load",
"streamlit.plotly_chart",
"streamlit.dataframe",
"streamlit.set_page_config",
"streamlit.columns",
"streamlit.title"
] | [((551, 565), 'pathlib.Path', 'Path', (['"""./data"""'], {}), "('./data')\n", (555, 565), False, 'from pathlib import Path\n'), ((577, 603), 'pathlib.Path', 'Path', (['"""./log/log_file.txt"""'], {}), "('./log/log_file.txt')\n", (581, 603), False, 'from pathlib import Path\n'), ((621, 653), 'pathlib.Path', 'Path', (['"""./log/log_file_color.txt"""'], {}), "('./log/log_file_color.txt')\n", (625, 653), False, 'from pathlib import Path\n'), ((666, 693), 'pathlib.Path', 'Path', (['"""./note/summary.yaml"""'], {}), "('./note/summary.yaml')\n", (670, 693), False, 'from pathlib import Path\n'), ((1074, 1142), 'pandas.read_csv', 'pd.read_csv', (['input_file'], {'parse_dates': "['created_at']", 'usecols': 'usecols'}), "(input_file, parse_dates=['created_at'], usecols=usecols)\n", (1085, 1142), True, 'import pandas as pd\n'), ((3001, 3034), 'streamlit.set_page_config', 'st.set_page_config', ([], {'layout': '"""wide"""'}), "(layout='wide')\n", (3019, 3034), True, 'import streamlit as st\n'), ((3043, 3110), 'streamlit_autorefresh.st_autorefresh', 'st_autorefresh', ([], {'interval': '(1000 * 1200)', 'limit': '(16)', 'key': '"""sagasitemiyou"""'}), "(interval=1000 * 1200, limit=16, key='sagasitemiyou')\n", (3057, 3110), False, 'from streamlit_autorefresh import st_autorefresh\n'), ((3245, 3272), 'streamlit.columns', 'st.columns', (['(0.9, 0.9, 0.9)'], {}), '((0.9, 0.9, 0.9))\n', (3255, 3272), True, 'import streamlit as st\n'), ((3474, 3492), 'streamlit.markdown', 'st.markdown', (['"""___"""'], {}), "('___')\n", (3485, 3492), True, 'import streamlit as st\n'), ((3627, 3673), 'streamlit.title', 'st.title', (['"""How People like Munich Bus Service"""'], {}), "('How People like Munich Bus Service')\n", (3635, 3673), True, 'import streamlit as st\n'), ((3693, 3720), 'streamlit.columns', 'st.columns', (['[0.9, 0.1, 1.4]'], {}), '([0.9, 0.1, 1.4])\n', (3703, 3720), True, 'import streamlit as st\n'), ((3968, 4004), 'streamlit.markdown', 'st.markdown', (['"""### Overall Sentiment"""'], {}), "('### Overall Sentiment')\n", (3979, 4004), True, 'import streamlit as st\n'), ((4005, 4055), 'streamlit.plotly_chart', 'st.plotly_chart', (['fig_agg'], {'use_container_width': '(True)'}), '(fig_agg, use_container_width=True)\n', (4020, 4055), True, 'import streamlit as st\n'), ((4056, 4101), 'streamlit.markdown', 'st.markdown', (['"""### How many Tweets about Bus?"""'], {}), "('### How many Tweets about Bus?')\n", (4067, 4101), True, 'import streamlit as st\n'), ((4102, 4154), 'streamlit.plotly_chart', 'st.plotly_chart', (['fig_count'], {'use_container_width': '(True)'}), '(fig_count, use_container_width=True)\n', (4117, 4154), True, 'import streamlit as st\n'), ((4297, 4317), 'streamlit.columns', 'st.columns', (['(1, 0.9)'], {}), '((1, 0.9))\n', (4307, 4317), True, 'import streamlit as st\n'), ((4796, 4821), 'streamlit.columns', 'st.columns', (['(2, 0.9, 0.9)'], {}), '((2, 0.9, 0.9))\n', (4806, 4821), True, 'import streamlit as st\n'), ((5285, 5312), 'streamlit.markdown', 'st.markdown', (['"""### All Data"""'], {}), "('### All Data')\n", (5296, 5312), True, 'import streamlit as st\n'), ((3288, 3338), 'streamlit.markdown', 'st.markdown', (["note['note1']"], {'unsafe_allow_html': '(True)'}), "(note['note1'], unsafe_allow_html=True)\n", (3299, 3338), True, 'import streamlit as st\n'), ((3355, 3405), 'streamlit.markdown', 'st.markdown', (["note['note2']"], {'unsafe_allow_html': '(True)'}), "(note['note2'], unsafe_allow_html=True)\n", (3366, 3405), True, 'import streamlit as st\n'), ((3422, 3472), 'streamlit.markdown', 'st.markdown', (["note['note3']"], {'unsafe_allow_html': '(True)'}), "(note['note3'], unsafe_allow_html=True)\n", (3433, 3472), True, 'import streamlit as st\n'), ((3736, 3790), 'streamlit.markdown', 'st.markdown', (["note['questions']"], {'unsafe_allow_html': '(True)'}), "(note['questions'], unsafe_allow_html=True)\n", (3747, 3790), True, 'import streamlit as st\n'), ((3807, 3863), 'streamlit.markdown', 'st.markdown', (["note['conclusions']"], {'unsafe_allow_html': '(True)'}), "(note['conclusions'], unsafe_allow_html=True)\n", (3818, 3863), True, 'import streamlit as st\n'), ((4401, 4433), 'streamlit.markdown', 'st.markdown', (["note['map_caption']"], {}), "(note['map_caption'])\n", (4412, 4433), True, 'import streamlit as st\n'), ((4450, 4480), 'streamlit.markdown', 'st.markdown', (['"""### Polling log"""'], {}), "('### Polling log')\n", (4461, 4480), True, 'import streamlit as st\n'), ((4485, 4531), 'streamlit.markdown', 'st.markdown', (['log_color'], {'unsafe_allow_html': '(True)'}), '(log_color, unsafe_allow_html=True)\n', (4496, 4531), True, 'import streamlit as st\n'), ((4837, 4895), 'streamlit.markdown', 'st.markdown', (['"""### Where people are satisfied/dissatified?"""'], {}), "('### Where people are satisfied/dissatified?')\n", (4848, 4895), True, 'import streamlit as st\n'), ((4987, 5004), 'streamlit.markdown', 'st.markdown', (['text'], {}), '(text)\n', (4998, 5004), True, 'import streamlit as st\n'), ((5009, 5058), 'streamlit.plotly_chart', 'st.plotly_chart', (['fig_pn'], {'use_container_width': '(True)'}), '(fig_pn, use_container_width=True)\n', (5024, 5058), True, 'import streamlit as st\n'), ((5075, 5123), 'streamlit.markdown', 'st.markdown', (['"""### Satisfied/dissatified with..."""'], {}), "('### Satisfied/dissatified with...')\n", (5086, 5123), True, 'import streamlit as st\n'), ((5156, 5178), 'streamlit.dataframe', 'st.dataframe', (['df_words'], {}), '(df_words)\n', (5168, 5178), True, 'import streamlit as st\n'), ((2769, 2786), 'yaml.safe_load', 'yaml.safe_load', (['s'], {}), '(s)\n', (2783, 2786), False, 'import yaml\n')] |
from rest_framework import serializers
from .models import (
Flow,
DailyFlowRuns,
Group,
DailyGroupCount,
Channel,
DailyChannelCount,
Label,
)
class FlowSerializer(serializers.ModelSerializer):
class Meta:
model = Flow
fields = ["uuid", "name", "is_active"]
read_only_fields = ["is_active"]
def update(self, instance, validated_data):
validated_data.pop("uuid")
return super().update(instance, validated_data)
class DailyFlowRunsSerializer(serializers.ModelSerializer):
flow = FlowSerializer()
class Meta:
model = DailyFlowRuns
fields = ["flow", "active", "completed", "interrupted", "expired", "day"]
class MostAccessedFlowStatusSerializer(serializers.Serializer):
uuid = serializers.CharField(max_length=255)
name = serializers.CharField(max_length=255)
active = serializers.IntegerField()
completed = serializers.IntegerField()
interrupted = serializers.IntegerField()
expired = serializers.IntegerField()
class GroupSerializer(serializers.ModelSerializer):
class Meta:
model = Group
fields = ["uuid", "name"]
class DailyGroupCountSerializer(serializers.ModelSerializer):
group = GroupSerializer()
class Meta:
model = DailyGroupCount
fields = ["group", "count", "day"]
class ChannelSerializer(serializers.ModelSerializer):
class Meta:
model = Channel
fields = ["uuid", "name"]
class DailyChannelCountSerializer(serializers.ModelSerializer):
channel = ChannelSerializer()
class Meta:
model = DailyChannelCount
fields = ["channel", "count", "day"]
class LabelCountSerializer(serializers.ModelSerializer):
msg_count = serializers.IntegerField()
class Meta:
model = Label
fields = ["uuid", "name", "msg_count"]
| [
"rest_framework.serializers.IntegerField",
"rest_framework.serializers.CharField"
] | [((786, 823), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (807, 823), False, 'from rest_framework import serializers\n'), ((835, 872), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (856, 872), False, 'from rest_framework import serializers\n'), ((886, 912), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {}), '()\n', (910, 912), False, 'from rest_framework import serializers\n'), ((929, 955), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {}), '()\n', (953, 955), False, 'from rest_framework import serializers\n'), ((974, 1000), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {}), '()\n', (998, 1000), False, 'from rest_framework import serializers\n'), ((1015, 1041), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {}), '()\n', (1039, 1041), False, 'from rest_framework import serializers\n'), ((1755, 1781), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {}), '()\n', (1779, 1781), False, 'from rest_framework import serializers\n')] |
import numpy as np
import gzip
from Bio import SeqIO
from pathlib import Path
import os
import subprocess
import tarfile
from io import BytesIO
#for parallel computing
from joblib import Parallel, delayed
import multiprocessing
num_cores_energy = multiprocessing.cpu_count()
from tqdm import tqdm
import pandas as pd
import sys
valid_aa = ['A','C','D','E','F','G','H','I','K','L','M','N','P','Q','R','S','T','V','W','Y','-']
aa_3= ['ALA','CYS','ASP','GLU','PHE','GLY','HIS','ILE','LYS','LEU','MET','ASN','PRO','GLN','ARG','SER','THR','VAL','TRP','TYR','-']
d_aa_num= {a:i for i,a in enumerate(valid_aa)}
d_3to1 = {a3:a1 for a3,a1 in zip(aa_3,valid_aa)}
def read_dca_par(path_h_DCA, path_J_DCA):
''' read compressed DCA file '''
tar = tarfile.open(path_h_DCA, "r:gz")
for member in tar.getmembers():
f = tar.extractfile(member)
if f is not None:
content = f.read()
load_bytes = BytesIO(content)
h = np.load(load_bytes)
tar = tarfile.open(path_J_DCA, "r:gz")
for member in tar.getmembers():
f = tar.extractfile(member)
if f is not None:
content = f.read()
load_bytes = BytesIO(content)
J = np.load(load_bytes)
return h,J
def compute_sm_energy_dict(seq, h ,J):
''' for SINGLE MUTANTS, return a dictionary d['idx', 'mutated_aa'] = energy - energy_wild_type '''
''' it can be VERY SLOW and d_sm BIG(all possible sm ~ 21*L) '''
''' see below to speed it up '''
E0 = compute_energy(seq,h,J)
d_sm = {}
for i in range(0, len(seq)):
print(i, len(seq))
#add also the gap
for aa in valid_aa:
new_seq = seq[:i] + aa + seq[(i+1):]
E = compute_energy(new_seq,h,J)
print(E)
d_sm[i,aa] = np.round(E-E0,4)
return d_sm
def compute_sm_energy(seq, h ,J, idx, aa ):
''' for SINGLE MUTANTS, given the ref_seq,h,J and idx(pos_mutations) aa(mutated_aa)
return energy_sum_single_mutants - energy_wild_type '''
E0 = compute_energy(seq,h,J)
E_sum_sm = 0
for i,a_i in zip(idx, aa):
new_seq = seq[:i] + a_i + seq[(i+1):]
E = compute_energy(new_seq,h,J)
E_sum_sm += E
return np.round(E_sum_sm,4)
def compute_energy(seq, h, J, parallel = False):
if all_standard_aa(seq):
if(parallel == True):
#DO NOT USE FOR NOW!!!
#something weird... E_parallel != E_non_parallel
# parallel actually slower than non parallel (execution time limited by memory access and not processor time??)
E = 0
all_ei = Parallel(n_jobs=num_cores_energy)(delayed(compute_energy_given_ai)(seq, h, J, idx_ai) for idx_ai in range(0,len(seq)))
E = np.sum(all_ei)
return E
if(parallel == False):
E = 0
for idx_aa1 in range(0, len(seq)):
aa1 = seq[idx_aa1]
E -= h[d_aa_num[aa1], idx_aa1]
for idx_aa2 in range(idx_aa1+1, len(seq)):
aa2 = seq[idx_aa2]
E -= J[d_aa_num[aa1], d_aa_num[aa2], idx_aa1, idx_aa2]
return E
def compute_energy_given_ai(seq,h,J, idx_ai):
'''e.g. idx_ai=1; computing E_1 = h_1 + J_12 + J_13 etc. (good for parallelization)'''
ai = seq[idx_ai]
#print("**", idx_ai, ai)
ei = h[d_aa_num[ai], idx_ai]
for idx_aj in range(idx_ai+1, len(seq)):
aj = seq[idx_aj]
#print(idx_aj, aj)
ei -= J[d_aa_num[ai], d_aa_num[aj], idx_ai, idx_aj]
return ei
def compute_entropy_context_ind(path_msa):
''' compute context-independent entropy (from msa)'''
fi = compute_freq(path_msa)
S = compute_entropy_from_freq(fi)
return S
def compute_entropy_from_freq(fi, remove_gaps = True, base2 = True):
if remove_gaps:
fi = (fi[:20,:])/np.sum(fi[:20,:], axis = 0)
qq, N = fi.shape
S = []
for i in range(0,N):
si = 0
for q in range(0,qq):
si -= fi[q,i]*np.log(fi[q,i])
if base2:
si /= np.log(2)
S.append(si)
return S
def compute_entropy_context_dep(ref_seq, h,J ):
''' compute context-DEPENDENT entropy (from hhblits ref_seq, h, J)'''
q, N = h.shape
fi_plm = np.zeros(h.shape)
#same conventions than in Eq.5.8 (PhD thesis)
for i in range(0,N):
#compute denominator
denom = 0
for b in range(0,q):
arg_denom = h[b,i]
for j in range(0,N):
if(j!=i):
aj = d_aa_num[ref_seq[j]]
arg_denom += J[b, aj ,i, j]
denom += np.exp(arg_denom)
# compute numerator
for ai in range(0,q):
arg_num = h[ai,i]
for j in range(0,N):
if(j!=i):
aj = d_aa_num[ref_seq[j]]
arg_num += J[ai, aj ,i, j]
num = np.exp(arg_num)
fi_plm[ai,i] = num/denom
#return the entropy
S = compute_entropy_from_freq(fi_plm)
return S
def compute_num_gap(seq):
'''return the number of gaps in a sequence '''
num_gap = 0
for _,char in enumerate(seq):
if(char == '-'):
num_gap += 1
return num_gap
def compute_gap_fraction(seq):
num_gap = compute_num_gap(seq)
frac_gap = (num_gap+0.0)/len(seq)
return frac_gap
def compute_diff(ref_seq, seq):
''' compute the mutations between two strings, return idx_mut, aa_first_seq(wt), aa_second_seq(mutant)'''
vec_idx = []
vec_aa1 = []
vec_aa2 = []
for idx, aa in enumerate(zip(ref_seq,seq)):
aa1 = aa[0]
aa2 = aa[1]
if (aa1.lower() != aa2.lower()):
vec_idx.append(idx)
vec_aa1.append(aa1)
vec_aa2.append(aa2)
return vec_idx, vec_aa1, vec_aa2
def compute_dist(ref_seq, seq):
distance = sum([1 for x, y in zip(ref_seq, seq) if x.lower() != y.lower()])
return distance
def compute_dist_excluding_gaps(ref_seq, seq):
# distance = sum([1 for x, y in zip(ref_seq, seq) if ( x.lower() != y.lower() or x == '-' or y == '-' )])
distance = 0
for x, y in zip(ref_seq, seq):
if x == '-':
continue
elif y == '-':
continue
elif x.lower() != y.lower():
distance += 1
return distance
def compute_seqid(ref_seq, seq):
'''return the sequence identity (seqid) '''
distance = compute_dist_excluding_gaps(ref_seq,seq)
distance /= len(seq)
seqid = 1 - distance
return seqid
def compute_freq(path_msa):
''' compute single point frequencies of an MSA '''
records_msa = list(SeqIO.parse(open(path_msa,'r'), "fasta"))
fi = np.zeros(( len(d_aa_num), len(records_msa[0].seq) ))
for idx_rec, rec in enumerate(records_msa):
seq = rec.seq
for idx_aa, amino_a in enumerate(seq):
fi[d_aa_num[amino_a], idx_aa] += 1
#add (small) pseudocount to take into account 0 frequencies (0*log(0))
alpha = 0.0001
fi = (1-alpha)*fi + alpha/2
#normalize
fi /= fi.sum(axis = 0)
return fi
def all_standard_aa(seq):
'''return True if sequence contains only standard-aa'''
for char in seq:
if((char not in valid_aa) and char !='-'):
#print("seq containing non standard aa: "+char)
return False
break
return True
def split_proteome(path_ref_proteome, name_ref_proteome, tmp_path):
''' simple function to split the reference proteome in reference proteins'''
with open(os.path.join(path_ref_proteome, name_ref_proteome), "r") as input_handle:
for record_ref in SeqIO.parse(input_handle, "fasta"):
name = record_ref.id
seq_ref = str(record_ref.seq)
#save tmp file with the seq of the reference
name_tmp_file = "ref_"+name
f_tmp = open(os.path.join(tmp_path,name_tmp_file),"w")
f_tmp.write(">"+name+"\n")
f_tmp.write(seq_ref)
f_tmp.close()
return 0
def run_hhblits(path_hhblits, path_ref_prot, path_db, path_msa_out, num_cores):
''' run hhblits, get the distant homologs MSA, return the number of sequences '''
#1) run hhblits
FNULL = open(os.devnull, 'w')
subprocess.run([path_hhblits, '-i', path_ref_prot, '-d', path_db, '-oa3m', path_ref_prot+".a3m", '-cpu' , str(num_cores)], stdout=FNULL, stderr=subprocess.STDOUT)
#num of sequences
file_out = open(path_msa_out, 'w')
#2) parse and filter the hhblits msa
with open(path_ref_prot+".a3m", "r") as input_handle:
for idx_record, record in enumerate(SeqIO.parse(input_handle, "fasta")):
seq = str(record.seq)
#hhblits ouput is a3m format, to make it a fasta remove dot and lower
seq = ''.join(char for char in seq if (char.isupper() or char =='-'))
# 2.1) do the filtering
records_ref = list(SeqIO.parse(open(path_ref_prot,'r'), "fasta"))
ref_seq = str(records_ref[0].seq)
# - remove sequences which are to gapped (i.e. gap_fraction mst be less than 10% gap)
# - remove sequence which are CLOSE to the reference sequence (i.e. sequence_identity must be LESS than 90%)
# - remove sequences containing non standard aa
if( (compute_gap_fraction(seq) < 0.1) and (compute_seqid(ref_seq, seq) < 0.9) and all_standard_aa(seq)):
file_out.write(str(">"+record.id)+'\n')
file_out.write(str(seq)+'\n')
file_out.close()
return 0
def filterMSA(path_ref_prot, path_msa_in, path_msa_out, include_refseq=True, max_grap_fraction = 0.2, max_seq_id = 0.9):
file_out = open(path_msa_out, 'w')
# parse and filter the msa
records_ref = list(SeqIO.parse(open(path_ref_prot,'r'), "fasta"))
ref_seq = str(records_ref[0].seq)
with open(path_msa_in, "r") as input_handle:
count = 1
for idx_record, record in enumerate(SeqIO.parse(input_handle, "fasta")):
seq = str(record.seq)
#remove dot and lower
seq = ''.join(char for char in seq if (char.isupper() or char =='-'))
# do the filtering
# - remove the sequences which are to gapped (i.e. sequence must contain less than 10 gap)
# - remove sequence which are close the reference sequence (i.e. sequence_identity must be less than 90%)
# - remove sequences containing non standard aa
if include_refseq and count == 1: # Keep the first seq, i.e., the reference sequence
file_out.write(str(">"+record.id)+'\n')
file_out.write(str(seq)+'\n')
count += 1
elif( (compute_gap_fraction(seq) < max_grap_fraction) and (compute_seqid(ref_seq, seq) < max_seq_id) and all_standard_aa(seq)):
file_out.write(str(">"+record.id)+'\n')
file_out.write(str(seq)+'\n')
file_out.close()
return 0
def filterMSA_gisaid(path_ref_prot, path_msa_in, path_msa_out, max_grap_fraction = 0.2, min_seq_id = 0.9):
file_out = open(path_msa_out, 'w')
# parse and filter the msa
records_ref = list(SeqIO.parse(open(path_ref_prot,'r'), "fasta"))
ref_seq = str(records_ref[0].seq)
with open(path_msa_in, "r") as input_handle:
count = 1
for idx_record, record in enumerate(SeqIO.parse(input_handle, "fasta")):
seq = str(record.seq)
#remove dot and lower
seq = ''.join(char for char in seq if (char.isupper() or char =='-'))
# do the filtering
# - remove the sequences which are to gapped (i.e. sequence must contain less than 10 gap)
# - remove sequence which are far the reference sequence (i.e. sequence_identity must greater than 90%)
# - remove sequences containing non standard aa
if count == 1: # Keep the first seq, i.e., the reference sequence
file_out.write(str(">"+record.id)+'\n')
file_out.write(str(seq)+'\n')
count += 1
elif( (compute_gap_fraction(seq) < max_grap_fraction) and (compute_seqid(ref_seq, seq) > min_seq_id) and all_standard_aa(seq)):
file_out.write(str(">"+record.id)+'\n')
file_out.write(str(seq)+'\n')
file_out.close()
return 0
def do_DCA_inference(path_msa, path_dca_par, min_num_seq, num_cores):
#1) number of lines (sequences). N.b. in Uniclust30 Meff ~ M
M = len(open(path_msa).readlines())/2
#2) do the inference with DCA
#only for msa with more than min_num_seq sequences
if( M > min_num_seq):
#import julia (Try to include julia variables into python => TO DO, see 'import julia') ---> doesn't work, I gave up...
# filename =
out_file = path_msa + '.out'
# f_julia= open(os.path.join(path_dca_par,out_file), 'a')
f_julia= open(out_file, 'w')
f_julia.write(path_msa.split("/")[-1]+".fa"+'\n')
f_julia.close() #close and re-open. Otherwise it writes the msa only at the end (after plmDCA info) (function waits subprocess to finish)
# f_julia= open(os.path.join(path_dca_par,out_file), 'a')
f_julia= open(out_file, 'w')
subprocess.run(["julia",'-p', str(num_cores), './src/plmDCA_inference.jl',path_msa, path_dca_par], stdout=f_julia, stderr=subprocess.STDOUT)
f_julia.close()
else:
print('... ERROR! too few seqs (M={0})!'.format(str(M)))
return 0
def run_phmmer(path_phmmer, path_ref_prot, path_db, path_msa_out, path_tmp_stockholm, path_tmp_msa, num_cores):
''' run phmmer, get the local homologs MSA (form E coli strains) '''
file_out = open(path_msa_out, 'w')
#1) run phmmer
FNULL = open(os.devnull, 'w')
subprocess.run([path_phmmer, '-A', path_tmp_stockholm, '--cpu', str(num_cores), path_ref_prot, path_db], stdout=FNULL, stderr=subprocess.STDOUT)
#2) convert stockholm to fasta
subprocess.run(['./src/stockholm2fasta.pl', '-g', path_tmp_stockholm ], stdout=open(path_tmp_msa,'w'),stderr=subprocess.STDOUT)
#3) parse and filter the hhblits msa
with open(path_tmp_msa, "r") as input_handle:
for idx_record, record in enumerate(SeqIO.parse(input_handle, "fasta")):
seq = str(record.seq)
#remove dot and lower
seq = ''.join(char for char in seq if (char.isupper() or char =='-'))
# 2.1) do the filtering
records_ref = list(SeqIO.parse(open(path_ref_prot,'r'), "fasta"))
ref_seq = str(records_ref[0].seq)
# - remove the sequences which are to gapped (i.e. sequence must contain less than 10 gap)
# - remove sequence which are FAR the reference sequence (i.e. sequence_identity must be MORE than 90%)
# - remove sequences containing non standard aa
if( (compute_num_gap(seq) < 10) and (compute_seqid(ref_seq, seq) > 0.9) and all_standard_aa(seq)):
file_out.write(str(">"+record.id)+'\n')
file_out.write(str(seq)+'\n')
file_out.close()
#rm prefiltering msa
subprocess.run(['rm' , path_tmp_msa])
#rm stockholm (too big!)
subprocess.run(['rm' , path_tmp_stockholm])
return 0
def compute_energy_local_msa(ref_prot_file, output_file, ali_file,h,J, verbose ):
''' load DCA model, compute energy of strains (also e_sum_sm and postions mutated '''
records_ref = list(SeqIO.parse(open(ref_prot_file,'r'), "fasta"))
ref_seq = str(records_ref[0].seq)
ref_name = str(records_ref[0].id)
E0 = 0
#0. compute energy single mutants -> NO need for it ( and very slow, seq function compute_sm_energy(seq, h, J, idx, aa)
#d_sm = compute_sm_energy(ref_seq, h, J)
path_msa_local = os.path.join(ali_file)
all_seq_name = []
all_seq_num_occurences= []
all_seq_e = []
all_seq_dist = []
all_seq_ref_prot = []
all_seq_sum_sm = []
all_seq_mut_idx= []
all_seq_mut_aa = []
E0 = compute_energy(ref_seq, h, J)
with open(path_msa_local,"r") as f:
for record in tqdm(SeqIO.parse(f,"fasta")):
seq = str(record.seq)
E = compute_energy(seq, h, J)
idx, aa1, aa2 = compute_diff(ref_seq,seq)
# sum of energies single mutants
E_sum_sm = compute_sm_energy(ref_seq, h, J, idx, aa2)
#num mutations
dist = len(idx)
name_seq_list = (str(record.id).split('/')[0]).split('-')
if(verbose == True):
for name_seq in name_seq_list:
all_seq_ref_prot.append(ref_name)
all_seq_name.append(name_seq)
all_seq_e.append(np.round(E,4))
all_seq_dist.append(int(dist))
all_seq_sum_sm.append(np.round(E_sum_sm,4))
all_seq_mut_idx.append(idx)
all_seq_mut_aa.append(aa2)
all_seq_e_e0 = np.round(all_seq_e - E0,4)
if(verbose == False):
all_seq_ref_prot.append(ref_name)
all_seq_num_occurences.append(len(name_seq_list))
all_seq_e.append(np.round(E,4))
all_seq_dist.append(int(dist))
all_seq_sum_sm.append(np.round(E_sum_sm,4))
all_seq_e_e0 = np.round(all_seq_e - E0 ,4)
all_seq_mut_idx.append(idx)
all_seq_mut_aa.append(aa2)
if(verbose == True):
df = pd.DataFrame({'ref':all_seq_ref_prot, 'seq_name':all_seq_name, 'e':all_seq_e, 'e-e0':all_seq_e_e0, 'e_sum_sm': all_seq_sum_sm, 'dist':all_seq_dist, 'idx_mut':all_seq_mut_idx, 'aa_mut': all_seq_mut_aa})
if(verbose == False):
df = pd.DataFrame({'ref':all_seq_ref_prot, 'num_occurences':all_seq_num_occurences, 'e':all_seq_e, 'e-e0':all_seq_e_e0, 'e_sum_sm': all_seq_sum_sm,'dist':all_seq_dist, 'idx_mut':all_seq_mut_idx, 'aa_mut': all_seq_mut_aa})
df.to_csv(os.path.join(output_file), index = False)
return 0
def compute_energy_ind_msa(ref_prot_file, ali_file, output_file, h,J ):
''' load DCA model, compute energy of mutation sampled from the profile model'''
records_ref = list(SeqIO.parse(open(ref_prot_file,'r'), "fasta"))
ref_seq = str(records_ref[0].seq)
ref_name = str(records_ref[0].id)
path_msa_local_ind = os.path.join(ali_file)
all_seq_ref_prot = []
all_seq_e = []
all_seq_e_e0 = []
all_seq_dist = []
E0 = compute_energy(ref_seq, h, J)
with open(path_msa_local_ind,"r") as f:
for record in tqdm(SeqIO.parse(f,"fasta")):
seq = str(record.seq)
dist = compute_dist(ref_seq,seq)
E = compute_energy(seq, h, J)
all_seq_ref_prot.append(ref_name)
all_seq_e.append(np.round(E,4))
all_seq_dist.append(int(dist))
all_seq_e_e0 = np.round(all_seq_e - E0 ,4)
df_ind = pd.DataFrame({'ref':all_seq_ref_prot, 'e':all_seq_e, 'e-e0':all_seq_e_e0, 'dist':all_seq_dist})
df_ind.to_csv(os.path.join(output_file), index = False)
return 0
def compute_energy_rand_msa(ref_prot_file, ali_file, output_file, h,J):
''' load DCA model, compute energy of mutation sampled from the random model'''
records_ref = list(SeqIO.parse(open(ref_prot_file,'r'), "fasta"))
ref_seq = str(records_ref[0].seq)
ref_name = str(records_ref[0].id)
path_msa_local_rand= os.path.join(ali_file)
all_seq_ref_prot = []
all_seq_e = []
all_seq_e_e0 = []
all_seq_dist = []
E0 = compute_energy(ref_seq, h, J)
with open(path_msa_local_rand,"r") as f:
for record in tqdm(SeqIO.parse(f,"fasta")):
seq = str(record.seq)
dist = compute_dist(ref_seq,seq)
E = compute_energy(seq, h, J)
all_seq_ref_prot.append(ref_name)
all_seq_e.append(np.round(E,4))
all_seq_dist.append(int(dist))
all_seq_e_e0 = np.round(all_seq_e - E0 ,4)
df_rand = pd.DataFrame({'ref':all_seq_ref_prot, 'e':all_seq_e, 'e-e0':all_seq_e_e0, 'dist':all_seq_dist})
df_rand.to_csv(os.path.join(output_file, 'e_'+ref_name+'_rand.csv'), index = False)
return 0
def compute_all_entropies(ref_prot_file, ali_file, ali_file_local, output_file, h, J ):
''' compute s_ind, s_dep , s_strains '''
records_ref = list(SeqIO.parse(open(ref_prot_file,'r'), "fasta"))
ref_seq = str(records_ref[0].seq)
ref_name = str(records_ref[0].id)
####################################################################################################
#context IND entropy (from msa_hhblits)
path_msa_hhblits = os.path.join(ali_file)
S_ind = np.round(compute_entropy_context_ind(path_msa_hhblits),4)
#context DEP entropy (from ref_seq, h, J)
S_dep = np.round(compute_entropy_context_dep(ref_seq, h,J),4)
#compute entropy in MSA_local (hhblits) (i.e. observed polymorphism?)
path_msa_local = os.path.join(ali_file_local)
S_local_obs = np.round(compute_entropy_context_ind(path_msa_local),4)
all_seq_ref_prot = [ref_name for i in range(0,len(ref_seq))]
all_seq_idx= [i for i in range(0,len(ref_seq))]
df_s = pd.DataFrame({'ref':all_seq_ref_prot, 'idx': all_seq_idx, 's_ind':S_ind, 's_dep':S_dep, 's_local_obs':S_local_obs})
df_s.to_csv(os.path.join(output_file), index = False)
return 0
| [
"tarfile.open",
"subprocess.run",
"os.path.join",
"io.BytesIO",
"multiprocessing.cpu_count",
"numpy.log",
"numpy.exp",
"numpy.sum",
"numpy.zeros",
"joblib.Parallel",
"Bio.SeqIO.parse",
"pandas.DataFrame",
"joblib.delayed",
"numpy.load",
"numpy.round"
] | [((247, 274), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (272, 274), False, 'import multiprocessing\n'), ((748, 780), 'tarfile.open', 'tarfile.open', (['path_h_DCA', '"""r:gz"""'], {}), "(path_h_DCA, 'r:gz')\n", (760, 780), False, 'import tarfile\n'), ((998, 1030), 'tarfile.open', 'tarfile.open', (['path_J_DCA', '"""r:gz"""'], {}), "(path_J_DCA, 'r:gz')\n", (1010, 1030), False, 'import tarfile\n'), ((2228, 2249), 'numpy.round', 'np.round', (['E_sum_sm', '(4)'], {}), '(E_sum_sm, 4)\n', (2236, 2249), True, 'import numpy as np\n'), ((4259, 4276), 'numpy.zeros', 'np.zeros', (['h.shape'], {}), '(h.shape)\n', (4267, 4276), True, 'import numpy as np\n'), ((15588, 15624), 'subprocess.run', 'subprocess.run', (["['rm', path_tmp_msa]"], {}), "(['rm', path_tmp_msa])\n", (15602, 15624), False, 'import subprocess\n'), ((15667, 15709), 'subprocess.run', 'subprocess.run', (["['rm', path_tmp_stockholm]"], {}), "(['rm', path_tmp_stockholm])\n", (15681, 15709), False, 'import subprocess\n'), ((16248, 16270), 'os.path.join', 'os.path.join', (['ali_file'], {}), '(ali_file)\n', (16260, 16270), False, 'import os\n'), ((18837, 18859), 'os.path.join', 'os.path.join', (['ali_file'], {}), '(ali_file)\n', (18849, 18859), False, 'import os\n'), ((19903, 19925), 'os.path.join', 'os.path.join', (['ali_file'], {}), '(ali_file)\n', (19915, 19925), False, 'import os\n'), ((21123, 21145), 'os.path.join', 'os.path.join', (['ali_file'], {}), '(ali_file)\n', (21135, 21145), False, 'import os\n'), ((21423, 21451), 'os.path.join', 'os.path.join', (['ali_file_local'], {}), '(ali_file_local)\n', (21435, 21451), False, 'import os\n'), ((21654, 21777), 'pandas.DataFrame', 'pd.DataFrame', (["{'ref': all_seq_ref_prot, 'idx': all_seq_idx, 's_ind': S_ind, 's_dep':\n S_dep, 's_local_obs': S_local_obs}"], {}), "({'ref': all_seq_ref_prot, 'idx': all_seq_idx, 's_ind': S_ind,\n 's_dep': S_dep, 's_local_obs': S_local_obs})\n", (21666, 21777), True, 'import pandas as pd\n'), ((7654, 7688), 'Bio.SeqIO.parse', 'SeqIO.parse', (['input_handle', '"""fasta"""'], {}), "(input_handle, 'fasta')\n", (7665, 7688), False, 'from Bio import SeqIO\n'), ((17981, 18196), 'pandas.DataFrame', 'pd.DataFrame', (["{'ref': all_seq_ref_prot, 'seq_name': all_seq_name, 'e': all_seq_e, 'e-e0':\n all_seq_e_e0, 'e_sum_sm': all_seq_sum_sm, 'dist': all_seq_dist,\n 'idx_mut': all_seq_mut_idx, 'aa_mut': all_seq_mut_aa}"], {}), "({'ref': all_seq_ref_prot, 'seq_name': all_seq_name, 'e':\n all_seq_e, 'e-e0': all_seq_e_e0, 'e_sum_sm': all_seq_sum_sm, 'dist':\n all_seq_dist, 'idx_mut': all_seq_mut_idx, 'aa_mut': all_seq_mut_aa})\n", (17993, 18196), True, 'import pandas as pd\n'), ((18222, 18457), 'pandas.DataFrame', 'pd.DataFrame', (["{'ref': all_seq_ref_prot, 'num_occurences': all_seq_num_occurences, 'e':\n all_seq_e, 'e-e0': all_seq_e_e0, 'e_sum_sm': all_seq_sum_sm, 'dist':\n all_seq_dist, 'idx_mut': all_seq_mut_idx, 'aa_mut': all_seq_mut_aa}"], {}), "({'ref': all_seq_ref_prot, 'num_occurences':\n all_seq_num_occurences, 'e': all_seq_e, 'e-e0': all_seq_e_e0,\n 'e_sum_sm': all_seq_sum_sm, 'dist': all_seq_dist, 'idx_mut':\n all_seq_mut_idx, 'aa_mut': all_seq_mut_aa})\n", (18234, 18457), True, 'import pandas as pd\n'), ((18453, 18478), 'os.path.join', 'os.path.join', (['output_file'], {}), '(output_file)\n', (18465, 18478), False, 'import os\n'), ((19361, 19388), 'numpy.round', 'np.round', (['(all_seq_e - E0)', '(4)'], {}), '(all_seq_e - E0, 4)\n', (19369, 19388), True, 'import numpy as np\n'), ((19406, 19509), 'pandas.DataFrame', 'pd.DataFrame', (["{'ref': all_seq_ref_prot, 'e': all_seq_e, 'e-e0': all_seq_e_e0, 'dist':\n all_seq_dist}"], {}), "({'ref': all_seq_ref_prot, 'e': all_seq_e, 'e-e0': all_seq_e_e0,\n 'dist': all_seq_dist})\n", (19418, 19509), True, 'import pandas as pd\n'), ((19520, 19545), 'os.path.join', 'os.path.join', (['output_file'], {}), '(output_file)\n', (19532, 19545), False, 'import os\n'), ((20428, 20455), 'numpy.round', 'np.round', (['(all_seq_e - E0)', '(4)'], {}), '(all_seq_e - E0, 4)\n', (20436, 20455), True, 'import numpy as np\n'), ((20474, 20577), 'pandas.DataFrame', 'pd.DataFrame', (["{'ref': all_seq_ref_prot, 'e': all_seq_e, 'e-e0': all_seq_e_e0, 'dist':\n all_seq_dist}"], {}), "({'ref': all_seq_ref_prot, 'e': all_seq_e, 'e-e0': all_seq_e_e0,\n 'dist': all_seq_dist})\n", (20486, 20577), True, 'import pandas as pd\n'), ((20589, 20645), 'os.path.join', 'os.path.join', (['output_file', "('e_' + ref_name + '_rand.csv')"], {}), "(output_file, 'e_' + ref_name + '_rand.csv')\n", (20601, 20645), False, 'import os\n'), ((21786, 21811), 'os.path.join', 'os.path.join', (['output_file'], {}), '(output_file)\n', (21798, 21811), False, 'import os\n'), ((935, 951), 'io.BytesIO', 'BytesIO', (['content'], {}), '(content)\n', (942, 951), False, 'from io import BytesIO\n'), ((968, 987), 'numpy.load', 'np.load', (['load_bytes'], {}), '(load_bytes)\n', (975, 987), True, 'import numpy as np\n'), ((1185, 1201), 'io.BytesIO', 'BytesIO', (['content'], {}), '(content)\n', (1192, 1201), False, 'from io import BytesIO\n'), ((1218, 1237), 'numpy.load', 'np.load', (['load_bytes'], {}), '(load_bytes)\n', (1225, 1237), True, 'import numpy as np\n'), ((1802, 1821), 'numpy.round', 'np.round', (['(E - E0)', '(4)'], {}), '(E - E0, 4)\n', (1810, 1821), True, 'import numpy as np\n'), ((2752, 2766), 'numpy.sum', 'np.sum', (['all_ei'], {}), '(all_ei)\n', (2758, 2766), True, 'import numpy as np\n'), ((3852, 3878), 'numpy.sum', 'np.sum', (['fi[:20, :]'], {'axis': '(0)'}), '(fi[:20, :], axis=0)\n', (3858, 3878), True, 'import numpy as np\n'), ((4060, 4069), 'numpy.log', 'np.log', (['(2)'], {}), '(2)\n', (4066, 4069), True, 'import numpy as np\n'), ((4633, 4650), 'numpy.exp', 'np.exp', (['arg_denom'], {}), '(arg_denom)\n', (4639, 4650), True, 'import numpy as np\n'), ((4909, 4924), 'numpy.exp', 'np.exp', (['arg_num'], {}), '(arg_num)\n', (4915, 4924), True, 'import numpy as np\n'), ((7554, 7604), 'os.path.join', 'os.path.join', (['path_ref_proteome', 'name_ref_proteome'], {}), '(path_ref_proteome, name_ref_proteome)\n', (7566, 7604), False, 'import os\n'), ((8672, 8706), 'Bio.SeqIO.parse', 'SeqIO.parse', (['input_handle', '"""fasta"""'], {}), "(input_handle, 'fasta')\n", (8683, 8706), False, 'from Bio import SeqIO\n'), ((10099, 10133), 'Bio.SeqIO.parse', 'SeqIO.parse', (['input_handle', '"""fasta"""'], {}), "(input_handle, 'fasta')\n", (10110, 10133), False, 'from Bio import SeqIO\n'), ((11595, 11629), 'Bio.SeqIO.parse', 'SeqIO.parse', (['input_handle', '"""fasta"""'], {}), "(input_handle, 'fasta')\n", (11606, 11629), False, 'from Bio import SeqIO\n'), ((14639, 14673), 'Bio.SeqIO.parse', 'SeqIO.parse', (['input_handle', '"""fasta"""'], {}), "(input_handle, 'fasta')\n", (14650, 14673), False, 'from Bio import SeqIO\n'), ((16569, 16592), 'Bio.SeqIO.parse', 'SeqIO.parse', (['f', '"""fasta"""'], {}), "(f, 'fasta')\n", (16580, 16592), False, 'from Bio import SeqIO\n'), ((19059, 19082), 'Bio.SeqIO.parse', 'SeqIO.parse', (['f', '"""fasta"""'], {}), "(f, 'fasta')\n", (19070, 19082), False, 'from Bio import SeqIO\n'), ((20126, 20149), 'Bio.SeqIO.parse', 'SeqIO.parse', (['f', '"""fasta"""'], {}), "(f, 'fasta')\n", (20137, 20149), False, 'from Bio import SeqIO\n'), ((2617, 2650), 'joblib.Parallel', 'Parallel', ([], {'n_jobs': 'num_cores_energy'}), '(n_jobs=num_cores_energy)\n', (2625, 2650), False, 'from joblib import Parallel, delayed\n'), ((4008, 4024), 'numpy.log', 'np.log', (['fi[q, i]'], {}), '(fi[q, i])\n', (4014, 4024), True, 'import numpy as np\n'), ((7887, 7924), 'os.path.join', 'os.path.join', (['tmp_path', 'name_tmp_file'], {}), '(tmp_path, name_tmp_file)\n', (7899, 7924), False, 'import os\n'), ((17465, 17492), 'numpy.round', 'np.round', (['(all_seq_e - E0)', '(4)'], {}), '(all_seq_e - E0, 4)\n', (17473, 17492), True, 'import numpy as np\n'), ((17828, 17855), 'numpy.round', 'np.round', (['(all_seq_e - E0)', '(4)'], {}), '(all_seq_e - E0, 4)\n', (17836, 17855), True, 'import numpy as np\n'), ((19280, 19294), 'numpy.round', 'np.round', (['E', '(4)'], {}), '(E, 4)\n', (19288, 19294), True, 'import numpy as np\n'), ((20347, 20361), 'numpy.round', 'np.round', (['E', '(4)'], {}), '(E, 4)\n', (20355, 20361), True, 'import numpy as np\n'), ((17675, 17689), 'numpy.round', 'np.round', (['E', '(4)'], {}), '(E, 4)\n', (17683, 17689), True, 'import numpy as np\n'), ((17775, 17796), 'numpy.round', 'np.round', (['E_sum_sm', '(4)'], {}), '(E_sum_sm, 4)\n', (17783, 17796), True, 'import numpy as np\n'), ((2651, 2683), 'joblib.delayed', 'delayed', (['compute_energy_given_ai'], {}), '(compute_energy_given_ai)\n', (2658, 2683), False, 'from joblib import Parallel, delayed\n'), ((17193, 17207), 'numpy.round', 'np.round', (['E', '(4)'], {}), '(E, 4)\n', (17201, 17207), True, 'import numpy as np\n'), ((17309, 17330), 'numpy.round', 'np.round', (['E_sum_sm', '(4)'], {}), '(E_sum_sm, 4)\n', (17317, 17330), True, 'import numpy as np\n')] |
import torch
import torch.nn.functional as F
from cogdl.utils import spmm
from . import BaseLayer
class GINELayer(BaseLayer):
r"""The modified GINConv operator from the `"Graph convolutions that can finally model local structure" paper
<https://arxiv.org/pdf/2011.15069.pdf>`__.
Parameters
----------
apply_func : callable layer function)
layer or function applied to update node feature
eps : float32, optional
Initial `\epsilon` value.
train_eps : bool, optional
If True, `\epsilon` will be a learnable parameter.
"""
def __init__(self, apply_func=None, eps=0, train_eps=True):
super(GINELayer, self).__init__()
if train_eps:
self.eps = torch.nn.Parameter(torch.FloatTensor([eps]))
else:
self.register_buffer("eps", torch.FloatTensor([eps]))
self.apply_func = apply_func
def forward(self, graph, x):
# m = self.message(x[graph.edge_index[0]], graph.edge_attr)
# out = self.aggregate(graph, m)
out = spmm(graph, x)
out += (1 + self.eps) * x
if self.apply_func is not None:
out = self.apply_func(out)
return out
def message(self, x, attr):
return F.relu(x + attr)
| [
"torch.FloatTensor",
"torch.nn.functional.relu",
"cogdl.utils.spmm"
] | [((1052, 1066), 'cogdl.utils.spmm', 'spmm', (['graph', 'x'], {}), '(graph, x)\n', (1056, 1066), False, 'from cogdl.utils import spmm\n'), ((1247, 1263), 'torch.nn.functional.relu', 'F.relu', (['(x + attr)'], {}), '(x + attr)\n', (1253, 1263), True, 'import torch.nn.functional as F\n'), ((752, 776), 'torch.FloatTensor', 'torch.FloatTensor', (['[eps]'], {}), '([eps])\n', (769, 776), False, 'import torch\n'), ((832, 856), 'torch.FloatTensor', 'torch.FloatTensor', (['[eps]'], {}), '([eps])\n', (849, 856), False, 'import torch\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-24 19:29
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('isisdata', '0060_auto_20170324_1741'),
]
operations = [
migrations.RemoveField(
model_name='historicaltracking',
name='subject_content_type',
),
migrations.RemoveField(
model_name='historicaltracking',
name='subject_instance_id',
),
migrations.RemoveField(
model_name='tracking',
name='subject_content_type',
),
migrations.RemoveField(
model_name='tracking',
name='subject_instance_id',
),
]
| [
"django.db.migrations.RemoveField"
] | [((293, 382), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""historicaltracking"""', 'name': '"""subject_content_type"""'}), "(model_name='historicaltracking', name=\n 'subject_content_type')\n", (315, 382), False, 'from django.db import migrations\n'), ((422, 510), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""historicaltracking"""', 'name': '"""subject_instance_id"""'}), "(model_name='historicaltracking', name=\n 'subject_instance_id')\n", (444, 510), False, 'from django.db import migrations\n'), ((550, 624), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""tracking"""', 'name': '"""subject_content_type"""'}), "(model_name='tracking', name='subject_content_type')\n", (572, 624), False, 'from django.db import migrations\n'), ((669, 742), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""tracking"""', 'name': '"""subject_instance_id"""'}), "(model_name='tracking', name='subject_instance_id')\n", (691, 742), False, 'from django.db import migrations\n')] |
import base64
import binascii
from typing import Union
from peewee import DatabaseProxy, Model, CharField, BooleanField, DateTimeField, IntegerField, ForeignKeyField, \
CompositeKey, DoesNotExist
from db.fields import BytesField
database_proxy = DatabaseProxy()
class BaseModel(Model):
class Meta:
database = database_proxy
class Entry(BaseModel):
id = IntegerField(primary_key=True)
context = CharField(32)
source = CharField(40)
v6 = BooleanField()
received_at = DateTimeField()
line = IntegerField()
data = BytesField(64)
def summary(self):
# if self.line == 0:
# return f"{self.received_at}: received metadata for context {self.context}: {self.data.decode('ascii')}"
return f"{self.received_at}: received line {self.line} from {str(self.source)} with content '{self.binary()}' for '{self.context}'"
def _decoded(self, encoding=None) -> Union[str, None]:
try:
data = base64.b64decode(self.data, validate=True)
if encoding:
return data.decode(encoding)
return data
except (binascii.Error, UnicodeDecodeError):
return None
def ascii(self) -> Union[str, None]:
return self._decoded('ascii')
def binary(self) -> Union[str, None]:
return self._decoded()
def to_json(self):
return dict(
id=self.id,
source=self.source,
v6=self.v6,
received_at=self.received_at.timestamp(),
context=self.context,
line=self.line,
data=self.data.decode("ascii"),
)
class Line(BaseModel):
context = CharField(32)
line = IntegerField()
entry = ForeignKeyField(Entry)
selected_at = DateTimeField()
class Meta:
primary_key = CompositeKey('context', 'line')
def summary(self):
return f"{self.selected_at}: {self.context}:{self.line} -> {self.entry}"
def to_json(self):
return dict(
context=self.context,
line=self.line,
entry=self.entry.to_json(),
selected_at=self.selected_at.timestamp(),
)
class Meta(BaseModel):
context = CharField(32, primary_key=True)
source = CharField(40)
v6 = BooleanField()
lines = IntegerField()
updated_at = DateTimeField()
def summary(self):
return f"{self.updated_at}: received metadata for context {self.context}: {self.lines}"
def to_json(self):
return dict(
context=self.context,
source=self.source,
v6=self.v6,
lines=self.lines,
updated_at=self.updated_at.timestamp(),
)
def get_missing(self):
existing = set(map(lambda e: e[0], Line.select(Line.line).where(Line.context == self.context).tuples()))
return list(set(range(1, self.lines + 1)) - existing) | [
"peewee.BooleanField",
"peewee.CharField",
"peewee.DatabaseProxy",
"db.fields.BytesField",
"peewee.CompositeKey",
"peewee.ForeignKeyField",
"peewee.IntegerField",
"base64.b64decode",
"peewee.DateTimeField"
] | [((253, 268), 'peewee.DatabaseProxy', 'DatabaseProxy', ([], {}), '()\n', (266, 268), False, 'from peewee import DatabaseProxy, Model, CharField, BooleanField, DateTimeField, IntegerField, ForeignKeyField, CompositeKey, DoesNotExist\n'), ((380, 410), 'peewee.IntegerField', 'IntegerField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (392, 410), False, 'from peewee import DatabaseProxy, Model, CharField, BooleanField, DateTimeField, IntegerField, ForeignKeyField, CompositeKey, DoesNotExist\n'), ((425, 438), 'peewee.CharField', 'CharField', (['(32)'], {}), '(32)\n', (434, 438), False, 'from peewee import DatabaseProxy, Model, CharField, BooleanField, DateTimeField, IntegerField, ForeignKeyField, CompositeKey, DoesNotExist\n'), ((452, 465), 'peewee.CharField', 'CharField', (['(40)'], {}), '(40)\n', (461, 465), False, 'from peewee import DatabaseProxy, Model, CharField, BooleanField, DateTimeField, IntegerField, ForeignKeyField, CompositeKey, DoesNotExist\n'), ((475, 489), 'peewee.BooleanField', 'BooleanField', ([], {}), '()\n', (487, 489), False, 'from peewee import DatabaseProxy, Model, CharField, BooleanField, DateTimeField, IntegerField, ForeignKeyField, CompositeKey, DoesNotExist\n'), ((508, 523), 'peewee.DateTimeField', 'DateTimeField', ([], {}), '()\n', (521, 523), False, 'from peewee import DatabaseProxy, Model, CharField, BooleanField, DateTimeField, IntegerField, ForeignKeyField, CompositeKey, DoesNotExist\n'), ((535, 549), 'peewee.IntegerField', 'IntegerField', ([], {}), '()\n', (547, 549), False, 'from peewee import DatabaseProxy, Model, CharField, BooleanField, DateTimeField, IntegerField, ForeignKeyField, CompositeKey, DoesNotExist\n'), ((561, 575), 'db.fields.BytesField', 'BytesField', (['(64)'], {}), '(64)\n', (571, 575), False, 'from db.fields import BytesField\n'), ((1681, 1694), 'peewee.CharField', 'CharField', (['(32)'], {}), '(32)\n', (1690, 1694), False, 'from peewee import DatabaseProxy, Model, CharField, BooleanField, DateTimeField, IntegerField, ForeignKeyField, CompositeKey, DoesNotExist\n'), ((1706, 1720), 'peewee.IntegerField', 'IntegerField', ([], {}), '()\n', (1718, 1720), False, 'from peewee import DatabaseProxy, Model, CharField, BooleanField, DateTimeField, IntegerField, ForeignKeyField, CompositeKey, DoesNotExist\n'), ((1733, 1755), 'peewee.ForeignKeyField', 'ForeignKeyField', (['Entry'], {}), '(Entry)\n', (1748, 1755), False, 'from peewee import DatabaseProxy, Model, CharField, BooleanField, DateTimeField, IntegerField, ForeignKeyField, CompositeKey, DoesNotExist\n'), ((1774, 1789), 'peewee.DateTimeField', 'DateTimeField', ([], {}), '()\n', (1787, 1789), False, 'from peewee import DatabaseProxy, Model, CharField, BooleanField, DateTimeField, IntegerField, ForeignKeyField, CompositeKey, DoesNotExist\n'), ((2216, 2247), 'peewee.CharField', 'CharField', (['(32)'], {'primary_key': '(True)'}), '(32, primary_key=True)\n', (2225, 2247), False, 'from peewee import DatabaseProxy, Model, CharField, BooleanField, DateTimeField, IntegerField, ForeignKeyField, CompositeKey, DoesNotExist\n'), ((2261, 2274), 'peewee.CharField', 'CharField', (['(40)'], {}), '(40)\n', (2270, 2274), False, 'from peewee import DatabaseProxy, Model, CharField, BooleanField, DateTimeField, IntegerField, ForeignKeyField, CompositeKey, DoesNotExist\n'), ((2284, 2298), 'peewee.BooleanField', 'BooleanField', ([], {}), '()\n', (2296, 2298), False, 'from peewee import DatabaseProxy, Model, CharField, BooleanField, DateTimeField, IntegerField, ForeignKeyField, CompositeKey, DoesNotExist\n'), ((2311, 2325), 'peewee.IntegerField', 'IntegerField', ([], {}), '()\n', (2323, 2325), False, 'from peewee import DatabaseProxy, Model, CharField, BooleanField, DateTimeField, IntegerField, ForeignKeyField, CompositeKey, DoesNotExist\n'), ((2343, 2358), 'peewee.DateTimeField', 'DateTimeField', ([], {}), '()\n', (2356, 2358), False, 'from peewee import DatabaseProxy, Model, CharField, BooleanField, DateTimeField, IntegerField, ForeignKeyField, CompositeKey, DoesNotExist\n'), ((1829, 1860), 'peewee.CompositeKey', 'CompositeKey', (['"""context"""', '"""line"""'], {}), "('context', 'line')\n", (1841, 1860), False, 'from peewee import DatabaseProxy, Model, CharField, BooleanField, DateTimeField, IntegerField, ForeignKeyField, CompositeKey, DoesNotExist\n'), ((979, 1021), 'base64.b64decode', 'base64.b64decode', (['self.data'], {'validate': '(True)'}), '(self.data, validate=True)\n', (995, 1021), False, 'import base64\n')] |
import requests
'''
Makes a request to an API endpoint
Takes authorization header if needed
'''
url = "http://127.0.0.1:8000/api/test"
payload={}
headers = {
'Accept': 'application/json',
'Authorization': 'token' # Specify an access token if needed
}
response = requests.request("GET", url, headers=headers, data=payload)
print('Status: {}'.format(response.status_code))
print(response)
| [
"requests.request"
] | [((270, 329), 'requests.request', 'requests.request', (['"""GET"""', 'url'], {'headers': 'headers', 'data': 'payload'}), "('GET', url, headers=headers, data=payload)\n", (286, 329), False, 'import requests\n')] |
import cloudpassage
import json
import os
policy_file_name = "firewall.json"
config_file_name = "portal.yaml.local"
tests_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../"))
config_file = os.path.join(tests_dir, "configs/", config_file_name)
policy_file = os.path.join(tests_dir, 'policies/', policy_file_name)
session_info = cloudpassage.ApiKeyManager(config_file=config_file)
key_id = session_info.key_id
secret_key = session_info.secret_key
api_hostname = session_info.api_hostname
api_port = session_info.api_port
with open(policy_file, 'r') as p_file:
firewall_policy_body = p_file.read().replace('\n', '')
def create_firewall_policy_object():
session = cloudpassage.HaloSession(key_id, secret_key,
api_host=api_hostname,
api_port=api_port,
integration_string="SDK-Smoke")
firewall_policy_object = cloudpassage.FirewallPolicy(session)
return firewall_policy_object
def create_firewall_rule_object():
session = cloudpassage.HaloSession(key_id, secret_key,
api_host=api_hostname,
api_port=api_port,
integration_string="SDK-Smoke")
firewall_rule_object = cloudpassage.FirewallRule(session)
return firewall_rule_object
def create_firewall_zone_object():
session = cloudpassage.HaloSession(key_id, secret_key,
api_host=api_hostname,
api_port=api_port,
integration_string="SDK-Smoke")
firewall_zone_object = cloudpassage.FirewallZone(session)
return firewall_zone_object
def create_firewall_service_object():
session = cloudpassage.HaloSession(key_id, secret_key,
api_host=api_hostname,
api_port=api_port,
integration_string="SDK-Smoke")
firewall_service_object = cloudpassage.FirewallService(session)
return firewall_service_object
def create_firewall_interface_object():
session = cloudpassage.HaloSession(key_id, secret_key,
api_host=api_hostname,
api_port=api_port,
integration_string="SDK-Smoke")
firewall_interface_object = cloudpassage.FirewallInterface(session)
return firewall_interface_object
def get_target_linux_firewall_policy():
firewall_policy = create_firewall_policy_object()
policy_list = firewall_policy.list_all()
for policy in policy_list:
if policy["platform"] == 'linux':
return policy["id"]
return None
def remove_policy_by_name(policy_name):
fw_policy_obj = create_firewall_policy_object()
policy_list = fw_policy_obj.list_all()
for policy in policy_list:
if policy["name"] == policy_name:
fw_policy_obj.delete(policy["id"])
class TestIntegrationFirewallPolicy:
def test_instantiation(self):
session = cloudpassage.HaloSession(key_id, secret_key)
assert cloudpassage.FirewallPolicy(session)
def test_firewall_policy_list_all(self):
"""This test requires that a firewall policy exist in your Halo
account. If you don't have a firewall policy in your Halo account,
this test will fail.
"""
firewall_policy = create_firewall_policy_object()
firewall_policy_list = firewall_policy.list_all()
assert "id" in firewall_policy_list[0]
def test_firewall_policy_describe(self):
"""This test requires that a firewall policy exist in your Halo
account. If you don't have a firewall policy in your Halo account,
this test will fail.
"""
firewall_policy = create_firewall_policy_object()
firewall_policy_list = firewall_policy.list_all()
target_firewall_policy_id = firewall_policy_list[0]["id"]
target_policy = firewall_policy.describe(target_firewall_policy_id)
assert "id" in target_policy
def test_firewall_policy_create_update_delete(self):
firewall_policy = create_firewall_policy_object()
remove_policy_by_name("cpapi_test_1")
remove_policy_by_name("NewName")
this_policy = json.loads(firewall_policy_body)
this_policy["firewall_policy"]["name"] = "cpapi_test_1"
new_policy_id = firewall_policy.create(json.dumps(this_policy))
policy_update = {"firewall_policy": {"name": "NewName",
"id": new_policy_id}}
firewall_policy.update(policy_update)
delete_error = firewall_policy.delete(new_policy_id)
assert delete_error is None
class TestIntegrationFirewallRule:
def test_instantiation(self):
session = cloudpassage.HaloSession(key_id, secret_key)
assert cloudpassage.FirewallRule(session)
def test_list_firewall_policy_rules(self):
firewall_rule = create_firewall_rule_object()
target_firewall_policy_id = get_target_linux_firewall_policy()
policy_rules = firewall_rule.list_all(target_firewall_policy_id)
assert "id" in policy_rules[0]
def test_get_firewall_policy_rule_describe(self):
firewall_rule = create_firewall_rule_object()
target_firewall_policy_id = get_target_linux_firewall_policy()
policy_rules = firewall_rule.list_all(target_firewall_policy_id)
target_rule_id = policy_rules[0]["id"]
rule_details = firewall_rule.describe(target_firewall_policy_id,
target_rule_id)
assert "id" in rule_details
def test_firewall_policy_rule_create_mod_delete(self):
modification_body = {"firewall_rule": {
"comment": "Your momma makes firewall rules"}}
firewall_policy = create_firewall_policy_object()
remove_policy_by_name("cpapi_test_2")
firewall_rule = create_firewall_rule_object()
this_policy = json.loads(firewall_policy_body)
this_policy["firewall_policy"]["name"] = "cpapi_test_2"
target_policy_id = firewall_policy.create(json.dumps(this_policy))
rule_imported = firewall_rule.list_all(target_policy_id)[0]
del rule_imported["url"]
rule_imported["position"] = 1
rule_body = {"firewall_rule": rule_imported}
print(rule_body)
target_rule_id = firewall_rule.create(target_policy_id, rule_body)
modification_error = firewall_rule.update(target_policy_id,
target_rule_id,
modification_body)
delete_rule_error = firewall_rule.delete(target_policy_id,
target_rule_id)
delete_policy_error = firewall_policy.delete(target_policy_id)
assert modification_error is None
assert delete_rule_error is None
assert delete_policy_error is None
class TestIntegraationFirewallZone:
def test_instantiation(self):
session = cloudpassage.HaloSession(key_id, secret_key)
assert cloudpassage.FirewallZone(session)
def test_list_all_ip_zones(self):
firewall_zone = create_firewall_zone_object()
list_of_zones = firewall_zone.list_all()
assert "id" in list_of_zones[0]
def test_get_zone_details(self):
firewall_zone = create_firewall_zone_object()
target_zone_id = firewall_zone.list_all()[0]["id"]
details = firewall_zone.describe(target_zone_id)
assert "id" in details
def test_firewall_zone_create_update_delete(self):
firewall_zone = create_firewall_zone_object()
firewall_zone_body = {"firewall_zone": {"name": "CPAPI TEST",
"ip_address": "127.0.0.1"}}
target_zone_id = firewall_zone.create(firewall_zone_body)
zone_update = {"firewall_zone": {"name": "NewName",
"id": target_zone_id}}
firewall_zone.update(zone_update)
delete_error = firewall_zone.delete(target_zone_id)
assert delete_error is None
class TestIntegrationFirewallService:
def test_instantiation(self):
session = cloudpassage.HaloSession(key_id, secret_key)
assert cloudpassage.FirewallService(session)
def test_list_all_services(self):
firewall_service = create_firewall_service_object()
list_of_services = firewall_service.list_all()
assert "id" in list_of_services[0]
def test_get_service_details(self):
firewall_service = create_firewall_service_object()
target_service_id = firewall_service.list_all()[0]["id"]
details = firewall_service.describe(target_service_id)
assert "id" in details
def test_firewall_zone_create_update_delete(self):
firewall_service = create_firewall_service_object()
firewall_service_body = {"firewall_service": {"name": "<NAME>",
"protocol": "TCP",
"port": "1234"}}
target_service_id = firewall_service.create(firewall_service_body)
service_update = {"firewall_service": {"name": "NewName",
"id": target_service_id}}
firewall_service.update(service_update)
delete_error = firewall_service.delete(target_service_id)
assert delete_error is None
class TestIntegrationFirewallInterface:
def test_instantiation(self):
session = cloudpassage.HaloSession(key_id, secret_key)
assert cloudpassage.FirewallInterface(session)
def test_list_all_interfaces(self):
interface = create_firewall_interface_object()
list_of_interfaces = interface.list_all()
assert "id" in list_of_interfaces[0]
def test_get_interface_details(self):
interface = create_firewall_interface_object()
target_interface_id = interface.list_all()[0]["id"]
details = interface.describe(target_interface_id)
assert "id" in details
def test_firewall_interface_create_delete(self):
interface = create_firewall_interface_object()
interface_body = {"firewall_interface": {"name": "eth12"}}
target_interface_id = interface.create(interface_body)
delete_error = interface.delete(target_interface_id)
assert delete_error is None
| [
"cloudpassage.FirewallPolicy",
"cloudpassage.ApiKeyManager",
"json.loads",
"cloudpassage.FirewallInterface",
"json.dumps",
"os.path.join",
"cloudpassage.FirewallService",
"cloudpassage.HaloSession",
"os.path.dirname",
"cloudpassage.FirewallRule",
"cloudpassage.FirewallZone"
] | [((207, 260), 'os.path.join', 'os.path.join', (['tests_dir', '"""configs/"""', 'config_file_name'], {}), "(tests_dir, 'configs/', config_file_name)\n", (219, 260), False, 'import os\n'), ((275, 329), 'os.path.join', 'os.path.join', (['tests_dir', '"""policies/"""', 'policy_file_name'], {}), "(tests_dir, 'policies/', policy_file_name)\n", (287, 329), False, 'import os\n'), ((346, 397), 'cloudpassage.ApiKeyManager', 'cloudpassage.ApiKeyManager', ([], {'config_file': 'config_file'}), '(config_file=config_file)\n', (372, 397), False, 'import cloudpassage\n'), ((691, 813), 'cloudpassage.HaloSession', 'cloudpassage.HaloSession', (['key_id', 'secret_key'], {'api_host': 'api_hostname', 'api_port': 'api_port', 'integration_string': '"""SDK-Smoke"""'}), "(key_id, secret_key, api_host=api_hostname,\n api_port=api_port, integration_string='SDK-Smoke')\n", (715, 813), False, 'import cloudpassage\n'), ((956, 992), 'cloudpassage.FirewallPolicy', 'cloudpassage.FirewallPolicy', (['session'], {}), '(session)\n', (983, 992), False, 'import cloudpassage\n'), ((1078, 1200), 'cloudpassage.HaloSession', 'cloudpassage.HaloSession', (['key_id', 'secret_key'], {'api_host': 'api_hostname', 'api_port': 'api_port', 'integration_string': '"""SDK-Smoke"""'}), "(key_id, secret_key, api_host=api_hostname,\n api_port=api_port, integration_string='SDK-Smoke')\n", (1102, 1200), False, 'import cloudpassage\n'), ((1341, 1375), 'cloudpassage.FirewallRule', 'cloudpassage.FirewallRule', (['session'], {}), '(session)\n', (1366, 1375), False, 'import cloudpassage\n'), ((1459, 1581), 'cloudpassage.HaloSession', 'cloudpassage.HaloSession', (['key_id', 'secret_key'], {'api_host': 'api_hostname', 'api_port': 'api_port', 'integration_string': '"""SDK-Smoke"""'}), "(key_id, secret_key, api_host=api_hostname,\n api_port=api_port, integration_string='SDK-Smoke')\n", (1483, 1581), False, 'import cloudpassage\n'), ((1722, 1756), 'cloudpassage.FirewallZone', 'cloudpassage.FirewallZone', (['session'], {}), '(session)\n', (1747, 1756), False, 'import cloudpassage\n'), ((1843, 1965), 'cloudpassage.HaloSession', 'cloudpassage.HaloSession', (['key_id', 'secret_key'], {'api_host': 'api_hostname', 'api_port': 'api_port', 'integration_string': '"""SDK-Smoke"""'}), "(key_id, secret_key, api_host=api_hostname,\n api_port=api_port, integration_string='SDK-Smoke')\n", (1867, 1965), False, 'import cloudpassage\n'), ((2109, 2146), 'cloudpassage.FirewallService', 'cloudpassage.FirewallService', (['session'], {}), '(session)\n', (2137, 2146), False, 'import cloudpassage\n'), ((2238, 2360), 'cloudpassage.HaloSession', 'cloudpassage.HaloSession', (['key_id', 'secret_key'], {'api_host': 'api_hostname', 'api_port': 'api_port', 'integration_string': '"""SDK-Smoke"""'}), "(key_id, secret_key, api_host=api_hostname,\n api_port=api_port, integration_string='SDK-Smoke')\n", (2262, 2360), False, 'import cloudpassage\n'), ((2506, 2545), 'cloudpassage.FirewallInterface', 'cloudpassage.FirewallInterface', (['session'], {}), '(session)\n', (2536, 2545), False, 'import cloudpassage\n'), ((158, 183), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (173, 183), False, 'import os\n'), ((3194, 3238), 'cloudpassage.HaloSession', 'cloudpassage.HaloSession', (['key_id', 'secret_key'], {}), '(key_id, secret_key)\n', (3218, 3238), False, 'import cloudpassage\n'), ((3254, 3290), 'cloudpassage.FirewallPolicy', 'cloudpassage.FirewallPolicy', (['session'], {}), '(session)\n', (3281, 3290), False, 'import cloudpassage\n'), ((4444, 4476), 'json.loads', 'json.loads', (['firewall_policy_body'], {}), '(firewall_policy_body)\n', (4454, 4476), False, 'import json\n'), ((4976, 5020), 'cloudpassage.HaloSession', 'cloudpassage.HaloSession', (['key_id', 'secret_key'], {}), '(key_id, secret_key)\n', (5000, 5020), False, 'import cloudpassage\n'), ((5036, 5070), 'cloudpassage.FirewallRule', 'cloudpassage.FirewallRule', (['session'], {}), '(session)\n', (5061, 5070), False, 'import cloudpassage\n'), ((6191, 6223), 'json.loads', 'json.loads', (['firewall_policy_body'], {}), '(firewall_policy_body)\n', (6201, 6223), False, 'import json\n'), ((7277, 7321), 'cloudpassage.HaloSession', 'cloudpassage.HaloSession', (['key_id', 'secret_key'], {}), '(key_id, secret_key)\n', (7301, 7321), False, 'import cloudpassage\n'), ((7337, 7371), 'cloudpassage.FirewallZone', 'cloudpassage.FirewallZone', (['session'], {}), '(session)\n', (7362, 7371), False, 'import cloudpassage\n'), ((8469, 8513), 'cloudpassage.HaloSession', 'cloudpassage.HaloSession', (['key_id', 'secret_key'], {}), '(key_id, secret_key)\n', (8493, 8513), False, 'import cloudpassage\n'), ((8529, 8566), 'cloudpassage.FirewallService', 'cloudpassage.FirewallService', (['session'], {}), '(session)\n', (8557, 8566), False, 'import cloudpassage\n'), ((9814, 9858), 'cloudpassage.HaloSession', 'cloudpassage.HaloSession', (['key_id', 'secret_key'], {}), '(key_id, secret_key)\n', (9838, 9858), False, 'import cloudpassage\n'), ((9874, 9913), 'cloudpassage.FirewallInterface', 'cloudpassage.FirewallInterface', (['session'], {}), '(session)\n', (9904, 9913), False, 'import cloudpassage\n'), ((4588, 4611), 'json.dumps', 'json.dumps', (['this_policy'], {}), '(this_policy)\n', (4598, 4611), False, 'import json\n'), ((6338, 6361), 'json.dumps', 'json.dumps', (['this_policy'], {}), '(this_policy)\n', (6348, 6361), False, 'import json\n')] |
import os
import random
import argparse
import time
from datetime import datetime
from tqdm import tqdm
import paddle
paddle.disable_static()
import paddle.nn.functional as F
import paddle.optimizer as optim
from pgl.utils.data import Dataloader
import numpy as np
from models import DeepFRI
from data_preprocessing import MyDataset
from custom_metrics import do_compute_metrics
from utils import add_saved_args_and_params
def do_compute(model, batch):
logits = model(*batch[:-1])
return logits, batch[-1]
def run_batch(model, data_loader, desc):
logits_list = []
ground_truth = []
for batch in tqdm(data_loader, desc=f"{desc}"):
logits, labels = do_compute(model, batch)
logits_list.append(F.sigmoid(logits).tolist())
ground_truth.append(labels.tolist())
logits_list = np.concatenate(logits_list)
ground_truth = np.concatenate(ground_truth)
metrics = do_compute_metrics(ground_truth, logits_list)
return metrics
def test(model, test_data_loader):
model.eval()
with paddle.no_grad():
test_metrics = run_batch(model, test_data_loader, "test")
print(f"#### Test results")
print("f_max: {0:.4f}, auprc: {1:.4f}".format(*test_metrics))
if __name__ == "__main__":
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument("--cuda", type=str, default="0", help="GPU ID to train on.")
parser.add_argument("-bs", "--batch_size", type=int, default=64, help="Batch size.")
parser.add_argument(
"--test_file",
type=str,
default="data/nrPDB-GO_2019.06.18_test.txt",
help="File with list of protein chains for training.",
)
parser.add_argument(
"--protein_chain_graphs",
type=str,
default="data/chain_graphs",
help="Path to graph reprsentations of proteins.",
)
parser.add_argument(
"--model_name",
type=str,
required=True,
help="Path to saved/trained methods with parameters.",
)
parser.add_argument(
"--label_data_path",
type=str,
required=True,
help="Mapping containing protein chains with associated their labels. Choose from [molecular_function.npz, cellular_component.npz, biological_process.npz]",
)
parser.add_argument(
"-lm",
"--lm_model_name",
type=str,
help="Path to the pre-trained LSTM-Language Model.",
)
parser.add_argument(
"--use_cache",
type=int,
default=0,
choices=[0, 1],
help="Whether to save protein graph in memory for fast reading.",
)
args = parser.parse_args()
args.use_cache = bool(args.use_cache)
if int(args.cuda) == -1:
paddle.set_device("cpu")
else:
paddle.set_device("gpu:%s" % args.cuda)
test_chain_list = [p.strip() for p in open(args.test_file)]
saved_state_dict = paddle.load(args.model_name)
# In-place assignment
add_saved_args_and_params(args, saved_state_dict)
test_dataset = MyDataset(
test_chain_list,
args.n_channels,
args.pad_len,
args.protein_chain_graphs,
args.cmap_thresh,
args.label_data_path,
args.use_cache,
)
test_loader = Dataloader(
test_dataset, batch_size=args.batch_size, collate_fn=test_dataset.collate_fn
)
args.n_labels = test_dataset.n_labels
model = DeepFRI(args)
model.set_state_dict(saved_state_dict["model"])
model.eval()
print(f"\n{args.task}: Testing on {len(test_dataset)} protein samples.")
print(f"Starting at {datetime.now()}\n")
print(args)
test(model, test_loader)
| [
"custom_metrics.do_compute_metrics",
"paddle.no_grad",
"argparse.ArgumentParser",
"paddle.nn.functional.sigmoid",
"tqdm.tqdm",
"utils.add_saved_args_and_params",
"datetime.datetime.now",
"paddle.disable_static",
"data_preprocessing.MyDataset",
"numpy.concatenate",
"paddle.load",
"models.DeepFR... | [((120, 143), 'paddle.disable_static', 'paddle.disable_static', ([], {}), '()\n', (141, 143), False, 'import paddle\n'), ((623, 656), 'tqdm.tqdm', 'tqdm', (['data_loader'], {'desc': 'f"""{desc}"""'}), "(data_loader, desc=f'{desc}')\n", (627, 656), False, 'from tqdm import tqdm\n'), ((827, 854), 'numpy.concatenate', 'np.concatenate', (['logits_list'], {}), '(logits_list)\n', (841, 854), True, 'import numpy as np\n'), ((874, 902), 'numpy.concatenate', 'np.concatenate', (['ground_truth'], {}), '(ground_truth)\n', (888, 902), True, 'import numpy as np\n'), ((917, 962), 'custom_metrics.do_compute_metrics', 'do_compute_metrics', (['ground_truth', 'logits_list'], {}), '(ground_truth, logits_list)\n', (935, 962), False, 'from custom_metrics import do_compute_metrics\n'), ((1279, 1358), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), '(formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n', (1302, 1358), False, 'import argparse\n'), ((2964, 2992), 'paddle.load', 'paddle.load', (['args.model_name'], {}), '(args.model_name)\n', (2975, 2992), False, 'import paddle\n'), ((3023, 3072), 'utils.add_saved_args_and_params', 'add_saved_args_and_params', (['args', 'saved_state_dict'], {}), '(args, saved_state_dict)\n', (3048, 3072), False, 'from utils import add_saved_args_and_params\n'), ((3092, 3242), 'data_preprocessing.MyDataset', 'MyDataset', (['test_chain_list', 'args.n_channels', 'args.pad_len', 'args.protein_chain_graphs', 'args.cmap_thresh', 'args.label_data_path', 'args.use_cache'], {}), '(test_chain_list, args.n_channels, args.pad_len, args.\n protein_chain_graphs, args.cmap_thresh, args.label_data_path, args.\n use_cache)\n', (3101, 3242), False, 'from data_preprocessing import MyDataset\n'), ((3315, 3408), 'pgl.utils.data.Dataloader', 'Dataloader', (['test_dataset'], {'batch_size': 'args.batch_size', 'collate_fn': 'test_dataset.collate_fn'}), '(test_dataset, batch_size=args.batch_size, collate_fn=\n test_dataset.collate_fn)\n', (3325, 3408), False, 'from pgl.utils.data import Dataloader\n'), ((3473, 3486), 'models.DeepFRI', 'DeepFRI', (['args'], {}), '(args)\n', (3480, 3486), False, 'from models import DeepFRI\n'), ((1046, 1062), 'paddle.no_grad', 'paddle.no_grad', ([], {}), '()\n', (1060, 1062), False, 'import paddle\n'), ((2792, 2816), 'paddle.set_device', 'paddle.set_device', (['"""cpu"""'], {}), "('cpu')\n", (2809, 2816), False, 'import paddle\n'), ((2835, 2874), 'paddle.set_device', 'paddle.set_device', (["('gpu:%s' % args.cuda)"], {}), "('gpu:%s' % args.cuda)\n", (2852, 2874), False, 'import paddle\n'), ((3660, 3674), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3672, 3674), False, 'from datetime import datetime\n'), ((735, 752), 'paddle.nn.functional.sigmoid', 'F.sigmoid', (['logits'], {}), '(logits)\n', (744, 752), True, 'import paddle.nn.functional as F\n')] |
from pathlib import Path
from ruamel import yaml
with Path(__file__).parent.parent.joinpath("config.yaml").resolve().open("r") as fin:
__DEFAULT_CONFIG: dict = yaml.safe_load(fin)
def set_default_config(ip: str, port: int, log_directory: str):
global __DEFAULT_CONFIG
__DEFAULT_CONFIG.update(dict(ip=ip, port=port, log_directory=log_directory))
def get_default_config():
return __DEFAULT_CONFIG
| [
"ruamel.yaml.safe_load",
"pathlib.Path"
] | [((170, 189), 'ruamel.yaml.safe_load', 'yaml.safe_load', (['fin'], {}), '(fin)\n', (184, 189), False, 'from ruamel import yaml\n'), ((60, 74), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (64, 74), False, 'from pathlib import Path\n')] |