code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
from agent.util import load_values, get_sim_dir, load_valid_data, get_norm_reward
from analyze.util import create_cdfs, save_dict
from utils import safe_reindex
from agent.const import DELTA_SLR
from featnames import LOOKUP
def main():
d = dict()
obs = load_valid_data(byr=False)
# value comparison
for delta in DELTA_SLR:
sim_dir = get_sim_dir(delta=delta)
data = load_valid_data(sim_dir=sim_dir)
vals = load_values(delta=delta)
valid_vals = safe_reindex(vals, idx=data[LOOKUP].index)
elem = {'All': vals, 'Valid': valid_vals}
d['cdf_values_{}'.format(delta)] = create_cdfs(elem)
sale_norm_obs, cont_value_obs = \
get_norm_reward(data=obs, values=vals)
sale_norm_agent, cont_value_agent = \
get_norm_reward(data=data, values=vals)
# cdf of values
elem = {'Data': sale_norm_obs.append(cont_value_obs * delta),
'Agent': sale_norm_agent.append(cont_value_agent * delta)}
d['cdf_realval_{}'.format(delta)] = create_cdfs(elem)
# cdf of values for unsold items
elem = {'Data': cont_value_obs, 'Agent': cont_value_agent}
d['cdf_unsoldvals_{}'.format(delta)] = create_cdfs(elem)
# cdf of values for unsold items
elem = {'Data': safe_reindex(vals, idx=sale_norm_obs.index),
'Agent': safe_reindex(vals, idx=sale_norm_agent.index)}
d['cdf_soldvals_{}'.format(delta)] = create_cdfs(elem)
# save
save_dict(d, 'slrvals')
if __name__ == '__main__':
main()
| [
"agent.util.get_norm_reward",
"analyze.util.save_dict",
"agent.util.get_sim_dir",
"agent.util.load_values",
"analyze.util.create_cdfs",
"utils.safe_reindex",
"agent.util.load_valid_data"
] | [((264, 290), 'agent.util.load_valid_data', 'load_valid_data', ([], {'byr': '(False)'}), '(byr=False)\n', (279, 290), False, 'from agent.util import load_values, get_sim_dir, load_valid_data, get_norm_reward\n'), ((1510, 1533), 'analyze.util.save_dict', 'save_dict', (['d', '"""slrvals"""'], {}), "(d, 'slrvals')\n", (1519, 1533), False, 'from analyze.util import create_cdfs, save_dict\n'), ((361, 385), 'agent.util.get_sim_dir', 'get_sim_dir', ([], {'delta': 'delta'}), '(delta=delta)\n', (372, 385), False, 'from agent.util import load_values, get_sim_dir, load_valid_data, get_norm_reward\n'), ((401, 433), 'agent.util.load_valid_data', 'load_valid_data', ([], {'sim_dir': 'sim_dir'}), '(sim_dir=sim_dir)\n', (416, 433), False, 'from agent.util import load_values, get_sim_dir, load_valid_data, get_norm_reward\n'), ((449, 473), 'agent.util.load_values', 'load_values', ([], {'delta': 'delta'}), '(delta=delta)\n', (460, 473), False, 'from agent.util import load_values, get_sim_dir, load_valid_data, get_norm_reward\n'), ((496, 538), 'utils.safe_reindex', 'safe_reindex', (['vals'], {'idx': 'data[LOOKUP].index'}), '(vals, idx=data[LOOKUP].index)\n', (508, 538), False, 'from utils import safe_reindex\n'), ((632, 649), 'analyze.util.create_cdfs', 'create_cdfs', (['elem'], {}), '(elem)\n', (643, 649), False, 'from analyze.util import create_cdfs, save_dict\n'), ((705, 743), 'agent.util.get_norm_reward', 'get_norm_reward', ([], {'data': 'obs', 'values': 'vals'}), '(data=obs, values=vals)\n', (720, 743), False, 'from agent.util import load_values, get_sim_dir, load_valid_data, get_norm_reward\n'), ((802, 841), 'agent.util.get_norm_reward', 'get_norm_reward', ([], {'data': 'data', 'values': 'vals'}), '(data=data, values=vals)\n', (817, 841), False, 'from agent.util import load_values, get_sim_dir, load_valid_data, get_norm_reward\n'), ((1056, 1073), 'analyze.util.create_cdfs', 'create_cdfs', (['elem'], {}), '(elem)\n', (1067, 1073), False, 'from analyze.util import create_cdfs, save_dict\n'), ((1230, 1247), 'analyze.util.create_cdfs', 'create_cdfs', (['elem'], {}), '(elem)\n', (1241, 1247), False, 'from analyze.util import create_cdfs, save_dict\n'), ((1476, 1493), 'analyze.util.create_cdfs', 'create_cdfs', (['elem'], {}), '(elem)\n', (1487, 1493), False, 'from analyze.util import create_cdfs, save_dict\n'), ((1314, 1357), 'utils.safe_reindex', 'safe_reindex', (['vals'], {'idx': 'sale_norm_obs.index'}), '(vals, idx=sale_norm_obs.index)\n', (1326, 1357), False, 'from utils import safe_reindex\n'), ((1384, 1429), 'utils.safe_reindex', 'safe_reindex', (['vals'], {'idx': 'sale_norm_agent.index'}), '(vals, idx=sale_norm_agent.index)\n', (1396, 1429), False, 'from utils import safe_reindex\n')] |
import requests
from bs4 import BeautifulSoup
# stackoverflow scrape begins
headers = dict()
headers[
"User-Agent"] = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36"
def get_ques_text_and_link(q):
res = requests.get("https://stackoverflow.com/search?q=" + q,
headers=headers) # this is exactly the url when we search a question on stack overflow
soup = BeautifulSoup(res.text, "html.parser")
questions_data = {
"questions": []
}
question = soup.select_one(".question-summary")
q = question.select_one('.question-hyperlink')
link = "https://stackoverflow.com" + q["href"]
ques_text = q.getText()
# print(ques_text)
# print(link)
return ques_text, link
def get_answer(url):
# content = urllib.request.urlopen(url)
source = requests.get(url, headers=headers).text
soup = BeautifulSoup(source, features='lxml')
try:
accepted_content = soup.find_all('div', class_="js-post-body")
return accepted_content[0].text.strip(), accepted_content[1].text.strip()
except Exception as error:
print(str(error))
return None, 'not answered'
def stackoverflow(query):
ques_text, link = get_ques_text_and_link(query)
stackoverflow_list = ["question is : " + ques_text, "For Details visit : " + link]
detailed_ques, answer = get_answer(link)
if (detailed_ques is None):
return ['The question is not answered in stackoverflow']
stackoverflow_list.append("Detailed Question : " + detailed_ques)
stackoverflow_list.append("Answer is : " + answer)
return stackoverflow_list | [
"bs4.BeautifulSoup",
"requests.get"
] | [((284, 356), 'requests.get', 'requests.get', (["('https://stackoverflow.com/search?q=' + q)"], {'headers': 'headers'}), "('https://stackoverflow.com/search?q=' + q, headers=headers)\n", (296, 356), False, 'import requests\n'), ((462, 500), 'bs4.BeautifulSoup', 'BeautifulSoup', (['res.text', '"""html.parser"""'], {}), "(res.text, 'html.parser')\n", (475, 500), False, 'from bs4 import BeautifulSoup\n'), ((937, 975), 'bs4.BeautifulSoup', 'BeautifulSoup', (['source'], {'features': '"""lxml"""'}), "(source, features='lxml')\n", (950, 975), False, 'from bs4 import BeautifulSoup\n'), ((886, 920), 'requests.get', 'requests.get', (['url'], {'headers': 'headers'}), '(url, headers=headers)\n', (898, 920), False, 'import requests\n')] |
import math
import os
import time
import numpy as np
import pybullet as p
import pybullet_utils.bullet_client as bc
from gripper_module import load_gripper
from misc.urdf_editor import UrdfEditor
import utils
from fusion import TSDFVolume
class Gripper(object):
"""
A moving mount and a gripper.
the mount has 4 joints:
0: prismatic x;
1: prismatic y;
2: prismatic z;
3: revolute z;
the gripper is defined by the `gripper_type`.
"""
def __init__(self, gripper_type, bullet_client, home_position, num_side_images, voxel_size=0.004, trunc_margin_scale=5, **kwargs):
self._bullet_client = bullet_client
self._gripper_type = gripper_type
self._gripper_size = kwargs['gripper_size']
self._home_position = home_position
self._default_orientation = [0,0,0]
self._num_side_images = num_side_images
# load gripper
self._gripper = load_gripper(gripper_type)(self._bullet_client, **kwargs)
gripper_body_id = self._gripper.load(self._home_position)
# load mount
mount_urdf = 'assets/gripper/mount.urdf'
mount_body_id = self._bullet_client.loadURDF(
mount_urdf,
basePosition=self._home_position,
useFixedBase=True
)
# combine mount and gripper by a joint
ed_mount = UrdfEditor()
ed_mount.initializeFromBulletBody(mount_body_id, self._bullet_client._client)
ed_gripper = UrdfEditor()
ed_gripper.initializeFromBulletBody(gripper_body_id, self._bullet_client._client)
self._gripper_parent_index = 4
newjoint = ed_mount.joinUrdf(
childEditor=ed_gripper,
parentLinkIndex=self._gripper_parent_index,
jointPivotXYZInParent=self._gripper.get_pos_offset(),
jointPivotRPYInParent=self._bullet_client.getEulerFromQuaternion(self._gripper.get_orn_offset()),
jointPivotXYZInChild=[0, 0, 0],
jointPivotRPYInChild=[0, 0, 0],
parentPhysicsClientId=self._bullet_client._client,
childPhysicsClientId=self._bullet_client._client
)
newjoint.joint_type = self._bullet_client.JOINT_FIXED
newjoint.joint_name = "joint_mount_gripper"
urdfname = f".tmp_combined_{self._gripper_type}_{self._gripper_size:.4f}_{np.random.random():.10f}_{time.time():.10f}.urdf"
ed_mount.saveUrdf(urdfname)
# remove mount and gripper bodies
self._bullet_client.removeBody(mount_body_id)
self._bullet_client.removeBody(gripper_body_id)
self._body_id = self._bullet_client.loadURDF(
urdfname,
useFixedBase=True,
basePosition=self._home_position,
baseOrientation=self._bullet_client.getQuaternionFromEuler([0, 0, 0])
)
# remove the combined URDF
os.remove(urdfname)
# configure the gripper (e.g. friction)
self._gripper.configure(self._body_id, self._gripper_parent_index+1)
# define force and speed (movement of mount)
self._force = 10000
self._speed = 0.005
self._tsdf_size = [64, 64, 32]
self._voxel_size = voxel_size
self._trunc_margin_scale = trunc_margin_scale
bond = np.array(self._tsdf_size) * self._voxel_size
self._vol_bnds = np.array([[-bond[0]/2, bond[0]/2],
[-bond[1]/2, bond[1]/2],
[0, bond[2]]])
self._vol_bnds += np.array(self._home_position).reshape(3, -1)
# Add RGB-D camera (mimic RealSense D415) for gripper
self._gripper_cam_lookat = self._vol_bnds.mean(1)
self._gripper_cam_image_size = (512, 512)
self._gripper_cam_z_near = 0.01
self._gripper_cam_z_far = 10.0
self._gripper_cam_fov_w = 69.40
self._gripper_cam_focal_length = (float(self._gripper_cam_image_size[1])/2)/np.tan((np.pi*self._gripper_cam_fov_w/180)/2)
self._gripper_cam_fov_h = (math.atan((float(self._gripper_cam_image_size[0])/2)/self._gripper_cam_focal_length)*2/np.pi)*180
self._gripper_cam_projection_matrix = self._bullet_client.computeProjectionMatrixFOV(
fov=self._gripper_cam_fov_h,
aspect=float(self._gripper_cam_image_size[1])/float(self._gripper_cam_image_size[0]),
nearVal=self._gripper_cam_z_near,
farVal=self._gripper_cam_z_far
) # notes: 1) FOV is vertical FOV 2) aspect must be float
self._gripper_cam_intrinsics = np.array([[self._gripper_cam_focal_length, 0, float(self._gripper_cam_image_size[1])/2],
[0, self._gripper_cam_focal_length, float(self._gripper_cam_image_size[0])/2],
[0, 0, 1]])
self.fix_joints(range(self._bullet_client.getNumJoints(self._body_id)))
def get_gripper_cam_data(self, cam_position, cam_lookat, cam_up_direction):
cam_view_matrix = self._bullet_client.computeViewMatrix(cam_position, cam_lookat, cam_up_direction)
cam_pose_matrix = np.linalg.inv(np.array(cam_view_matrix).reshape(4, 4).T)
# TODO: fix flipped up and forward vectors (quick hack)
cam_pose_matrix[:, 1:3] = -cam_pose_matrix[:, 1:3]
camera_data = self._bullet_client.getCameraImage(self._gripper_cam_image_size[1],self._gripper_cam_image_size[0],
cam_view_matrix,self._gripper_cam_projection_matrix,
shadow=1,flags=self._bullet_client.ER_SEGMENTATION_MASK_OBJECT_AND_LINKINDEX,
renderer=self._bullet_client.ER_BULLET_HARDWARE_OPENGL)
rgb_pixels = np.array(camera_data[2]).reshape((self._gripper_cam_image_size[0], self._gripper_cam_image_size[1], 4))
color_image = rgb_pixels[:,:,:3] # remove alpha channel
z_buffer = np.array(camera_data[3]).reshape((self._gripper_cam_image_size[0], self._gripper_cam_image_size[1]))
segmentation_mask = None # camera_data[4] - not implemented yet with renderer=p.ER_BULLET_HARDWARE_OPENGL
depth_image = (2.0*self._gripper_cam_z_near*self._gripper_cam_z_far)/(self._gripper_cam_z_far+self._gripper_cam_z_near-(2.0*z_buffer-1.0)*(self._gripper_cam_z_far-self._gripper_cam_z_near))
return color_image, depth_image, segmentation_mask, cam_pose_matrix
def get_tsdf(self, open_scale):
self.move(self._home_position, 0)
self.close()
self.open(open_scale=open_scale)
self._gripper_tsdf = TSDFVolume(self._vol_bnds, voxel_size=self._voxel_size)
# take side images
cam_up_direction = [0, 0, 1]
side_look_directions = np.linspace(0, 2*np.pi, num=self._num_side_images, endpoint=False)
cam_distance = 1
for direction in side_look_directions:
cam_position = [
self._home_position[0] + cam_distance * np.cos(direction),
self._home_position[1] + cam_distance * np.sin(direction),
self._home_position[2]
]
color_image, depth_image, _, cam_pose_matrix = self.get_gripper_cam_data(cam_position, self._gripper_cam_lookat, cam_up_direction)
self._gripper_tsdf.integrate(color_image, depth_image, self._gripper_cam_intrinsics, cam_pose_matrix, obs_weight=1.)
# take image from top
color_image, depth_image, _, cam_pose_matrix = self.get_gripper_cam_data([0, 0, 2], self._gripper_cam_lookat, [1, 0, 0])
self._gripper_tsdf.integrate(color_image, depth_image, self._gripper_cam_intrinsics, cam_pose_matrix, obs_weight=2.)
# take image from bottom
color_image, depth_image, _, cam_pose_matrix = self.get_gripper_cam_data([0, 0, 0], self._gripper_cam_lookat, [1, 0, 0])
self._gripper_tsdf.integrate(color_image, depth_image, self._gripper_cam_intrinsics, cam_pose_matrix, obs_weight=2.)
tsdf_vol_cpu, _ = self._gripper_tsdf.get_volume()
tsdf_vol_cpu = np.transpose(tsdf_vol_cpu, [1, 0, 2]) # swap x-axis and y-axis to make it consitent with scene_tsdf
return tsdf_vol_cpu
def open(self, open_scale):
self._gripper.open(self._body_id, self._gripper_parent_index+1, open_scale=open_scale)
def close(self):
self._gripper.close(self._body_id, self._gripper_parent_index+1)
def move(self, target_position, rotation_angle, stop_at_contact=False):
"""
:param target_position: (x, y, z). the position of the bottom center, not the base!
:param rotation_angle: rotation in z axis \in [0, 2 * \pi]. For 2-finger gripper, angle=0 --> parallel to x-axis
"""
target_position = np.array(target_position) - np.array(self._home_position)
joint_ids = [0, 1, 2, 3]
target_states = [target_position[0], target_position[1], target_position[2], rotation_angle%(2*np.pi)]
self._bullet_client.setJointMotorControlArray(
self._body_id,
joint_ids,
self._bullet_client.POSITION_CONTROL,
targetPositions=target_states,
forces=[self._force] * len(joint_ids),
positionGains=[self._speed] * len(joint_ids)
)
for i in range(240 * 6):
current_states = np.array([self._bullet_client.getJointState(self._body_id, joint_id)[0] for joint_id in joint_ids])
states_diff = np.abs(target_states - current_states)
# stop moving gripper if gripper collide with other objects
if stop_at_contact:
is_in_contact = False
points = self._bullet_client.getContactPoints(bodyA=self._body_id)
if len(points) > 0:
for p in points:
if p[9] > 0:
is_in_contact = True
break
if is_in_contact:
break
if np.all(states_diff < 1e-4):
break
self._gripper.step_constraints(self._body_id, self._gripper_parent_index+1)
self._bullet_client.stepSimulation()
self.fix_joints(joint_ids)
def fix_joints(self, joint_ids):
current_states = np.array([self._bullet_client.getJointState(self._body_id, joint_id)[0] for joint_id in joint_ids])
self._bullet_client.setJointMotorControlArray(
self._body_id,
joint_ids,
self._bullet_client.POSITION_CONTROL,
targetPositions=current_states,
forces=[self._force] * len(joint_ids),
positionGains=[self._speed] * len(joint_ids)
)
def primitive_grasping(self, target_position, rotation_angle, open_scale=1.0, stop_at_contact=False):
"""
:param target_position: (x, y, z). the position of the bottom center, not the base!
:param rotation_angle: rotation in z axis \in [0, 2 * \pi]
:return successs or not (True/False)
"""
self.move([target_position[0], target_position[1], self._home_position[2]], rotation_angle)
self.open(open_scale)
self.move(target_position, rotation_angle, stop_at_contact=stop_at_contact)
self.close()
self.move([target_position[0], target_position[1], self._home_position[2]], rotation_angle)
def remove(self):
self._bullet_client.removeBody(self._body_id)
def get_vis_pts(self, open_scale):
pts = self._gripper.get_vis_pts(open_scale)
angle = self._default_orientation[-1] # only add rotation around z axis
rotated_pts = np.transpose(np.dot(np.asarray(
[[np.cos(angle),-np.sin(angle)],
[np.sin(angle), np.cos(angle)]]),np.transpose(pts)))
return rotated_pts | [
"numpy.abs",
"numpy.tan",
"numpy.random.random",
"fusion.TSDFVolume",
"misc.urdf_editor.UrdfEditor",
"numpy.array",
"numpy.linspace",
"numpy.cos",
"gripper_module.load_gripper",
"numpy.sin",
"numpy.all",
"numpy.transpose",
"time.time",
"os.remove"
] | [((1374, 1386), 'misc.urdf_editor.UrdfEditor', 'UrdfEditor', ([], {}), '()\n', (1384, 1386), False, 'from misc.urdf_editor import UrdfEditor\n'), ((1494, 1506), 'misc.urdf_editor.UrdfEditor', 'UrdfEditor', ([], {}), '()\n', (1504, 1506), False, 'from misc.urdf_editor import UrdfEditor\n'), ((2897, 2916), 'os.remove', 'os.remove', (['urdfname'], {}), '(urdfname)\n', (2906, 2916), False, 'import os\n'), ((3378, 3464), 'numpy.array', 'np.array', (['[[-bond[0] / 2, bond[0] / 2], [-bond[1] / 2, bond[1] / 2], [0, bond[2]]]'], {}), '([[-bond[0] / 2, bond[0] / 2], [-bond[1] / 2, bond[1] / 2], [0,\n bond[2]]])\n', (3386, 3464), True, 'import numpy as np\n'), ((6627, 6682), 'fusion.TSDFVolume', 'TSDFVolume', (['self._vol_bnds'], {'voxel_size': 'self._voxel_size'}), '(self._vol_bnds, voxel_size=self._voxel_size)\n', (6637, 6682), False, 'from fusion import TSDFVolume\n'), ((6779, 6847), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)'], {'num': 'self._num_side_images', 'endpoint': '(False)'}), '(0, 2 * np.pi, num=self._num_side_images, endpoint=False)\n', (6790, 6847), True, 'import numpy as np\n'), ((8076, 8113), 'numpy.transpose', 'np.transpose', (['tsdf_vol_cpu', '[1, 0, 2]'], {}), '(tsdf_vol_cpu, [1, 0, 2])\n', (8088, 8113), True, 'import numpy as np\n'), ((948, 974), 'gripper_module.load_gripper', 'load_gripper', (['gripper_type'], {}), '(gripper_type)\n', (960, 974), False, 'from gripper_module import load_gripper\n'), ((3308, 3333), 'numpy.array', 'np.array', (['self._tsdf_size'], {}), '(self._tsdf_size)\n', (3316, 3333), True, 'import numpy as np\n'), ((3968, 4017), 'numpy.tan', 'np.tan', (['(np.pi * self._gripper_cam_fov_w / 180 / 2)'], {}), '(np.pi * self._gripper_cam_fov_w / 180 / 2)\n', (3974, 4017), True, 'import numpy as np\n'), ((8776, 8801), 'numpy.array', 'np.array', (['target_position'], {}), '(target_position)\n', (8784, 8801), True, 'import numpy as np\n'), ((8804, 8833), 'numpy.array', 'np.array', (['self._home_position'], {}), '(self._home_position)\n', (8812, 8833), True, 'import numpy as np\n'), ((9492, 9530), 'numpy.abs', 'np.abs', (['(target_states - current_states)'], {}), '(target_states - current_states)\n', (9498, 9530), True, 'import numpy as np\n'), ((10024, 10052), 'numpy.all', 'np.all', (['(states_diff < 0.0001)'], {}), '(states_diff < 0.0001)\n', (10030, 10052), True, 'import numpy as np\n'), ((2361, 2379), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (2377, 2379), True, 'import numpy as np\n'), ((2387, 2398), 'time.time', 'time.time', ([], {}), '()\n', (2396, 2398), False, 'import time\n'), ((3549, 3578), 'numpy.array', 'np.array', (['self._home_position'], {}), '(self._home_position)\n', (3557, 3578), True, 'import numpy as np\n'), ((5775, 5799), 'numpy.array', 'np.array', (['camera_data[2]'], {}), '(camera_data[2])\n', (5783, 5799), True, 'import numpy as np\n'), ((5962, 5986), 'numpy.array', 'np.array', (['camera_data[3]'], {}), '(camera_data[3])\n', (5970, 5986), True, 'import numpy as np\n'), ((11805, 11822), 'numpy.transpose', 'np.transpose', (['pts'], {}), '(pts)\n', (11817, 11822), True, 'import numpy as np\n'), ((5161, 5186), 'numpy.array', 'np.array', (['cam_view_matrix'], {}), '(cam_view_matrix)\n', (5169, 5186), True, 'import numpy as np\n'), ((7003, 7020), 'numpy.cos', 'np.cos', (['direction'], {}), '(direction)\n', (7009, 7020), True, 'import numpy as np\n'), ((7078, 7095), 'numpy.sin', 'np.sin', (['direction'], {}), '(direction)\n', (7084, 7095), True, 'import numpy as np\n'), ((11728, 11741), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (11734, 11741), True, 'import numpy as np\n'), ((11773, 11786), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (11779, 11786), True, 'import numpy as np\n'), ((11788, 11801), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (11794, 11801), True, 'import numpy as np\n'), ((11743, 11756), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (11749, 11756), True, 'import numpy as np\n')] |
from flask import Flask
from flaskslack.flaskslack import FlaskSlack
from flaskslack.slack import ResponseType, Slack
app = Flask(__name__)
# If you want to get your config in a non default way,
# you can create a slack client with: Slack('slack_oauth_token', 'slack_signing_secret')
slack = Slack.create()
flask_slack = FlaskSlack(app, slack)
# set verify_signature to False if you want to do some local testing
@flask_slack.slack_route('/slack/endpoint', response_type=ResponseType.IN_CHANNEL, verify_signature=True)
def get_channel_member_ids(form_content):
"""
:param form_content: a dict containing the data payload from the slack HTTP POST
see: https://api.slack.com/slash-commands#app_command_handling
:return: It should return a dict. The dict should contain a 'text' field, and/or a list of 'attachments'.
see: https://api.slack.com/slash-commands#responding_immediate_response
"""
channel_id = form_content["channel_id"]
members_form_content = slack.try_api_call("conversations.members", channel=channel_id)
channel_member_ids = members_form_content["members"]
text_response = f"The channel_member_ids for channel_id {channel_id} is: {channel_member_ids}"
return Slack.create_response(text_response)
if __name__ == "__main__":
app.run(host="localhost")
| [
"flaskslack.flaskslack.FlaskSlack",
"flaskslack.slack.Slack.create",
"flaskslack.slack.Slack.create_response",
"flask.Flask"
] | [((126, 141), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (131, 141), False, 'from flask import Flask\n'), ((294, 308), 'flaskslack.slack.Slack.create', 'Slack.create', ([], {}), '()\n', (306, 308), False, 'from flaskslack.slack import ResponseType, Slack\n'), ((323, 345), 'flaskslack.flaskslack.FlaskSlack', 'FlaskSlack', (['app', 'slack'], {}), '(app, slack)\n', (333, 345), False, 'from flaskslack.flaskslack import FlaskSlack\n'), ((1239, 1275), 'flaskslack.slack.Slack.create_response', 'Slack.create_response', (['text_response'], {}), '(text_response)\n', (1260, 1275), False, 'from flaskslack.slack import ResponseType, Slack\n')] |
from __future__ import annotations
from typing import Union, List, Tuple, Set, Dict
from datetime import date
from IPython.display import display, Markdown as md
from itertools import chain
from rich.table import Table
from rich.console import Console
from wow import SPECIALIZATION_DATA, CLASS_DATA, ENCOUNTER_DATA, cDifficulty
from wow.query import Query, Predicate
from .record import Record
class Encounter:
"""
Represents a group of Records in a single Encounter.
A encounter starts with the event ENCOUNTER_START and finishes with a ENCOUNTER_END event.
"""
def from_db() -> List[Encounter]:
pass
def __init__(self, log: Query, beg: Record, end: Record):
self.beg = beg
self.timestamp_begin = date.datetime.strptime(
beg.timestamp, '%m/%d %H:%M:%S.%f'
).replace(year=2022)
self.end = end
self.timestamp_end = date.datetime.strptime(
end.timestamp, '%m/%d %H:%M:%S.%f'
).replace(year=2022)
try:
self.id = int(beg.data[1])
self.name = beg.data[2]
except:
print(beg)
raise Exception('BOOM.')
self.log = log.slice(self.beg.idx, self.end.idx + 1).qlist()
def __repr__(self) -> str:
return self.text
def __iter__(self):
return self.iter()
@property
def q(self) -> Query:
return self.log
# NOTE: There is a minor difference in miliseconds between
# duration (calculated from encounter_start / encounter_end timestamp) and
# log_duration (extracted from the log)
@property
def duration(self) -> date.timedelta:
return self.timestamp_end - self.timestamp_begin
@property
def log_duration(self):
return str(
date.timedelta(microseconds=int(self.end[6]) * 1000)
)
@property
def title(self) -> str:
return f'{self.name} {self.duration.seconds // 60}:{self.duration.seconds % 60:02d}'
@property
def text(self):
return """
<style>
sb {{ color: steelblue }}
o {{ color: Orange }}
g {{ color: Green }}
</style>
# <sb>{0.difficulty} {0.name} {0.result} (id: {0.id})</sb>
- {0.beg}
- {1} entries in **{0.log_duration}** / {0.duration}
- {0.end}
""".format(self, self.q.len())
@property
def difficulty(self):
return cDifficulty.get(self.beg[3], None)
@property
def result(self) -> str:
return ['Wipe', 'Kill'][int(self.end[5])]
def md(self):
display(md(self.text))
def iter(self):
return self.q.iter()
def getReport(self) -> EncounterReport:
return ENCOUNTER_DATA.get(self.id, EncounterReport)(self)
def export(self) -> List[Tuple[str, str, str]]:
return self.q.map(
(
Predicate.getTimestampString(),
Predicate.getEvent(),
Predicate.getRawData()
)
).iter()
class EncounterReport:
"""
A pré-defined Query
"""
def __init__(self, encounters: Union[Encounter, List[Encounter], Tuple[Encounter], Query]) -> None:
if type(encounters) in [list, tuple]:
self.data = encounters
else:
self.data = [encounters]
@property
def q(self):
return Query(chain(*self.data))
@property
def e(self):
if(len(self.data) == 1):
return self.data[0]
else:
return None
def report(self):
self.showEncounters()
def listPlayers(self) -> Set:
"""
Returns a set with all players in the fight
"""
playerSpec = self.q.filter(
Predicate.isEventIn(['COMBATANT_INFO'])
).map(
(
Predicate.getActorId(),
Predicate.getDataIndex(24)
)
).dict()
players = Table(title="Players")
players.add_column("Name")
players.add_column("ID")
players.add_column("Class")
players.add_column("Spec")
players.add_column("Role")
# print(playerData)
playerList = self.q.filter(
Predicate.isPlayerAction()
).map(
Predicate.getActorInfo()
).set()
playerData = {}
for p in playerList:
name, _, id = p
specID = playerSpec[id]
className, specName, role = SPECIALIZATION_DATA[int(specID)]
color = CLASS_DATA[className]
playerData.update(
{id: (name, id, className, specName, role, color)})
for p in sorted(playerData.items(), key=lambda p: '{0}-{1}-{2}'.format(p[1][4], p[1][2], p[1][4])):
name, id, className, specName, role, color = p[1]
players.add_row(name, id, className, specName, role, style=color)
Console().print(players)
return playerList
def listEncounters(self):
return self.q.filter(Predicate.isEncounterStart()).list()
def showEncounters(self):
for e in self.data:
display(e.md())
def getSpellDamage(self):
spellDamage = self.q.filter(
Predicate.all([
Predicate.any([Predicate.isPlayerAction(),
Predicate.isPetAction()]),
Predicate.isTargetHostile(),
Predicate.isEventIn([
'SPELL_DAMAGE',
'SPELL_PERIODIC_DAMAGE',
'RANGE_DAMAGE'
]),
])
).map((
Predicate.getActorId(),
Predicate.getActor(),
lambda x: int(x[29])
)).groupby(
lambda x: tuple(x[0:2]),
lambda x: x[2],
# lambda x: help.human_format(sum(x))
sum
).sort(
lambda x: x[1],
True
).map(
lambda x: (*x[0], x[1]) # help.human_format(x[1])
).pandas(['Unit ID', 'Name', 'Total (Spell)'])
return spellDamage
def getMeleeDamage(self):
def consolidate(x):
return [x.action, x.actor_id][x.action[0:3] == '000']
meleeDamage = self.q.filter(
Predicate.all([
Predicate.any([Predicate.isPlayerAction(),
Predicate.isPetAction()]),
Predicate.isTargetHostile(),
Predicate.isEventIn([
'SWING_DAMAGE',
]),
])
).map(
(
Predicate.getActorId(),
consolidate,
Predicate.getActor(),
lambda x: int(x[26])
)
).groupby(
lambda x: tuple(x[0:3]),
lambda x: x[3],
# lambda x: help.human_format(sum(x))
sum
).sort(
lambda x: x[1],
True
).map(
lambda x: (*x[0], x[1]) # help.human_format(x[1])
).pandas(['Unit ID', 'Player ID', 'Name', 'Total (Melee)'])
return meleeDamage
def getDamage(self):
return (
self.getSpellDamage(),
self.getMeleeDamage()
)
def hostile_action(self) -> Query:
# Hostile NPCs & Their Actions
return self.q.filter(
Predicate.isCreatureAction()
).filter(
Predicate.isActorHostile()
).map(
(Predicate.getActor(), Predicate.getEvent(), Predicate.getAction())
).groupby(
lambda x: x[0],
lambda x: x[1],
set
).dict()
def hostile_action2(self) -> Query:
return self.q.filter(
Predicate.isCreatureAction()
).filter(
Predicate.isActorHostile()
).map(
(Predicate.getActionId(), Predicate.getAction(),
Predicate.getActor(), Predicate.getEvent())
).qset(
).sort(
lambda x: x[1]
).list()
def actor_actions(self, actor) -> Query:
return self.q.filter(
Predicate.isActor(actor)
).map(
(
Predicate.getAction(),
Predicate.getEvent(),
Predicate.getTarget()
)
)
def actions(self, event, action) -> Query:
return self.q.filter(
Predicate.isEventIn(event) # 'SPELL_AURA_APPLIED'
).filter(
Predicate.isAction(action) # '"Sorrowful Procession"'
).map(
(
Predicate.getTimestamp(),
Predicate.getActorId(),
Predicate.getTarget()
)
)
@ classmethod
def groupActionByActor(self, event, action) -> Dict[str, Tuple]:
return self.actions(self.q, event, action).groupby(
lambda x: x[1],
lambda x: x[2],
).dict()
| [
"itertools.chain",
"wow.query.Predicate.isEventIn",
"wow.cDifficulty.get",
"wow.query.Predicate.isActor",
"wow.query.Predicate.getActor",
"wow.query.Predicate.getActionId",
"wow.query.Predicate.isPetAction",
"wow.query.Predicate.getActorInfo",
"IPython.display.Markdown",
"wow.query.Predicate.getDa... | [((2352, 2386), 'wow.cDifficulty.get', 'cDifficulty.get', (['self.beg[3]', 'None'], {}), '(self.beg[3], None)\n', (2367, 2386), False, 'from wow import SPECIALIZATION_DATA, CLASS_DATA, ENCOUNTER_DATA, cDifficulty\n'), ((3865, 3887), 'rich.table.Table', 'Table', ([], {'title': '"""Players"""'}), "(title='Players')\n", (3870, 3887), False, 'from rich.table import Table\n'), ((2516, 2529), 'IPython.display.Markdown', 'md', (['self.text'], {}), '(self.text)\n', (2518, 2529), True, 'from IPython.display import display, Markdown as md\n'), ((2641, 2685), 'wow.ENCOUNTER_DATA.get', 'ENCOUNTER_DATA.get', (['self.id', 'EncounterReport'], {}), '(self.id, EncounterReport)\n', (2659, 2685), False, 'from wow import SPECIALIZATION_DATA, CLASS_DATA, ENCOUNTER_DATA, cDifficulty\n'), ((3297, 3314), 'itertools.chain', 'chain', (['*self.data'], {}), '(*self.data)\n', (3302, 3314), False, 'from itertools import chain\n'), ((756, 814), 'datetime.date.datetime.strptime', 'date.datetime.strptime', (['beg.timestamp', '"""%m/%d %H:%M:%S.%f"""'], {}), "(beg.timestamp, '%m/%d %H:%M:%S.%f')\n", (778, 814), False, 'from datetime import date\n'), ((909, 967), 'datetime.date.datetime.strptime', 'date.datetime.strptime', (['end.timestamp', '"""%m/%d %H:%M:%S.%f"""'], {}), "(end.timestamp, '%m/%d %H:%M:%S.%f')\n", (931, 967), False, 'from datetime import date\n'), ((4829, 4838), 'rich.console.Console', 'Console', ([], {}), '()\n', (4836, 4838), False, 'from rich.console import Console\n'), ((8106, 8127), 'wow.query.Predicate.getAction', 'Predicate.getAction', ([], {}), '()\n', (8125, 8127), False, 'from wow.query import Query, Predicate\n'), ((8145, 8165), 'wow.query.Predicate.getEvent', 'Predicate.getEvent', ([], {}), '()\n', (8163, 8165), False, 'from wow.query import Query, Predicate\n'), ((8183, 8204), 'wow.query.Predicate.getTarget', 'Predicate.getTarget', ([], {}), '()\n', (8202, 8204), False, 'from wow.query import Query, Predicate\n'), ((8500, 8524), 'wow.query.Predicate.getTimestamp', 'Predicate.getTimestamp', ([], {}), '()\n', (8522, 8524), False, 'from wow.query import Query, Predicate\n'), ((8542, 8564), 'wow.query.Predicate.getActorId', 'Predicate.getActorId', ([], {}), '()\n', (8562, 8564), False, 'from wow.query import Query, Predicate\n'), ((8582, 8603), 'wow.query.Predicate.getTarget', 'Predicate.getTarget', ([], {}), '()\n', (8601, 8603), False, 'from wow.query import Query, Predicate\n'), ((4194, 4218), 'wow.query.Predicate.getActorInfo', 'Predicate.getActorInfo', ([], {}), '()\n', (4216, 4218), False, 'from wow.query import Query, Predicate\n'), ((4941, 4969), 'wow.query.Predicate.isEncounterStart', 'Predicate.isEncounterStart', ([], {}), '()\n', (4967, 4969), False, 'from wow.query import Query, Predicate\n'), ((8036, 8060), 'wow.query.Predicate.isActor', 'Predicate.isActor', (['actor'], {}), '(actor)\n', (8053, 8060), False, 'from wow.query import Query, Predicate\n'), ((8400, 8426), 'wow.query.Predicate.isAction', 'Predicate.isAction', (['action'], {}), '(action)\n', (8418, 8426), False, 'from wow.query import Query, Predicate\n'), ((2802, 2832), 'wow.query.Predicate.getTimestampString', 'Predicate.getTimestampString', ([], {}), '()\n', (2830, 2832), False, 'from wow.query import Query, Predicate\n'), ((2850, 2870), 'wow.query.Predicate.getEvent', 'Predicate.getEvent', ([], {}), '()\n', (2868, 2870), False, 'from wow.query import Query, Predicate\n'), ((2888, 2910), 'wow.query.Predicate.getRawData', 'Predicate.getRawData', ([], {}), '()\n', (2908, 2910), False, 'from wow.query import Query, Predicate\n'), ((3748, 3770), 'wow.query.Predicate.getActorId', 'Predicate.getActorId', ([], {}), '()\n', (3768, 3770), False, 'from wow.query import Query, Predicate\n'), ((3788, 3814), 'wow.query.Predicate.getDataIndex', 'Predicate.getDataIndex', (['(24)'], {}), '(24)\n', (3810, 3814), False, 'from wow.query import Query, Predicate\n'), ((3663, 3702), 'wow.query.Predicate.isEventIn', 'Predicate.isEventIn', (["['COMBATANT_INFO']"], {}), "(['COMBATANT_INFO'])\n", (3682, 3702), False, 'from wow.query import Query, Predicate\n'), ((4140, 4166), 'wow.query.Predicate.isPlayerAction', 'Predicate.isPlayerAction', ([], {}), '()\n', (4164, 4166), False, 'from wow.query import Query, Predicate\n'), ((8319, 8345), 'wow.query.Predicate.isEventIn', 'Predicate.isEventIn', (['event'], {}), '(event)\n', (8338, 8345), False, 'from wow.query import Query, Predicate\n'), ((7395, 7415), 'wow.query.Predicate.getActor', 'Predicate.getActor', ([], {}), '()\n', (7413, 7415), False, 'from wow.query import Query, Predicate\n'), ((7417, 7437), 'wow.query.Predicate.getEvent', 'Predicate.getEvent', ([], {}), '()\n', (7435, 7437), False, 'from wow.query import Query, Predicate\n'), ((7439, 7460), 'wow.query.Predicate.getAction', 'Predicate.getAction', ([], {}), '()\n', (7458, 7460), False, 'from wow.query import Query, Predicate\n'), ((7340, 7366), 'wow.query.Predicate.isActorHostile', 'Predicate.isActorHostile', ([], {}), '()\n', (7364, 7366), False, 'from wow.query import Query, Predicate\n'), ((7767, 7790), 'wow.query.Predicate.getActionId', 'Predicate.getActionId', ([], {}), '()\n', (7788, 7790), False, 'from wow.query import Query, Predicate\n'), ((7792, 7813), 'wow.query.Predicate.getAction', 'Predicate.getAction', ([], {}), '()\n', (7811, 7813), False, 'from wow.query import Query, Predicate\n'), ((7828, 7848), 'wow.query.Predicate.getActor', 'Predicate.getActor', ([], {}), '()\n', (7846, 7848), False, 'from wow.query import Query, Predicate\n'), ((7850, 7870), 'wow.query.Predicate.getEvent', 'Predicate.getEvent', ([], {}), '()\n', (7868, 7870), False, 'from wow.query import Query, Predicate\n'), ((7281, 7309), 'wow.query.Predicate.isCreatureAction', 'Predicate.isCreatureAction', ([], {}), '()\n', (7307, 7309), False, 'from wow.query import Query, Predicate\n'), ((7712, 7738), 'wow.query.Predicate.isActorHostile', 'Predicate.isActorHostile', ([], {}), '()\n', (7736, 7738), False, 'from wow.query import Query, Predicate\n'), ((5540, 5562), 'wow.query.Predicate.getActorId', 'Predicate.getActorId', ([], {}), '()\n', (5560, 5562), False, 'from wow.query import Query, Predicate\n'), ((5576, 5596), 'wow.query.Predicate.getActor', 'Predicate.getActor', ([], {}), '()\n', (5594, 5596), False, 'from wow.query import Query, Predicate\n'), ((6511, 6533), 'wow.query.Predicate.getActorId', 'Predicate.getActorId', ([], {}), '()\n', (6531, 6533), False, 'from wow.query import Query, Predicate\n'), ((6580, 6600), 'wow.query.Predicate.getActor', 'Predicate.getActor', ([], {}), '()\n', (6598, 6600), False, 'from wow.query import Query, Predicate\n'), ((7653, 7681), 'wow.query.Predicate.isCreatureAction', 'Predicate.isCreatureAction', ([], {}), '()\n', (7679, 7681), False, 'from wow.query import Query, Predicate\n'), ((5294, 5321), 'wow.query.Predicate.isTargetHostile', 'Predicate.isTargetHostile', ([], {}), '()\n', (5319, 5321), False, 'from wow.query import Query, Predicate\n'), ((5339, 5417), 'wow.query.Predicate.isEventIn', 'Predicate.isEventIn', (["['SPELL_DAMAGE', 'SPELL_PERIODIC_DAMAGE', 'RANGE_DAMAGE']"], {}), "(['SPELL_DAMAGE', 'SPELL_PERIODIC_DAMAGE', 'RANGE_DAMAGE'])\n", (5358, 5417), False, 'from wow.query import Query, Predicate\n'), ((6328, 6355), 'wow.query.Predicate.isTargetHostile', 'Predicate.isTargetHostile', ([], {}), '()\n', (6353, 6355), False, 'from wow.query import Query, Predicate\n'), ((6373, 6410), 'wow.query.Predicate.isEventIn', 'Predicate.isEventIn', (["['SWING_DAMAGE']"], {}), "(['SWING_DAMAGE'])\n", (6392, 6410), False, 'from wow.query import Query, Predicate\n'), ((5192, 5218), 'wow.query.Predicate.isPlayerAction', 'Predicate.isPlayerAction', ([], {}), '()\n', (5216, 5218), False, 'from wow.query import Query, Predicate\n'), ((5251, 5274), 'wow.query.Predicate.isPetAction', 'Predicate.isPetAction', ([], {}), '()\n', (5272, 5274), False, 'from wow.query import Query, Predicate\n'), ((6226, 6252), 'wow.query.Predicate.isPlayerAction', 'Predicate.isPlayerAction', ([], {}), '()\n', (6250, 6252), False, 'from wow.query import Query, Predicate\n'), ((6285, 6308), 'wow.query.Predicate.isPetAction', 'Predicate.isPetAction', ([], {}), '()\n', (6306, 6308), False, 'from wow.query import Query, Predicate\n')] |
# -*- coding: utf-8 -*-
#!/usr/bin/env python3
"""
Created on Sun Feb 10 09:32:48 2019
@author: yansl
Target:
1. apply YCrCB & HSV color space to create standard color space
"""
import cv2
import numpy as np
#%%
"""
secondary functions
"""
def channel_merge(*, B, G, R):
#merging the channels
B = B.copy().astype(np.float32)
G = G.copy().astype(np.float32)
R = R.copy().astype(np.float32)
temp_img = cv2.merge([B, G, R])
return temp_img
#%%
"""
primary functions
"""
def standard_color_space_ycrcb(t_Y, r_Cr, r_Cb):
#format: float32
#YCrCb
temp_ycrcb = channel_merge(B = t_Y, G = r_Cr, R = r_Cb)
rgb_based_ycrcb = cv2.cvtColor(temp_ycrcb, cv2.COLOR_YCrCb2BGR)
return rgb_based_ycrcb
def standard_color_space_hsv(r_H, r_S, t_V):
#format: float32
#HSV
temp_hsv = channel_merge(B = r_H, G = r_S, R = t_V)
rgb_based_hsv = cv2.cvtColor(temp_hsv, cv2.COLOR_HSV2BGR)
return rgb_based_hsv | [
"cv2.merge",
"cv2.cvtColor"
] | [((453, 473), 'cv2.merge', 'cv2.merge', (['[B, G, R]'], {}), '([B, G, R])\n', (462, 473), False, 'import cv2\n'), ((699, 744), 'cv2.cvtColor', 'cv2.cvtColor', (['temp_ycrcb', 'cv2.COLOR_YCrCb2BGR'], {}), '(temp_ycrcb, cv2.COLOR_YCrCb2BGR)\n', (711, 744), False, 'import cv2\n'), ((931, 972), 'cv2.cvtColor', 'cv2.cvtColor', (['temp_hsv', 'cv2.COLOR_HSV2BGR'], {}), '(temp_hsv, cv2.COLOR_HSV2BGR)\n', (943, 972), False, 'import cv2\n')] |
#from Members import Uyeler as uye
import csv
import errno
import os
baseProgramPath = "Programlar/"
days = [["Pazartesi"], ["Salı"], ["Çarşamba"], ["Perşembe"], ["Cuma"], ["Cumartesi"], ["Pazar"]]
csv.register_dialect('myDialect', delimiter='|', quoting=csv.QUOTE_NONE, skipinitialspace=True, quotechar='')
class Program:
@staticmethod
def netYagHesaplama(cinsiyet: str, boy: float, kilo: float) -> float:
"""
Bir üyenin vücudundaki net yağ miktarının yaklaşık olarak
hesaplanması için hazırlana metod
:param cinsiyet: Yaklaşık yağ oranı hesaplanacak üyenin cinsiyeti
:type cinsiyet: str
:param boy: Yaklaşık yağ oranı hesaplanacak üyenin boyu(cm)
:type boy: float
:param kilo: Yaklaşık yağ oranı hesaplanacak üyenin vücut ağırlığı(kg)
:type kilo:float
:return: Yaklaşık olarak hesaplanan yağ miktarı(kg)
:rtype: float
"""
return kilo - Program.yagsizKiloHesapla(cinsiyet, boy, kilo)
@staticmethod
def yagsizKiloHesapla(cinsiyet: str, boy: float, kilo: float) -> float:
"""
Bir üyenin vücudunun yağsız ağırlığının yaklaşık olarak hesaplanması için hazırlanan metod
Erkek : (1.10 * Ağırlık (kg)) - 128 * (Ağırlık2/(100 * Boy(m))2)
Kadın : (1.07 * Ağırlık (kg)) - 148 * (Ağırlık2/(100 * Boy(m))2)
:param cinsiyet: Yaklaşık olarak yağsız vücut ağırlığı hesaplanacak üyenin cinsiyeti
:type cinsiyet: str
:param boy: Yaklaşık olarak yağsız vücut ağırlığı hesaplanacak üyenin boyu(cm)
:type boy: float
:param kilo: Yaklaşık olarak yağsız vücut ağırlığı hesaplanacak üyenin vücut ağırlığı(kg)
:type kilo:float
:return: Yaklaşık olarak hesaplanan net yağ oranı
:rtype: float
"""
if (cinsiyet == 0): # erkek
return (kilo * (1.10)) - 128 * ((kilo ** 2) / boy ** 2)
else: # kadın
return ((1.07) * kilo) - 148 * ((kilo ** 2) / boy ** 2)
@staticmethod
def boyKiloEndeks(boy: float, kilo: float) -> str:
"""
Bir üyenin vücut kitle indeksinin hesaplanması için hazırlanan metod
:param boy: Vücut kitle indeksi hesaplanacak üyenin boyu(cm)
:type boy: float
:param kilo: Vücut kitle indeksi hesaplanacak üyenin vücut ağırlığı(kg)
:type kilo: flaot
:return: Vücut kitle endeksini hesaplayıp uygun mesajı döner.
:rtype: str
"""
bmi = kilo / ((boy / 100) ** 2)
# "My name is {fname}, I'm {age}".format(fname = "John", age = 36)
if (bmi < 18.5):
return f"Vücut kitle endeksiniz: {bmi:.2f}, zayıfsınız."
elif (bmi < 25):
return f"Vücut kitle endeksiniz: {bmi:.2f}, normalsiniz."
elif (bmi < 30):
return f"Vücut kitle endeksiniz: {bmi:.2f}, fazla kilolusunuz."
elif (bmi < 35):
return f"Vücut kitle endeksiniz: {bmi:.2f}, Şişmansınız(1. derece obez)."
elif (bmi < 45):
return f"Vücut kitle endeksiniz: {bmi:.2f}, Şişmansınız(2. derece obez)."
elif (45 < bmi):
return f"Vücut kitle endeksiniz: {bmi:.2f}, Aşırı şişmansınız(3. derece obez), sağlığınız tehlikede."
@staticmethod
def programSil(Id: str):
"""
Bir üyenin spor programını diskten silmek için hazırlanan metod
:param Id: Kimlik numarası
:type Id: str
"""
try:
os.remove("Programlar/" + Id + ".csv")
except Exception as e:
print('Silme hatası: ' % e)
@staticmethod
def programYaz(prog: list, Id: str):
"""
Bir üyenin spor programının diske yazımı için hazırlanan metod
:param prog: Diske yazılacak spor programı
:type prog: list
:param Id: Kimlik numarası
:type Id: str
"""
with open(baseProgramPath + Id + ".csv", mode='w', encoding='utf-8', newline='') as writeFile:
writer = csv.writer(writeFile, 'myDialectp')
semicolon = [';']
for ind, row in enumerate(prog):
writer.writerow(days[ind])
writer.writerow(row)
writer.writerow(semicolon)
@staticmethod
def programOku(Id: str) -> str:
"""
Bir üyenin spor programını diskten okumak için hazırlanan metod
Diskten listeye okunana spor programı string haline getirilir.
Listeden stringe dönerken gelen gereksiz karakterler metinden temizlenir.
:param Id: Kimlik numarası
:type Id: str
:return: Sppr programı
:rtype: str
"""
liste = []
prog = []
try:
with open(baseProgramPath+Id+".csv", mode='r', encoding='utf-8') as readerFile:
reader = csv.reader(readerFile, 'myDialectp')
for row in reader:
liste.append(row)
for ind, row in enumerate(liste):
if row in days:
liste.pop(ind)
temp = ""
count = 0
for ind, row in enumerate(liste):
if ';' in row:
for i in range(count, ind):
temp += str(liste[i])
if i < ind -1:
temp += "*"
prog.append(temp)
count = ind + 1
temp = ""
prog = '|'.join(str(elem) for elem in prog)
# Listeden stringe dönerken gelen gereksiz karakterler metinden temizlenir.
prog = prog.replace('[', '')
prog = prog.replace('\'', '')
prog = prog.replace(']', '')
prog = prog.replace(' ', ';')
except OSError as exc: # Eğer Programlar klasörü bulunamazsa
if exc.errno != errno.EEXIST:
os.mkdir(os.getcwd() + "/Programlar/")
return prog
| [
"csv.register_dialect",
"csv.writer",
"os.getcwd",
"csv.reader",
"os.remove"
] | [((199, 312), 'csv.register_dialect', 'csv.register_dialect', (['"""myDialect"""'], {'delimiter': '"""|"""', 'quoting': 'csv.QUOTE_NONE', 'skipinitialspace': '(True)', 'quotechar': '""""""'}), "('myDialect', delimiter='|', quoting=csv.QUOTE_NONE,\n skipinitialspace=True, quotechar='')\n", (219, 312), False, 'import csv\n'), ((3047, 3085), 'os.remove', 'os.remove', (["('Programlar/' + Id + '.csv')"], {}), "('Programlar/' + Id + '.csv')\n", (3056, 3085), False, 'import os\n'), ((3492, 3527), 'csv.writer', 'csv.writer', (['writeFile', '"""myDialectp"""'], {}), "(writeFile, 'myDialectp')\n", (3502, 3527), False, 'import csv\n'), ((4155, 4191), 'csv.reader', 'csv.reader', (['readerFile', '"""myDialectp"""'], {}), "(readerFile, 'myDialectp')\n", (4165, 4191), False, 'import csv\n'), ((4948, 4959), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (4957, 4959), False, 'import os\n')] |
import sqlalchemy
from sqlalchemy import *
from sqlalchemy.dialects.postgresql import CIDR
from sqlalchemy.orm import relationship, Session
from models import Network
from db import db
import ipaddress as ip
class Subnet(db.Model):
__tablename__ = "subnet"
id = Column(String, primary_key=True)
network_id = Column(String, ForeignKey('network.id'), nullable=False)
native_id = Column(String)
name = Column(String, nullable=False)
cidr = Column(CIDR, nullable=False, )
defaults = Column(Text)
native_id = Column(String)
network = relationship("Network", back_populates="subnets")
fleets = relationship("Fleet", secondary='subnets_fleets', back_populates="subnets")
def __repr__(self):
return self.id
def net(self):
return ip.IPv4Network(address=self.cidr)
class SubnetUnavailableError(RuntimeError):
def __init__(self, message, errors):
super().__init__(message)
self.errors = errors
@db.event.listens_for(Subnet, 'before_update')
def my_before_update_listener(mapper, connection, subnet):
__update_id__(subnet)
# TODO this shouldn't be a rest call, refactor it'
@db.event.listens_for(Subnet, 'before_insert')
def my_before_insert_listener(mapper, connection, subnet):
newsubnet = ip.IPv4Network(address=subnet.cidr)
session = db.session
network = session.query(Network).filter_by(id=subnet.network_id).first()
if newsubnet in [ip.IPv4Network(subnet.cidr) for subnet in network.subnets if subnet.id is not None]:
error = SubnetUnavailableError("Already Used", null)
return error
__update_id__(subnet)
def __update_id__(subnet):
subnet.id = subnet.network_id + ':' + subnet.name
| [
"sqlalchemy.orm.relationship",
"db.db.event.listens_for",
"ipaddress.IPv4Network"
] | [((980, 1025), 'db.db.event.listens_for', 'db.event.listens_for', (['Subnet', '"""before_update"""'], {}), "(Subnet, 'before_update')\n", (1000, 1025), False, 'from db import db\n'), ((1165, 1210), 'db.db.event.listens_for', 'db.event.listens_for', (['Subnet', '"""before_insert"""'], {}), "(Subnet, 'before_insert')\n", (1185, 1210), False, 'from db import db\n'), ((570, 619), 'sqlalchemy.orm.relationship', 'relationship', (['"""Network"""'], {'back_populates': '"""subnets"""'}), "('Network', back_populates='subnets')\n", (582, 619), False, 'from sqlalchemy.orm import relationship, Session\n'), ((633, 708), 'sqlalchemy.orm.relationship', 'relationship', (['"""Fleet"""'], {'secondary': '"""subnets_fleets"""', 'back_populates': '"""subnets"""'}), "('Fleet', secondary='subnets_fleets', back_populates='subnets')\n", (645, 708), False, 'from sqlalchemy.orm import relationship, Session\n'), ((1286, 1321), 'ipaddress.IPv4Network', 'ip.IPv4Network', ([], {'address': 'subnet.cidr'}), '(address=subnet.cidr)\n', (1300, 1321), True, 'import ipaddress as ip\n'), ((792, 825), 'ipaddress.IPv4Network', 'ip.IPv4Network', ([], {'address': 'self.cidr'}), '(address=self.cidr)\n', (806, 825), True, 'import ipaddress as ip\n'), ((1448, 1475), 'ipaddress.IPv4Network', 'ip.IPv4Network', (['subnet.cidr'], {}), '(subnet.cidr)\n', (1462, 1475), True, 'import ipaddress as ip\n')] |
# -*- coding: UTF-8 -*-
import cv2
import face_recognition
video = cv2.VideoCapture("short_biden.mp4")
fps = video.get(cv2.CAP_PROP_FPS)
frameCount = video.get(cv2.CAP_PROP_FRAME_COUNT)
size = (int(video.get(cv2.CAP_PROP_FRAME_WIDTH)), int(video.get(cv2.CAP_PROP_FRAME_HEIGHT)))
biden_image = face_recognition.load_image_file("biden.jpg")
videoWriter = cv2.VideoWriter('result.avi', cv2.VideoWriter_fourcc(*'XVID'), fps, size)
# videoWriter = cv2.VideoWriter('result.mp4', cv2.VideoWriter_fourcc(*'MP4V'), fps, size)
# OpenCV: FFMPEG: tag 0x5634504d/'MP4V' is not supported with codec id 12 and format 'mp4 / MP4 (MPEG-4 Part 14)'
# OpenCV: FFMPEG: fallback to use tag 0x7634706d/'mp4v'
success, frame = video.read()
# Initialize some variables
face_locations = []
face_encodings = []
face_names = []
index = 1
if success:
# def encoding_imgs(src_imgs):
# TODO: 解耦合
# Get the face encodings for each face in each image file
# Since there could be more than one face in each image, it returns a list of encodings.
# But since I know each image only has one face, I only care about the first encoding in each image, so I grab index 0.
try:
# Convert the image from BGR color (which OpenCV uses) to RGB color (which face_recognition uses)
gb_frame = frame[:, :, ::-1]
# first_face_encoding = face_recognition.face_encodings(gb_frame)[0]
biden_face_encoding = face_recognition.face_encodings(biden_image)[0]
# obama_face_encoding = face_recognition.face_encodings(obama_image)[0]
# unknown_face_encoding = face_recognition.face_encodings(unknown_image)[0]
except IndexError:
print("I wasn't able to locate any faces in at least one of the images. Check the image files. Aborting...")
quit()
known_faces = [
# first_face_encoding,
biden_face_encoding,
# obama_face_encoding
]
# 为了输出片段时间起止的变量们
time_zones = []
pre_time_i = -1
cur_time_i = 0
left_time = ()
right_time = ()
def get_ms():
milliseconds = video.get(cv2.CAP_PROP_POS_MSEC)
seconds = milliseconds//1000
milliseconds = milliseconds%1000
minutes = 0
hours = 0
if seconds >= 60:
minutes = seconds//60
seconds = seconds % 60
if minutes >= 60:
hours = minutes//60
minutes = minutes % 60
return (int(hours), int(minutes), int(seconds), int(milliseconds))
while success:
# cv2.putText(frame, 'fps: ' + str(fps), (0, 200), cv2.FONT_HERSHEY_SIMPLEX, 2, (255,0,255), 5)
# cv2.putText(frame, 'count: ' + str(frameCount), (0, 300), cv2.FONT_HERSHEY_SIMPLEX,2, (255,0,255), 5)
# cv2.putText(frame, 'frame: ' + str(index), (0, 400), cv2.FONT_HERSHEY_SIMPLEX, 2, (255,0,255), 5)
# cv2.putText(frame, 'time: ' + str(round(index / 24.0, 2)) + "s", (0,500), cv2.FONT_HERSHEY_SIMPLEX, 2, (255,0,255), 5)
# results is an array of True/False telling if the unknown face matched anyone in the known_faces array
# Convert the image from BGR color (which OpenCV uses) to RGB color (which face_recognition uses)
rgb_frame = frame[:, :, ::-1]
# results = face_recognition.compare_faces(known_faces, gb_frame)
'''
Traceback (most recent call last):
File "v2v_test.py", line 54, in <module>
results = face_recognition.compare_faces(known_faces, gb_frame)
File "/Users/linmin/opt/anaconda3/envs/opencv/lib/python3.6/site-packages/face_recognition/api.py", line 226, in compare_faces
return list(face_distance(known_face_encodings, face_encoding_to_check) <= tolerance)
File "/Users/linmin/opt/anaconda3/envs/opencv/lib/python3.6/site-packages/face_recognition/api.py", line 75, in face_distance
return np.linalg.norm(face_encodings - face_to_compare, axis=1)
ValueError: operands could not be broadcast together with shapes (1,128) (720,1280,3)
'''
# Find all the faces and face encodings in the current frame of video
face_locations = face_recognition.face_locations(rgb_frame)
face_encodings = face_recognition.face_encodings(rgb_frame, face_locations)
face_names = []
cur_time_i += 1
for face_encoding in face_encodings:
# See if the face is a match for the known face(s)
match = face_recognition.compare_faces(known_faces, face_encoding, tolerance=0.50)
# If you had more than 2 faces, you could make this logic a lot prettier
# but I kept it simple for the demo
name = None
if match[0]:
name = "binden"
# elif match[1]:
# name = "<NAME>"
face_names.append(name)
# Label the results
for (top, right, bottom, left), name in zip(face_locations, face_names):
if not name:
continue
if pre_time_i == -1:
pre_time_i = cur_time_i
left_time = get_ms()
elif pre_time_i + 1 == cur_time_i:
pre_time_i += 1
right_time = get_ms()
else:
time_zones.append((left_time, right_time))
# 下一轮时间区间开始
pre_time_i = cur_time_i
left_time = get_ms()
# Draw a box around the face
cv2.rectangle(frame, (left, top), (right, bottom), (0, 0, 255), 2)
# Draw a label with a name below the face
cv2.rectangle(frame, (left, bottom - 25), (right, bottom), (0, 0, 255), cv2.FILLED)
font = cv2.FONT_HERSHEY_DUPLEX
cv2.putText(frame, name, (left + 6, bottom - 6), font, 0.5, (255, 255, 255), 1)
# if not results[0]:
# cv2.imshow("not binden", frame)
# # 一秒显示帧数张: 因为1000是1秒,然后后面是帧数
# # 所以 每帧`1/fps`秒, 1秒 fps帧,所以 fps帧 * 1/fps秒 = 1秒
# # cv2.waitKey(1000 / int(fps))
# cv2.waitKey(1000)
# else:
# videoWriter.write(frame)
# TODO: 这里直接不写帧,然后直接后面通过time_zone用FFmpeg来写帧,这样就可以保存视频了
# TODO: 但是速度问题需要考虑
videoWriter.write(frame)
success, frame = video.read()
# Write the resulting image to the output video file
print("Writing frame {} / {}".format(index, frameCount))
index += 1
print("各片段时间区间集合如下: ")
for time_zone in time_zones:
print("({}:{}:{}.{}->{}:{}:{}.{})".format(time_zone[0][0], time_zone[0][1],
time_zone[0][2], time_zone[0][3],
time_zone[1][0], time_zone[1][1],
time_zone[1][2], time_zone[1][3]))
video.release()
| [
"cv2.rectangle",
"face_recognition.face_locations",
"cv2.putText",
"face_recognition.face_encodings",
"cv2.VideoCapture",
"cv2.VideoWriter_fourcc",
"face_recognition.load_image_file",
"face_recognition.compare_faces"
] | [((68, 103), 'cv2.VideoCapture', 'cv2.VideoCapture', (['"""short_biden.mp4"""'], {}), "('short_biden.mp4')\n", (84, 103), False, 'import cv2\n'), ((295, 340), 'face_recognition.load_image_file', 'face_recognition.load_image_file', (['"""biden.jpg"""'], {}), "('biden.jpg')\n", (327, 340), False, 'import face_recognition\n'), ((388, 419), 'cv2.VideoWriter_fourcc', 'cv2.VideoWriter_fourcc', (["*'XVID'"], {}), "(*'XVID')\n", (410, 419), False, 'import cv2\n'), ((3977, 4019), 'face_recognition.face_locations', 'face_recognition.face_locations', (['rgb_frame'], {}), '(rgb_frame)\n', (4008, 4019), False, 'import face_recognition\n'), ((4041, 4099), 'face_recognition.face_encodings', 'face_recognition.face_encodings', (['rgb_frame', 'face_locations'], {}), '(rgb_frame, face_locations)\n', (4072, 4099), False, 'import face_recognition\n'), ((4257, 4330), 'face_recognition.compare_faces', 'face_recognition.compare_faces', (['known_faces', 'face_encoding'], {'tolerance': '(0.5)'}), '(known_faces, face_encoding, tolerance=0.5)\n', (4287, 4330), False, 'import face_recognition\n'), ((5170, 5236), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(left, top)', '(right, bottom)', '(0, 0, 255)', '(2)'], {}), '(frame, (left, top), (right, bottom), (0, 0, 255), 2)\n', (5183, 5236), False, 'import cv2\n'), ((5296, 5384), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(left, bottom - 25)', '(right, bottom)', '(0, 0, 255)', 'cv2.FILLED'], {}), '(frame, (left, bottom - 25), (right, bottom), (0, 0, 255), cv2\n .FILLED)\n', (5309, 5384), False, 'import cv2\n'), ((5427, 5506), 'cv2.putText', 'cv2.putText', (['frame', 'name', '(left + 6, bottom - 6)', 'font', '(0.5)', '(255, 255, 255)', '(1)'], {}), '(frame, name, (left + 6, bottom - 6), font, 0.5, (255, 255, 255), 1)\n', (5438, 5506), False, 'import cv2\n'), ((1426, 1470), 'face_recognition.face_encodings', 'face_recognition.face_encodings', (['biden_image'], {}), '(biden_image)\n', (1457, 1470), False, 'import face_recognition\n')] |
import threading
from . import _impl
from .entry import NetworkTableEntry
from .messages import *
__all__ = ["BadMessageError", "StreamEOF", "NetworkTableConnection",
"ReadManager", "PROTOCOL_REVISION"]
class StreamEOF(IOError):
pass
class ReadStream:
def __init__(self, f):
self.f = f
def read(self, size=-1):
data = self.f.read(size)
if size is not None and size > 0 and len(data) != size:
raise StreamEOF("end of file")
return data
def readStruct(self, s):
data = self.f.read(s.size)
if len(data) != s.size:
raise StreamEOF("end of file")
return s.unpack(data)
class NetworkTableConnection:
"""An abstraction for the NetworkTable protocol
"""
def __init__(self, stream, typeManager):
self.stream = stream
self.rstream = ReadStream(stream.getInputStream())
self.wstream = stream.getOutputStream()
self.typeManager = typeManager
self.write_lock = _impl.create_rlock('write_lock')
self.isValid = True
def close(self):
if self.isValid:
self.isValid = False
self.stream.close()
def flush(self):
with self.write_lock:
self.wstream.flush()
def sendKeepAlive(self):
with self.write_lock:
self.wstream.write(KEEP_ALIVE.getBytes())
self.wstream.flush()
def sendClientHello(self):
with self.write_lock:
self.wstream.write(CLIENT_HELLO.getBytes(PROTOCOL_REVISION))
self.wstream.flush()
def sendServerHelloComplete(self):
with self.write_lock:
self.wstream.write(SERVER_HELLO_COMPLETE.getBytes())
self.wstream.flush()
def sendProtocolVersionUnsupported(self):
with self.write_lock:
self.wstream.write(PROTOCOL_UNSUPPORTED.getBytes(PROTOCOL_REVISION))
self.wstream.flush()
def sendEntry(self, entryBytes):
# use entry.getAssignBytes or entry.getUpdateBytes
with self.write_lock:
self.wstream.write(entryBytes)
def read(self, adapter):
messageType = self.rstream.read(1)
if messageType == KEEP_ALIVE.HEADER:
adapter.keepAlive()
elif messageType == CLIENT_HELLO.HEADER:
protocolRevision = CLIENT_HELLO.read(self.rstream)[0]
adapter.clientHello(protocolRevision)
elif messageType == SERVER_HELLO_COMPLETE.HEADER:
adapter.serverHelloComplete()
elif messageType == PROTOCOL_UNSUPPORTED.HEADER:
protocolRevision = PROTOCOL_UNSUPPORTED.read(self.rstream)[0]
adapter.protocolVersionUnsupported(protocolRevision)
elif messageType == ENTRY_ASSIGNMENT.HEADER:
entryName, (typeId, entryId, entrySequenceNumber) = \
ENTRY_ASSIGNMENT.read(self.rstream)
entryType = self.typeManager.getType(typeId)
if entryType is None:
raise BadMessageError("Unknown data type: 0x%x" % typeId)
value = entryType.readValue(self.rstream)
adapter.offerIncomingAssignment(NetworkTableEntry(entryName, entryType, value, id=entryId, sequenceNumber=entrySequenceNumber))
elif messageType == FIELD_UPDATE.HEADER:
entryId, entrySequenceNumber = FIELD_UPDATE.read(self.rstream)
entry = adapter.getEntry(entryId)
if entry is None:
raise BadMessageError("Received update for unknown entry id: %d " % entryId)
value = entry.getType().readValue(self.rstream)
adapter.offerIncomingUpdate(entry, entrySequenceNumber, value)
else:
raise BadMessageError("Unknown Network Table Message Type: %s" % (messageType))
class ReadManager:
"""A periodic thread that repeatedly reads from a connection
"""
def __init__(self, adapter, connection, name=None):
"""create a new monitor thread
:param adapter:
:type adapter: :class:`.ServerConncetionAdapter` or :class:`.ClientConnectionAdapter`
:param connection:
:type connection: :class:`NetworkTableConnection`
"""
self.adapter = adapter
self.connection = connection
self.running = True
self.thread = threading.Thread(target=self.run, name=name)
self.thread.daemon = True
def start(self):
self.thread.start()
def stop(self):
self.running = False
try:
self.thread.join()
except RuntimeError:
pass
def run(self):
while self.running:
try:
self.connection.read(self.adapter)
except BadMessageError as e:
self.adapter.badMessage(e)
except IOError as e:
self.adapter.ioError(e)
| [
"threading.Thread"
] | [((4419, 4463), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.run', 'name': 'name'}), '(target=self.run, name=name)\n', (4435, 4463), False, 'import threading\n')] |
"""
Scattering GUI
"""
import sys, os
import matplotlib.pyplot as plt # Plotting
import numpy as np
if sys.version_info[0] < 3:
import Tkinter as tk
else:
import tkinter as tk
from .. import functions_general as fg
from .. import functions_crystallography as fc
from .basic_widgets import StringViewer
from .basic_widgets import (TF, BF, SF, LF, HF,
bkg, ety, btn, opt, btn2,
btn_active, opt_active, txtcol,
btn_txt, ety_txt, opt_txt)
class ScatteringGui:
"""
Simulate scattering of various forms
"""
def __init__(self, xtl):
"""Initialise"""
self.xtl = xtl
# Create Tk inter instance
self.root = tk.Tk()
self.root.wm_title('Scattering %s' % xtl.name)
# self.root.minsize(width=640, height=480)
self.root.maxsize(width=self.root.winfo_screenwidth(), height=self.root.winfo_screenheight())
self.root.tk_setPalette(
background=bkg,
foreground=txtcol,
activeBackground=opt_active,
activeForeground=txtcol)
frame = tk.Frame(self.root)
frame.pack(side=tk.LEFT, anchor=tk.N)
# Variatbles
self.energy_kev = tk.DoubleVar(frame, 8.0)
self.edge = tk.StringVar(frame, 'Edge')
self.type = tk.StringVar(frame, 'X-Ray')
self.orientation = tk.StringVar(frame, 'None')
self.direction_h = tk.IntVar(frame, 0)
self.direction_k = tk.IntVar(frame, 0)
self.direction_l = tk.IntVar(frame, 1)
self.theta_offset = tk.DoubleVar(frame, 0.0)
self.theta_min = tk.DoubleVar(frame, -180.0)
self.theta_max = tk.DoubleVar(frame, 180.0)
self.twotheta_min = tk.DoubleVar(frame, -180.0)
self.twotheta_max = tk.DoubleVar(frame, 180.0)
self.powder_units = tk.StringVar(frame, 'Two-Theta')
self.powderaverage = tk.BooleanVar(frame, True)
self.powder_width = tk.DoubleVar(frame, 0.01)
self.hkl_check = tk.StringVar(frame, '0 0 1')
self.hkl_result = tk.StringVar(frame, 'I:%10.0f TTH:%8.2f' % (0, 0))
self.val_i = tk.IntVar(frame, 0)
self.hkl_magnetic = tk.StringVar(frame, '0 0 1')
self.azim_zero = tk.StringVar(frame, '1 0 0')
self.isres = tk.BooleanVar(frame, True)
self.psival = tk.DoubleVar(frame, 0.0)
self.polval = tk.StringVar(frame, u'\u03c3-\u03c0')
self.resF0 = tk.DoubleVar(frame, 0.0)
self.resF1 = tk.DoubleVar(frame, 1.0)
self.resF2 = tk.DoubleVar(frame, 0.0)
self.magresult = tk.StringVar(frame, 'I = --')
# X-ray edges:
self.xr_edges, self.xr_energies = self.xtl.Properties.xray_edges()
self.xr_edges.insert(0, 'Cu Ka')
self.xr_edges.insert(1, 'Mo Ka')
self.xr_energies.insert(0, fg.Cu)
self.xr_energies.insert(1, fg.Mo)
line = tk.Frame(frame)
line.pack(side=tk.TOP, fill=tk.X, pady=5)
var = tk.Label(line, text='Scattering', font=LF)
var.pack(side=tk.LEFT)
var = tk.Button(line, text='Supernova', font=BF, command=self.fun_supernova, bg=btn,
activebackground=btn_active)
var.pack(side=tk.RIGHT)
var = tk.Button(line, text='Wish', font=BF, command=self.fun_wish, bg=btn, activebackground=btn_active)
var.pack(side=tk.RIGHT)
var = tk.Button(line, text='I16', font=BF, command=self.fun_i16, bg=btn, activebackground=btn_active)
var.pack(side=tk.RIGHT)
# ---Settings---
box = tk.LabelFrame(frame, text='Settings')
box.pack(side=tk.TOP, fill=tk.BOTH, padx=5, pady=5)
# Energy
line = tk.Frame(box)
line.pack(side=tk.TOP, fill=tk.X, pady=5)
var = tk.Label(line, text='Energy (keV):', font=SF)
var.pack(side=tk.LEFT)
var = tk.OptionMenu(line, self.edge, *self.xr_edges, command=self.fun_edge)
var.config(font=SF, width=5, bg=opt, activebackground=opt_active)
var["menu"].config(bg=opt, bd=0, activebackground=opt_active)
var.pack(side=tk.LEFT)
var = tk.Entry(line, textvariable=self.energy_kev, font=TF, width=8, bg=ety, fg=ety_txt)
var.pack(side=tk.LEFT)
# Type
line = tk.Frame(box)
line.pack(side=tk.TOP, fill=tk.X, pady=5)
types = ['X-Ray', 'Neutron', 'XRay Magnetic', 'Neutron Magnetic', 'XRay Resonant', 'XRay Dispersion']
var = tk.Label(line, text='Type:', font=SF)
var.pack(side=tk.LEFT)
var = tk.OptionMenu(line, self.type, *types)
var.config(font=SF, width=10, bg=opt, activebackground=opt_active)
var["menu"].config(bg=opt, bd=0, activebackground=opt_active)
var.pack(side=tk.LEFT)
# Units
xaxistypes = ['two-theta', 'd-spacing', 'Q']
var = tk.Label(line, text='Units:', font=SF)
var.pack(side=tk.LEFT)
var = tk.OptionMenu(line, self.powder_units, *xaxistypes)
var.config(font=SF, width=10, bg=opt, activebackground=opt_active)
var["menu"].config(bg=opt, bd=0, activebackground=opt_active)
var.pack(side=tk.LEFT)
# Orientation
line = tk.Frame(box)
line.pack(side=tk.TOP, fill=tk.X, pady=5)
var = tk.Label(line, text='Geometry:', font=SF)
var.pack(side=tk.LEFT)
orients = ['None', 'Reflection', 'Transmission']
var = tk.OptionMenu(line, self.orientation, *orients)
var.config(font=SF, width=10, bg=opt, activebackground=opt_active)
var["menu"].config(bg=opt, bd=0, activebackground=opt_active)
var.pack(side=tk.LEFT)
# Direction
var = tk.Label(line, text='Direction:', font=SF)
var.pack(side=tk.LEFT)
var = tk.Entry(line, textvariable=self.direction_h, font=TF, width=2, bg=ety, fg=ety_txt)
var.pack(side=tk.LEFT)
var = tk.Entry(line, textvariable=self.direction_k, font=TF, width=2, bg=ety, fg=ety_txt)
var.pack(side=tk.LEFT)
var = tk.Entry(line, textvariable=self.direction_l, font=TF, width=2, bg=ety, fg=ety_txt)
var.pack(side=tk.LEFT)
# Theta offset
line = tk.Frame(box)
line.pack(side=tk.TOP, fill=tk.X, pady=5)
var = tk.Label(line, text='Offset:', font=SF)
var.pack(side=tk.LEFT)
var = tk.Entry(line, textvariable=self.theta_offset, font=TF, width=5, bg=ety, fg=ety_txt)
var.pack(side=tk.LEFT)
# Theta min
var = tk.Label(line, text='Min Theta:', font=SF)
var.pack(side=tk.LEFT)
var = tk.Entry(line, textvariable=self.theta_min, font=TF, width=5, bg=ety, fg=ety_txt)
var.pack(side=tk.LEFT)
# Theta max
var = tk.Label(line, text='Max Theta:', font=SF)
var.pack(side=tk.LEFT)
var = tk.Entry(line, textvariable=self.theta_max, font=TF, width=5, bg=ety, fg=ety_txt)
var.pack(side=tk.LEFT)
# TwoTheta min
line = tk.Frame(box)
line.pack(side=tk.TOP, fill=tk.X, pady=5)
var = tk.Label(line, text='Min TwoTheta:', font=SF)
var.pack(side=tk.LEFT)
var = tk.Entry(line, textvariable=self.twotheta_min, font=TF, width=5, bg=ety, fg=ety_txt)
var.pack(side=tk.LEFT)
# TwoTheta max
var = tk.Entry(line, textvariable=self.twotheta_max, font=TF, width=5, bg=ety, fg=ety_txt)
var.pack(side=tk.RIGHT)
var = tk.Label(line, text='Max TwoTheta:', font=SF)
var.pack(side=tk.RIGHT)
# Powder width
line = tk.Frame(box)
line.pack(side=tk.TOP, fill=tk.X, pady=5)
var = tk.Label(line, text='Powder peak width:', font=SF)
var.pack(side=tk.LEFT, padx=3)
var = tk.Entry(line, textvariable=self.powder_width, font=TF, width=5, bg=ety, fg=ety_txt)
var.pack(side=tk.LEFT)
# Powder average tickbox
var = tk.Checkbutton(line, text='Powder average', variable=self.powderaverage, font=SF)
var.pack(side=tk.LEFT, padx=6)
# ---Intensities---
box = tk.LabelFrame(frame, text='Intensities')
box.pack(side=tk.TOP, fill=tk.BOTH, padx=5, pady=5)
line = tk.Frame(box)
line.pack(side=tk.TOP, fill=tk.X, pady=5)
var = tk.Button(line, text='Display Intensities', font=BF, command=self.fun_intensities, bg=btn2,
activebackground=btn_active)
var.pack(side=tk.LEFT)
var = tk.Button(line, text='Plot Powder', font=BF, command=self.fun_powder, bg=btn,
activebackground=btn_active)
var.pack(side=tk.LEFT)
# hkl check
line = tk.Frame(box)
line.pack(side=tk.TOP, fill=tk.X, pady=5)
hklbox = tk.LabelFrame(line, text='Quick Check')
hklbox.pack(side=tk.RIGHT)
var = tk.Entry(hklbox, textvariable=self.hkl_check, font=TF, width=6, bg=ety, fg=ety_txt)
var.pack(side=tk.LEFT)
var.bind('<Return>', self.fun_hklcheck)
var.bind('<KP_Enter>', self.fun_hklcheck)
var = tk.Label(hklbox, textvariable=self.hkl_result, font=TF, width=22)
var.pack(side=tk.LEFT)
var = tk.Button(hklbox, text='Check HKL', font=BF, command=self.fun_hklcheck, bg=btn,
activebackground=btn_active)
var.pack(side=tk.LEFT, pady=2)
# ---Planes---
box = tk.LabelFrame(frame, text='Reciprocal Space Planes')
box.pack(side=tk.TOP, fill=tk.BOTH, padx=5, pady=5)
line = tk.Frame(box)
line.pack(side=tk.TOP, pady=5)
# ---HKL Planes---
# i value
var = tk.Label(line, text='i:', font=SF)
var.pack(side=tk.LEFT)
var = tk.Entry(line, textvariable=self.val_i, font=TF, width=3, bg=ety, fg=ety_txt)
var.pack(side=tk.LEFT)
# directions
vframe = tk.Frame(line)
vframe.pack(side=tk.LEFT, padx=3)
var = tk.Button(vframe, text='HKi', font=BF, command=self.fun_hki, width=5, bg=btn, activebackground=btn_active)
var.pack()
var = tk.Button(vframe, text='HiL', font=BF, command=self.fun_hil, width=5, bg=btn, activebackground=btn_active)
var.pack()
vframe = tk.Frame(line)
vframe.pack(side=tk.LEFT)
var = tk.Button(vframe, text='iKL', font=BF, command=self.fun_ikl, width=5, bg=btn, activebackground=btn_active)
var.pack()
var = tk.Button(vframe, text='HHi', font=BF, command=self.fun_hhi, width=5, bg=btn, activebackground=btn_active)
var.pack()
# ---X-ray Magnetic scattering----
if np.any(self.xtl.Structure.mxmymz()):
box = tk.LabelFrame(frame, text='X-Ray Magnetic Scattering')
box.pack(side=tk.TOP, fill=tk.BOTH, padx=3)
line = tk.Frame(box)
line.pack(side=tk.TOP, fill=tk.BOTH, pady=5)
# Resonant HKL, azimuthal reference
vframe = tk.Frame(line)
vframe.pack(side=tk.LEFT, fill=tk.Y, padx=3)
hframe = tk.Frame(vframe)
hframe.pack()
var = tk.Label(hframe, text=' HKL:', font=SF, width=11)
var.pack(side=tk.LEFT)
var = tk.Entry(hframe, textvariable=self.hkl_magnetic, font=TF, width=6, bg=ety, fg=ety_txt)
var.pack(side=tk.LEFT)
var.bind('<Return>', self.fun_hklmag)
var.bind('<KP_Enter>', self.fun_hklmag)
hframe = tk.Frame(vframe)
hframe.pack()
var = tk.Label(vframe, text='Azim. Ref.:', font=SF, width=11)
var.pack(side=tk.LEFT)
var = tk.Entry(vframe, textvariable=self.azim_zero, font=TF, width=6, bg=ety, fg=ety_txt)
var.pack(side=tk.LEFT)
# Resonant value
vframe = tk.Frame(line)
vframe.pack(side=tk.LEFT, fill=tk.Y, padx=3)
hframe = tk.Frame(vframe)
hframe.pack()
var = tk.Label(hframe, text='F0:', font=SF)
var.pack(side=tk.LEFT)
var = tk.Entry(hframe, textvariable=self.resF0, font=TF, width=3, bg=ety, fg=ety_txt)
var.pack(side=tk.LEFT)
hframe = tk.Frame(vframe)
hframe.pack()
var = tk.Label(hframe, text='F1:', font=SF)
var.pack(side=tk.LEFT)
var = tk.Entry(hframe, textvariable=self.resF1, font=TF, width=3, bg=ety, fg=ety_txt)
var.pack(side=tk.LEFT)
hframe = tk.Frame(vframe)
hframe.pack()
var = tk.Label(hframe, text='F2:', font=SF)
var.pack(side=tk.LEFT)
var = tk.Entry(hframe, textvariable=self.resF2, font=TF, width=3, bg=ety, fg=ety_txt)
var.pack(side=tk.LEFT)
vframe = tk.Frame(line)
vframe.pack(side=tk.LEFT, fill=tk.Y, padx=3)
# Polarisation
poltypes = [u'\u03c3-\u03c3', u'\u03c3-\u03c0', u'\u03c0-\u03c3', u'\u03c0-\u03c0']
hframe = tk.Frame(vframe)
hframe.pack()
var = tk.Label(hframe, text='Polarisation:', font=SF)
var.pack(side=tk.LEFT)
var = tk.OptionMenu(hframe, self.polval, *poltypes)
var.config(font=SF, width=5, bg=opt, activebackground=opt_active)
var["menu"].config(bg=opt, bd=0, activebackground=opt_active)
var.pack(side=tk.LEFT)
hframe = tk.Frame(vframe)
hframe.pack()
# Resonant tickbox
var = tk.Checkbutton(hframe, text='Resonant', variable=self.isres, font=SF)
var.pack(side=tk.LEFT, padx=6)
# psi
var = tk.Label(hframe, text='psi:', font=SF, width=4)
var.pack(side=tk.LEFT)
var = tk.Entry(hframe, textvariable=self.psival, font=TF, width=4, bg=ety, fg=ety_txt)
var.pack(side=tk.LEFT)
var.bind('<Return>', self.fun_hklmag)
var.bind('<KP_Enter>', self.fun_hklmag)
line = tk.Frame(box)
line.pack(side=tk.TOP, fill=tk.BOTH, pady=5)
vframe = tk.Frame(line)
vframe.pack(side=tk.LEFT, fill=tk.Y, padx=3)
# Mag. Inten button
var = tk.Button(vframe, text='Calc. Mag. Inten.', font=BF, command=self.fun_hklmag, bg=btn,
activebackground=btn_active)
var.pack(side=tk.LEFT, padx=5)
# Magnetic Result
var = tk.Label(vframe, textvariable=self.magresult, font=SF, width=12)
var.pack(side=tk.LEFT, fill=tk.Y)
# Azimuth Button
var = tk.Button(line, text='Simulate\n Azimuth', font=BF, command=self.fun_azimuth, width=7, bg=btn,
activebackground=btn_active)
var.pack(side=tk.RIGHT)
def fun_set(self):
""""Set gui parameters from crystal"""
self.type.set(self.xtl._scattering_type)
# self.energy_kev.set(8)
self.theta_offset.set(self.xtl._scattering_theta_offset)
self.theta_min.set(self.xtl._scattering_min_theta)
self.theta_max.set(self.xtl._scattering_max_theta)
self.twotheta_min.set(self.xtl._scattering_min_two_theta)
self.twotheta_max.set(self.xtl._scattering_max_two_theta)
if self.orientation.get() == 'Reflection':
self.direction_h.set(self.xtl._scattering_specular_direction[0])
self.direction_k.set(self.xtl._scattering_specular_direction[1])
self.direction_l.set(self.xtl._scattering_specular_direction[2])
else:
self.direction_h.set(self.xtl._scattering_parallel_direction[0])
self.direction_k.set(self.xtl._scattering_parallel_direction[1])
self.direction_l.set(self.xtl._scattering_parallel_direction[2])
def fun_get(self):
"""Set crytal parameters from gui"""
scat = self.xtl.Scatter
scat._scattering_type = self.type.get()
scat._energy_kev = self.energy_kev.get()
scat._scattering_theta_offset = self.theta_offset.get()
scat._scattering_min_theta = self.theta_min.get()
scat._scattering_max_theta = self.theta_max.get()
scat._scattering_min_twotheta = self.twotheta_min.get()
scat._scattering_max_twotheta = self.twotheta_max.get()
scat._powder_units = self.powder_units.get()
if self.orientation.get() == 'Reflection':
scat._scattering_specular_direction[0] = self.direction_h.get()
scat._scattering_specular_direction[1] = self.direction_k.get()
scat._scattering_specular_direction[2] = self.direction_l.get()
elif self.orientation.get() == 'Transmission':
scat._scattering_parallel_direction[0] = self.direction_h.get()
scat._scattering_parallel_direction[1] = self.direction_k.get()
scat._scattering_parallel_direction[2] = self.direction_l.get()
def fun_i16(self):
""""Add I16 parameters"""
self.type.set('X-Ray')
self.energy_kev.set(8.0)
self.edge.set('Edge')
self.powder_units.set('Two-Theta')
self.powderaverage.set(False)
self.orientation.set('Reflection')
self.theta_offset.set(0.0)
self.theta_min.set(-20.0)
self.theta_max.set(150.0)
self.twotheta_min.set(0.0)
self.twotheta_max.set(130.0)
def fun_wish(self):
""""Add Wish parameters"""
self.type.set('Neutron')
self.energy_kev.set(17.7)
self.edge.set('Edge')
self.powder_units.set('d-spacing')
self.orientation.set('None')
self.theta_offset.set(0.0)
self.theta_min.set(-180.0)
self.theta_max.set(180.0)
self.twotheta_min.set(10.0)
self.twotheta_max.set(170.0)
def fun_supernova(self):
"""Add SuperNova parameters"""
self.type.set('X-Ray')
idx = self.xr_edges.index('Mo Ka')
self.edge.set('Mo Ka')
self.energy_kev.set(self.xr_energies[idx])
self.powder_units.set('Two-Theta')
self.orientation.set('None')
self.theta_offset.set(0.0)
self.theta_min.set(-180.0)
self.theta_max.set(180.0)
self.twotheta_min.set(-170.0)
self.twotheta_max.set(170.0)
def fun_edge(self, event=None):
"""X-ray edge option menu"""
edge = self.edge.get()
if self.edge.get() in self.xr_edges:
idx = self.xr_edges.index(edge)
self.energy_kev.set(self.xr_energies[idx])
def fun_hklcheck(self, event=None):
""""Show single hkl intensity"""
self.fun_get()
hkl = self.hkl_check.get()
hkl = hkl.replace(',', ' ') # remove commas
hkl = hkl.replace('(', '').replace(')', '') # remove brackets
hkl = hkl.replace('[', '').replace(']', '') # remove brackets
hkl = np.fromstring(hkl, sep=' ')
I = self.xtl.Scatter.intensity(hkl)
unit = self.powder_units.get()
energy = self.energy_kev.get()
tth = self.xtl.Cell.tth(hkl, energy)
if unit.lower() in ['tth', 'angle', 'twotheta', 'theta', 'two-theta']:
self.hkl_result.set('I:%10.0f TTH:%8.2f' % (I, tth))
elif unit.lower() in ['d', 'dspace', 'd-spacing', 'dspacing']:
q = fc.calqmag(tth, energy)
d = fc.q2dspace(q)
self.hkl_result.set(u'I:%10.0f d:%8.2f \u00c5' % (I, d))
else:
q = fc.calqmag(tth, energy)
self.hkl_result.set(u'I:%8.0f Q:%8.2f \u00c5\u207B\u00B9' % (I, q))
def fun_intensities(self):
"""Display intensities"""
self.fun_get()
if self.orientation.get() == 'Reflection':
string = self.xtl.Scatter.print_ref_reflections(min_intensity=-1, max_intensity=None)
elif self.orientation.get() == 'Transmission':
string = self.xtl.Scatter.print_tran_reflections(min_intensity=-1, max_intensity=None)
else:
units = self.powder_units.get()
string = self.xtl.Scatter.print_all_reflections(min_intensity=-1, max_intensity=None, units=units)
StringViewer(string, 'Intensities %s' % self.xtl.name)
def fun_powder(self):
"""Plot Powder"""
self.fun_get()
energy = self.energy_kev.get()
min_q = fc.calqmag(self.twotheta_min.get(), energy)
max_q = fc.calqmag(self.twotheta_max.get(), energy)
pow_avg = self.powderaverage.get()
pow_wid = self.powder_width.get()
#if min_q < 0: min_q = 0.0
self.xtl.Plot.simulate_powder(energy, peak_width=pow_wid, powder_average=pow_avg)
plt.show()
def fun_hki(self):
"""Plot hki plane"""
self.fun_get()
i = self.val_i.get()
self.xtl.Plot.simulate_hk0(i)
plt.show()
def fun_hil(self):
"""Plot hil plane"""
self.fun_get()
i = self.val_i.get()
self.xtl.Plot.simulate_h0l(i)
plt.show()
def fun_ikl(self):
"""Plot ikl plane"""
self.fun_get()
i = self.val_i.get()
self.xtl.Plot.simulate_0kl(i)
plt.show()
def fun_hhi(self):
"""Plot hhl plane"""
self.fun_get()
i = self.val_i.get()
self.xtl.Plot.simulate_hhl(i)
plt.show()
def fun_hklmag(self, event=None):
""""Magnetic scattering"""
energy_kev = self.energy_kev.get()
hkl = self.hkl_magnetic.get()
hkl = hkl.replace(',', ' ') # remove commas
hkl = hkl.replace('(', '').replace(')', '') # remove brackets
hkl = hkl.replace('[', '').replace(']', '') # remove brackets
hkl = np.fromstring(hkl, sep=' ')
azi = self.azim_zero.get()
azi = azi.replace(',', ' ') # remove commas
azi = azi.replace('(', '').replace(')', '') # remove brackets
azi = azi.replace('[', '').replace(']', '') # remove brackets
azi = np.fromstring(azi, sep=' ')
psi = self.psival.get()
pol = self.polval.get()
if pol == u'\u03c3-\u03c3':
pol = 's-s'
elif pol == u'\u03c3-\u03c0':
pol = 's-p'
elif pol == u'\u03c0-\u03c3':
pol = 'p-s'
else:
pol = 'p-p'
F0 = self.resF0.get()
F1 = self.resF1.get()
F2 = self.resF2.get()
isres = self.isres.get()
if isres:
# Resonant scattering
maginten = self.xtl.Scatter.xray_resonant_magnetic(
hkl,
energy_kev=energy_kev,
azim_zero=azi, psi=psi,
polarisation=pol,
F0=F0, F1=F1, F2=F2)
else:
# Non-Resonant scattering
maginten = self.xtl.Scatter.xray_nonresonant_magnetic(
hkl,
energy_kev=energy_kev,
azim_zero=azi, psi=psi,
polarisation=pol)
self.magresult.set('I = %9.4g' % maginten)
def fun_azimuth(self):
"""Simulate azimuthal magnetic scattering"""
energy_kev = self.energy_kev.get()
hkl = self.hkl_magnetic.get()
hkl = hkl.replace(',', ' ') # remove commas
hkl = hkl.replace('(', '').replace(')', '') # remove brackets
hkl = hkl.replace('[', '').replace(']', '') # remove brackets
hkl = np.fromstring(hkl, sep=' ')
azi = self.azim_zero.get()
azi = azi.replace(',', ' ') # remove commas
azi = azi.replace('(', '').replace(')', '') # remove brackets
azi = azi.replace('[', '').replace(']', '') # remove brackets
azi = np.fromstring(azi, sep=' ')
pol = self.polval.get()
if pol == u'\u03c3-\u03c3':
pol = 's-s'
elif pol == u'\u03c3-\u03c0':
pol = 's-p'
elif pol == u'\u03c0-\u03c3':
pol = 'p-s'
else:
pol = 'p-p'
F0 = self.resF0.get()
F1 = self.resF1.get()
F2 = self.resF2.get()
isres = self.isres.get()
if isres:
# Resonant scattering
self.xtl.Plot.simulate_azimuth_resonant(
hkl,
energy_kev=energy_kev,
azim_zero=azi,
polarisation=pol,
F0=F0, F1=F1, F2=F2)
plt.show()
else:
# Non-Resonant scattering
self.xtl.Plot.simulate_azimuth_nonresonant(
hkl,
energy_kev=energy_kev,
azim_zero=azi,
polarisation=pol)
plt.show()
| [
"tkinter.IntVar",
"tkinter.LabelFrame",
"tkinter.Entry",
"tkinter.Checkbutton",
"tkinter.BooleanVar",
"tkinter.Button",
"tkinter.StringVar",
"tkinter.Tk",
"tkinter.Label",
"tkinter.DoubleVar",
"tkinter.OptionMenu",
"numpy.fromstring",
"tkinter.Frame",
"matplotlib.pyplot.show"
] | [((743, 750), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (748, 750), True, 'import tkinter as tk\n'), ((1146, 1165), 'tkinter.Frame', 'tk.Frame', (['self.root'], {}), '(self.root)\n', (1154, 1165), True, 'import tkinter as tk\n'), ((1260, 1284), 'tkinter.DoubleVar', 'tk.DoubleVar', (['frame', '(8.0)'], {}), '(frame, 8.0)\n', (1272, 1284), True, 'import tkinter as tk\n'), ((1305, 1332), 'tkinter.StringVar', 'tk.StringVar', (['frame', '"""Edge"""'], {}), "(frame, 'Edge')\n", (1317, 1332), True, 'import tkinter as tk\n'), ((1353, 1381), 'tkinter.StringVar', 'tk.StringVar', (['frame', '"""X-Ray"""'], {}), "(frame, 'X-Ray')\n", (1365, 1381), True, 'import tkinter as tk\n'), ((1409, 1436), 'tkinter.StringVar', 'tk.StringVar', (['frame', '"""None"""'], {}), "(frame, 'None')\n", (1421, 1436), True, 'import tkinter as tk\n'), ((1464, 1483), 'tkinter.IntVar', 'tk.IntVar', (['frame', '(0)'], {}), '(frame, 0)\n', (1473, 1483), True, 'import tkinter as tk\n'), ((1511, 1530), 'tkinter.IntVar', 'tk.IntVar', (['frame', '(0)'], {}), '(frame, 0)\n', (1520, 1530), True, 'import tkinter as tk\n'), ((1558, 1577), 'tkinter.IntVar', 'tk.IntVar', (['frame', '(1)'], {}), '(frame, 1)\n', (1567, 1577), True, 'import tkinter as tk\n'), ((1606, 1630), 'tkinter.DoubleVar', 'tk.DoubleVar', (['frame', '(0.0)'], {}), '(frame, 0.0)\n', (1618, 1630), True, 'import tkinter as tk\n'), ((1656, 1683), 'tkinter.DoubleVar', 'tk.DoubleVar', (['frame', '(-180.0)'], {}), '(frame, -180.0)\n', (1668, 1683), True, 'import tkinter as tk\n'), ((1709, 1735), 'tkinter.DoubleVar', 'tk.DoubleVar', (['frame', '(180.0)'], {}), '(frame, 180.0)\n', (1721, 1735), True, 'import tkinter as tk\n'), ((1764, 1791), 'tkinter.DoubleVar', 'tk.DoubleVar', (['frame', '(-180.0)'], {}), '(frame, -180.0)\n', (1776, 1791), True, 'import tkinter as tk\n'), ((1820, 1846), 'tkinter.DoubleVar', 'tk.DoubleVar', (['frame', '(180.0)'], {}), '(frame, 180.0)\n', (1832, 1846), True, 'import tkinter as tk\n'), ((1875, 1907), 'tkinter.StringVar', 'tk.StringVar', (['frame', '"""Two-Theta"""'], {}), "(frame, 'Two-Theta')\n", (1887, 1907), True, 'import tkinter as tk\n'), ((1937, 1963), 'tkinter.BooleanVar', 'tk.BooleanVar', (['frame', '(True)'], {}), '(frame, True)\n', (1950, 1963), True, 'import tkinter as tk\n'), ((1992, 2017), 'tkinter.DoubleVar', 'tk.DoubleVar', (['frame', '(0.01)'], {}), '(frame, 0.01)\n', (2004, 2017), True, 'import tkinter as tk\n'), ((2043, 2071), 'tkinter.StringVar', 'tk.StringVar', (['frame', '"""0 0 1"""'], {}), "(frame, '0 0 1')\n", (2055, 2071), True, 'import tkinter as tk\n'), ((2098, 2148), 'tkinter.StringVar', 'tk.StringVar', (['frame', "('I:%10.0f TTH:%8.2f' % (0, 0))"], {}), "(frame, 'I:%10.0f TTH:%8.2f' % (0, 0))\n", (2110, 2148), True, 'import tkinter as tk\n'), ((2170, 2189), 'tkinter.IntVar', 'tk.IntVar', (['frame', '(0)'], {}), '(frame, 0)\n', (2179, 2189), True, 'import tkinter as tk\n'), ((2218, 2246), 'tkinter.StringVar', 'tk.StringVar', (['frame', '"""0 0 1"""'], {}), "(frame, '0 0 1')\n", (2230, 2246), True, 'import tkinter as tk\n'), ((2272, 2300), 'tkinter.StringVar', 'tk.StringVar', (['frame', '"""1 0 0"""'], {}), "(frame, '1 0 0')\n", (2284, 2300), True, 'import tkinter as tk\n'), ((2322, 2348), 'tkinter.BooleanVar', 'tk.BooleanVar', (['frame', '(True)'], {}), '(frame, True)\n', (2335, 2348), True, 'import tkinter as tk\n'), ((2371, 2395), 'tkinter.DoubleVar', 'tk.DoubleVar', (['frame', '(0.0)'], {}), '(frame, 0.0)\n', (2383, 2395), True, 'import tkinter as tk\n'), ((2418, 2445), 'tkinter.StringVar', 'tk.StringVar', (['frame', 'u"""σ-π"""'], {}), "(frame, u'σ-π')\n", (2430, 2445), True, 'import tkinter as tk\n'), ((2477, 2501), 'tkinter.DoubleVar', 'tk.DoubleVar', (['frame', '(0.0)'], {}), '(frame, 0.0)\n', (2489, 2501), True, 'import tkinter as tk\n'), ((2523, 2547), 'tkinter.DoubleVar', 'tk.DoubleVar', (['frame', '(1.0)'], {}), '(frame, 1.0)\n', (2535, 2547), True, 'import tkinter as tk\n'), ((2569, 2593), 'tkinter.DoubleVar', 'tk.DoubleVar', (['frame', '(0.0)'], {}), '(frame, 0.0)\n', (2581, 2593), True, 'import tkinter as tk\n'), ((2619, 2648), 'tkinter.StringVar', 'tk.StringVar', (['frame', '"""I = --"""'], {}), "(frame, 'I = --')\n", (2631, 2648), True, 'import tkinter as tk\n'), ((2931, 2946), 'tkinter.Frame', 'tk.Frame', (['frame'], {}), '(frame)\n', (2939, 2946), True, 'import tkinter as tk\n'), ((3012, 3054), 'tkinter.Label', 'tk.Label', (['line'], {'text': '"""Scattering"""', 'font': 'LF'}), "(line, text='Scattering', font=LF)\n", (3020, 3054), True, 'import tkinter as tk\n'), ((3101, 3213), 'tkinter.Button', 'tk.Button', (['line'], {'text': '"""Supernova"""', 'font': 'BF', 'command': 'self.fun_supernova', 'bg': 'btn', 'activebackground': 'btn_active'}), "(line, text='Supernova', font=BF, command=self.fun_supernova, bg=\n btn, activebackground=btn_active)\n", (3110, 3213), True, 'import tkinter as tk\n'), ((3279, 3380), 'tkinter.Button', 'tk.Button', (['line'], {'text': '"""Wish"""', 'font': 'BF', 'command': 'self.fun_wish', 'bg': 'btn', 'activebackground': 'btn_active'}), "(line, text='Wish', font=BF, command=self.fun_wish, bg=btn,\n activebackground=btn_active)\n", (3288, 3380), True, 'import tkinter as tk\n'), ((3423, 3522), 'tkinter.Button', 'tk.Button', (['line'], {'text': '"""I16"""', 'font': 'BF', 'command': 'self.fun_i16', 'bg': 'btn', 'activebackground': 'btn_active'}), "(line, text='I16', font=BF, command=self.fun_i16, bg=btn,\n activebackground=btn_active)\n", (3432, 3522), True, 'import tkinter as tk\n'), ((3591, 3628), 'tkinter.LabelFrame', 'tk.LabelFrame', (['frame'], {'text': '"""Settings"""'}), "(frame, text='Settings')\n", (3604, 3628), True, 'import tkinter as tk\n'), ((3722, 3735), 'tkinter.Frame', 'tk.Frame', (['box'], {}), '(box)\n', (3730, 3735), True, 'import tkinter as tk\n'), ((3800, 3845), 'tkinter.Label', 'tk.Label', (['line'], {'text': '"""Energy (keV):"""', 'font': 'SF'}), "(line, text='Energy (keV):', font=SF)\n", (3808, 3845), True, 'import tkinter as tk\n'), ((3891, 3960), 'tkinter.OptionMenu', 'tk.OptionMenu', (['line', 'self.edge', '*self.xr_edges'], {'command': 'self.fun_edge'}), '(line, self.edge, *self.xr_edges, command=self.fun_edge)\n', (3904, 3960), True, 'import tkinter as tk\n'), ((4150, 4237), 'tkinter.Entry', 'tk.Entry', (['line'], {'textvariable': 'self.energy_kev', 'font': 'TF', 'width': '(8)', 'bg': 'ety', 'fg': 'ety_txt'}), '(line, textvariable=self.energy_kev, font=TF, width=8, bg=ety, fg=\n ety_txt)\n', (4158, 4237), True, 'import tkinter as tk\n'), ((4295, 4308), 'tkinter.Frame', 'tk.Frame', (['box'], {}), '(box)\n', (4303, 4308), True, 'import tkinter as tk\n'), ((4483, 4520), 'tkinter.Label', 'tk.Label', (['line'], {'text': '"""Type:"""', 'font': 'SF'}), "(line, text='Type:', font=SF)\n", (4491, 4520), True, 'import tkinter as tk\n'), ((4566, 4604), 'tkinter.OptionMenu', 'tk.OptionMenu', (['line', 'self.type', '*types'], {}), '(line, self.type, *types)\n', (4579, 4604), True, 'import tkinter as tk\n'), ((4865, 4903), 'tkinter.Label', 'tk.Label', (['line'], {'text': '"""Units:"""', 'font': 'SF'}), "(line, text='Units:', font=SF)\n", (4873, 4903), True, 'import tkinter as tk\n'), ((4949, 5000), 'tkinter.OptionMenu', 'tk.OptionMenu', (['line', 'self.powder_units', '*xaxistypes'], {}), '(line, self.powder_units, *xaxistypes)\n', (4962, 5000), True, 'import tkinter as tk\n'), ((5215, 5228), 'tkinter.Frame', 'tk.Frame', (['box'], {}), '(box)\n', (5223, 5228), True, 'import tkinter as tk\n'), ((5293, 5334), 'tkinter.Label', 'tk.Label', (['line'], {'text': '"""Geometry:"""', 'font': 'SF'}), "(line, text='Geometry:', font=SF)\n", (5301, 5334), True, 'import tkinter as tk\n'), ((5437, 5484), 'tkinter.OptionMenu', 'tk.OptionMenu', (['line', 'self.orientation', '*orients'], {}), '(line, self.orientation, *orients)\n', (5450, 5484), True, 'import tkinter as tk\n'), ((5696, 5738), 'tkinter.Label', 'tk.Label', (['line'], {'text': '"""Direction:"""', 'font': 'SF'}), "(line, text='Direction:', font=SF)\n", (5704, 5738), True, 'import tkinter as tk\n'), ((5784, 5872), 'tkinter.Entry', 'tk.Entry', (['line'], {'textvariable': 'self.direction_h', 'font': 'TF', 'width': '(2)', 'bg': 'ety', 'fg': 'ety_txt'}), '(line, textvariable=self.direction_h, font=TF, width=2, bg=ety, fg=\n ety_txt)\n', (5792, 5872), True, 'import tkinter as tk\n'), ((5913, 6001), 'tkinter.Entry', 'tk.Entry', (['line'], {'textvariable': 'self.direction_k', 'font': 'TF', 'width': '(2)', 'bg': 'ety', 'fg': 'ety_txt'}), '(line, textvariable=self.direction_k, font=TF, width=2, bg=ety, fg=\n ety_txt)\n', (5921, 6001), True, 'import tkinter as tk\n'), ((6042, 6130), 'tkinter.Entry', 'tk.Entry', (['line'], {'textvariable': 'self.direction_l', 'font': 'TF', 'width': '(2)', 'bg': 'ety', 'fg': 'ety_txt'}), '(line, textvariable=self.direction_l, font=TF, width=2, bg=ety, fg=\n ety_txt)\n', (6050, 6130), True, 'import tkinter as tk\n'), ((6196, 6209), 'tkinter.Frame', 'tk.Frame', (['box'], {}), '(box)\n', (6204, 6209), True, 'import tkinter as tk\n'), ((6274, 6313), 'tkinter.Label', 'tk.Label', (['line'], {'text': '"""Offset:"""', 'font': 'SF'}), "(line, text='Offset:', font=SF)\n", (6282, 6313), True, 'import tkinter as tk\n'), ((6359, 6448), 'tkinter.Entry', 'tk.Entry', (['line'], {'textvariable': 'self.theta_offset', 'font': 'TF', 'width': '(5)', 'bg': 'ety', 'fg': 'ety_txt'}), '(line, textvariable=self.theta_offset, font=TF, width=5, bg=ety, fg\n =ety_txt)\n', (6367, 6448), True, 'import tkinter as tk\n'), ((6510, 6552), 'tkinter.Label', 'tk.Label', (['line'], {'text': '"""Min Theta:"""', 'font': 'SF'}), "(line, text='Min Theta:', font=SF)\n", (6518, 6552), True, 'import tkinter as tk\n'), ((6598, 6684), 'tkinter.Entry', 'tk.Entry', (['line'], {'textvariable': 'self.theta_min', 'font': 'TF', 'width': '(5)', 'bg': 'ety', 'fg': 'ety_txt'}), '(line, textvariable=self.theta_min, font=TF, width=5, bg=ety, fg=\n ety_txt)\n', (6606, 6684), True, 'import tkinter as tk\n'), ((6746, 6788), 'tkinter.Label', 'tk.Label', (['line'], {'text': '"""Max Theta:"""', 'font': 'SF'}), "(line, text='Max Theta:', font=SF)\n", (6754, 6788), True, 'import tkinter as tk\n'), ((6834, 6920), 'tkinter.Entry', 'tk.Entry', (['line'], {'textvariable': 'self.theta_max', 'font': 'TF', 'width': '(5)', 'bg': 'ety', 'fg': 'ety_txt'}), '(line, textvariable=self.theta_max, font=TF, width=5, bg=ety, fg=\n ety_txt)\n', (6842, 6920), True, 'import tkinter as tk\n'), ((6986, 6999), 'tkinter.Frame', 'tk.Frame', (['box'], {}), '(box)\n', (6994, 6999), True, 'import tkinter as tk\n'), ((7064, 7109), 'tkinter.Label', 'tk.Label', (['line'], {'text': '"""Min TwoTheta:"""', 'font': 'SF'}), "(line, text='Min TwoTheta:', font=SF)\n", (7072, 7109), True, 'import tkinter as tk\n'), ((7155, 7244), 'tkinter.Entry', 'tk.Entry', (['line'], {'textvariable': 'self.twotheta_min', 'font': 'TF', 'width': '(5)', 'bg': 'ety', 'fg': 'ety_txt'}), '(line, textvariable=self.twotheta_min, font=TF, width=5, bg=ety, fg\n =ety_txt)\n', (7163, 7244), True, 'import tkinter as tk\n'), ((7309, 7398), 'tkinter.Entry', 'tk.Entry', (['line'], {'textvariable': 'self.twotheta_max', 'font': 'TF', 'width': '(5)', 'bg': 'ety', 'fg': 'ety_txt'}), '(line, textvariable=self.twotheta_max, font=TF, width=5, bg=ety, fg\n =ety_txt)\n', (7317, 7398), True, 'import tkinter as tk\n'), ((7440, 7485), 'tkinter.Label', 'tk.Label', (['line'], {'text': '"""Max TwoTheta:"""', 'font': 'SF'}), "(line, text='Max TwoTheta:', font=SF)\n", (7448, 7485), True, 'import tkinter as tk\n'), ((7557, 7570), 'tkinter.Frame', 'tk.Frame', (['box'], {}), '(box)\n', (7565, 7570), True, 'import tkinter as tk\n'), ((7635, 7685), 'tkinter.Label', 'tk.Label', (['line'], {'text': '"""Powder peak width:"""', 'font': 'SF'}), "(line, text='Powder peak width:', font=SF)\n", (7643, 7685), True, 'import tkinter as tk\n'), ((7739, 7828), 'tkinter.Entry', 'tk.Entry', (['line'], {'textvariable': 'self.powder_width', 'font': 'TF', 'width': '(5)', 'bg': 'ety', 'fg': 'ety_txt'}), '(line, textvariable=self.powder_width, font=TF, width=5, bg=ety, fg\n =ety_txt)\n', (7747, 7828), True, 'import tkinter as tk\n'), ((7903, 7988), 'tkinter.Checkbutton', 'tk.Checkbutton', (['line'], {'text': '"""Powder average"""', 'variable': 'self.powderaverage', 'font': 'SF'}), "(line, text='Powder average', variable=self.powderaverage,\n font=SF)\n", (7917, 7988), True, 'import tkinter as tk\n'), ((8067, 8107), 'tkinter.LabelFrame', 'tk.LabelFrame', (['frame'], {'text': '"""Intensities"""'}), "(frame, text='Intensities')\n", (8080, 8107), True, 'import tkinter as tk\n'), ((8184, 8197), 'tkinter.Frame', 'tk.Frame', (['box'], {}), '(box)\n', (8192, 8197), True, 'import tkinter as tk\n'), ((8263, 8388), 'tkinter.Button', 'tk.Button', (['line'], {'text': '"""Display Intensities"""', 'font': 'BF', 'command': 'self.fun_intensities', 'bg': 'btn2', 'activebackground': 'btn_active'}), "(line, text='Display Intensities', font=BF, command=self.\n fun_intensities, bg=btn2, activebackground=btn_active)\n", (8272, 8388), True, 'import tkinter as tk\n'), ((8454, 8565), 'tkinter.Button', 'tk.Button', (['line'], {'text': '"""Plot Powder"""', 'font': 'BF', 'command': 'self.fun_powder', 'bg': 'btn', 'activebackground': 'btn_active'}), "(line, text='Plot Powder', font=BF, command=self.fun_powder, bg=\n btn, activebackground=btn_active)\n", (8463, 8565), True, 'import tkinter as tk\n'), ((8652, 8665), 'tkinter.Frame', 'tk.Frame', (['box'], {}), '(box)\n', (8660, 8665), True, 'import tkinter as tk\n'), ((8733, 8772), 'tkinter.LabelFrame', 'tk.LabelFrame', (['line'], {'text': '"""Quick Check"""'}), "(line, text='Quick Check')\n", (8746, 8772), True, 'import tkinter as tk\n'), ((8822, 8910), 'tkinter.Entry', 'tk.Entry', (['hklbox'], {'textvariable': 'self.hkl_check', 'font': 'TF', 'width': '(6)', 'bg': 'ety', 'fg': 'ety_txt'}), '(hklbox, textvariable=self.hkl_check, font=TF, width=6, bg=ety, fg=\n ety_txt)\n', (8830, 8910), True, 'import tkinter as tk\n'), ((9049, 9114), 'tkinter.Label', 'tk.Label', (['hklbox'], {'textvariable': 'self.hkl_result', 'font': 'TF', 'width': '(22)'}), '(hklbox, textvariable=self.hkl_result, font=TF, width=22)\n', (9057, 9114), True, 'import tkinter as tk\n'), ((9160, 9273), 'tkinter.Button', 'tk.Button', (['hklbox'], {'text': '"""Check HKL"""', 'font': 'BF', 'command': 'self.fun_hklcheck', 'bg': 'btn', 'activebackground': 'btn_active'}), "(hklbox, text='Check HKL', font=BF, command=self.fun_hklcheck, bg=\n btn, activebackground=btn_active)\n", (9169, 9273), True, 'import tkinter as tk\n'), ((9369, 9421), 'tkinter.LabelFrame', 'tk.LabelFrame', (['frame'], {'text': '"""Reciprocal Space Planes"""'}), "(frame, text='Reciprocal Space Planes')\n", (9382, 9421), True, 'import tkinter as tk\n'), ((9498, 9511), 'tkinter.Frame', 'tk.Frame', (['box'], {}), '(box)\n', (9506, 9511), True, 'import tkinter as tk\n'), ((9611, 9645), 'tkinter.Label', 'tk.Label', (['line'], {'text': '"""i:"""', 'font': 'SF'}), "(line, text='i:', font=SF)\n", (9619, 9645), True, 'import tkinter as tk\n'), ((9691, 9768), 'tkinter.Entry', 'tk.Entry', (['line'], {'textvariable': 'self.val_i', 'font': 'TF', 'width': '(3)', 'bg': 'ety', 'fg': 'ety_txt'}), '(line, textvariable=self.val_i, font=TF, width=3, bg=ety, fg=ety_txt)\n', (9699, 9768), True, 'import tkinter as tk\n'), ((9839, 9853), 'tkinter.Frame', 'tk.Frame', (['line'], {}), '(line)\n', (9847, 9853), True, 'import tkinter as tk\n'), ((9910, 10021), 'tkinter.Button', 'tk.Button', (['vframe'], {'text': '"""HKi"""', 'font': 'BF', 'command': 'self.fun_hki', 'width': '(5)', 'bg': 'btn', 'activebackground': 'btn_active'}), "(vframe, text='HKi', font=BF, command=self.fun_hki, width=5, bg=\n btn, activebackground=btn_active)\n", (9919, 10021), True, 'import tkinter as tk\n'), ((10050, 10161), 'tkinter.Button', 'tk.Button', (['vframe'], {'text': '"""HiL"""', 'font': 'BF', 'command': 'self.fun_hil', 'width': '(5)', 'bg': 'btn', 'activebackground': 'btn_active'}), "(vframe, text='HiL', font=BF, command=self.fun_hil, width=5, bg=\n btn, activebackground=btn_active)\n", (10059, 10161), True, 'import tkinter as tk\n'), ((10194, 10208), 'tkinter.Frame', 'tk.Frame', (['line'], {}), '(line)\n', (10202, 10208), True, 'import tkinter as tk\n'), ((10257, 10368), 'tkinter.Button', 'tk.Button', (['vframe'], {'text': '"""iKL"""', 'font': 'BF', 'command': 'self.fun_ikl', 'width': '(5)', 'bg': 'btn', 'activebackground': 'btn_active'}), "(vframe, text='iKL', font=BF, command=self.fun_ikl, width=5, bg=\n btn, activebackground=btn_active)\n", (10266, 10368), True, 'import tkinter as tk\n'), ((10397, 10508), 'tkinter.Button', 'tk.Button', (['vframe'], {'text': '"""HHi"""', 'font': 'BF', 'command': 'self.fun_hhi', 'width': '(5)', 'bg': 'btn', 'activebackground': 'btn_active'}), "(vframe, text='HHi', font=BF, command=self.fun_hhi, width=5, bg=\n btn, activebackground=btn_active)\n", (10406, 10508), True, 'import tkinter as tk\n'), ((18799, 18826), 'numpy.fromstring', 'np.fromstring', (['hkl'], {'sep': '""" """'}), "(hkl, sep=' ')\n", (18812, 18826), True, 'import numpy as np\n'), ((20568, 20578), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (20576, 20578), True, 'import matplotlib.pyplot as plt\n'), ((20730, 20740), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (20738, 20740), True, 'import matplotlib.pyplot as plt\n'), ((20892, 20902), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (20900, 20902), True, 'import matplotlib.pyplot as plt\n'), ((21054, 21064), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (21062, 21064), True, 'import matplotlib.pyplot as plt\n'), ((21216, 21226), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (21224, 21226), True, 'import matplotlib.pyplot as plt\n'), ((21592, 21619), 'numpy.fromstring', 'np.fromstring', (['hkl'], {'sep': '""" """'}), "(hkl, sep=' ')\n", (21605, 21619), True, 'import numpy as np\n'), ((21865, 21892), 'numpy.fromstring', 'np.fromstring', (['azi'], {'sep': '""" """'}), "(azi, sep=' ')\n", (21878, 21892), True, 'import numpy as np\n'), ((23269, 23296), 'numpy.fromstring', 'np.fromstring', (['hkl'], {'sep': '""" """'}), "(hkl, sep=' ')\n", (23282, 23296), True, 'import numpy as np\n'), ((23542, 23569), 'numpy.fromstring', 'np.fromstring', (['azi'], {'sep': '""" """'}), "(azi, sep=' ')\n", (23555, 23569), True, 'import numpy as np\n'), ((10633, 10687), 'tkinter.LabelFrame', 'tk.LabelFrame', (['frame'], {'text': '"""X-Ray Magnetic Scattering"""'}), "(frame, text='X-Ray Magnetic Scattering')\n", (10646, 10687), True, 'import tkinter as tk\n'), ((10764, 10777), 'tkinter.Frame', 'tk.Frame', (['box'], {}), '(box)\n', (10772, 10777), True, 'import tkinter as tk\n'), ((10905, 10919), 'tkinter.Frame', 'tk.Frame', (['line'], {}), '(line)\n', (10913, 10919), True, 'import tkinter as tk\n'), ((10999, 11015), 'tkinter.Frame', 'tk.Frame', (['vframe'], {}), '(vframe)\n', (11007, 11015), True, 'import tkinter as tk\n'), ((11060, 11115), 'tkinter.Label', 'tk.Label', (['hframe'], {'text': '""" HKL:"""', 'font': 'SF', 'width': '(11)'}), "(hframe, text=' HKL:', font=SF, width=11)\n", (11068, 11115), True, 'import tkinter as tk\n'), ((11169, 11259), 'tkinter.Entry', 'tk.Entry', (['hframe'], {'textvariable': 'self.hkl_magnetic', 'font': 'TF', 'width': '(6)', 'bg': 'ety', 'fg': 'ety_txt'}), '(hframe, textvariable=self.hkl_magnetic, font=TF, width=6, bg=ety,\n fg=ety_txt)\n', (11177, 11259), True, 'import tkinter as tk\n'), ((11415, 11431), 'tkinter.Frame', 'tk.Frame', (['vframe'], {}), '(vframe)\n', (11423, 11431), True, 'import tkinter as tk\n'), ((11476, 11531), 'tkinter.Label', 'tk.Label', (['vframe'], {'text': '"""Azim. Ref.:"""', 'font': 'SF', 'width': '(11)'}), "(vframe, text='Azim. Ref.:', font=SF, width=11)\n", (11484, 11531), True, 'import tkinter as tk\n'), ((11585, 11673), 'tkinter.Entry', 'tk.Entry', (['vframe'], {'textvariable': 'self.azim_zero', 'font': 'TF', 'width': '(6)', 'bg': 'ety', 'fg': 'ety_txt'}), '(vframe, textvariable=self.azim_zero, font=TF, width=6, bg=ety, fg=\n ety_txt)\n', (11593, 11673), True, 'import tkinter as tk\n'), ((11755, 11769), 'tkinter.Frame', 'tk.Frame', (['line'], {}), '(line)\n', (11763, 11769), True, 'import tkinter as tk\n'), ((11849, 11865), 'tkinter.Frame', 'tk.Frame', (['vframe'], {}), '(vframe)\n', (11857, 11865), True, 'import tkinter as tk\n'), ((11910, 11947), 'tkinter.Label', 'tk.Label', (['hframe'], {'text': '"""F0:"""', 'font': 'SF'}), "(hframe, text='F0:', font=SF)\n", (11918, 11947), True, 'import tkinter as tk\n'), ((12001, 12080), 'tkinter.Entry', 'tk.Entry', (['hframe'], {'textvariable': 'self.resF0', 'font': 'TF', 'width': '(3)', 'bg': 'ety', 'fg': 'ety_txt'}), '(hframe, textvariable=self.resF0, font=TF, width=3, bg=ety, fg=ety_txt)\n', (12009, 12080), True, 'import tkinter as tk\n'), ((12138, 12154), 'tkinter.Frame', 'tk.Frame', (['vframe'], {}), '(vframe)\n', (12146, 12154), True, 'import tkinter as tk\n'), ((12199, 12236), 'tkinter.Label', 'tk.Label', (['hframe'], {'text': '"""F1:"""', 'font': 'SF'}), "(hframe, text='F1:', font=SF)\n", (12207, 12236), True, 'import tkinter as tk\n'), ((12290, 12369), 'tkinter.Entry', 'tk.Entry', (['hframe'], {'textvariable': 'self.resF1', 'font': 'TF', 'width': '(3)', 'bg': 'ety', 'fg': 'ety_txt'}), '(hframe, textvariable=self.resF1, font=TF, width=3, bg=ety, fg=ety_txt)\n', (12298, 12369), True, 'import tkinter as tk\n'), ((12427, 12443), 'tkinter.Frame', 'tk.Frame', (['vframe'], {}), '(vframe)\n', (12435, 12443), True, 'import tkinter as tk\n'), ((12488, 12525), 'tkinter.Label', 'tk.Label', (['hframe'], {'text': '"""F2:"""', 'font': 'SF'}), "(hframe, text='F2:', font=SF)\n", (12496, 12525), True, 'import tkinter as tk\n'), ((12579, 12658), 'tkinter.Entry', 'tk.Entry', (['hframe'], {'textvariable': 'self.resF2', 'font': 'TF', 'width': '(3)', 'bg': 'ety', 'fg': 'ety_txt'}), '(hframe, textvariable=self.resF2, font=TF, width=3, bg=ety, fg=ety_txt)\n', (12587, 12658), True, 'import tkinter as tk\n'), ((12716, 12730), 'tkinter.Frame', 'tk.Frame', (['line'], {}), '(line)\n', (12724, 12730), True, 'import tkinter as tk\n'), ((12933, 12949), 'tkinter.Frame', 'tk.Frame', (['vframe'], {}), '(vframe)\n', (12941, 12949), True, 'import tkinter as tk\n'), ((12994, 13041), 'tkinter.Label', 'tk.Label', (['hframe'], {'text': '"""Polarisation:"""', 'font': 'SF'}), "(hframe, text='Polarisation:', font=SF)\n", (13002, 13041), True, 'import tkinter as tk\n'), ((13095, 13140), 'tkinter.OptionMenu', 'tk.OptionMenu', (['hframe', 'self.polval', '*poltypes'], {}), '(hframe, self.polval, *poltypes)\n', (13108, 13140), True, 'import tkinter as tk\n'), ((13350, 13366), 'tkinter.Frame', 'tk.Frame', (['vframe'], {}), '(vframe)\n', (13358, 13366), True, 'import tkinter as tk\n'), ((13443, 13512), 'tkinter.Checkbutton', 'tk.Checkbutton', (['hframe'], {'text': '"""Resonant"""', 'variable': 'self.isres', 'font': 'SF'}), "(hframe, text='Resonant', variable=self.isres, font=SF)\n", (13457, 13512), True, 'import tkinter as tk\n'), ((13592, 13639), 'tkinter.Label', 'tk.Label', (['hframe'], {'text': '"""psi:"""', 'font': 'SF', 'width': '(4)'}), "(hframe, text='psi:', font=SF, width=4)\n", (13600, 13639), True, 'import tkinter as tk\n'), ((13693, 13778), 'tkinter.Entry', 'tk.Entry', (['hframe'], {'textvariable': 'self.psival', 'font': 'TF', 'width': '(4)', 'bg': 'ety', 'fg': 'ety_txt'}), '(hframe, textvariable=self.psival, font=TF, width=4, bg=ety, fg=ety_txt\n )\n', (13701, 13778), True, 'import tkinter as tk\n'), ((13931, 13944), 'tkinter.Frame', 'tk.Frame', (['box'], {}), '(box)\n', (13939, 13944), True, 'import tkinter as tk\n'), ((14024, 14038), 'tkinter.Frame', 'tk.Frame', (['line'], {}), '(line)\n', (14032, 14038), True, 'import tkinter as tk\n'), ((14147, 14266), 'tkinter.Button', 'tk.Button', (['vframe'], {'text': '"""Calc. Mag. Inten."""', 'font': 'BF', 'command': 'self.fun_hklmag', 'bg': 'btn', 'activebackground': 'btn_active'}), "(vframe, text='Calc. Mag. Inten.', font=BF, command=self.\n fun_hklmag, bg=btn, activebackground=btn_active)\n", (14156, 14266), True, 'import tkinter as tk\n'), ((14381, 14445), 'tkinter.Label', 'tk.Label', (['vframe'], {'textvariable': 'self.magresult', 'font': 'SF', 'width': '(12)'}), '(vframe, textvariable=self.magresult, font=SF, width=12)\n', (14389, 14445), True, 'import tkinter as tk\n'), ((14540, 14671), 'tkinter.Button', 'tk.Button', (['line'], {'text': '"""Simulate\n Azimuth"""', 'font': 'BF', 'command': 'self.fun_azimuth', 'width': '(7)', 'bg': 'btn', 'activebackground': 'btn_active'}), '(line, text="""Simulate\n Azimuth""", font=BF, command=self.\n fun_azimuth, width=7, bg=btn, activebackground=btn_active)\n', (14549, 14671), True, 'import tkinter as tk\n'), ((24229, 24239), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (24237, 24239), True, 'import matplotlib.pyplot as plt\n'), ((24485, 24495), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (24493, 24495), True, 'import matplotlib.pyplot as plt\n')] |
import sys
import codecs
import jieba
'''
训练语言模型,将语料库中的文章分词
'''
if __name__ == '__main__':
input_file = sys.argv[1]
output_file = sys.argv[2]
input_reader = codecs.open(input_file, 'r', encoding='utf-8')
out_writer = codecs.open(output_file, 'w', encoding='utf-8')
line = input_reader.readline()
line_num = 1
while line:
print('processing line: {l} article...'.format(l=line_num))
seg_list = jieba.cut(line, cut_all=False)
line_seg = ' '.join(seg_list)
out_writer.writelines(line_seg)
line_num += 1
line = input_reader.readline()
print('all done...')
input_reader.close()
out_writer.close()
| [
"codecs.open",
"jieba.cut"
] | [((164, 210), 'codecs.open', 'codecs.open', (['input_file', '"""r"""'], {'encoding': '"""utf-8"""'}), "(input_file, 'r', encoding='utf-8')\n", (175, 210), False, 'import codecs\n'), ((225, 272), 'codecs.open', 'codecs.open', (['output_file', '"""w"""'], {'encoding': '"""utf-8"""'}), "(output_file, 'w', encoding='utf-8')\n", (236, 272), False, 'import codecs\n'), ((409, 439), 'jieba.cut', 'jieba.cut', (['line'], {'cut_all': '(False)'}), '(line, cut_all=False)\n', (418, 439), False, 'import jieba\n')] |
# -*- coding: utf8 -*-
"""
SPARQL Wrapper Utils
@authors: U{<NAME><http://www.ivan-herman.net>}, U{<NAME><http://www.wikier.org>}, U{<NAME><http://www.dayures.net>}
@organization: U{World Wide Web Consortium<http://www.w3.org>} and U{Foundation CTIC<http://www.fundacionctic.org/>}.
@license: U{W3C SOFTWARE NOTICE AND LICENSE<href="http://www.w3.org/Consortium/Legal/copyright-software">}
"""
import warnings
def deprecated(func):
"""
This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emmitted
when the function is used.
@see: http://code.activestate.com/recipes/391367/
"""
def newFunc(*args, **kwargs):
warnings.warn("Call to deprecated function %s." % func.__name__, category=DeprecationWarning, stacklevel=2)
return func(*args, **kwargs)
newFunc.__name__ = func.__name__
newFunc.__doc__ = func.__doc__
newFunc.__dict__.update(func.__dict__)
return newFunc
| [
"warnings.warn"
] | [((719, 831), 'warnings.warn', 'warnings.warn', (["('Call to deprecated function %s.' % func.__name__)"], {'category': 'DeprecationWarning', 'stacklevel': '(2)'}), "('Call to deprecated function %s.' % func.__name__, category=\n DeprecationWarning, stacklevel=2)\n", (732, 831), False, 'import warnings\n')] |
# Generated by Django 2.0.4 on 2018-05-28 06:49
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('instruments', '0006_instruments_production'),
]
operations = [
migrations.AlterField(
model_name='instruments',
name='Quantity',
field=models.IntegerField(default=0),
),
]
| [
"django.db.models.IntegerField"
] | [((351, 381), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (370, 381), False, 'from django.db import migrations, models\n')] |
__author__ = '<NAME>'
from datetime import time, date, datetime
StandardEventTimes = []
StandardEventTimes.append(('noon', time(12, 00, 00)))
StandardEventTimes.append(('midnight', time(00, 00, 00)))
def time_string(a_time):
for seTime in StandardEventTimes:
if a_time == seTime[1]:
return seTime[0]
return a_time.strftime("%H:%M")
def date_string(a_date):
return a_date.strftime("%d. %B %Y")
def get_time(a_string):
for seTime in StandardEventTimes:
if a_string == seTime[0]:
return seTime[1]
return (time.strptime(a_string, "%H:%M")).time()
def get_date(a_string):
return (datetime.strptime(a_string, "%d. %B %Y")).date() | [
"datetime.time.strptime",
"datetime.time",
"datetime.datetime.strptime"
] | [((126, 140), 'datetime.time', 'time', (['(12)', '(0)', '(0)'], {}), '(12, 0, 0)\n', (130, 140), False, 'from datetime import time, date, datetime\n'), ((184, 197), 'datetime.time', 'time', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (188, 197), False, 'from datetime import time, date, datetime\n'), ((570, 602), 'datetime.time.strptime', 'time.strptime', (['a_string', '"""%H:%M"""'], {}), "(a_string, '%H:%M')\n", (583, 602), False, 'from datetime import time, date, datetime\n'), ((649, 689), 'datetime.datetime.strptime', 'datetime.strptime', (['a_string', '"""%d. %B %Y"""'], {}), "(a_string, '%d. %B %Y')\n", (666, 689), False, 'from datetime import time, date, datetime\n')] |
from time import clock
import sys
sys.path.append('../../../')
print (sys.path)
from tspdb.src.data import generateHarmonics as gH
from tspdb.src.data import generateTrend as gT
import tspdb.src.data.generateARMA as gA
import numpy as np
from tspdb.src.hdf_util import write_data
import matplotlib.pyplot as plt
def armaDataTest(timeSteps):
arLags = []#[0.4, 0.3, 0.2]
maLags = []#[0.5, 0.1]
startingArray = np.zeros(np.max([len(arLags), len(maLags)])) # start with all 0's
noiseMean = 0.0
noiseSD = [1.0]
(observedArray, meanArray, errorArray) = gA.generate(arLags, maLags, startingArray, timeSteps, noiseMean, noiseSD)
return (observedArray, meanArray)
def trendDataTest(timeSteps):
dampening = 2.0*float(1.0/timeSteps)
power = 0.35
displacement = -2.5
f1 = gT.linearTrendFn
data = gT.generate(f1, power=power, displacement=displacement, timeSteps=timeSteps)
f2 = gT.logTrendFn
f3 = gT.negExpTrendFn
return data
def harmonicDataTest(timeSteps):
sineCoeffs = [-2.0, 3.0]
sinePeriods = [560.0, 30.0]
cosineCoeffs = [-2.5]
cosinePeriods = [16.0]
data = gH.generate(sineCoeffs, sinePeriods, cosineCoeffs, cosinePeriods, timeSteps)
#plt.plot(data)
#plt.show()
return data
timeSteps = 10**5 +10000
print('generating data..')
dt = clock()
harmonicsTS = harmonicDataTest(timeSteps)
trendTS = trendDataTest(timeSteps)
(armaTS, armaMeanTS) = armaDataTest(timeSteps)
meanTS = harmonicsTS + trendTS #+ armaMeanTS
# combinedTS = harmonicsTS + trendTS + armaTS
var = harmonicsTS
var = (var - min(var))
errorArray = np.random.normal(0, np.sqrt(var[:timeSteps]), timeSteps)
combinedTS = meanTS + errorArray
# max1 = np.nanmax(combinedTS)
# min1 = np.nanmin(combinedTS)
# max2 = np.nanmax(meanTS)
# min2 = np.nanmin(meanTS)
# max = np.max([max1, max2])
# min = np.min([min1, min2])
# combinedTS = tsUtils.normalize(combinedTS, max, min)
# meanTS = tsUtils.normalize(meanTS, max, min)
# p = 1
plt.plot(combinedTS, label = 'obs')
plt.plot(meanTS, label = 'mean')
plt.plot(var, label = 'var')
plt.show()
print('Data Generated in ', clock() - dt)
write_data('MixtureTS_var2.h5', 'means', meanTS)
write_data('MixtureTS_var2.h5', 'obs', combinedTS,'a')
write_data('MixtureTS_var2.h5', 'var', var,'a')
# DF = pd.DataFrame()
# DF['means'] = meanTS
# DF['Obs'] = combinedTS
# DF['trainData'] = trainData
# DF.to_hdf('MixtureTS.h5','ts1')
| [
"numpy.sqrt",
"time.clock",
"tspdb.src.data.generateARMA.generate",
"tspdb.src.data.generateTrend.generate",
"matplotlib.pyplot.plot",
"tspdb.src.hdf_util.write_data",
"tspdb.src.data.generateHarmonics.generate",
"sys.path.append",
"matplotlib.pyplot.show"
] | [((36, 64), 'sys.path.append', 'sys.path.append', (['"""../../../"""'], {}), "('../../../')\n", (51, 64), False, 'import sys\n'), ((1375, 1382), 'time.clock', 'clock', ([], {}), '()\n', (1380, 1382), False, 'from time import clock\n'), ((2049, 2082), 'matplotlib.pyplot.plot', 'plt.plot', (['combinedTS'], {'label': '"""obs"""'}), "(combinedTS, label='obs')\n", (2057, 2082), True, 'import matplotlib.pyplot as plt\n'), ((2086, 2116), 'matplotlib.pyplot.plot', 'plt.plot', (['meanTS'], {'label': '"""mean"""'}), "(meanTS, label='mean')\n", (2094, 2116), True, 'import matplotlib.pyplot as plt\n'), ((2120, 2146), 'matplotlib.pyplot.plot', 'plt.plot', (['var'], {'label': '"""var"""'}), "(var, label='var')\n", (2128, 2146), True, 'import matplotlib.pyplot as plt\n'), ((2150, 2160), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2158, 2160), True, 'import matplotlib.pyplot as plt\n'), ((2207, 2255), 'tspdb.src.hdf_util.write_data', 'write_data', (['"""MixtureTS_var2.h5"""', '"""means"""', 'meanTS'], {}), "('MixtureTS_var2.h5', 'means', meanTS)\n", (2217, 2255), False, 'from tspdb.src.hdf_util import write_data\n'), ((2257, 2312), 'tspdb.src.hdf_util.write_data', 'write_data', (['"""MixtureTS_var2.h5"""', '"""obs"""', 'combinedTS', '"""a"""'], {}), "('MixtureTS_var2.h5', 'obs', combinedTS, 'a')\n", (2267, 2312), False, 'from tspdb.src.hdf_util import write_data\n'), ((2313, 2361), 'tspdb.src.hdf_util.write_data', 'write_data', (['"""MixtureTS_var2.h5"""', '"""var"""', 'var', '"""a"""'], {}), "('MixtureTS_var2.h5', 'var', var, 'a')\n", (2323, 2361), False, 'from tspdb.src.hdf_util import write_data\n'), ((590, 663), 'tspdb.src.data.generateARMA.generate', 'gA.generate', (['arLags', 'maLags', 'startingArray', 'timeSteps', 'noiseMean', 'noiseSD'], {}), '(arLags, maLags, startingArray, timeSteps, noiseMean, noiseSD)\n', (601, 663), True, 'import tspdb.src.data.generateARMA as gA\n'), ((860, 936), 'tspdb.src.data.generateTrend.generate', 'gT.generate', (['f1'], {'power': 'power', 'displacement': 'displacement', 'timeSteps': 'timeSteps'}), '(f1, power=power, displacement=displacement, timeSteps=timeSteps)\n', (871, 936), True, 'from tspdb.src.data import generateTrend as gT\n'), ((1173, 1249), 'tspdb.src.data.generateHarmonics.generate', 'gH.generate', (['sineCoeffs', 'sinePeriods', 'cosineCoeffs', 'cosinePeriods', 'timeSteps'], {}), '(sineCoeffs, sinePeriods, cosineCoeffs, cosinePeriods, timeSteps)\n', (1184, 1249), True, 'from tspdb.src.data import generateHarmonics as gH\n'), ((1682, 1706), 'numpy.sqrt', 'np.sqrt', (['var[:timeSteps]'], {}), '(var[:timeSteps])\n', (1689, 1706), True, 'import numpy as np\n'), ((2190, 2197), 'time.clock', 'clock', ([], {}), '()\n', (2195, 2197), False, 'from time import clock\n')] |
from __future__ import unicode_literals
from django.db import models
# Create your models here.
class feedback_data(models.Model):
improvements=models.CharField(max_length=500)
complain=models.CharField(max_length=500)
| [
"django.db.models.CharField"
] | [((147, 179), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(500)'}), '(max_length=500)\n', (163, 179), False, 'from django.db import models\n'), ((190, 222), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(500)'}), '(max_length=500)\n', (206, 222), False, 'from django.db import models\n')] |
"""
Some useful I/O functions
"""
import os
import pickle
import shutil
# get all directories in a specific directory
def get_directories(path):
return [f for f in os.listdir(path) if os.path.isdir(os.path.join(path, f))]
# get all the files in a specific directory
# extension can be string or tuple of strings
def get_files(path, extension=None):
files = [f for f in os.listdir(path) if os.path.isfile(os.path.join(path, f))]
if extension is not None:
files = [f for f in files if f.lower().endswith(extension)]
return files
# get all files in a specific directory
def file_exists(path):
return not os.path.exists(path)
# make directory
def makedir(path, replace_existing=False):
if not os.path.exists(path):
os.makedirs(path)
elif replace_existing:
shutil.rmtree(path)
os.makedirs(path)
else:
print("Beware .. path {} already exists".format(path))
# extract relative path from a root-directory and an absolute path
def relative_path(root, path):
return os.path.relpath(path, root)
# save pickle
def save_pickle(path, data):
with open(path, "wb") as f:
pickle.dump(data, f)
# load pickle
def load_pickle(path):
with open(path, "rb") as f:
return pickle.load(f)
| [
"os.path.exists",
"os.listdir",
"pickle.dump",
"os.makedirs",
"pickle.load",
"os.path.join",
"shutil.rmtree",
"os.path.relpath"
] | [((1042, 1069), 'os.path.relpath', 'os.path.relpath', (['path', 'root'], {}), '(path, root)\n', (1057, 1069), False, 'import os\n'), ((635, 655), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (649, 655), False, 'import os\n'), ((729, 749), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (743, 749), False, 'import os\n'), ((759, 776), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (770, 776), False, 'import os\n'), ((1155, 1175), 'pickle.dump', 'pickle.dump', (['data', 'f'], {}), '(data, f)\n', (1166, 1175), False, 'import pickle\n'), ((1262, 1276), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1273, 1276), False, 'import pickle\n'), ((170, 186), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (180, 186), False, 'import os\n'), ((381, 397), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (391, 397), False, 'import os\n'), ((812, 831), 'shutil.rmtree', 'shutil.rmtree', (['path'], {}), '(path)\n', (825, 831), False, 'import shutil\n'), ((840, 857), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (851, 857), False, 'import os\n'), ((204, 225), 'os.path.join', 'os.path.join', (['path', 'f'], {}), '(path, f)\n', (216, 225), False, 'import os\n'), ((416, 437), 'os.path.join', 'os.path.join', (['path', 'f'], {}), '(path, f)\n', (428, 437), False, 'import os\n')] |
import os
import sys
import math
import numpy as np
import pandas as pd
sys.path.append(os.getcwd())
pdata = pd.read_csv(os.getcwd() + '/Bayes/data/train_data.csv', header=None)
test_data = pd.read_csv(os.getcwd() + '/Bayes/data/test_data.csv', header=None)
npdata = pd.DataFrame(pdata).values
final_test = pd.DataFrame(test_data).values
def get_params(data):
X = data[:, 1:]
mu = X.sum(axis=0) / len(X)
X -= mu
delta=np.sum(X ** 2, axis=0) / len(X)
return mu, delta
w1 = np.array(list(filter(lambda x: x[0] == 1, npdata)))
w2 = np.array(list(filter(lambda x: x[0] == 2, npdata)))
w3 = np.array(list(filter(lambda x: x[0] == 3, npdata)))
super_p1 = len(w1) / len(npdata)
super_p2 = len(w2) / len(npdata)
super_p3 = len(w3) / len(npdata)
def calc_prob(x, mu, delta):
f = []
for row in x:
base = 1
for i in range(len(row)):
base *= (1 / math.sqrt(2 * math.pi * delta[i]) * math.exp(-(row[i] - mu[i]) ** 2 / (2 * delta[i])))
f.append(base)
f = np.array(f)
return f
a, b = get_params(w1)
x_prob1 = calc_prob(final_test[:, 1:], a, b) * super_p1
a, b = get_params(w2)
x_prob2 = calc_prob(final_test[:, 1:], a, b) * super_p2
a, b = get_params(w3)
x_prob3 = calc_prob(final_test[:, 1:], a, b) * super_p3
cnt = 0
res = final_test
for i in range(len(x_prob3)):
if x_prob1[i] == max(x_prob1[i], x_prob2[i], x_prob3[i]):
res[i][0] = 1
if final_test[i][0] == 1:
cnt = cnt + 1
continue
if x_prob2[i] == max(x_prob1[i], x_prob2[i], x_prob3[i]):
res[i][0] = 2
if final_test[i][0] == 2:
cnt = cnt + 1
continue
if x_prob3[i] == max(x_prob1[i], x_prob2[i], x_prob3[i]):
res[i][0] = 3
if final_test[i][0] == 3:
cnt = cnt + 1
continue
res = pd.DataFrame(res)
res.to_csv('test_prediction.csv', index=False, sep=',', header=None)
print ('prediction acc: ', cnt / len(final_test))
| [
"math.sqrt",
"os.getcwd",
"numpy.array",
"numpy.sum",
"pandas.DataFrame",
"math.exp"
] | [((1836, 1853), 'pandas.DataFrame', 'pd.DataFrame', (['res'], {}), '(res)\n', (1848, 1853), True, 'import pandas as pd\n'), ((88, 99), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (97, 99), False, 'import os\n'), ((269, 288), 'pandas.DataFrame', 'pd.DataFrame', (['pdata'], {}), '(pdata)\n', (281, 288), True, 'import pandas as pd\n'), ((309, 332), 'pandas.DataFrame', 'pd.DataFrame', (['test_data'], {}), '(test_data)\n', (321, 332), True, 'import pandas as pd\n'), ((1017, 1028), 'numpy.array', 'np.array', (['f'], {}), '(f)\n', (1025, 1028), True, 'import numpy as np\n'), ((122, 133), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (131, 133), False, 'import os\n'), ((203, 214), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (212, 214), False, 'import os\n'), ((438, 460), 'numpy.sum', 'np.sum', (['(X ** 2)'], {'axis': '(0)'}), '(X ** 2, axis=0)\n', (444, 460), True, 'import numpy as np\n'), ((935, 984), 'math.exp', 'math.exp', (['(-(row[i] - mu[i]) ** 2 / (2 * delta[i]))'], {}), '(-(row[i] - mu[i]) ** 2 / (2 * delta[i]))\n', (943, 984), False, 'import math\n'), ((899, 932), 'math.sqrt', 'math.sqrt', (['(2 * math.pi * delta[i])'], {}), '(2 * math.pi * delta[i])\n', (908, 932), False, 'import math\n')] |
import pandas as pd
import os
def load_data(dir_path, N=-1):
"""
Reads the data files
:param dir_path path to files:
:param N how many to read, -1 means all:
:return: pandas.DataFrame with data
"""
data = []
for i, filename in enumerate(os.listdir(dir_path)):
if N > 0 and i > N:
break
if filename.endswith(".txt"):
path = os.path.join(dir_path, filename)
with open(path) as f:
text = f.read()
print(type(text))
data.append((filename[:-4], text))
return pd.DataFrame(data, columns = ["id","text"]) | [
"pandas.DataFrame",
"os.listdir",
"os.path.join"
] | [((585, 627), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {'columns': "['id', 'text']"}), "(data, columns=['id', 'text'])\n", (597, 627), True, 'import pandas as pd\n'), ((271, 291), 'os.listdir', 'os.listdir', (['dir_path'], {}), '(dir_path)\n', (281, 291), False, 'import os\n'), ((397, 429), 'os.path.join', 'os.path.join', (['dir_path', 'filename'], {}), '(dir_path, filename)\n', (409, 429), False, 'import os\n')] |
#!/usr/bin/python3
from cpe import CPE
import vulners
import argparse
APIKey = "<KEY>"
"""
format:
{'cpe:/a:apache:tomcat:7.0.27': {
'total':15, # number of CVE concerning this CPE
'highest': 9.8, # highest CVSS score in all the CVEs
'remotelyExploitable': True # True if there is an available exploit for a network vulnerability
'cve': {
'CVE-2021-25329': {
'score': 4.4, # The CVSSv2 score
'vector': 'AV:L/AC:M/Au:N/C:P/I:P/A:P' # The full CVSSv2 vector
'exploit': False # True if exploits are available
}
}
}
"""
def cpe2cve(cpe, vulners_api):
vulns = {}
for c in cpe:
vulns[c.cpe_str] = {"total":0, "cve":{}, "highest":0.0, "remotelyExploitable":False}
try:
res = vulners_api.cpeVulnerabilities(c.cpe_str)
except Exception as e:
print(f"[!] An error occured: maybe the license is burned")
exit(1)
cves = res.get('NVD')
if cves:
vulns[c.cpe_str]["total"] = len(cves)
for cve in cves:
vulns[c.cpe_str]["cve"][cve['id']] = {"score": cve['cvss']['score'],
"vector": cve['cvss']['vector'],
"exploit": "Unknown"}
vulns[c.cpe_str]['highest'] = max(vulns[c.cpe_str]['highest'], cve['cvss']['score'])
return vulns
def cve2exploit(vulns, vulners_api):
for cpe in vulns:
for cve,details in vulns[cpe]['cve'].items():
try:
exp = 'exploit' in [i['bulletinFamily'] for i in vulners_api.searchExploit(cve)]
except Exception as e:
print(f"[!] An error occured: maybe the license is burned")
exit(1)
details['exploit'] = exp
remotely = 'AV:N' in details['vector']
vulns[cpe]['remotelyExploitable'] |= exp and remotely
return vulns
def export(vulns, output):
header = "cpe,vulnerabilities,cve,score,remote,exploit,vector"
lines = []
for cpe,details in vulns.items():
cpt = 0
if details["total"] == 0:
lines.append(f"{cpe},0/0,,,,")
for cve,detailscve in details["cve"].items():
cpt += 1
lines.append(f"{cpe},{cpt}/{details['total']},{cve},{detailscve['score']},{'AV:N' in detailscve['vector']},{detailscve['exploit']},{detailscve['vector']}")
if output:
with open(output, "w") as f:
f.write(header + '\n')
for l in lines:
f.write(l + '\n')
else:
print('-'*20)
print(header)
print(*(lines), sep='\n')
print('-'*20)
def main():
global APIKey
parser = argparse.ArgumentParser(description='Giving the CVEs that affect a given CPE', add_help=True)
parser.add_argument('--cpe', '-c', action="store", default=None,
help="Give one CPE string to the tool")
parser.add_argument('--file', '-f', action="store", default=None,
help="Import multiple CPE from a file (one per line)")
parser.add_argument('--output', '-o', action="store", default=None,
help="Write results in an output file. The format is CSV")
parser.add_argument('--noexploit', action="store_true", default=False,
help="Do not perform exploit verification (save many requests)")
args = parser.parse_args()
cpe = []
print("[*] Check if CPE is well formed ...")
try:
if args.cpe:
cpe.append(CPE(args.cpe))
elif args.file:
with open(args.file) as f:
for line in f.readlines():
#remove space and newlines char from each line
l = line.lower().strip('\n\r')
cpe.append(CPE(l))
else:
print("[!] indicate at least a CPE (--cpe) or an input file with one CPE per line (--file)")
exit(1)
except Exception as e:
print(f"[!] Bad CPE format: {e}")
exit(1)
print("[+] Valid CPE")
print("[*] Connection to vulners API...")
try:
vulners_api = vulners.Vulners(api_key=APIKey)
except Exception as e:
print(f"[!] Problem during connection to vulners: {e}")
exit(1)
print("[+] Connection sucessful")
print(f"[*] Searching vulnerabilities for the {len(cpe)} CPE given")
vulns = cpe2cve(cpe, vulners_api)
print("[+] Vulnerabilities computed")
print("[*] Searching for available exploits (can take some time)")
if not args.noexploit:
vulns = cve2exploit(vulns, vulners_api)
print(f"[*] Export to {args.output if args.output else 'stdout'}")
export(vulns, args.output)
print("[+] Export completed !")
if __name__ == '__main__':
main()
| [
"cpe.CPE",
"vulners.Vulners",
"argparse.ArgumentParser"
] | [((2901, 2999), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Giving the CVEs that affect a given CPE"""', 'add_help': '(True)'}), "(description=\n 'Giving the CVEs that affect a given CPE', add_help=True)\n", (2924, 2999), False, 'import argparse\n'), ((4303, 4334), 'vulners.Vulners', 'vulners.Vulners', ([], {'api_key': 'APIKey'}), '(api_key=APIKey)\n', (4318, 4334), False, 'import vulners\n'), ((3697, 3710), 'cpe.CPE', 'CPE', (['args.cpe'], {}), '(args.cpe)\n', (3700, 3710), False, 'from cpe import CPE\n'), ((3967, 3973), 'cpe.CPE', 'CPE', (['l'], {}), '(l)\n', (3970, 3973), False, 'from cpe import CPE\n')] |
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 19 18:43:25 2020
@author: white
"""
import functools
import operator
import ignored
def frequency(some_df, column):
word_list_block = []
answers = list(some_df[column])
for i in range(len(answers)):
answers[i] = answers[i].lower().split()
for word in answers:
# print(word)
word_list_block.append(word)
Words_list = functools.reduce(operator.iconcat, word_list_block, [])
# return Words
unique_words = {}
prep = ignored.ignore_these_words()
for W in Words_list:
if W in prep:
pass
else:
if W in unique_words:
unique_words[W] += 1
else:
unique_words[W] = 1
for key, value in sorted(unique_words.items(), key=operator.itemgetter(1)):
print(key, value) | [
"functools.reduce",
"ignored.ignore_these_words",
"operator.itemgetter"
] | [((439, 494), 'functools.reduce', 'functools.reduce', (['operator.iconcat', 'word_list_block', '[]'], {}), '(operator.iconcat, word_list_block, [])\n', (455, 494), False, 'import functools\n'), ((562, 590), 'ignored.ignore_these_words', 'ignored.ignore_these_words', ([], {}), '()\n', (588, 590), False, 'import ignored\n'), ((864, 886), 'operator.itemgetter', 'operator.itemgetter', (['(1)'], {}), '(1)\n', (883, 886), False, 'import operator\n')] |
from ledgerblue.comm import getDongle
keyIndex = raw_input("Specify Key Index: ")
# pubKeyB = raw_input("Enter public key for Steem Account B: ")
pubKeyB = "<KEY>"
if pubKeyB.startswith("STM") == False:
print("Error: Steem public keys must start with STM.")
exit(1)
elif len(pubKeyB) != 53:
print("Error: Invalid Steem public key.")
exit(1)
messageToEncrypt = raw_input("Enter message to encrypt (up to 203 characters): ")
apdu = bytes("8003".decode('hex') + chr(eval(keyIndex)) + "0000".decode('hex') + pubKeyB + messageToEncrypt)
# print(apdu)
dongle = getDongle(True)
result = dongle.exchange(apdu)
| [
"ledgerblue.comm.getDongle"
] | [((574, 589), 'ledgerblue.comm.getDongle', 'getDongle', (['(True)'], {}), '(True)\n', (583, 589), False, 'from ledgerblue.comm import getDongle\n')] |
#!/usr/bin/env python
import tensorflow as tf
import numpy as np
import time
# shortcut
tfs = tf.sparse
logger = tf.compat.v1.logging
# eager execution
#tf.enable_eager_execution()
config = tf.ConfigProto()
config.inter_op_parallelism_threads = 4
config.intra_op_parallelism_threads = 4
tf.compat.v1.enable_eager_execution(config=config)
class ValueModel(tf.keras.Model):
def __init__(self,input_dim,hidden_dim=100):
super(ValueModel, self).__init__()
self.cnn = tf.keras.layers.Conv2D(28,4,input_shape=(input_dim,input_dim))
self.flatten = tf.keras.layers.Flatten()
# this is our dense hidden layer witha ReLU activiation that will encode most of our information
self.hidden_layer = tf.keras.layers.Dense(100,'relu',use_bias=False)
# then we reduce to a single output with a tanh activation
# we use tanh because -1 <= tanh(x) <= 1 and we will build a reward system based on a range -1 to 1
self.output_layer = tf.keras.layers.Dense(1,'tanh',use_bias=False)
def call(self,input):
# this is the function used to actually evaluate our model on input data
x = self.cnn(input)
x = self.flatten(x)
x = self.hidden_layer(x)
x = self.output_layer(x)
return x
def main():
flags = tf.app.flags
logger.set_verbosity(tf.logging.INFO)
ranks = [2 ** 7,2 ** 8,2 ** 9,2 ** 10,2 ** 11,2 ** 12]
funcs = [ss_add,sd_add,ss_matmul,sd_matmul]
dim = 100
model = ValueModel(dim)
test = np.random.randn(50,dim,dim,1)
result = model(test)
logger.info('result = %s',result)
data = {}
for func in funcs:
logger.info(func.__name__)
func_data = []
for rank in ranks:
min,mean,sigma = timer(func,100,rank)
logger.info(" \t%10d\t%6.3f\t%6.3f\t%6.3f",rank,min,mean,sigma)
func_data.append((rank,min,mean,max))
data[func.__name__] = func_data
def timer(func,tests,*argv):
sum = 0.
sum2 = 0.
n = 0
min = 999999999.
for _ in range(tests):
#start = time.time()
diff = func(*argv)
#end = time.time()
# diff = end - start
sum += diff
sum2 += diff ** 2
n += 1
if diff < min:
min = diff
mean = sum / n
sigma = np.sqrt((1. / n) * (sum2 - mean ** 2))
return min,mean,sigma
####
# tfs.add with sparse + sparse
def ss_add(rank=1000,dim=2,sparsity_mean=0.5,sparsity_sigma=10):
# number of points to fill in the matrix
a_np = int(np.random.normal(sparsity_mean * rank,sparsity_sigma))
if a_np <= 0:
raise Exception(f'produced sparse tensor with no entries, settings:\n rank={rank}\n' +
f' dim={dim}\n sparsity_mean={sparsity_mean} sparsity_sigma={sparsity_sigma}')
logger.debug('a_np = %s',a_np)
a = tfs.SparseTensor(indices=np.random.randint(0,rank,(a_np,dim)),
values=np.random.randn(a_np),
dense_shape=[rank] * dim)
b = tfs.SparseTensor(indices=np.random.randint(0,rank,(a_np,dim)),
values=np.random.randn(a_np),
dense_shape=[rank] * dim)
start = time.time()
c = tfs.add(a,b)
end = time.time()
return end - start
####
# tfs.add with sparse + dense
def sd_add(rank=1000,dim=2,sparsity_mean=0.5,sparsity_sigma=10):
# number of points to fill in the matrix
a_np = int(np.random.normal(sparsity_mean * rank,sparsity_sigma))
if a_np <= 0:
raise Exception(f'produced sparse tensor with no entries, settings:\n rank={rank}\n' +
f' dim={dim}\n sparsity_mean={sparsity_mean} sparsity_sigma={sparsity_sigma}')
logger.debug('a_np = %s',a_np)
a = tfs.SparseTensor(indices=np.random.randint(0,rank,(a_np,dim)),
values=np.random.randn(a_np),
dense_shape=[rank] * dim)
b = np.random.randn(rank ** dim)
b = np.reshape(b,[rank] * dim)
start = time.time()
c = tfs.add(a,b)
end = time.time()
return end - start
####
# tfs.add with sparse + dense
def ss_matmul(rank=1000,dim=2,sparsity_mean=0.5,sparsity_sigma=10):
# number of points to fill in the matrix
a_np = int(np.random.normal(sparsity_mean * rank,sparsity_sigma))
if a_np <= 0:
raise Exception(f'produced sparse tensor with no entries, settings:\n rank={rank}\n' +
f' dim={dim}\n sparsity_mean={sparsity_mean} sparsity_sigma={sparsity_sigma}')
logger.debug('a_np = %s',a_np)
a = tfs.SparseTensor(indices=np.random.randint(0,rank,(a_np,dim)),
values=np.random.randn(a_np),
dense_shape=[rank] * dim)
b = tfs.SparseTensor(indices=np.random.randint(0,rank,(a_np,dim)),
values=np.random.randn(a_np),
dense_shape=[rank] * dim)
start = time.time()
c = tfs.to_dense(a,0.,validate_indices=False) * tfs.to_dense(b,0.,validate_indices=False)
end = time.time()
return end - start
####
# tfs.add with sparse + dense
def sd_matmul(rank=1000,dim=2,sparsity_mean=0.5,sparsity_sigma=10):
# number of points to fill in the matrix
a_np = int(np.random.normal(sparsity_mean * rank,sparsity_sigma))
if a_np <= 0:
raise Exception(f'produced sparse tensor with no entries, settings:\n rank={rank}\n' +
f' dim={dim}\n sparsity_mean={sparsity_mean} sparsity_sigma={sparsity_sigma}')
logger.debug('a_np = %s',a_np)
a = tfs.SparseTensor(indices=np.random.randint(0,rank,(a_np,dim)),
values=np.random.randn(a_np),
dense_shape=[rank] * dim)
b = np.random.randn(rank ** dim)
b = np.reshape(b,[rank] * dim)
start = time.time()
c = tfs.sparse_dense_matmul(a,b)
end = time.time()
return end - start
if __name__ == "__main__":
main()
| [
"numpy.random.normal",
"numpy.sqrt",
"numpy.reshape",
"tensorflow.keras.layers.Conv2D",
"tensorflow.keras.layers.Flatten",
"numpy.random.randint",
"tensorflow.keras.layers.Dense",
"tensorflow.compat.v1.enable_eager_execution",
"time.time",
"tensorflow.ConfigProto",
"numpy.random.randn"
] | [((192, 208), 'tensorflow.ConfigProto', 'tf.ConfigProto', ([], {}), '()\n', (206, 208), True, 'import tensorflow as tf\n'), ((289, 339), 'tensorflow.compat.v1.enable_eager_execution', 'tf.compat.v1.enable_eager_execution', ([], {'config': 'config'}), '(config=config)\n', (324, 339), True, 'import tensorflow as tf\n'), ((1486, 1518), 'numpy.random.randn', 'np.random.randn', (['(50)', 'dim', 'dim', '(1)'], {}), '(50, dim, dim, 1)\n', (1501, 1518), True, 'import numpy as np\n'), ((2243, 2280), 'numpy.sqrt', 'np.sqrt', (['(1.0 / n * (sum2 - mean ** 2))'], {}), '(1.0 / n * (sum2 - mean ** 2))\n', (2250, 2280), True, 'import numpy as np\n'), ((3129, 3140), 'time.time', 'time.time', ([], {}), '()\n', (3138, 3140), False, 'import time\n'), ((3170, 3181), 'time.time', 'time.time', ([], {}), '()\n', (3179, 3181), False, 'import time\n'), ((3847, 3875), 'numpy.random.randn', 'np.random.randn', (['(rank ** dim)'], {}), '(rank ** dim)\n', (3862, 3875), True, 'import numpy as np\n'), ((3883, 3910), 'numpy.reshape', 'np.reshape', (['b', '([rank] * dim)'], {}), '(b, [rank] * dim)\n', (3893, 3910), True, 'import numpy as np\n'), ((3922, 3933), 'time.time', 'time.time', ([], {}), '()\n', (3931, 3933), False, 'import time\n'), ((3963, 3974), 'time.time', 'time.time', ([], {}), '()\n', (3972, 3974), False, 'import time\n'), ((4822, 4833), 'time.time', 'time.time', ([], {}), '()\n', (4831, 4833), False, 'import time\n'), ((4936, 4947), 'time.time', 'time.time', ([], {}), '()\n', (4945, 4947), False, 'import time\n'), ((5616, 5644), 'numpy.random.randn', 'np.random.randn', (['(rank ** dim)'], {}), '(rank ** dim)\n', (5631, 5644), True, 'import numpy as np\n'), ((5652, 5679), 'numpy.reshape', 'np.reshape', (['b', '([rank] * dim)'], {}), '(b, [rank] * dim)\n', (5662, 5679), True, 'import numpy as np\n'), ((5691, 5702), 'time.time', 'time.time', ([], {}), '()\n', (5700, 5702), False, 'import time\n'), ((5748, 5759), 'time.time', 'time.time', ([], {}), '()\n', (5757, 5759), False, 'import time\n'), ((483, 548), 'tensorflow.keras.layers.Conv2D', 'tf.keras.layers.Conv2D', (['(28)', '(4)'], {'input_shape': '(input_dim, input_dim)'}), '(28, 4, input_shape=(input_dim, input_dim))\n', (505, 548), True, 'import tensorflow as tf\n'), ((567, 592), 'tensorflow.keras.layers.Flatten', 'tf.keras.layers.Flatten', ([], {}), '()\n', (590, 592), True, 'import tensorflow as tf\n'), ((722, 772), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['(100)', '"""relu"""'], {'use_bias': '(False)'}), "(100, 'relu', use_bias=False)\n", (743, 772), True, 'import tensorflow as tf\n'), ((968, 1016), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['(1)', '"""tanh"""'], {'use_bias': '(False)'}), "(1, 'tanh', use_bias=False)\n", (989, 1016), True, 'import tensorflow as tf\n'), ((2470, 2524), 'numpy.random.normal', 'np.random.normal', (['(sparsity_mean * rank)', 'sparsity_sigma'], {}), '(sparsity_mean * rank, sparsity_sigma)\n', (2486, 2524), True, 'import numpy as np\n'), ((3366, 3420), 'numpy.random.normal', 'np.random.normal', (['(sparsity_mean * rank)', 'sparsity_sigma'], {}), '(sparsity_mean * rank, sparsity_sigma)\n', (3382, 3420), True, 'import numpy as np\n'), ((4162, 4216), 'numpy.random.normal', 'np.random.normal', (['(sparsity_mean * rank)', 'sparsity_sigma'], {}), '(sparsity_mean * rank, sparsity_sigma)\n', (4178, 4216), True, 'import numpy as np\n'), ((5135, 5189), 'numpy.random.normal', 'np.random.normal', (['(sparsity_mean * rank)', 'sparsity_sigma'], {}), '(sparsity_mean * rank, sparsity_sigma)\n', (5151, 5189), True, 'import numpy as np\n'), ((2802, 2841), 'numpy.random.randint', 'np.random.randint', (['(0)', 'rank', '(a_np, dim)'], {}), '(0, rank, (a_np, dim))\n', (2819, 2841), True, 'import numpy as np\n'), ((2871, 2892), 'numpy.random.randn', 'np.random.randn', (['a_np'], {}), '(a_np)\n', (2886, 2892), True, 'import numpy as np\n'), ((2976, 3015), 'numpy.random.randint', 'np.random.randint', (['(0)', 'rank', '(a_np, dim)'], {}), '(0, rank, (a_np, dim))\n', (2993, 3015), True, 'import numpy as np\n'), ((3045, 3066), 'numpy.random.randn', 'np.random.randn', (['a_np'], {}), '(a_np)\n', (3060, 3066), True, 'import numpy as np\n'), ((3698, 3737), 'numpy.random.randint', 'np.random.randint', (['(0)', 'rank', '(a_np, dim)'], {}), '(0, rank, (a_np, dim))\n', (3715, 3737), True, 'import numpy as np\n'), ((3767, 3788), 'numpy.random.randn', 'np.random.randn', (['a_np'], {}), '(a_np)\n', (3782, 3788), True, 'import numpy as np\n'), ((4494, 4533), 'numpy.random.randint', 'np.random.randint', (['(0)', 'rank', '(a_np, dim)'], {}), '(0, rank, (a_np, dim))\n', (4511, 4533), True, 'import numpy as np\n'), ((4563, 4584), 'numpy.random.randn', 'np.random.randn', (['a_np'], {}), '(a_np)\n', (4578, 4584), True, 'import numpy as np\n'), ((4668, 4707), 'numpy.random.randint', 'np.random.randint', (['(0)', 'rank', '(a_np, dim)'], {}), '(0, rank, (a_np, dim))\n', (4685, 4707), True, 'import numpy as np\n'), ((4737, 4758), 'numpy.random.randn', 'np.random.randn', (['a_np'], {}), '(a_np)\n', (4752, 4758), True, 'import numpy as np\n'), ((5467, 5506), 'numpy.random.randint', 'np.random.randint', (['(0)', 'rank', '(a_np, dim)'], {}), '(0, rank, (a_np, dim))\n', (5484, 5506), True, 'import numpy as np\n'), ((5536, 5557), 'numpy.random.randn', 'np.random.randn', (['a_np'], {}), '(a_np)\n', (5551, 5557), True, 'import numpy as np\n')] |
import PIL
from skimage.io import imread
import numpy as np
# https://stackoverflow.com/questions/27026866/convert-an-image-to-2d-array-in-python
im = imread("snowboarder.jpg")
indices = np.dstack(np.indices(im.shape[:2]))
data = np.concatenate((im, indices), axis=-1)
new_data = data[:, :, :]
print(new_data)
# np.savetxt("somefile.txt", new_data.reshape((4,5,10)), newline="\n")
| [
"skimage.io.imread",
"numpy.indices",
"numpy.concatenate"
] | [((153, 178), 'skimage.io.imread', 'imread', (['"""snowboarder.jpg"""'], {}), "('snowboarder.jpg')\n", (159, 178), False, 'from skimage.io import imread\n'), ((232, 270), 'numpy.concatenate', 'np.concatenate', (['(im, indices)'], {'axis': '(-1)'}), '((im, indices), axis=-1)\n', (246, 270), True, 'import numpy as np\n'), ((199, 223), 'numpy.indices', 'np.indices', (['im.shape[:2]'], {}), '(im.shape[:2])\n', (209, 223), True, 'import numpy as np\n')] |
import asyncio
import logging
from collections.abc import AsyncIterator, Iterator
import aiobotocore.session
import aiohttp
import pytest
from aiobotocore.client import AioBaseClient
from async_timeout import timeout
from docker import DockerClient
from docker.errors import NotFound as ContainerNotFound
from docker.models.containers import Container
from yarl import URL
from platform_buckets_api.providers import BMCWrapper
logger = logging.getLogger(__name__)
@pytest.fixture(scope="session")
def minio_container_image() -> str:
return "minio/minio:RELEASE.2021-12-10T23-03-39Z"
@pytest.fixture(scope="session")
def minio_container_name() -> str:
return "platform-buckets-api-minio"
def _create_url(container: Container, in_docker: bool) -> URL:
exposed_port = 9000
if in_docker:
host, port = container.attrs["NetworkSettings"]["IPAddress"], exposed_port
else:
host, port = "0.0.0.0", container.ports[f"{exposed_port}/tcp"][0]["HostPort"]
return URL(f"http://{host}:{port}")
@pytest.fixture(scope="session")
def _minio_server(
docker_client: DockerClient,
in_docker: bool,
reuse_docker: bool,
minio_container_image: str,
minio_container_name: str,
) -> Iterator[URL]:
try:
container = docker_client.containers.get(minio_container_name)
if reuse_docker:
yield _create_url(container, in_docker)
return
else:
container.remove(force=True)
except ContainerNotFound:
pass
# `run` performs implicit `pull`
container = docker_client.containers.run(
image=minio_container_image,
name=minio_container_name,
publish_all_ports=True,
stdout=False,
stderr=False,
detach=True,
command=["server", "/data"],
environment={
"MINIO_ROOT_USER": "access_key",
"MINIO_ROOT_PASSWORD": "<PASSWORD>",
"MINIO_REGION_NAME": "region-1",
"MINIO_STORAGE_CLASS_STANDARD": "EC:4",
},
)
container.reload()
yield _create_url(container, in_docker)
if not reuse_docker:
container.remove(force=True)
async def wait_for_minio_server(
url: URL, timeout_s: float = 300, interval_s: float = 1
) -> None:
last_exc = None
try:
async with timeout(timeout_s):
while True:
try:
async with aiohttp.ClientSession() as session:
async with session.get(f"{url}/minio/health/live"):
return
except (AssertionError, OSError, aiohttp.ClientError) as exc:
last_exc = exc
logger.debug(f"waiting for {url}: {last_exc}")
await asyncio.sleep(interval_s)
except asyncio.TimeoutError:
pytest.fail(f"failed to connect to {url}: {last_exc}")
@pytest.fixture()
async def minio_server(_minio_server: URL) -> AsyncIterator[URL]:
await wait_for_minio_server(_minio_server)
yield _minio_server
@pytest.fixture()
async def minio_s3(minio_server: URL) -> AsyncIterator[AioBaseClient]:
session = aiobotocore.session.get_session()
async def _drop_buckets(s3: AioBaseClient) -> None:
for bucket in (await s3.list_buckets())["Buckets"]:
bucket_name = bucket["Name"]
for obj in (await s3.list_objects_v2(Bucket=bucket_name)).get(
"Contents", []
):
obj_key = obj["Key"]
await s3.delete_object(Bucket=bucket_name, Key=obj_key)
await s3.delete_bucket(Bucket=bucket_name)
async with session.create_client(
"s3",
endpoint_url=str(minio_server),
aws_access_key_id="access_key",
aws_secret_access_key="secret_key",
region_name="region-1",
) as s3_client:
await _drop_buckets(s3_client)
yield s3_client
await _drop_buckets(s3_client)
@pytest.fixture()
async def minio_sts(minio_server: URL) -> AsyncIterator[AioBaseClient]:
session = aiobotocore.session.get_session()
async with session.create_client(
"sts",
endpoint_url=str(minio_server),
aws_access_key_id="access_key",
aws_secret_access_key="secret_key",
region_name="region-1",
) as s3_client:
yield s3_client
@pytest.fixture()
async def bmc_wrapper(minio_server: URL) -> AsyncIterator[BMCWrapper]:
async def _drop_users(mc: BMCWrapper) -> None:
for user in (await mc.admin_user_list()).content:
await mc.admin_user_remove(username=user["accessKey"])
async with BMCWrapper(minio_server, "access_key", "secret_key") as wrapper:
await _drop_users(wrapper)
yield wrapper
await _drop_users(wrapper)
| [
"logging.getLogger",
"aiohttp.ClientSession",
"platform_buckets_api.providers.BMCWrapper",
"async_timeout.timeout",
"pytest.fail",
"asyncio.sleep",
"pytest.fixture",
"yarl.URL"
] | [((439, 466), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (456, 466), False, 'import logging\n'), ((470, 501), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (484, 501), False, 'import pytest\n'), ((595, 626), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (609, 626), False, 'import pytest\n'), ((1031, 1062), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (1045, 1062), False, 'import pytest\n'), ((2889, 2905), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (2903, 2905), False, 'import pytest\n'), ((3046, 3062), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (3060, 3062), False, 'import pytest\n'), ((3959, 3975), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (3973, 3975), False, 'import pytest\n'), ((4353, 4369), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (4367, 4369), False, 'import pytest\n'), ((999, 1027), 'yarl.URL', 'URL', (['f"""http://{host}:{port}"""'], {}), "(f'http://{host}:{port}')\n", (1002, 1027), False, 'from yarl import URL\n'), ((4633, 4685), 'platform_buckets_api.providers.BMCWrapper', 'BMCWrapper', (['minio_server', '"""access_key"""', '"""secret_key"""'], {}), "(minio_server, 'access_key', 'secret_key')\n", (4643, 4685), False, 'from platform_buckets_api.providers import BMCWrapper\n'), ((2323, 2341), 'async_timeout.timeout', 'timeout', (['timeout_s'], {}), '(timeout_s)\n', (2330, 2341), False, 'from async_timeout import timeout\n'), ((2831, 2885), 'pytest.fail', 'pytest.fail', (['f"""failed to connect to {url}: {last_exc}"""'], {}), "(f'failed to connect to {url}: {last_exc}')\n", (2842, 2885), False, 'import pytest\n'), ((2764, 2789), 'asyncio.sleep', 'asyncio.sleep', (['interval_s'], {}), '(interval_s)\n', (2777, 2789), False, 'import asyncio\n'), ((2419, 2442), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (2440, 2442), False, 'import aiohttp\n')] |
"""Setup file to install ddsrouter_yaml_validator module."""
from setuptools import setup
package_name = 'ddsrouter_yaml_validator'
setup(
name=package_name,
version='0.3.0',
packages=[package_name],
data_files=[
('share/ament_index/resource_index/packages',
['resource/' + package_name]),
('share/' + package_name, ['package.xml']),
('share/' + package_name, [package_name + '/ddsrouter_config_schema.json']),
('share/' + package_name, [package_name + '/full_example.yaml'])
],
install_requires=['setuptools'],
zip_safe=True,
maintainer='eprosima',
maintainer_email='<EMAIL>',
description='Tool used for validating DDS-Router configuration files',
license='Apache License, Version 2.0',
tests_require=['pytest'],
test_suite='tests',
entry_points={
'console_scripts': [
'ddsrouter_yaml_validator = ddsrouter_yaml_validator.ddsrouter_yaml_validator:main',
],
},
)
| [
"setuptools.setup"
] | [((135, 904), 'setuptools.setup', 'setup', ([], {'name': 'package_name', 'version': '"""0.3.0"""', 'packages': '[package_name]', 'data_files': "[('share/ament_index/resource_index/packages', ['resource/' + package_name]\n ), ('share/' + package_name, ['package.xml']), ('share/' + package_name,\n [package_name + '/ddsrouter_config_schema.json']), ('share/' +\n package_name, [package_name + '/full_example.yaml'])]", 'install_requires': "['setuptools']", 'zip_safe': '(True)', 'maintainer': '"""eprosima"""', 'maintainer_email': '"""<EMAIL>"""', 'description': '"""Tool used for validating DDS-Router configuration files"""', 'license': '"""Apache License, Version 2.0"""', 'tests_require': "['pytest']", 'test_suite': '"""tests"""', 'entry_points': "{'console_scripts': [\n 'ddsrouter_yaml_validator = ddsrouter_yaml_validator.ddsrouter_yaml_validator:main'\n ]}"}), "(name=package_name, version='0.3.0', packages=[package_name],\n data_files=[('share/ament_index/resource_index/packages', ['resource/' +\n package_name]), ('share/' + package_name, ['package.xml']), ('share/' +\n package_name, [package_name + '/ddsrouter_config_schema.json']), (\n 'share/' + package_name, [package_name + '/full_example.yaml'])],\n install_requires=['setuptools'], zip_safe=True, maintainer='eprosima',\n maintainer_email='<EMAIL>', description=\n 'Tool used for validating DDS-Router configuration files', license=\n 'Apache License, Version 2.0', tests_require=['pytest'], test_suite=\n 'tests', entry_points={'console_scripts': [\n 'ddsrouter_yaml_validator = ddsrouter_yaml_validator.ddsrouter_yaml_validator:main'\n ]})\n", (140, 904), False, 'from setuptools import setup\n')] |
# Copyright 2014 Facebook, Inc.
# You are hereby granted a non-exclusive, worldwide, royalty-free license to
# use, copy, modify, and distribute this software in source code or binary
# form for use in connection with the web services and APIs provided by
# Facebook.
# As with any software that integrates with the Facebook platform, your use
# of this software is subject to the Facebook Developer Principles and
# Policies [http://developers.facebook.com/policy/]. This copyright notice
# shall be included in all copies or substantial portions of the software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
"""
Gets the current Facebook Python SDK version.
"""
import os
import re
def get_version():
this_dir = os.path.dirname(__file__)
package_init_filename = os.path.join(this_dir, '../__init__.py')
version = None
with open(package_init_filename, 'r') as handle:
file_content = handle.read()
version = re.search(
r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
file_content, re.MULTILINE
).group(1)
if not version:
raise ValueError('Cannot find version information')
return version
| [
"os.path.dirname",
"os.path.join",
"re.search"
] | [((1155, 1180), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1170, 1180), False, 'import os\n'), ((1209, 1249), 'os.path.join', 'os.path.join', (['this_dir', '"""../__init__.py"""'], {}), "(this_dir, '../__init__.py')\n", (1221, 1249), False, 'import os\n'), ((1378, 1469), 're.search', 're.search', (['"""^__version__\\\\s*=\\\\s*[\\\\\'"]([^\\\\\'"]*)[\\\\\'"]"""', 'file_content', 're.MULTILINE'], {}), '(\'^__version__\\\\s*=\\\\s*[\\\\\\\'"]([^\\\\\\\'"]*)[\\\\\\\'"]\', file_content,\n re.MULTILINE)\n', (1387, 1469), False, 'import re\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Girder plugin framework and tests adapted from Kitware Inc. source and
# documentation by the Imaging and Visualization Group, Advanced Biomedical
# Computational Science, Frederick National Laboratory for Cancer Research.
#
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import json
import os.path
import uuid
from girder.constants import AccessType
from girder.models.model_base import AccessControlledModel
from girder.models.file import File
from girder.models.setting import Setting
from girder_jobs.models.job import Job
from girder_worker.girder_plugin import utils
from ..constants import PluginSettings
from girder_worker_utils.transforms.girder_io import GirderFileId, GirderUploadToItem
from histogram.histogram import histogram as histogramExecutor
class Histogram(AccessControlledModel):
def initialize(self):
self.name = 'histogram'
self.ensureIndices(['itemId', 'fakeId', 'fileId'])
self.exposeFields(AccessType.READ, (
'_id',
'itemId', # computed histogram of this item
'bins',
'label',
'bitmask',
'fakeId',
'fileId', # file containing computed histogram
))
def remove(self, histogram, **kwargs):
if not kwargs.get('keepFile'):
fileId = histogram.get('fileId')
if fileId:
file_ = File().load(fileId, force=True)
if file_:
File().remove(file_)
return super(Histogram, self).remove(histogram, **kwargs)
def createHistogramJob(self, item, file_, user=None, token=None,
notify=False, bins=None, label=False, bitmask=False):
if bins is None:
bins = Setting().get(PluginSettings.DEFAULT_BINS)
if file_['itemId'] != item['_id']:
raise ValueError('The file must be in the item.')
girder_job_title = 'Histogram computation for item %s' % item['_id']
girder_job_type = 'histogram'
fakeId = uuid.uuid4().hex
other_fields = {
'meta' : {
'creator': 'histogram',
'task': 'createHistogram',
'fakeId': fakeId,
}
}
reference = json.dumps({'isHistogram': True, 'fakeId': fakeId})
result = histogramExecutor.delay(GirderFileId(str(file_['_id'])), label, bins, bitmask,
girder_job_title=girder_job_title, girder_job_type=girder_job_type,
girder_job_other_fields=other_fields,
girder_result_hooks=[GirderUploadToItem(str(item['_id']), delete_file=True,
upload_kwargs={'reference': reference})])
histogram = {
'expected': True,
'notify': True,
'itemId': item['_id'],
'bins': bins,
'label': label,
'bitmask': bitmask,
# 'jobId': result.job['_id'],
'fakeId': fakeId
}
self.save(histogram)
return histogram
# path = os.path.join(os.path.dirname(__file__), '../../histogramScript/',
# 'create_histogram.py')
# with open(path, 'r') as f:
# script = f.read()
# title = 'Histogram computation for item %s' % item['_id']
# job = Job().createJob(title=title, type='histogram',
# handler='worker_handler', user=user)
# jobToken = Job().createJobToken(job)
# task = {
# 'mode': 'python',
# 'script': script,
# 'name': title,
# 'inputs': [{
# 'id': 'in_path',
# 'target': 'filepath',
# 'type': 'string',
# 'format': 'text'
# }, {
# 'id': 'bins',
# 'type': 'number',
# 'format': 'number',
# }, {
# 'id': 'label',
# 'type': 'boolean',
# 'format': 'boolean',
# }, {
# 'id': 'bitmask',
# 'type': 'boolean',
# 'format': 'boolean',
# }],
# 'outputs': [{
# 'id': 'histogram',
# 'target': 'memory',
# 'type': 'string',
# 'format': 'text',
# }],
# }
# inputs = {
# 'in_path': utils.girderInputSpec(
# file_, resourceType='file', token=token),
# 'bins': {
# 'mode': 'inline',
# 'type': 'number',
# 'format': 'number',
# 'data': bins,
# },
# 'label': {
# 'mode': 'inline',
# 'type': 'boolean',
# 'format': 'boolean',
# 'data': label,
# },
# 'bitmask': {
# 'mode': 'inline',
# 'type': 'boolean',
# 'format': 'boolean',
# 'data': bitmask,
# },
# }
# reference = json.dumps({'jobId': str(job['_id']), 'isHistogram': True})
# outputs = {
# 'histogram': utils.girderOutputSpec(item, token,
# parentType='item',
# name='histogram.json',
# reference=reference),
# }
# job['kwargs'] = {
# 'task': task,
# 'inputs': inputs,
# 'outputs': outputs,
# 'jobInfo': utils.jobInfoSpec(job, jobToken),
# 'auto_convert': True,
# 'validate': True,
# }
# job['meta'] = {
# 'creator': 'histogram',
# 'task': 'createHistogram',
# }
# job = Job().save(job)
# histogram = {
# 'expected': True,
# 'notify': notify,
# 'itemId': item['_id'],
# 'bins': bins,
# 'label': label,
# 'bitmask': bitmask,
# 'jobId': job['_id'],
# }
# self.save(histogram)
# Job().scheduleJob(job)
# return job
def validate(self, histogram):
return histogram
| [
"json.dumps",
"girder.models.file.File",
"girder.models.setting.Setting",
"uuid.uuid4"
] | [((2991, 3042), 'json.dumps', 'json.dumps', (["{'isHistogram': True, 'fakeId': fakeId}"], {}), "({'isHistogram': True, 'fakeId': fakeId})\n", (3001, 3042), False, 'import json\n'), ((2765, 2777), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (2775, 2777), False, 'import uuid\n'), ((2485, 2494), 'girder.models.setting.Setting', 'Setting', ([], {}), '()\n', (2492, 2494), False, 'from girder.models.setting import Setting\n'), ((2125, 2131), 'girder.models.file.File', 'File', ([], {}), '()\n', (2129, 2131), False, 'from girder.models.file import File\n'), ((2203, 2209), 'girder.models.file.File', 'File', ([], {}), '()\n', (2207, 2209), False, 'from girder.models.file import File\n')] |
from flask import Flask, request, redirect, url_for, render_template
import requests
import json
from .spotify import get_token, authorize, get_audio_features, get_user, get_user_playlists
from .analysis import analyse_playlist, group_by_day
def create_app(test_config=None):
app = Flask(__name__)
@app.route('/')
def index():
token = request.args.get('token')
if token:
# we have a token
user_id = get_user(token).json().get('id')
playlists = get_user_playlists(token, user_id).json().get('items')
return render_template('index.html', playlists=json.dumps(playlists))
else:
# get a token
return redirect(url_for('authorization'))
@app.route('/analyse')
def analyse():
token = request.args.get('token')
id = request.args.get('id')
if token:
r = analyse_playlist(token, id)
# remove time from date
dates = [x[:10] for x in r[0]]
dates, valences = group_by_day(dates, r[1])
return render_template('analyse.html', labels=json.dumps(dates), data=valences)
else:
return redirect(url_for('authorization'))
@app.route('/authorization')
def authorization():
return redirect(authorize())
@app.route('/token')
def token():
code = request.args.get('code')
if code:
token = get_token(code)
else:
return redirect(url_for('authorization'))
return redirect(url_for('index', token=token))
return app
| [
"flask.request.args.get",
"flask.url_for",
"json.dumps",
"flask.Flask"
] | [((288, 303), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (293, 303), False, 'from flask import Flask, request, redirect, url_for, render_template\n'), ((358, 383), 'flask.request.args.get', 'request.args.get', (['"""token"""'], {}), "('token')\n", (374, 383), False, 'from flask import Flask, request, redirect, url_for, render_template\n'), ((805, 830), 'flask.request.args.get', 'request.args.get', (['"""token"""'], {}), "('token')\n", (821, 830), False, 'from flask import Flask, request, redirect, url_for, render_template\n'), ((844, 866), 'flask.request.args.get', 'request.args.get', (['"""id"""'], {}), "('id')\n", (860, 866), False, 'from flask import Flask, request, redirect, url_for, render_template\n'), ((1381, 1405), 'flask.request.args.get', 'request.args.get', (['"""code"""'], {}), "('code')\n", (1397, 1405), False, 'from flask import Flask, request, redirect, url_for, render_template\n'), ((1551, 1580), 'flask.url_for', 'url_for', (['"""index"""'], {'token': 'token'}), "('index', token=token)\n", (1558, 1580), False, 'from flask import Flask, request, redirect, url_for, render_template\n'), ((716, 740), 'flask.url_for', 'url_for', (['"""authorization"""'], {}), "('authorization')\n", (723, 740), False, 'from flask import Flask, request, redirect, url_for, render_template\n'), ((1201, 1225), 'flask.url_for', 'url_for', (['"""authorization"""'], {}), "('authorization')\n", (1208, 1225), False, 'from flask import Flask, request, redirect, url_for, render_template\n'), ((1501, 1525), 'flask.url_for', 'url_for', (['"""authorization"""'], {}), "('authorization')\n", (1508, 1525), False, 'from flask import Flask, request, redirect, url_for, render_template\n'), ((625, 646), 'json.dumps', 'json.dumps', (['playlists'], {}), '(playlists)\n', (635, 646), False, 'import json\n'), ((1125, 1142), 'json.dumps', 'json.dumps', (['dates'], {}), '(dates)\n', (1135, 1142), False, 'import json\n')] |
from pywaterkotte.ecotouch import (Ecotouch, EcotouchTag, InvalidResponseException, StatusException)
import responses
import pytest
from datetime import datetime
HOSTNAME = 'hostname'
@pytest.fixture
def wp_instance():
return Ecotouch(HOSTNAME)
# Helper functions
def prepare_response(action, body):
responses.add(
responses.GET,
'http://%s/cgi/%s' % (HOSTNAME, action),
body=body)
@responses.activate
def test_login_invalid_response(wp_instance):
prepare_response('login', 'invalid')
with pytest.raises(InvalidResponseException) as e_info:
wp_instance.login()
@responses.activate
def test_login_relogin(wp_instance):
prepare_response('login', '#E_RE-LOGIN_ATTEMPT')
with pytest.raises(StatusException) as e_info:
wp_instance.login()
@responses.activate
def test_login_success(wp_instance):
prepare_response('login', '1\n#S_OK\nIDALToken=<PASSWORD>')
wp_instance.login()
@responses.activate
def test_read_tag(wp_instance):
prepare_response('readTags', '#A1\tS_OK\n192\t86\n')
assert wp_instance.read_value(EcotouchTag.TEMPERATURE_OUTSIDE) == 8.6
@responses.activate
def test_read_bitfield(wp_instance):
prepare_response('readTags', '#I51\tS_OK\n192\t170\n')
assert wp_instance.read_value(EcotouchTag.STATE_COMPRESSOR) == True
assert wp_instance.read_value(EcotouchTag.STATE_SOURCEPUMP) == False
assert wp_instance.read_value(EcotouchTag.STATE_EXTERNAL_HEATER) == True
assert wp_instance.read_value(EcotouchTag.STATE_HEATINGPUMP) == True
@responses.activate
def test_write(wp_instance):
prepare_response('writeTags', '#I263\tS_OK\n192\t5\n')
wp_instance.write_value(EcotouchTag.ADAPT_HEATING, 6)
assert len(responses.calls) == 1
@responses.activate
def test_write_date(wp_instance):
prepare_response('writeTags', '#I263\tS_OK\n192\t5\n')
wp_instance.write_value(EcotouchTag.HOLIDAY_START_TIME, datetime(2019, 3,2,11,00))
assert len(responses.calls) == 5
@responses.activate
def test_read_date(wp_instance):
RESPONSE = "".join([
'#I1250\tS_OK\n192\t18\n',
'#I1251\tS_OK\n192\t2\n',
'#I1252\tS_OK\n192\t1\n',
'#I1253\tS_OK\n192\t3\n',
'#I1254\tS_OK\n192\t19\n'])
prepare_response('readTags', RESPONSE)
result = wp_instance.read_value(EcotouchTag.HOLIDAY_START_TIME)
assert isinstance(result, datetime)
assert datetime(2019,3,1,18,2) == result
@responses.activate
def test_read_multiple_tags(wp_instance):
RESPONSE = "".join([
'#A1\tS_OK\n192\t84\n',
'#A2\tS_OK\n192\t87\n',
'#A3\tS_OK\n192\t92\n',
'#A4\tS_OK\n192\t95\n',
'#A5\tS_OK\n192\t57\n'])
prepare_response('readTags', RESPONSE)
result = wp_instance.read_values([
EcotouchTag.TEMPERATURE_OUTSIDE,
EcotouchTag.TEMPERATURE_OUTSIDE_1H,
EcotouchTag.TEMPERATURE_OUTSIDE_24H,
EcotouchTag.TEMPERATURE_SOURCE_IN,
EcotouchTag.TEMPERATURE_SOURCE_OUT])
assert result is not None
assert isinstance(result, dict)
assert result[EcotouchTag.TEMPERATURE_OUTSIDE] == 8.4
assert result[EcotouchTag.TEMPERATURE_OUTSIDE_1H] == 8.7
assert result[EcotouchTag.TEMPERATURE_OUTSIDE_24H] == 9.2
assert result[EcotouchTag.TEMPERATURE_SOURCE_IN] == 9.5
assert result[EcotouchTag.TEMPERATURE_SOURCE_OUT] == 5.7
| [
"datetime.datetime",
"responses.add",
"pywaterkotte.ecotouch.Ecotouch",
"pytest.raises"
] | [((232, 250), 'pywaterkotte.ecotouch.Ecotouch', 'Ecotouch', (['HOSTNAME'], {}), '(HOSTNAME)\n', (240, 250), False, 'from pywaterkotte.ecotouch import Ecotouch, EcotouchTag, InvalidResponseException, StatusException\n'), ((312, 397), 'responses.add', 'responses.add', (['responses.GET', "('http://%s/cgi/%s' % (HOSTNAME, action))"], {'body': 'body'}), "(responses.GET, 'http://%s/cgi/%s' % (HOSTNAME, action), body=body\n )\n", (325, 397), False, 'import responses\n'), ((551, 590), 'pytest.raises', 'pytest.raises', (['InvalidResponseException'], {}), '(InvalidResponseException)\n', (564, 590), False, 'import pytest\n'), ((750, 780), 'pytest.raises', 'pytest.raises', (['StatusException'], {}), '(StatusException)\n', (763, 780), False, 'import pytest\n'), ((1944, 1971), 'datetime.datetime', 'datetime', (['(2019)', '(3)', '(2)', '(11)', '(0)'], {}), '(2019, 3, 2, 11, 0)\n', (1952, 1971), False, 'from datetime import datetime\n'), ((2402, 2429), 'datetime.datetime', 'datetime', (['(2019)', '(3)', '(1)', '(18)', '(2)'], {}), '(2019, 3, 1, 18, 2)\n', (2410, 2429), False, 'from datetime import datetime\n')] |
from . import views
from django.urls import path
from django.contrib import admin
urlpatterns=[
path("admin/", admin.site.urls),
path("",views.index, name="index"),
path("maps", views.base, name="base"),
path("profile", views.profile, name="profile"),
path("add_todo",views.add_todo, name="add_todo"),
path("delete_todo/<int:todo_id>",views.delete_todo, name="delete_todo"),
]
| [
"django.urls.path"
] | [((109, 140), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (113, 140), False, 'from django.urls import path\n'), ((147, 182), 'django.urls.path', 'path', (['""""""', 'views.index'], {'name': '"""index"""'}), "('', views.index, name='index')\n", (151, 182), False, 'from django.urls import path\n'), ((188, 225), 'django.urls.path', 'path', (['"""maps"""', 'views.base'], {'name': '"""base"""'}), "('maps', views.base, name='base')\n", (192, 225), False, 'from django.urls import path\n'), ((232, 278), 'django.urls.path', 'path', (['"""profile"""', 'views.profile'], {'name': '"""profile"""'}), "('profile', views.profile, name='profile')\n", (236, 278), False, 'from django.urls import path\n'), ((285, 334), 'django.urls.path', 'path', (['"""add_todo"""', 'views.add_todo'], {'name': '"""add_todo"""'}), "('add_todo', views.add_todo, name='add_todo')\n", (289, 334), False, 'from django.urls import path\n'), ((340, 412), 'django.urls.path', 'path', (['"""delete_todo/<int:todo_id>"""', 'views.delete_todo'], {'name': '"""delete_todo"""'}), "('delete_todo/<int:todo_id>', views.delete_todo, name='delete_todo')\n", (344, 412), False, 'from django.urls import path\n')] |
import re
import bs4
from LimeSoup.lime_soup import Soup, RuleIngredient
from LimeSoup.parser.elsevier_xml import (
resolve_elsevier_entities, extract_ce_text, find_non_empty_children,
node_named, extract_ce_para, extract_ce_section, extract_ce_abstract,
extract_ce_title, remove_consecutive_whitespaces)
__author__ = '<NAME>, <NAME>'
__maintainer__ = '<NAME>'
__email__ = '<EMAIL>'
__version__ = '0.3.2-xml'
__all__ = ['ElsevierXMLSoup']
class ElsevierParseXML(RuleIngredient):
@staticmethod
def _parse(xml_str):
xml_str = resolve_elsevier_entities(xml_str)
return bs4.BeautifulSoup(xml_str, 'lxml-xml')
class ElsevierReadMetaData(RuleIngredient):
@staticmethod
def get_text_or_none(soup, name, handler=None):
if soup is None:
return None
node = soup.find(name=name)
if node is None:
return None
elif handler is not None:
return handler(node)
else:
return node.get_text().strip()
@staticmethod
def _parse(soup):
# journal
journal_name = ElsevierReadMetaData.get_text_or_none(soup, 'xocs:srctitle') or \
ElsevierReadMetaData.get_text_or_none(soup, 'prism:publicationName')
doi = ElsevierReadMetaData.get_text_or_none(soup, 'xocs:doi')
# https://www.elsevier.com/__data/assets/pdf_file/0003/58872/ja5_tagbytag5_v1.9.5.pdf
# Elsevier XML definition pp. 46
head_node = soup.find('head')
title = ElsevierReadMetaData.get_text_or_none(head_node, 'ce:title', extract_ce_title) or \
ElsevierReadMetaData.get_text_or_none(soup, 'dc:title')
keywords = []
if head_node is not None:
# Elsevier XML definition pp. 366
for node in head_node.find_all('ce:keyword'):
text_node = node.find('ce:text')
if text_node is not None:
keyword = remove_consecutive_whitespaces(
extract_ce_text(text_node),
keep_newline=False
).strip()
keywords.append(keyword)
if len(keywords) == 0:
for subject in soup.find_all('dcterms:subject'):
keywords.append(subject.get_text().strip())
return soup, {
'Journal': journal_name,
'DOI': doi,
'Title': title,
'Keywords': keywords
}
class ElsevierCollect(RuleIngredient):
@staticmethod
def _parse(args):
soup, obj = args
paragraphs = []
# find all sections
for node in soup.find_all('ce:abstract'):
abstract_paragraph = extract_ce_abstract(node)
normalized_name = re.sub(r'[^\w]', '', abstract_paragraph['name'])
if re.match(r'abstracts?', normalized_name, re.IGNORECASE):
paragraphs.append(abstract_paragraph)
sections = soup.find('ce:sections')
if sections is not None:
for node in find_non_empty_children(sections):
if node_named(node, 'ce:para'):
paragraphs.extend(extract_ce_para(node).split('\n'))
elif node_named(node, 'ce:section'):
paragraphs.append(extract_ce_section(node))
obj['Sections'] = paragraphs
return obj
ElsevierXMLSoup = Soup(parser_version=__version__)
ElsevierXMLSoup.add_ingredient(ElsevierParseXML())
ElsevierXMLSoup.add_ingredient(ElsevierReadMetaData())
ElsevierXMLSoup.add_ingredient(ElsevierCollect())
| [
"LimeSoup.parser.elsevier_xml.extract_ce_section",
"LimeSoup.parser.elsevier_xml.node_named",
"LimeSoup.parser.elsevier_xml.resolve_elsevier_entities",
"re.match",
"LimeSoup.parser.elsevier_xml.extract_ce_para",
"bs4.BeautifulSoup",
"LimeSoup.parser.elsevier_xml.find_non_empty_children",
"LimeSoup.par... | [((3398, 3430), 'LimeSoup.lime_soup.Soup', 'Soup', ([], {'parser_version': '__version__'}), '(parser_version=__version__)\n', (3402, 3430), False, 'from LimeSoup.lime_soup import Soup, RuleIngredient\n'), ((557, 591), 'LimeSoup.parser.elsevier_xml.resolve_elsevier_entities', 'resolve_elsevier_entities', (['xml_str'], {}), '(xml_str)\n', (582, 591), False, 'from LimeSoup.parser.elsevier_xml import resolve_elsevier_entities, extract_ce_text, find_non_empty_children, node_named, extract_ce_para, extract_ce_section, extract_ce_abstract, extract_ce_title, remove_consecutive_whitespaces\n'), ((607, 645), 'bs4.BeautifulSoup', 'bs4.BeautifulSoup', (['xml_str', '"""lxml-xml"""'], {}), "(xml_str, 'lxml-xml')\n", (624, 645), False, 'import bs4\n'), ((2715, 2740), 'LimeSoup.parser.elsevier_xml.extract_ce_abstract', 'extract_ce_abstract', (['node'], {}), '(node)\n', (2734, 2740), False, 'from LimeSoup.parser.elsevier_xml import resolve_elsevier_entities, extract_ce_text, find_non_empty_children, node_named, extract_ce_para, extract_ce_section, extract_ce_abstract, extract_ce_title, remove_consecutive_whitespaces\n'), ((2771, 2819), 're.sub', 're.sub', (['"""[^\\\\w]"""', '""""""', "abstract_paragraph['name']"], {}), "('[^\\\\w]', '', abstract_paragraph['name'])\n", (2777, 2819), False, 'import re\n'), ((2835, 2889), 're.match', 're.match', (['"""abstracts?"""', 'normalized_name', 're.IGNORECASE'], {}), "('abstracts?', normalized_name, re.IGNORECASE)\n", (2843, 2889), False, 'import re\n'), ((3048, 3081), 'LimeSoup.parser.elsevier_xml.find_non_empty_children', 'find_non_empty_children', (['sections'], {}), '(sections)\n', (3071, 3081), False, 'from LimeSoup.parser.elsevier_xml import resolve_elsevier_entities, extract_ce_text, find_non_empty_children, node_named, extract_ce_para, extract_ce_section, extract_ce_abstract, extract_ce_title, remove_consecutive_whitespaces\n'), ((3102, 3129), 'LimeSoup.parser.elsevier_xml.node_named', 'node_named', (['node', '"""ce:para"""'], {}), "(node, 'ce:para')\n", (3112, 3129), False, 'from LimeSoup.parser.elsevier_xml import resolve_elsevier_entities, extract_ce_text, find_non_empty_children, node_named, extract_ce_para, extract_ce_section, extract_ce_abstract, extract_ce_title, remove_consecutive_whitespaces\n'), ((3225, 3255), 'LimeSoup.parser.elsevier_xml.node_named', 'node_named', (['node', '"""ce:section"""'], {}), "(node, 'ce:section')\n", (3235, 3255), False, 'from LimeSoup.parser.elsevier_xml import resolve_elsevier_entities, extract_ce_text, find_non_empty_children, node_named, extract_ce_para, extract_ce_section, extract_ce_abstract, extract_ce_title, remove_consecutive_whitespaces\n'), ((3295, 3319), 'LimeSoup.parser.elsevier_xml.extract_ce_section', 'extract_ce_section', (['node'], {}), '(node)\n', (3313, 3319), False, 'from LimeSoup.parser.elsevier_xml import resolve_elsevier_entities, extract_ce_text, find_non_empty_children, node_named, extract_ce_para, extract_ce_section, extract_ce_abstract, extract_ce_title, remove_consecutive_whitespaces\n'), ((2016, 2042), 'LimeSoup.parser.elsevier_xml.extract_ce_text', 'extract_ce_text', (['text_node'], {}), '(text_node)\n', (2031, 2042), False, 'from LimeSoup.parser.elsevier_xml import resolve_elsevier_entities, extract_ce_text, find_non_empty_children, node_named, extract_ce_para, extract_ce_section, extract_ce_abstract, extract_ce_title, remove_consecutive_whitespaces\n'), ((3169, 3190), 'LimeSoup.parser.elsevier_xml.extract_ce_para', 'extract_ce_para', (['node'], {}), '(node)\n', (3184, 3190), False, 'from LimeSoup.parser.elsevier_xml import resolve_elsevier_entities, extract_ce_text, find_non_empty_children, node_named, extract_ce_para, extract_ce_section, extract_ce_abstract, extract_ce_title, remove_consecutive_whitespaces\n')] |
import os
from pathlib import Path
import click
import toml
def get_config(app_name: str, config_name: str, default_config: str):
"""
Try retrieving the configuration file content from the following sources
in the following order:
1. <APP_NAME>_CONFIG_PATH env var, if provided
2. '/etc/<app_name>/<config_name>' path
3. Local configuration directory as determined by `click.get_app_dir()`
In addition, the default config will be written to source 3 in case no
config sources are available.
:param app_name: Application name used for the configuration directory name
:param config_name: Configuration file name
:param default_config: Default configuration as TOML-formatted string
"""
env_name = f"{app_name.upper().replace('-', '_')}_CONFIG_PATH"
if os.environ.get(env_name):
return Path(os.environ[env_name]).read_text()
system_path = Path("/etc") / app_name / config_name
if system_path.is_file():
return system_path.read_text()
local_path = Path(click.get_app_dir(app_name)) / config_name
if local_path.is_file():
return local_path.read_text()
local_path.parent.mkdir(exist_ok=True, parents=True)
local_path.write_text(default_config)
return default_config
DEFAULT_CONFIG = f"""
[logging]
# different logging levels:
# 50 = critical
# 40 = error
# 30 = warning
# 20 = info
# 10 = debug
level=10
[mets]
# Organization name used in PREMIS events
organization_name='ORGANIZATION NAME HERE'
# Contract ID used for DPRES REST API and in PREMIS events
contract_id='12345678-f00d-d00f-a4b7-010a184befdd'
[sign]
# Path to the key used to sign the METS
key_path='{Path(__file__).parent / "data" / "test_rsa_keys.crt"}'
[ssh]
host=''
port='22'
username=''
private_key=''
home_path=''
[museumplus]
# MuseumPlus instance URL ending with '/ria-ws/application'
url=''
# Template ID used for generating the LIDO XML report
lido_report_id='45005'
# Field used for storing the preservation history for an object
# Needs to have the 'Clob' data type
object_preservation_field_name=''
object_preservation_field_type='dataField'
# Whether to update MuseumPlus log field with preservation events
add_log_entries=true
username=''
password=''
[dpres]
# Virtualenv settings for dpres-siptools.
# These allow dpres-siptools to be installed separately
# from passari.
use_virtualenv=false
virtualenv_path=''
"""
CONFIG = toml.loads(
get_config("passari", "config.toml", DEFAULT_CONFIG)
)
ORGANIZATION_NAME = CONFIG["mets"]["organization_name"]
CONTRACT_ID = CONFIG["mets"]["contract_id"]
SIGN_KEY_PATH = CONFIG["sign"]["key_path"]
MUSEUMPLUS_URL = CONFIG["museumplus"]["url"]
LIDO_REPORT_ID = CONFIG["museumplus"]["lido_report_id"]
| [
"click.get_app_dir",
"os.environ.get",
"pathlib.Path"
] | [((812, 836), 'os.environ.get', 'os.environ.get', (['env_name'], {}), '(env_name)\n', (826, 836), False, 'import os\n'), ((911, 923), 'pathlib.Path', 'Path', (['"""/etc"""'], {}), "('/etc')\n", (915, 923), False, 'from pathlib import Path\n'), ((1041, 1068), 'click.get_app_dir', 'click.get_app_dir', (['app_name'], {}), '(app_name)\n', (1058, 1068), False, 'import click\n'), ((853, 879), 'pathlib.Path', 'Path', (['os.environ[env_name]'], {}), '(os.environ[env_name])\n', (857, 879), False, 'from pathlib import Path\n'), ((1697, 1711), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (1701, 1711), False, 'from pathlib import Path\n')] |
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
import logging
import configparser
import time
import json
import concurrent.futures
logger = logging.getLogger(__name__)
config = configparser.ConfigParser()
config.read('config.ini')
crxcavator_api = config['crxcavator']['api']
MAX_THREADS = int(config['crxcavator']['threads']) # Get max number of threads for multi-threading
class CrXcavator(object):
def __init__(self, extension_id, version, name):
self.id = extension_id
self.version = version
self.name = name
self.risk_csp = None
self.risk_external_javascript = None
self.risk_external_calls = None
self.risk_score = 0
self.entry_points = None
self.dangerous_functions = None
self.chrome_link = "https://chrome.google.com/webstore/detail/{0}".format(extension_id)
self.crxcavator_link = "https://crxcavator.io/report/{0}/{1}".format(extension_id, version)
def print(self):
print('ID: %s' % self.id)
print('Version: %s' % self.version)
print('Score: %d' % self.risk_score)
print('Link: %s' % self.chrome_link)
print('CrXcavator Link: %s' % self.crxcavator_link)
if self.risk_csp is not None:
print('CSP: \n%s' % json.dumps(self.risk_csp, indent=2))
if self.risk_external_javascript is not None:
print('External JavaScript: \n%s' % json.dumps(self.risk_external_javascript, indent=2))
if self.risk_external_calls is not None:
print('External Calls: \n%s' % json.dumps(self.risk_external_calls, indent=2))
if self.dangerous_functions is not None:
print('Dangerous Functions: \n%s' % json.dumps(self.dangerous_functions, indent=2))
if self.entry_points is not None:
print('Entry Points: \n%s' % json.dumps(self.entry_points, indent=2))
print()
# Generate session with max of 3 retries and interval of 1 second
def session_generator():
session = requests.Session()
session.headers.update({'API-Key': config['crxcavator']['key'], 'Content-Type': 'application/json'})
retry = Retry(connect=3, backoff_factor=0.5)
adapter = HTTPAdapter(max_retries=retry)
session.mount('http://', adapter)
session.mount('https://', adapter)
return session
# Parse risk data returned from report of crxcavator
def parse_risk_data(extension_id, version, data):
riskobj = CrXcavator(extension_id, version, data['webstore']['name'])
if 'csp' in data:
riskobj.risk_csp = data['csp']
if 'extjs' in data:
riskobj.risk_external_javascript = data['extjs']
if 'extcalls' in data:
riskobj.risk_external_calls = data['extcalls']
if 'entrypoints' in data:
riskobj.entry_points = data['entrypoints']
if 'dangerousfunctions' in data:
riskobj.dangerous_functions = data['dangerousfunctions']
if 'risk' in data:
for each_item in data['risk']:
if each_item == 'total' or each_item == 'webstore' or each_item == 'metadata':
continue
else:
riskobj.risk_score = riskobj.risk_score + int(data['risk'][each_item]['total'])
return riskobj
# Get risk data for a particular extension and their version
def get_extension_risk(extension_id, version):
risk_obj = None
session = session_generator()
resp = session.get("%s/report/%s/%s" % (crxcavator_api, extension_id, version))
if resp.ok:
try:
response = resp.json()
except json.decoder.JSONDecodeError:
logger.warning('JSON Decode Error. Retrying for extension %s version %s' % (extension_id, version))
risk_obj = get_extension_risk(extension_id, version)
return risk_obj
if response is None:
logger.info('Failed to fetch report on %s version %s' % (extension_id, version))
else:
if 'version' in response:
if response['version'] is not None:
risk_obj = parse_risk_data(extension_id, response['version'], response['data'])
else:
print(json.dumps(response, indent=4))
elif 600 > resp.status_code >= 500 or resp.status_code == 429:
logger.warning("Exceed rate limit.")
time.sleep(60)
# TO DO:
# Check header to see if spits out retry.
# print(resp.header)
risk_obj = get_extension_risk(extension_id, version)
else:
logger.error('ERROR %s: %s' % (resp.status_code, resp.text))
logger.error('Unable to get risk data on extension %s of version %s' % (extension_id, version))
return risk_obj
# Submit an extension to get it scanned by crxcavator. This would also be useful to classify the extensions to the
# below categories
def submit_extension(extension_id: str):
submit_results = {}
submit_results['id'] = extension_id
submit_results['version'] = None
submit_results['extension'] = False
submit_results['not_free'] = False
submit_results['run_again'] = False
submit_results['removed_from_store'] = False
data = {'extension_id': extension_id}
session = session_generator()
resp = session.post("%s/submit" % crxcavator_api, json=data)
if resp.ok:
try:
response = resp.json()
except json.decoder.JSONDecodeError:
logger.warning('JSON Decode Error. Retrying for extension %s' % extension_id)
submit_results = submit_extension(extension_id)
return submit_results
if 'error' not in response:
if "no longer in Chrome" in response['message']:
submit_results['removed_from_store'] = True
else:
submit_results['version'] = response['version']
submit_results['extension'] = True
else:
if "not free" in response['error']:
submit_results['not_free'] = True
elif "Error retrieving extension from webstore" in response['error']:
submit_results['run_again'] = True
elif "Theme" in response['error']:
submit_results['extension'] = False
elif 'Error extension is too big' in response['error']:
submit_results['version'] = ""
submit_results['extension'] = True
else:
logger.error('Extension %s: %s' % (extension_id, response['error']))
elif resp.status_code == 429:
logger.warning("Exceed rate limit.")
time.sleep(60)
# TO DO:
# Check header to see if spits out retry.
# print(resp.header)
submit_results = submit_extension(extension_id)
elif 600 > resp.status_code >= 500:
time.sleep(90)
logger.error('Server not responsive for extension %s. Trying Again' % extension_id)
submit_results['run_again'] = True
else:
logger.error('ERROR %s: %s' % (resp.status_code, resp.text))
return submit_results
# Get risk data on multiple versions of the same chrome extension
def fetch_risk_details(extension_id, versions):
riskobjs = []
# Check if report exist for current version
with concurrent.futures.ThreadPoolExecutor(max_workers=MAX_THREADS) as executor:
fs = [executor.submit(get_extension_risk, extension_id, version) for version in versions]
for future in concurrent.futures.as_completed(fs):
riskobj = future.result()
if riskobj is not None:
riskobjs.append(riskobj)
return riskobjs
| [
"logging.getLogger",
"requests.Session",
"configparser.ConfigParser",
"requests.adapters.HTTPAdapter",
"json.dumps",
"time.sleep",
"requests.packages.urllib3.util.retry.Retry"
] | [((209, 236), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (226, 236), False, 'import logging\n'), ((247, 274), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (272, 274), False, 'import configparser\n'), ((2071, 2089), 'requests.Session', 'requests.Session', ([], {}), '()\n', (2087, 2089), False, 'import requests\n'), ((2207, 2243), 'requests.packages.urllib3.util.retry.Retry', 'Retry', ([], {'connect': '(3)', 'backoff_factor': '(0.5)'}), '(connect=3, backoff_factor=0.5)\n', (2212, 2243), False, 'from requests.packages.urllib3.util.retry import Retry\n'), ((2258, 2288), 'requests.adapters.HTTPAdapter', 'HTTPAdapter', ([], {'max_retries': 'retry'}), '(max_retries=retry)\n', (2269, 2288), False, 'from requests.adapters import HTTPAdapter\n'), ((4370, 4384), 'time.sleep', 'time.sleep', (['(60)'], {}), '(60)\n', (4380, 4384), False, 'import time\n'), ((6617, 6631), 'time.sleep', 'time.sleep', (['(60)'], {}), '(60)\n', (6627, 6631), False, 'import time\n'), ((6832, 6846), 'time.sleep', 'time.sleep', (['(90)'], {}), '(90)\n', (6842, 6846), False, 'import time\n'), ((1347, 1382), 'json.dumps', 'json.dumps', (['self.risk_csp'], {'indent': '(2)'}), '(self.risk_csp, indent=2)\n', (1357, 1382), False, 'import json\n'), ((1486, 1537), 'json.dumps', 'json.dumps', (['self.risk_external_javascript'], {'indent': '(2)'}), '(self.risk_external_javascript, indent=2)\n', (1496, 1537), False, 'import json\n'), ((1631, 1677), 'json.dumps', 'json.dumps', (['self.risk_external_calls'], {'indent': '(2)'}), '(self.risk_external_calls, indent=2)\n', (1641, 1677), False, 'import json\n'), ((1776, 1822), 'json.dumps', 'json.dumps', (['self.dangerous_functions'], {'indent': '(2)'}), '(self.dangerous_functions, indent=2)\n', (1786, 1822), False, 'import json\n'), ((1907, 1946), 'json.dumps', 'json.dumps', (['self.entry_points'], {'indent': '(2)'}), '(self.entry_points, indent=2)\n', (1917, 1946), False, 'import json\n'), ((4217, 4247), 'json.dumps', 'json.dumps', (['response'], {'indent': '(4)'}), '(response, indent=4)\n', (4227, 4247), False, 'import json\n')] |
# =================================================================
# Copyright (C) 2021-2021 52°North Spatial Information Research GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Convenience script to create resource entries for each datacube product to be included in pygeoapi's config.yml
# =================================================================
import logging
import os
from pathlib import Path
import yaml
import argparse
from odcprovider.connector import OdcConnector
from odcprovider.utils import convert_datacube_bbox_to_wgs84
from datacube.utils.geometry import BoundingBox
from datacube.model import DatasetType
logging_config_file = Path(Path(__file__).parent, 'logging.yaml')
level = logging.DEBUG
if os.path.exists(logging_config_file):
with open(logging_config_file, 'rt') as file:
try:
config = yaml.safe_load(file.read())
logging.config.dictConfig(config)
except Exception as e:
print(e)
print('Error while loading logging configuration from file "{}". Using defaults'
.format(logging_config_file))
logging.basicConfig(level=level)
else:
print('Logging file configuration does not exist: "{}". Using defaults.'.format(logging_config_file))
logging.basicConfig(level=level)
LOGGER = logging.getLogger(__name__)
# ToDo: improve formatting of created config.yaml
def parse_parameter() -> argparse.Namespace:
parser = argparse.ArgumentParser(
description='Create resource entries for pygeoapi configuration. If infile is '
'provided, resource entries will be inserted there and written to outfile.')
parser.add_argument('--infile', '-i',
help='File name of the config yaml that should be merged.')
parser.add_argument('--outfile', '-o',
default='config_auto.yml',
help='Output yaml file name (default: config_auto.yml)')
parser.add_argument('--exclude-products',
help='Comma separated list of product names to exclude')
args = parser.parse_args()
if args.exclude_products:
args.exclude_products = [s.strip() for s in args.exclude_products.split(",")]
LOGGER.info("""
Start creating pygeoapi config
==============================
- empty values are allowed
infile : {}
outfile : {}
exclude products : {}""".format(args.infile, args.outfile, args.exclude_products))
return args
def _create_resource_from_odc_product(product: DatasetType, bbox: BoundingBox, format_set: set) -> dict:
"""
Create resource from Open Data Cube product
:param product: ODC product, datacube.model.DatasetType
:param bbox: bbox in WGS84!!!
:param format_set: set of format strings (e.g. 'GeoTIFF' or 'netCDF')
:return: dict
"""
left, bottom, right, top = bbox
if product.fields['format'] is not None:
format_name = product.fields['format']
elif len(format_set) == 1:
format_name = next(iter(format_set))
else:
format_name = 'GeoTIFF'
links = []
if 'links' in product.metadata_doc.keys():
for link in product.metadata_doc.get('links'):
links.append({
'type': link.get('type'),
'rel': link.get('rel'),
'title': link.get('title'),
'href': link.get('href'),
'hreflang': link.get('hreflang')
})
resource_dict = {
'type': 'collection',
'title': product.name,
'description': product.definition['description'],
'keywords': product.metadata_doc.get('keywords') if 'keywords' in product.metadata_doc.keys() else [],
'links': links,
'extents': {
'spatial': {
'bbox': [left, bottom, right, top],
'crs': 'http://www.opengis.net/def/crs/OGC/1.3/CRS84'
}
},
'providers': [{
'type': 'coverage',
'name': 'odcprovider.OpenDataCubeCoveragesProvider',
'data': product.name,
'format': {
'name': format_name,
'mimetype': 'application/{}'.format(format_name.lower())
}
}],
}
return resource_dict
def _merge_config(infile, data):
"""
Insert auto-created resource entries into given config file if given
:param infile: file name of a pygeoapi yml config file
:param data: dict of resource entries
:return: merged dict of resource entries
"""
with open(infile, 'r') as infile:
data_in = yaml.load(infile, Loader=yaml.FullLoader)
for resource_entry in data['resources']:
data_in['resources'].update({resource_entry: data['resources'][resource_entry]})
return data_in
def main():
args = parse_parameter()
# Create collection for each datacube product that is not excluded
dc = OdcConnector()
data = {'resources': {}}
products = dc.list_product_names()
LOGGER.info("Start processing {} products in ODC instance".format(len(products)))
idx = 1
for dc_product_name in products:
LOGGER.info("[{}/{}] Processing product '{}'".format(idx, len(products), dc_product_name))
if dc_product_name in args.exclude_products:
LOGGER.info("[{}/{}] Product '{}' is list of products to exclude, hence skipping it"
.format(idx, len(products), dc_product_name))
else:
LOGGER.info("[{}/{}] Including product '{}'".format(idx, len(products), dc_product_name))
dc_product = dc.get_product_by_id(dc_product_name)
format_set = set()
for dataset in dc.get_datasets_for_product(dc_product.name):
format_set.add(dataset.format)
# Make sure bbox is in WGS84
if len(dc.get_crs_set(dc_product.name)) == 1:
bbox = convert_datacube_bbox_to_wgs84(dc.bbox_of_product(dc_product.name),
str(dc.get_crs_set(dc_product.name).pop()))
else:
bbox = dc.bbox_of_product(dc_product.name)
data['resources'][dc_product.name] = _create_resource_from_odc_product(dc_product, bbox, format_set)
idx = idx + 1
LOGGER.info("Finished processing {} products".format(len(products)))
# Write to yaml file, merge with provided config yaml if given
with open(args.outfile, 'w') as outfile:
if args.infile is not None:
data = _merge_config(args.infile, data)
LOGGER.debug("Writing configuration to file '{}':\n{}\n".format(outfile.name, data))
yaml.dump(data, outfile, default_flow_style=False, sort_keys=False)
LOGGER.info("Finished processing ODC products")
if __name__ == "__main__":
main()
| [
"logging.getLogger",
"os.path.exists",
"logging.basicConfig",
"odcprovider.connector.OdcConnector",
"argparse.ArgumentParser",
"pathlib.Path",
"yaml.dump",
"logging.config.dictConfig",
"yaml.load"
] | [((1229, 1264), 'os.path.exists', 'os.path.exists', (['logging_config_file'], {}), '(logging_config_file)\n', (1243, 1264), False, 'import os\n'), ((1821, 1848), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1838, 1848), False, 'import logging\n'), ((1778, 1810), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'level'}), '(level=level)\n', (1797, 1810), False, 'import logging\n'), ((1960, 2147), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Create resource entries for pygeoapi configuration. If infile is provided, resource entries will be inserted there and written to outfile."""'}), "(description=\n 'Create resource entries for pygeoapi configuration. If infile is provided, resource entries will be inserted there and written to outfile.'\n )\n", (1983, 2147), False, 'import argparse\n'), ((5460, 5474), 'odcprovider.connector.OdcConnector', 'OdcConnector', ([], {}), '()\n', (5472, 5474), False, 'from odcprovider.connector import OdcConnector\n'), ((1165, 1179), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (1169, 1179), False, 'from pathlib import Path\n'), ((5129, 5170), 'yaml.load', 'yaml.load', (['infile'], {'Loader': 'yaml.FullLoader'}), '(infile, Loader=yaml.FullLoader)\n', (5138, 5170), False, 'import yaml\n'), ((7206, 7273), 'yaml.dump', 'yaml.dump', (['data', 'outfile'], {'default_flow_style': '(False)', 'sort_keys': '(False)'}), '(data, outfile, default_flow_style=False, sort_keys=False)\n', (7215, 7273), False, 'import yaml\n'), ((1390, 1423), 'logging.config.dictConfig', 'logging.config.dictConfig', (['config'], {}), '(config)\n', (1415, 1423), False, 'import logging\n'), ((1629, 1661), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'level'}), '(level=level)\n', (1648, 1661), False, 'import logging\n')] |
"""Convert a CP trace to NCP or VIP.
blaze run convert_traces -- \
--tracefile=german_partial_prior.npz \
--model=german_credit_lognormalcentered \
--vip_json=german_credit_lognormalcentered_data/cVIP_exp_tied.json
"""
from absl import app
from absl import flags
import io
import json
import os
import numpy as np
import tensorflow as tf
from tensorflow_probability import edward2 as ed
import models as models
flags.DEFINE_string('tracefile', default='', help='')
flags.DEFINE_string('vip_json', default='', help='')
flags.DEFINE_string('model', default='', help='')
flags.DEFINE_string('dataset', default='', help='')
FLAGS = flags.FLAGS
def main(_):
model_config = models.get_model_by_name(FLAGS.model, dataset=FLAGS.dataset)
if FLAGS.vip_json:
if tf.io.gfile.exists(FLAGS.vip_json):
with tf.io.gfile.GFile(FLAGS.vip_json, 'r') as f:
prev_results = json.load(f)
else:
raise Exception('Run VI first to find initial step sizes')
vip_reparam = prev_results['learned_reparam']
new_method = 'cVIP'
to_noncentered = model_config.make_to_partially_noncentered(**vip_reparam)
else:
new_method = 'NCP'
to_noncentered = model_config.to_noncentered
with tf.io.gfile.GFile(FLAGS.tracefile) as f:
traces = dict(np.load(f))
# Get ordered list of latent variable names for this model.
with ed.tape() as model_tape:
model_config.model(*model_config.model_args)
param_names = [
k for k in list(model_tape.keys()) if k not in model_config.observed_data
]
traces_as_list = [traces[k] for k in param_names]
initial_shape = traces_as_list[0].shape[:2] # [num_results x num_chains]
flattened_traces = [np.reshape(v, [-1] + list(v.shape[2:]))
for v in traces_as_list]
transformed_traces = tf.vectorized_map(to_noncentered, flattened_traces)
unflattened_traces = {k: tf.reshape(v, initial_shape + v.shape[1:])
for (k, v) in zip(param_names, transformed_traces)}
with tf.compat.v1.Session() as sess:
unflattened_traces_ = sess.run(unflattened_traces)
np_path = FLAGS.tracefile[:-4] + '_{}.npz'.format(new_method)
with tf.io.gfile.GFile(np_path, 'wb') as out_f:
io_buffer = io.BytesIO()
np.savez(io_buffer, **unflattened_traces_)
out_f.write(io_buffer.getvalue())
if __name__ == '__main__':
app.run()
| [
"tensorflow_probability.edward2.tape",
"numpy.savez",
"tensorflow.io.gfile.GFile",
"tensorflow.compat.v1.Session",
"io.BytesIO",
"absl.app.run",
"tensorflow.vectorized_map",
"tensorflow.reshape",
"json.load",
"models.get_model_by_name",
"numpy.load",
"absl.flags.DEFINE_string",
"tensorflow.i... | [((421, 474), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""tracefile"""'], {'default': '""""""', 'help': '""""""'}), "('tracefile', default='', help='')\n", (440, 474), False, 'from absl import flags\n'), ((475, 527), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""vip_json"""'], {'default': '""""""', 'help': '""""""'}), "('vip_json', default='', help='')\n", (494, 527), False, 'from absl import flags\n'), ((528, 577), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""model"""'], {'default': '""""""', 'help': '""""""'}), "('model', default='', help='')\n", (547, 577), False, 'from absl import flags\n'), ((578, 629), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""dataset"""'], {'default': '""""""', 'help': '""""""'}), "('dataset', default='', help='')\n", (597, 629), False, 'from absl import flags\n'), ((682, 742), 'models.get_model_by_name', 'models.get_model_by_name', (['FLAGS.model'], {'dataset': 'FLAGS.dataset'}), '(FLAGS.model, dataset=FLAGS.dataset)\n', (706, 742), True, 'import models as models\n'), ((1793, 1844), 'tensorflow.vectorized_map', 'tf.vectorized_map', (['to_noncentered', 'flattened_traces'], {}), '(to_noncentered, flattened_traces)\n', (1810, 1844), True, 'import tensorflow as tf\n'), ((2345, 2354), 'absl.app.run', 'app.run', ([], {}), '()\n', (2352, 2354), False, 'from absl import app\n'), ((772, 806), 'tensorflow.io.gfile.exists', 'tf.io.gfile.exists', (['FLAGS.vip_json'], {}), '(FLAGS.vip_json)\n', (790, 806), True, 'import tensorflow as tf\n'), ((1216, 1250), 'tensorflow.io.gfile.GFile', 'tf.io.gfile.GFile', (['FLAGS.tracefile'], {}), '(FLAGS.tracefile)\n', (1233, 1250), True, 'import tensorflow as tf\n'), ((1357, 1366), 'tensorflow_probability.edward2.tape', 'ed.tape', ([], {}), '()\n', (1364, 1366), True, 'from tensorflow_probability import edward2 as ed\n'), ((1872, 1914), 'tensorflow.reshape', 'tf.reshape', (['v', '(initial_shape + v.shape[1:])'], {}), '(v, initial_shape + v.shape[1:])\n', (1882, 1914), True, 'import tensorflow as tf\n'), ((1999, 2021), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (2019, 2021), True, 'import tensorflow as tf\n'), ((2158, 2190), 'tensorflow.io.gfile.GFile', 'tf.io.gfile.GFile', (['np_path', '"""wb"""'], {}), "(np_path, 'wb')\n", (2175, 2190), True, 'import tensorflow as tf\n'), ((2217, 2229), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (2227, 2229), False, 'import io\n'), ((2234, 2276), 'numpy.savez', 'np.savez', (['io_buffer'], {}), '(io_buffer, **unflattened_traces_)\n', (2242, 2276), True, 'import numpy as np\n'), ((1275, 1285), 'numpy.load', 'np.load', (['f'], {}), '(f)\n', (1282, 1285), True, 'import numpy as np\n'), ((819, 857), 'tensorflow.io.gfile.GFile', 'tf.io.gfile.GFile', (['FLAGS.vip_json', '"""r"""'], {}), "(FLAGS.vip_json, 'r')\n", (836, 857), True, 'import tensorflow as tf\n'), ((887, 899), 'json.load', 'json.load', (['f'], {}), '(f)\n', (896, 899), False, 'import json\n')] |
import cv2
import numpy as np
def ColorAzul():
black_screen = np.zeros([500, 500, 3], dtype=np.uint8)
black_screen[:, :, 0] = np.ones([500, 500]) * 255
black_screen[:, :, 1] = np.ones([500, 500]) * 0
black_screen[:, :, 2] = np.ones([500, 500]) * 0
return black_screen
def ColorRojo():
black_screen = np.zeros([500, 500, 3], dtype=np.uint8)
black_screen[:, :, 0] = np.ones([500, 500]) * 0
black_screen[:, :, 1] = np.ones([500, 500]) * 0
black_screen[:, :, 2] = np.ones([500, 500]) * 255
return black_screen
fondo_base = True
def back(*args):
global fondo_base
if fondo_base:
fondo_base = False
else:
fondo_base = True
pass
cv2.namedWindow("Frame")
cv2.createButton("Cahnge Color", back)
while True:
fondo = ColorAzul() if fondo_base else ColorRojo()
cv2.imshow('Frame', fondo)
key = cv2.waitKey(1)
if key == ord('q'):
break
cv2.waitKey(0)
cv2.destroyAllWindows()
| [
"numpy.ones",
"cv2.createButton",
"cv2.imshow",
"numpy.zeros",
"cv2.destroyAllWindows",
"cv2.waitKey",
"cv2.namedWindow"
] | [((708, 732), 'cv2.namedWindow', 'cv2.namedWindow', (['"""Frame"""'], {}), "('Frame')\n", (723, 732), False, 'import cv2\n'), ((733, 771), 'cv2.createButton', 'cv2.createButton', (['"""Cahnge Color"""', 'back'], {}), "('Cahnge Color', back)\n", (749, 771), False, 'import cv2\n'), ((937, 951), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (948, 951), False, 'import cv2\n'), ((952, 975), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (973, 975), False, 'import cv2\n'), ((68, 107), 'numpy.zeros', 'np.zeros', (['[500, 500, 3]'], {'dtype': 'np.uint8'}), '([500, 500, 3], dtype=np.uint8)\n', (76, 107), True, 'import numpy as np\n'), ((330, 369), 'numpy.zeros', 'np.zeros', (['[500, 500, 3]'], {'dtype': 'np.uint8'}), '([500, 500, 3], dtype=np.uint8)\n', (338, 369), True, 'import numpy as np\n'), ((845, 871), 'cv2.imshow', 'cv2.imshow', (['"""Frame"""', 'fondo'], {}), "('Frame', fondo)\n", (855, 871), False, 'import cv2\n'), ((883, 897), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (894, 897), False, 'import cv2\n'), ((137, 156), 'numpy.ones', 'np.ones', (['[500, 500]'], {}), '([500, 500])\n', (144, 156), True, 'import numpy as np\n'), ((191, 210), 'numpy.ones', 'np.ones', (['[500, 500]'], {}), '([500, 500])\n', (198, 210), True, 'import numpy as np\n'), ((243, 262), 'numpy.ones', 'np.ones', (['[500, 500]'], {}), '([500, 500])\n', (250, 262), True, 'import numpy as np\n'), ((399, 418), 'numpy.ones', 'np.ones', (['[500, 500]'], {}), '([500, 500])\n', (406, 418), True, 'import numpy as np\n'), ((451, 470), 'numpy.ones', 'np.ones', (['[500, 500]'], {}), '([500, 500])\n', (458, 470), True, 'import numpy as np\n'), ((503, 522), 'numpy.ones', 'np.ones', (['[500, 500]'], {}), '([500, 500])\n', (510, 522), True, 'import numpy as np\n')] |
import datetime
import math
import os
from entomb import (
constants,
exceptions,
utilities,
)
@utilities.hide_cursor()
def produce_report(path, include_git):
"""Print a report.
Parameters
----------
path : str
An absolute path.
include_git: bool
Whether to include git files and directories.
Returns
-------
None
Raises
------
AssertionError
If the path does not exist.
"""
# Parameter check.
assert os.path.exists(path)
# Set up.
directory_count = 0
immutable_file_count = 0
inaccessible_file_count = 0
link_count = 0
mutable_file_count = 0
# Print the operation.
print("Produce report")
print()
# If the path is not a directory, print an abbreviated report then return.
if not os.path.isdir(path):
_print_abbreviated_report(path)
return
# Print the progress header and set up the progress bar.
utilities.print_header("Progress")
total_file_paths = utilities.count_file_paths(path, include_git)
start_time = datetime.datetime.now()
utilities.print_progress_bar(start_time, 0, total_file_paths)
# Walk the tree.
for root_dir, dirnames, filenames in os.walk(path):
# Exclude git files and directories if directed.
if not include_git:
dirnames[:] = [d for d in dirnames if d != ".git"]
# Count the directory.
directory_count += 1
# Examine each file path.
for filename in filenames:
file_path = os.path.join(root_dir, filename)
# Count the link.
if os.path.islink(file_path):
link_count += 1
# Count the file.
else:
try:
if utilities.file_is_immutable(file_path):
immutable_file_count += 1
else:
mutable_file_count += 1
except exceptions.GetAttributeError:
inaccessible_file_count += 1
# Update the progress bar.
total_count = (
immutable_file_count
+ inaccessible_file_count
+ link_count
+ mutable_file_count
)
utilities.print_progress_bar(
start_time,
total_count,
total_file_paths,
)
print()
print()
_print_full_report(
directory_count,
link_count,
immutable_file_count,
inaccessible_file_count,
mutable_file_count,
)
def _print_abbreviated_report(path):
"""Print a report for a path which is not a directory.
Parameters
----------
path : str
An absolute path.
Returns
-------
None
Raises
------
AssertionError
If the path is a directory or does not exist.
"""
# Parameter check.
assert not os.path.isdir(path)
assert os.path.exists(path)
utilities.print_header("Report")
if os.path.islink(path):
print("A link has no immutable attribute")
else:
try:
if utilities.file_is_immutable(path):
print("File is immutable")
else:
print("File is mutable")
except exceptions.GetAttributeError:
print("Immutable attribute could not be accessed")
print()
def _print_full_report(directory_count, link_count, immutable_file_count,
inaccessible_file_count, mutable_file_count):
"""Print a report for a path which is a file or a link.
Parameters
----------
directory_count : int
The number of directories counted.
link_count : int
The number of links counted.
immutable_file_count : int
The number of immutable files counted.
inaccessible_file_count : int
The number of files for which the immutability attribute could not be
accessed.
mutable_file_count : int
The number of mutable files counted.
Returns
-------
None
"""
# Do calculations.
subdirectory_count = directory_count - 1
total_file_count = (
immutable_file_count
+ inaccessible_file_count
+ mutable_file_count
)
try:
entombed_proportion = immutable_file_count / total_file_count
entombed_percentage_integer = math.floor(entombed_proportion * 100)
entombed_percentage = "{}%".format(entombed_percentage_integer)
except ZeroDivisionError:
entombed_percentage = "n/a"
# Print the report.
_print_report_line("Report")
_print_report_line("Entombed", entombed_percentage)
if mutable_file_count:
_print_report_line("Mutable files", _stringify_int(mutable_file_count))
if inaccessible_file_count:
_print_report_line(
"Inaccessible files",
_stringify_int(inaccessible_file_count),
)
_print_report_line("Total files", _stringify_int(total_file_count))
if link_count:
_print_report_line("Links", _stringify_int(link_count))
if subdirectory_count:
_print_report_line(
"Sub-directories",
_stringify_int(subdirectory_count),
)
print()
def _print_report_line(label, value=None):
"""Print a line in the full report followed by a separator line.
Parameters
----------
label : str
The label to print on the left.
value : str, optional
The value, if any, to print on the right.
Returns
-------
None
"""
if value is None:
print(label)
else:
value_width = constants.TABLE_WIDTH - (len(label) + 1)
print(label, value.rjust(value_width))
print("-" * constants.TABLE_WIDTH)
def _stringify_int(integer):
"""Convert an integer into a string formatted with thousand separators.
Parameters
----------
integer : int
The integer.
Returns
-------
str
The integer turned into a string.
"""
return "{:,}".format(integer)
| [
"entomb.utilities.file_is_immutable",
"os.path.exists",
"math.floor",
"os.path.join",
"entomb.utilities.print_header",
"entomb.utilities.hide_cursor",
"datetime.datetime.now",
"entomb.utilities.print_progress_bar",
"os.path.isdir",
"entomb.utilities.count_file_paths",
"os.path.islink",
"os.wal... | [((111, 134), 'entomb.utilities.hide_cursor', 'utilities.hide_cursor', ([], {}), '()\n', (132, 134), False, 'from entomb import constants, exceptions, utilities\n'), ((500, 520), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (514, 520), False, 'import os\n'), ((968, 1002), 'entomb.utilities.print_header', 'utilities.print_header', (['"""Progress"""'], {}), "('Progress')\n", (990, 1002), False, 'from entomb import constants, exceptions, utilities\n'), ((1026, 1071), 'entomb.utilities.count_file_paths', 'utilities.count_file_paths', (['path', 'include_git'], {}), '(path, include_git)\n', (1052, 1071), False, 'from entomb import constants, exceptions, utilities\n'), ((1089, 1112), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1110, 1112), False, 'import datetime\n'), ((1117, 1178), 'entomb.utilities.print_progress_bar', 'utilities.print_progress_bar', (['start_time', '(0)', 'total_file_paths'], {}), '(start_time, 0, total_file_paths)\n', (1145, 1178), False, 'from entomb import constants, exceptions, utilities\n'), ((1242, 1255), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (1249, 1255), False, 'import os\n'), ((3002, 3022), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (3016, 3022), False, 'import os\n'), ((3028, 3060), 'entomb.utilities.print_header', 'utilities.print_header', (['"""Report"""'], {}), "('Report')\n", (3050, 3060), False, 'from entomb import constants, exceptions, utilities\n'), ((3069, 3089), 'os.path.islink', 'os.path.islink', (['path'], {}), '(path)\n', (3083, 3089), False, 'import os\n'), ((826, 845), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (839, 845), False, 'import os\n'), ((2971, 2990), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (2984, 2990), False, 'import os\n'), ((4435, 4472), 'math.floor', 'math.floor', (['(entombed_proportion * 100)'], {}), '(entombed_proportion * 100)\n', (4445, 4472), False, 'import math\n'), ((1561, 1593), 'os.path.join', 'os.path.join', (['root_dir', 'filename'], {}), '(root_dir, filename)\n', (1573, 1593), False, 'import os\n'), ((1640, 1665), 'os.path.islink', 'os.path.islink', (['file_path'], {}), '(file_path)\n', (1654, 1665), False, 'import os\n'), ((2297, 2368), 'entomb.utilities.print_progress_bar', 'utilities.print_progress_bar', (['start_time', 'total_count', 'total_file_paths'], {}), '(start_time, total_count, total_file_paths)\n', (2325, 2368), False, 'from entomb import constants, exceptions, utilities\n'), ((3180, 3213), 'entomb.utilities.file_is_immutable', 'utilities.file_is_immutable', (['path'], {}), '(path)\n', (3207, 3213), False, 'from entomb import constants, exceptions, utilities\n'), ((1792, 1830), 'entomb.utilities.file_is_immutable', 'utilities.file_is_immutable', (['file_path'], {}), '(file_path)\n', (1819, 1830), False, 'from entomb import constants, exceptions, utilities\n')] |
import os
import re
from typing import Dict, Iterable, Tuple
from .gh import Developer, GitHub, Repository, TrendingSince, BASE_URL
from .tg import Telegram
def format_message(
repos: Dict[TrendingSince, Iterable[Tuple[Repository, Iterable[str]]]],
developers: Dict[TrendingSince, Iterable[Developer]],
) -> str:
def escape(text):
return re.sub(r"([_*\[\]()~`>#+=|{}.!-])", r"\\\1", text)
lines = []
if any(repos.values()):
lines.append("*Trending Repos*")
for since, items in repos.items():
if items:
lines.append(f"_{since.value.capitalize()}_")
for repo, users in items:
lines.append(
f"\\- [{escape(repo.url)}]({BASE_URL}/{repo.url}) "
f"\\| {', '.join(users)}"
)
lines.append("")
if any(developers.values()):
lines.append("*Trending Developers*")
for since, items in developers.items():
if items:
lines.append(f"_{since.value.capitalize()}_")
for developer in items:
lines.append(
f"\\- [{escape(developer.name)}]({BASE_URL}/{developer.username}) "
f"\\| [{escape(developer.repo)}]({BASE_URL}/{developer.repo})"
)
lines.append("")
return "\n".join(lines)
def run_main(gh_token: str, tg_token: str, tg_chat: str):
github = GitHub(gh_token)
telegram = Telegram(tg_token)
repos = github.get_trending_repos()
developers = github.get_trending_developers()
message = format_message(repos, developers)
if message:
telegram.send_message(tg_chat, message)
if __name__ == "__main__":
gh_token = os.getenv("GH_TOKEN")
tg_token = os.getenv("TG_TOKEN")
tg_chat = int(os.getenv("TG_CHAT"))
run_main(gh_token, tg_token, tg_chat)
| [
"re.sub",
"os.getenv"
] | [((1799, 1820), 'os.getenv', 'os.getenv', (['"""GH_TOKEN"""'], {}), "('GH_TOKEN')\n", (1808, 1820), False, 'import os\n'), ((1836, 1857), 'os.getenv', 'os.getenv', (['"""TG_TOKEN"""'], {}), "('TG_TOKEN')\n", (1845, 1857), False, 'import os\n'), ((361, 414), 're.sub', 're.sub', (['"""([_*\\\\[\\\\]()~`>#+=|{}.!-])"""', '"""\\\\\\\\\\\\1"""', 'text'], {}), "('([_*\\\\[\\\\]()~`>#+=|{}.!-])', '\\\\\\\\\\\\1', text)\n", (367, 414), False, 'import re\n'), ((1876, 1896), 'os.getenv', 'os.getenv', (['"""TG_CHAT"""'], {}), "('TG_CHAT')\n", (1885, 1896), False, 'import os\n')] |
import os
from torch import nn
from torch import optim
from robustcode.models.modules.neural_model_base import NeuralModelBase
from robustcode.models.robust.adversary.adversary import AdversarialMode
from robustcode.models.robust.adversary.adversary import AdversaryAccuracyStats
from robustcode.models.robust.adversary.adversary import AdversaryBatchIter
from robustcode.models.robust.adversary.adversary import RenameAdversary
from robustcode.models.robust.adversary.adversary import SubtreeAdversary
from robustcode.models.robust.adversary.rules import AdversarialNodeReplacement
from robustcode.models.robust.adversary.rules import NodeValueIndex
from robustcode.models.robust.adversary.rules import NodeValueIndexStr
from robustcode.models.robust.adversary.tree_rules import AdversarialSubtreeReplacement
from robustcode.models.robust.adversary.tree_rules import ExpressionGenerator
from robustcode.models.robust.dataset import Dataset
from robustcode.models.robust.dataset_util import dataset_to_trees
from robustcode.models.robust.dataset_util import dataset_to_trees_num
from robustcode.util.misc import Logger
def checkpoint_name(args, model_id):
return "{}.pt".format(model_id)
def checkpoint_dir(args):
name = "adv{}_val{}".format(args.adversarial, args.include_values)
if args.window_size != 0:
name += "_window{}".format(args.window_size)
checkpoint_dir = os.path.join(args.save_dir, args.tag, name)
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
return checkpoint_dir
def load_model(model: NeuralModelBase, args, model_id):
import torch
checkpoint_file = os.path.join(
checkpoint_dir(args), checkpoint_name(args, model_id)
)
print("checkpoint_file", checkpoint_file)
if not os.path.exists(checkpoint_file):
return False
Logger.debug("Loading model from {}".format(checkpoint_file))
data = torch.load(checkpoint_file)
model.load_state_dict(data)
return True
def save_model(model: NeuralModelBase, args, model_id):
import torch
checkpoint_file = os.path.join(
checkpoint_dir(args), checkpoint_name(args, model_id)
)
Logger.debug("Saving model to {}".format(checkpoint_file))
torch.save(model.state_dict(), checkpoint_file)
def make_adversary(dataset: Dataset, make_iter):
Logger.start_scope("Parsing Trees")
trees_train_str = dataset_to_trees(dataset.dtrain, dataset.ID)
trees_valid_str = dataset_to_trees(dataset.dvalid, dataset.ID)
trees_test_str = dataset_to_trees(dataset.dtest, dataset.ID)
trees_str = {**trees_train_str, **trees_valid_str, **trees_test_str}
trees_train_num = dataset_to_trees_num(dataset.dtrain)
trees_valid_num = dataset_to_trees_num(dataset.dvalid)
trees_test_num = dataset_to_trees_num(dataset.dtest)
trees_num = {**trees_train_num, **trees_valid_num, **trees_test_num}
Logger.end_scope()
Logger.start_scope("Indexing Trees")
value_index = NodeValueIndex(dataset, trees_train_num)
value_index_str = NodeValueIndexStr(dataset, trees_train_str)
expr_gen = ExpressionGenerator(value_index_str)
node_replacement = AdversarialNodeReplacement(
value_index, dataset.fixed_value_offset
)
rules_index = node_replacement.make_rules(dataset, trees_str, trees_num)
adversary = RenameAdversary(rules_index, dataset)
Logger.end_scope()
subtree_replacement = AdversarialSubtreeReplacement(expr_gen)
subtree_rules = subtree_replacement.make_rules(dataset, trees_str, trees_num)
subtree_adversary = SubtreeAdversary(subtree_rules, dataset, trees_str, make_iter)
return adversary, subtree_adversary
def train_base_model(
model: NeuralModelBase,
dataset: Dataset,
num_epochs,
train_iter,
valid_iter,
lr=0.001,
verbose=True,
):
valid_iters = [valid_iter] if not isinstance(valid_iter, list) else valid_iter
Logger.start_scope("Training Model")
opt = optim.Adam(model.parameters(), lr=lr)
model.opt = opt
loss_function = nn.CrossEntropyLoss(reduction="none")
model.loss_function = loss_function
train_prec, valid_prec = None, None
for epoch in range(num_epochs):
Logger.start_scope("Epoch {}".format(epoch))
model.fit(train_iter, opt, loss_function, mask_field="mask_valid")
for valid_iter in valid_iters:
valid_stats = model.accuracy(valid_iter, dataset.TARGET, verbose=verbose)
valid_prec = valid_stats["mask_valid_noreject_acc"]
Logger.debug(f"valid_prec: {valid_prec}")
Logger.end_scope()
train_stats = model.accuracy(train_iter, dataset.TARGET, verbose=False)
train_prec = train_stats["mask_valid_noreject_acc"]
Logger.debug(f"train_prec: {train_prec}, valid_prec: {valid_prec}")
Logger.end_scope()
return train_prec, valid_prec
def eval_adversarial(
model: NeuralModelBase,
it,
rename_adversary: RenameAdversary,
subtree_adversary: SubtreeAdversary,
n_renames=20,
n_subtree_renames=50,
adv_mode=AdversarialMode.RANDOM,
threshold=0.5,
out_file=None,
approximate=False,
) -> AdversaryAccuracyStats:
Logger.debug(
"Eval Adversarial [n_renames={}, n_subtree={}, mode={}]".format(
n_renames, n_subtree_renames, adv_mode
)
)
iterators = []
if n_renames == 0:
assert adv_mode == AdversarialMode.RANDOM
iterators.append(
AdversaryBatchIter(
subtree_adversary, model, it, num_samples=n_subtree_renames
)
)
else:
if n_renames > 0:
iterators.append(
AdversaryBatchIter(
rename_adversary,
model,
it,
num_samples=n_renames,
adv_mode=adv_mode,
)
)
if n_subtree_renames > 0:
if n_renames == 0:
iterators.append(
AdversaryBatchIter(
subtree_adversary, model, it, num_samples=n_subtree_renames
)
)
else:
iterators.append(
AdversaryBatchIter(
subtree_adversary,
model,
AdversaryBatchIter(
rename_adversary,
model,
it,
num_samples=n_subtree_renames,
adv_mode=adv_mode,
),
)
)
return rename_adversary.adversarial_accuracy(
model,
it,
iterators,
threshold=threshold,
out_file=out_file,
verbose=True,
approximate=approximate,
)
| [
"os.path.exists",
"robustcode.models.robust.dataset_util.dataset_to_trees",
"torch.nn.CrossEntropyLoss",
"os.makedirs",
"robustcode.util.misc.Logger.end_scope",
"torch.load",
"os.path.join",
"robustcode.models.robust.adversary.rules.NodeValueIndex",
"robustcode.models.robust.dataset_util.dataset_to_... | [((1399, 1442), 'os.path.join', 'os.path.join', (['args.save_dir', 'args.tag', 'name'], {}), '(args.save_dir, args.tag, name)\n', (1411, 1442), False, 'import os\n'), ((1917, 1944), 'torch.load', 'torch.load', (['checkpoint_file'], {}), '(checkpoint_file)\n', (1927, 1944), False, 'import torch\n'), ((2343, 2378), 'robustcode.util.misc.Logger.start_scope', 'Logger.start_scope', (['"""Parsing Trees"""'], {}), "('Parsing Trees')\n", (2361, 2378), False, 'from robustcode.util.misc import Logger\n'), ((2401, 2445), 'robustcode.models.robust.dataset_util.dataset_to_trees', 'dataset_to_trees', (['dataset.dtrain', 'dataset.ID'], {}), '(dataset.dtrain, dataset.ID)\n', (2417, 2445), False, 'from robustcode.models.robust.dataset_util import dataset_to_trees\n'), ((2468, 2512), 'robustcode.models.robust.dataset_util.dataset_to_trees', 'dataset_to_trees', (['dataset.dvalid', 'dataset.ID'], {}), '(dataset.dvalid, dataset.ID)\n', (2484, 2512), False, 'from robustcode.models.robust.dataset_util import dataset_to_trees\n'), ((2534, 2577), 'robustcode.models.robust.dataset_util.dataset_to_trees', 'dataset_to_trees', (['dataset.dtest', 'dataset.ID'], {}), '(dataset.dtest, dataset.ID)\n', (2550, 2577), False, 'from robustcode.models.robust.dataset_util import dataset_to_trees\n'), ((2674, 2710), 'robustcode.models.robust.dataset_util.dataset_to_trees_num', 'dataset_to_trees_num', (['dataset.dtrain'], {}), '(dataset.dtrain)\n', (2694, 2710), False, 'from robustcode.models.robust.dataset_util import dataset_to_trees_num\n'), ((2733, 2769), 'robustcode.models.robust.dataset_util.dataset_to_trees_num', 'dataset_to_trees_num', (['dataset.dvalid'], {}), '(dataset.dvalid)\n', (2753, 2769), False, 'from robustcode.models.robust.dataset_util import dataset_to_trees_num\n'), ((2791, 2826), 'robustcode.models.robust.dataset_util.dataset_to_trees_num', 'dataset_to_trees_num', (['dataset.dtest'], {}), '(dataset.dtest)\n', (2811, 2826), False, 'from robustcode.models.robust.dataset_util import dataset_to_trees_num\n'), ((2904, 2922), 'robustcode.util.misc.Logger.end_scope', 'Logger.end_scope', ([], {}), '()\n', (2920, 2922), False, 'from robustcode.util.misc import Logger\n'), ((2928, 2964), 'robustcode.util.misc.Logger.start_scope', 'Logger.start_scope', (['"""Indexing Trees"""'], {}), "('Indexing Trees')\n", (2946, 2964), False, 'from robustcode.util.misc import Logger\n'), ((2983, 3023), 'robustcode.models.robust.adversary.rules.NodeValueIndex', 'NodeValueIndex', (['dataset', 'trees_train_num'], {}), '(dataset, trees_train_num)\n', (2997, 3023), False, 'from robustcode.models.robust.adversary.rules import NodeValueIndex\n'), ((3046, 3089), 'robustcode.models.robust.adversary.rules.NodeValueIndexStr', 'NodeValueIndexStr', (['dataset', 'trees_train_str'], {}), '(dataset, trees_train_str)\n', (3063, 3089), False, 'from robustcode.models.robust.adversary.rules import NodeValueIndexStr\n'), ((3105, 3141), 'robustcode.models.robust.adversary.tree_rules.ExpressionGenerator', 'ExpressionGenerator', (['value_index_str'], {}), '(value_index_str)\n', (3124, 3141), False, 'from robustcode.models.robust.adversary.tree_rules import ExpressionGenerator\n'), ((3166, 3233), 'robustcode.models.robust.adversary.rules.AdversarialNodeReplacement', 'AdversarialNodeReplacement', (['value_index', 'dataset.fixed_value_offset'], {}), '(value_index, dataset.fixed_value_offset)\n', (3192, 3233), False, 'from robustcode.models.robust.adversary.rules import AdversarialNodeReplacement\n'), ((3341, 3378), 'robustcode.models.robust.adversary.adversary.RenameAdversary', 'RenameAdversary', (['rules_index', 'dataset'], {}), '(rules_index, dataset)\n', (3356, 3378), False, 'from robustcode.models.robust.adversary.adversary import RenameAdversary\n'), ((3383, 3401), 'robustcode.util.misc.Logger.end_scope', 'Logger.end_scope', ([], {}), '()\n', (3399, 3401), False, 'from robustcode.util.misc import Logger\n'), ((3429, 3468), 'robustcode.models.robust.adversary.tree_rules.AdversarialSubtreeReplacement', 'AdversarialSubtreeReplacement', (['expr_gen'], {}), '(expr_gen)\n', (3458, 3468), False, 'from robustcode.models.robust.adversary.tree_rules import AdversarialSubtreeReplacement\n'), ((3575, 3637), 'robustcode.models.robust.adversary.adversary.SubtreeAdversary', 'SubtreeAdversary', (['subtree_rules', 'dataset', 'trees_str', 'make_iter'], {}), '(subtree_rules, dataset, trees_str, make_iter)\n', (3591, 3637), False, 'from robustcode.models.robust.adversary.adversary import SubtreeAdversary\n'), ((3923, 3959), 'robustcode.util.misc.Logger.start_scope', 'Logger.start_scope', (['"""Training Model"""'], {}), "('Training Model')\n", (3941, 3959), False, 'from robustcode.util.misc import Logger\n'), ((4048, 4085), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {'reduction': '"""none"""'}), "(reduction='none')\n", (4067, 4085), False, 'from torch import nn\n'), ((4739, 4806), 'robustcode.util.misc.Logger.debug', 'Logger.debug', (['f"""train_prec: {train_prec}, valid_prec: {valid_prec}"""'], {}), "(f'train_prec: {train_prec}, valid_prec: {valid_prec}')\n", (4751, 4806), False, 'from robustcode.util.misc import Logger\n'), ((4811, 4829), 'robustcode.util.misc.Logger.end_scope', 'Logger.end_scope', ([], {}), '()\n', (4827, 4829), False, 'from robustcode.util.misc import Logger\n'), ((1454, 1484), 'os.path.exists', 'os.path.exists', (['checkpoint_dir'], {}), '(checkpoint_dir)\n', (1468, 1484), False, 'import os\n'), ((1494, 1521), 'os.makedirs', 'os.makedirs', (['checkpoint_dir'], {}), '(checkpoint_dir)\n', (1505, 1521), False, 'import os\n'), ((1785, 1816), 'os.path.exists', 'os.path.exists', (['checkpoint_file'], {}), '(checkpoint_file)\n', (1799, 1816), False, 'import os\n'), ((4583, 4601), 'robustcode.util.misc.Logger.end_scope', 'Logger.end_scope', ([], {}), '()\n', (4599, 4601), False, 'from robustcode.util.misc import Logger\n'), ((4533, 4574), 'robustcode.util.misc.Logger.debug', 'Logger.debug', (['f"""valid_prec: {valid_prec}"""'], {}), "(f'valid_prec: {valid_prec}')\n", (4545, 4574), False, 'from robustcode.util.misc import Logger\n'), ((5463, 5542), 'robustcode.models.robust.adversary.adversary.AdversaryBatchIter', 'AdversaryBatchIter', (['subtree_adversary', 'model', 'it'], {'num_samples': 'n_subtree_renames'}), '(subtree_adversary, model, it, num_samples=n_subtree_renames)\n', (5481, 5542), False, 'from robustcode.models.robust.adversary.adversary import AdversaryBatchIter\n'), ((5665, 5758), 'robustcode.models.robust.adversary.adversary.AdversaryBatchIter', 'AdversaryBatchIter', (['rename_adversary', 'model', 'it'], {'num_samples': 'n_renames', 'adv_mode': 'adv_mode'}), '(rename_adversary, model, it, num_samples=n_renames,\n adv_mode=adv_mode)\n', (5683, 5758), False, 'from robustcode.models.robust.adversary.adversary import AdversaryBatchIter\n'), ((6008, 6087), 'robustcode.models.robust.adversary.adversary.AdversaryBatchIter', 'AdversaryBatchIter', (['subtree_adversary', 'model', 'it'], {'num_samples': 'n_subtree_renames'}), '(subtree_adversary, model, it, num_samples=n_subtree_renames)\n', (6026, 6087), False, 'from robustcode.models.robust.adversary.adversary import AdversaryBatchIter\n'), ((6342, 6444), 'robustcode.models.robust.adversary.adversary.AdversaryBatchIter', 'AdversaryBatchIter', (['rename_adversary', 'model', 'it'], {'num_samples': 'n_subtree_renames', 'adv_mode': 'adv_mode'}), '(rename_adversary, model, it, num_samples=\n n_subtree_renames, adv_mode=adv_mode)\n', (6360, 6444), False, 'from robustcode.models.robust.adversary.adversary import AdversaryBatchIter\n')] |
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from django.db.models import Q
from django.utils import timezone
from django.core.validators import MaxValueValidator, MinValueValidator
from django.shortcuts import get_object_or_404
from itertools import chain
User = settings.AUTH_USER_MODEL
from taggit.managers import TaggableManager
# Create your models here.
class RentalQuerySet(models.query.QuerySet):
"""
QuerySet class for Rental model
this class is used for costom QuerySet while querying database
"""
def search(self, query):
"""
this function queries the database for search functionality in index view
"""
return self.filter(
Q(title__icontains=query)
| Q(author__username__icontains=query)
| Q(description__icontains=query)
| Q(location__icontains=query)
| Q(tags__name__icontains=query)
).distinct()
def custom_search(self, query):
qs1 = self.filter(title__icontains=query)
qs2 = self.filter(
Q(author__username__icontains=query)
| Q(description__icontains=query)
| Q(location__icontains=query)
| Q(tags__name__icontains=query)
).distinct()
return list(set(list(chain(qs1, qs2))))
class RentalManager(models.Manager):
"""
Rental Manager class
this class makes querying the database easy
"""
def get_queryset(self):
"""
get queryset for the rental model
"""
return RentalQuerySet(self.model, using=self._db)
def search(self, query):
"""
manage search functionality of the index view
"""
return self.get_queryset().search(query)
def custom_search(self, query):
return self.get_queryset().custom_search(query)
def toggle_intrested(self, pk, user):
rental = get_object_or_404(Rental, pk=pk)
if user in rental.intrested.all():
rental.intrested.remove(user)
added = False
elif user.username == rental.author.username:
added = False
else:
rental.intrested.add(user)
added = True
print("func", rental)
return added
def toggle_occupied(self, pk):
rental = Rental.objects.get(pk=pk)
rental.occupied = not rental.occupied
return rental.save()
class Rental(models.Model):
"""
Rental model
contains author, title, discription, created_date, rent
negotiable whis is a boolean value
"""
author = models.ForeignKey(User)
title = models.CharField(max_length=128)
description = models.TextField(max_length=4096, blank=True, null=True)
created_date = models.DateTimeField(default=timezone.now)
rent = models.BigIntegerField(default=0)
photo = models.FileField(upload_to="photos/", blank=True, null=True)
location = models.CharField(max_length=256, blank=True, null=True)
rating = models.FloatField(default=0)
intrested = models.ManyToManyField(
User, related_name="intrested_rentals", blank=True
)
lat = models.DecimalField(max_digits=9, decimal_places=6, blank=True, null=True)
lng = models.DecimalField(max_digits=9, decimal_places=6, blank=True, null=True)
occupied = models.BooleanField(default=False)
tags = TaggableManager()
objects = RentalManager()
def __str__(self):
"""
str function for rental model
returns title of the rental
"""
if len(self.title) <= 25:
return self.title[:25]
def get_description(self):
if len(self.description) > 40:
return self.description[:40] + " ..."
else:
return self.description
def get_title(self):
if len(self.title) > 50:
return self.title[:50] + " ..."
else:
return self.title
def get_absolute_url(self):
"""
returns absolute url for each rental object
for eg /rentals/pk/
where pk is primary key
"""
return reverse("rentals:detail", kwargs={"pk": self.pk})
def is_intrested(self, user):
if user not in self.intrested.all():
return False
else:
return True
def can_review(self, user):
for comment in self.comments.all():
if user.username == comment.author.username:
return False
return True
class Comment(models.Model):
"""
Comment model for the rentals
contains text, author,created_date and stars as in star review
connected to a rental through a ForeignKey
"""
rental = models.ForeignKey("Rental", related_name="comments")
author = models.ForeignKey(User)
text = models.TextField(max_length=1024)
stars = models.IntegerField(validators=[MaxValueValidator(5), MinValueValidator(1)])
created_date = models.DateTimeField(default=timezone.now)
def __str__(self):
"""
str function for comment
returns comment text
"""
return self.text
def stars_range(self):
return list(range(self.stars))
| [
"itertools.chain",
"django.core.validators.MinValueValidator",
"django.db.models.FloatField",
"django.core.validators.MaxValueValidator",
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.shortcuts.get_object_or_404",
"django.db.models.ManyToManyField",
"django.db.models.FileField... | [((2642, 2665), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {}), '(User)\n', (2659, 2665), False, 'from django.db import models\n'), ((2678, 2710), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)'}), '(max_length=128)\n', (2694, 2710), False, 'from django.db import models\n'), ((2729, 2785), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(4096)', 'blank': '(True)', 'null': '(True)'}), '(max_length=4096, blank=True, null=True)\n', (2745, 2785), False, 'from django.db import models\n'), ((2805, 2847), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'timezone.now'}), '(default=timezone.now)\n', (2825, 2847), False, 'from django.db import models\n'), ((2859, 2892), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'default': '(0)'}), '(default=0)\n', (2881, 2892), False, 'from django.db import models\n'), ((2905, 2965), 'django.db.models.FileField', 'models.FileField', ([], {'upload_to': '"""photos/"""', 'blank': '(True)', 'null': '(True)'}), "(upload_to='photos/', blank=True, null=True)\n", (2921, 2965), False, 'from django.db import models\n'), ((2981, 3036), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'blank': '(True)', 'null': '(True)'}), '(max_length=256, blank=True, null=True)\n', (2997, 3036), False, 'from django.db import models\n'), ((3050, 3078), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': '(0)'}), '(default=0)\n', (3067, 3078), False, 'from django.db import models\n'), ((3095, 3169), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['User'], {'related_name': '"""intrested_rentals"""', 'blank': '(True)'}), "(User, related_name='intrested_rentals', blank=True)\n", (3117, 3169), False, 'from django.db import models\n'), ((3194, 3268), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(9)', 'decimal_places': '(6)', 'blank': '(True)', 'null': '(True)'}), '(max_digits=9, decimal_places=6, blank=True, null=True)\n', (3213, 3268), False, 'from django.db import models\n'), ((3279, 3353), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(9)', 'decimal_places': '(6)', 'blank': '(True)', 'null': '(True)'}), '(max_digits=9, decimal_places=6, blank=True, null=True)\n', (3298, 3353), False, 'from django.db import models\n'), ((3369, 3403), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (3388, 3403), False, 'from django.db import models\n'), ((3416, 3433), 'taggit.managers.TaggableManager', 'TaggableManager', ([], {}), '()\n', (3431, 3433), False, 'from taggit.managers import TaggableManager\n'), ((4745, 4797), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""Rental"""'], {'related_name': '"""comments"""'}), "('Rental', related_name='comments')\n", (4762, 4797), False, 'from django.db import models\n'), ((4811, 4834), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {}), '(User)\n', (4828, 4834), False, 'from django.db import models\n'), ((4846, 4879), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(1024)'}), '(max_length=1024)\n', (4862, 4879), False, 'from django.db import models\n'), ((4988, 5030), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'timezone.now'}), '(default=timezone.now)\n', (5008, 5030), False, 'from django.db import models\n'), ((1958, 1990), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Rental'], {'pk': 'pk'}), '(Rental, pk=pk)\n', (1975, 1990), False, 'from django.shortcuts import get_object_or_404\n'), ((4159, 4208), 'django.core.urlresolvers.reverse', 'reverse', (['"""rentals:detail"""'], {'kwargs': "{'pk': self.pk}"}), "('rentals:detail', kwargs={'pk': self.pk})\n", (4166, 4208), False, 'from django.core.urlresolvers import reverse\n'), ((4924, 4944), 'django.core.validators.MaxValueValidator', 'MaxValueValidator', (['(5)'], {}), '(5)\n', (4941, 4944), False, 'from django.core.validators import MaxValueValidator, MinValueValidator\n'), ((4946, 4966), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(1)'], {}), '(1)\n', (4963, 4966), False, 'from django.core.validators import MaxValueValidator, MinValueValidator\n'), ((1348, 1363), 'itertools.chain', 'chain', (['qs1', 'qs2'], {}), '(qs1, qs2)\n', (1353, 1363), False, 'from itertools import chain\n'), ((948, 978), 'django.db.models.Q', 'Q', ([], {'tags__name__icontains': 'query'}), '(tags__name__icontains=query)\n', (949, 978), False, 'from django.db.models import Q\n'), ((1266, 1296), 'django.db.models.Q', 'Q', ([], {'tags__name__icontains': 'query'}), '(tags__name__icontains=query)\n', (1267, 1296), False, 'from django.db.models import Q\n'), ((905, 933), 'django.db.models.Q', 'Q', ([], {'location__icontains': 'query'}), '(location__icontains=query)\n', (906, 933), False, 'from django.db.models import Q\n'), ((1223, 1251), 'django.db.models.Q', 'Q', ([], {'location__icontains': 'query'}), '(location__icontains=query)\n', (1224, 1251), False, 'from django.db.models import Q\n'), ((859, 890), 'django.db.models.Q', 'Q', ([], {'description__icontains': 'query'}), '(description__icontains=query)\n', (860, 890), False, 'from django.db.models import Q\n'), ((1126, 1162), 'django.db.models.Q', 'Q', ([], {'author__username__icontains': 'query'}), '(author__username__icontains=query)\n', (1127, 1162), False, 'from django.db.models import Q\n'), ((1177, 1208), 'django.db.models.Q', 'Q', ([], {'description__icontains': 'query'}), '(description__icontains=query)\n', (1178, 1208), False, 'from django.db.models import Q\n'), ((768, 793), 'django.db.models.Q', 'Q', ([], {'title__icontains': 'query'}), '(title__icontains=query)\n', (769, 793), False, 'from django.db.models import Q\n'), ((808, 844), 'django.db.models.Q', 'Q', ([], {'author__username__icontains': 'query'}), '(author__username__icontains=query)\n', (809, 844), False, 'from django.db.models import Q\n')] |
import torch
from time import time
from IPython import display
import numpy as np
import matplotlib.pyplot as plt
import random
import torch.utils.data as Data
from torch.nn import init
import torch.nn as nn
# 3.3.1 生成数据集
# 我们生成与上一节中相同的数据集。其中features是训练数据特征,labels是标签
num_inputs = 2 #x1,x2 有几个x
num_examples = 1000
true_w = [2, -3.4]
true_b = 4.2
features = torch.tensor(np.random.normal(0, 1, (num_examples, num_inputs)), dtype=torch.float)
labels = true_w[0] * features[:, 0] + true_w[1] * features[:, 1] + true_b
labels += torch.tensor(np.random.normal(0, 0.01, size=labels.size()), dtype=torch.float)
# 3.3.2 读取数据
# PyTorch提供了data包来读取数据。由于data常用作变量名,我们将导入的data模块用Data代替。
# 在每一次迭代中,我们将随机读取包含10个数据样本的小批量。
batch_size = 10
# 将训练数据的特征和标签组合
dataset = Data.TensorDataset(features, labels)
# 随机读取小批量
data_iter = Data.DataLoader(dataset, batch_size, shuffle=True)
# 这里data_iter的使用跟上一节中的一样。让我们读取并打印第一个小批量数据样本。
for X, y in data_iter:
print(X, y)
break
#
# 3.3.3 定义模型
# 在上一节从零开始的实现中,我们需要定义模型参数,并使用它们一步步描述模型是怎样计算的。
# 当模型结构变得更复杂时,这些步骤将变得更繁琐。
# 其实,PyTorch提供了大量预定义的层,这使我们只需关注使用哪些层来构造模型。
# 下面将介绍如何使用PyTorch更简洁地定义线性回归。
#
# 首先,导入torch.nn模块。
# 实际上,“nn”是neural networks(神经网络)的缩写。
# 顾名思义,该模块定义了大量神经网络的层。
# 之前我们已经用过了autograd,而nn就是利用autograd来定义模型。
# nn的核心数据结构是Module,它是一个抽象概念,既可以表示神经网络中的某个层(layer),也可以表示一个包含很多层的神经网络。
# 在实际使用中,最常见的做法是继承nn.Module,撰写自己的网络/层。一个nn.Module实例应该包含一些层以及返回输出的前向传播(forward)方法。
# 下面先来看看如何用nn.Module实现一个线性回归模型。
class LinearNet(nn.Module):
def __init__(self, n_feature):
super(LinearNet, self).__init__()
self.linear = nn.Linear(n_feature, 1)
# forward 定义前向传播
def forward(self, x):
y = self.linear(x)
return y
net = LinearNet(num_inputs)
print(net) # 使用print可以打印出网络的结构
# 事实上我们还可以用nn.Sequential来更加方便地搭建网络,Sequential是一个有序的容器,网络层将按照在传入Sequential的顺序依次被添加到计算图中。
# 写法一
net = nn.Sequential(
nn.Linear(num_inputs, 1)#in_features = 2, out_features = 1 输入为2维,输出为1维
# 此处还可以传入其他层
)
# 写法二
net = nn.Sequential()
net.add_module('linear', nn.Linear(num_inputs, 1))
# net.add_module ......
# 写法三
from collections import OrderedDict
net = nn.Sequential(OrderedDict([
('linear', nn.Linear(num_inputs, 1))
# ......
]))
print(net)
print(net[0])
#
# 可以通过net.parameters()来查看模型所有的可学习参数,此函数将返回一个生成器。
for param in net.parameters():
print(param)
# 回顾图3.1中线性回归在神经网络图中的表示。
# 作为一个单层神经网络,线性回归输出层中的神经元和输入层中各个输入完全连接。
# 因此,线性回归的输出层又叫全连接层。
# 注意:torch.nn仅支持输入一个batch的样本不支持单个样本输入,如果只有单个样本,可使用input.unsqueeze(0)来添加一维。
# 3.3.4 初始化模型参数
# 在使用net前,我们需要初始化模型参数,如线性回归模型中的权重和偏差。
# PyTorch在init模块中提供了多种参数初始化方法。
# 这里的init是initializer的缩写形式。
# 我们通过init.normal_将权重参数每个元素初始化为随机采样于均值为0、标准差为0.01的正态分布。偏差会初始化为零。
init.normal_(net[0].weight, mean=0, std=0.01)
init.constant_(net[0].bias, val=0) # 也可以直接修改bias的data: net[0].bias.data.fill_(0)
# 注:如果这里的net是用3.3.3节一开始的代码自定义的,那么上面代码会报错,net[0].weight应改为net.linear.weight,bias亦然。因为net[0]这样根据下标访问子模块的写法只有当net是个ModuleList或者Sequential实例时才可以,详见4.1节。
# 3.3.5 定义损失函数
# PyTorch在nn模块中提供了各种损失函数,这些损失函数可看作是一种特殊的层,PyTorch也将这些损失函数实现为nn.Module的子类。
# 我们现在使用它提供的均方误差损失作为模型的损失函数。
loss = nn.MSELoss()
# 3.3.6 定义优化算法
# 同样,我们也无须自己实现小批量随机梯度下降算法。
# torch.optim模块提供了很多常用的优化算法比如SGD、Adam和RMSProp等。
# 下面我们创建一个用于优化net所有参数的优化器实例,并指定学习率为0.03的小批量随机梯度下降(SGD)为优化算法。
import torch.optim as optim
optimizer = optim.SGD(net.parameters(), lr=0.03)
print(optimizer)
# 我们还可以为不同子网络设置不同的学习率,这在finetune时经常用到。例:
#
# optimizer = optim.SGD([
# # 如果对某个参数不指定学习率,就使用最外层的默认学习率
# {'params': net.subnet1.parameters()}, # lr=0.03
# {'params': net.subnet2.parameters(), 'lr': 0.01}], lr=0.03)
# 有时候我们不想让学习率固定成一个常数,那如何调整学习率呢?
# 主要有两种做法。一种是修改optimizer.param_groups中对应的学习率,
# 另一种是更简单也是较为推荐的做法——新建优化器,由于optimizer十分轻量级,构建开销很小,故而可以构建新的optimizer。
# 但是后者对于使用动量的优化器(如Adam),会丢失动量等状态信息,可能会造成损失函数的收敛出现震荡等情况。
# 调整学习率
for param_group in optimizer.param_groups:
param_group['lr'] *= 0.1 # 学习率为之前的0.1倍
# 3.3.7 训练模型
# 在使用Gluon训练模型时,我们通过调用optim实例的step函数来迭代模型参数。
# 按照小批量随机梯度下降的定义,我们在step函数中指明批量大小,从而对批量中样本梯度求平均。
num_epochs = 3
for epoch in range(1, num_epochs + 1):
for X, y in data_iter:
output = net(X)
l = loss(output, y.view(-1, 1))
optimizer.zero_grad() # 梯度清零,等价于net.zero_grad()
l.backward()
optimizer.step()
print('epoch %d, loss: %f' % (epoch, l.item()))
# 下面我们分别比较学到的模型参数和真实的模型参数。
# 我们从net获得需要的层,并访问其权重(weight)和偏差(bias)。学到的参数和真实的参数很接近。
dense = net[0]
print(true_w, dense.weight)
print(true_b, dense.bias)
| [
"numpy.random.normal",
"torch.nn.init.constant_",
"torch.nn.Sequential",
"torch.utils.data.TensorDataset",
"torch.nn.MSELoss",
"torch.nn.Linear",
"torch.utils.data.DataLoader",
"torch.nn.init.normal_"
] | [((751, 787), 'torch.utils.data.TensorDataset', 'Data.TensorDataset', (['features', 'labels'], {}), '(features, labels)\n', (769, 787), True, 'import torch.utils.data as Data\n'), ((810, 860), 'torch.utils.data.DataLoader', 'Data.DataLoader', (['dataset', 'batch_size'], {'shuffle': '(True)'}), '(dataset, batch_size, shuffle=True)\n', (825, 860), True, 'import torch.utils.data as Data\n'), ((1954, 1969), 'torch.nn.Sequential', 'nn.Sequential', ([], {}), '()\n', (1967, 1969), True, 'import torch.nn as nn\n'), ((2650, 2695), 'torch.nn.init.normal_', 'init.normal_', (['net[0].weight'], {'mean': '(0)', 'std': '(0.01)'}), '(net[0].weight, mean=0, std=0.01)\n', (2662, 2695), False, 'from torch.nn import init\n'), ((2696, 2730), 'torch.nn.init.constant_', 'init.constant_', (['net[0].bias'], {'val': '(0)'}), '(net[0].bias, val=0)\n', (2710, 2730), False, 'from torch.nn import init\n'), ((3054, 3066), 'torch.nn.MSELoss', 'nn.MSELoss', ([], {}), '()\n', (3064, 3066), True, 'import torch.nn as nn\n'), ((371, 421), 'numpy.random.normal', 'np.random.normal', (['(0)', '(1)', '(num_examples, num_inputs)'], {}), '(0, 1, (num_examples, num_inputs))\n', (387, 421), True, 'import numpy as np\n'), ((1851, 1875), 'torch.nn.Linear', 'nn.Linear', (['num_inputs', '(1)'], {}), '(num_inputs, 1)\n', (1860, 1875), True, 'import torch.nn as nn\n'), ((1995, 2019), 'torch.nn.Linear', 'nn.Linear', (['num_inputs', '(1)'], {}), '(num_inputs, 1)\n', (2004, 2019), True, 'import torch.nn as nn\n'), ((1552, 1575), 'torch.nn.Linear', 'nn.Linear', (['n_feature', '(1)'], {}), '(n_feature, 1)\n', (1561, 1575), True, 'import torch.nn as nn\n'), ((2138, 2162), 'torch.nn.Linear', 'nn.Linear', (['num_inputs', '(1)'], {}), '(num_inputs, 1)\n', (2147, 2162), True, 'import torch.nn as nn\n')] |
from django.db import models
from multi_tenant_users.users import AbstractUserMixin, TenantUser
from tenant_schemas.models import TenantMixin
class Tenant(TenantMixin):
name = models.CharField(max_length=100)
class User(TenantUser, AbstractUserMixin):
def __init__(self, *args, **kwargs):
# is_staff and is_superuser may be provided by some of Django's
# built-in components when creating objects. They're not valid __init__
# keyword args for this model because they're not model fields, so they
# need to be removed.
kwargs.pop('is_staff', None)
kwargs.pop('is_superuser', None)
super(User, self).__init__(*args, **kwargs)
@property
def is_staff(self):
"""
Designates whether the user can log into this tenant's admin site.
"""
try:
return self.tenant_permissions.is_staff
except self.PermissionsModel.DoesNotExist:
return False
@is_staff.setter
def is_staff(self, value):
try:
self.tenant_permissions.is_staff = value
self.tenant_permissions.is_staff.save()
except self.PermissionsModel.DoesNotExist:
if self.id:
permissions = self.PermissionsModel(
user_id=self.id,
is_staff=value,
)
permissions.save()
def get_short_name(self):
return self.first_name or self.username
| [
"django.db.models.CharField"
] | [((182, 214), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (198, 214), False, 'from django.db import models\n')] |
from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator
from django.shortcuts import get_object_or_404, redirect, render
from django.views.decorators.cache import cache_page
from .forms import CommentForm, PostForm
from .models import Follow, Group, Post, User
@cache_page(20, key_prefix="index_page")
def index(request):
post_list = Post.objects.order_by('-pub_date').all()
paginator = Paginator(post_list, 10)
page_number = request.GET.get('page')
page = paginator.get_page(page_number)
return render(
request,
'index.html',
{'page': page, 'paginator': paginator}
)
def group_posts(request, slug):
group = get_object_or_404(Group, slug=slug)
post_list = group.posts.all()
paginator = Paginator(post_list, 10)
page_number = request.GET.get('page')
page = paginator.get_page(page_number)
return render(request, "group.html", {
"group": group,
'page': page,
'paginator': paginator
})
@login_required
def new_post(request):
form = PostForm(request.POST or None)
if form.is_valid():
instance = form.save(commit=False)
instance.author = request.user
instance.save()
return redirect('index')
return render(request, 'new.html', {'form': form})
def profile(request, username):
author = get_object_or_404(User, username=username)
following = False
if request.user.is_authenticated:
following = Follow.objects.filter(user=request.user.id,
author=author).exists()
post_list = author.author_posts.all()
count_posts = post_list.count()
paginator = Paginator(post_list, 10)
page_number = request.GET.get('page')
page = paginator.get_page(page_number)
return render(request, 'profile.html', {
"author": author,
'page': page,
'paginator': paginator,
'count_posts': count_posts,
'profile': author,
"following": following}
)
def post_view(request, username, post_id):
author = get_object_or_404(User, username=username)
post = get_object_or_404(Post, pk=post_id, author__username=username)
count_posts = author.author_posts.count()
form = CommentForm(request.POST)
comments = post.comments.all()
return render(request, "post_view.html", {
"author": author,
"username": username,
"post": post,
"count_posts": count_posts,
"form": form,
"comments": comments, })
@login_required
def post_edit(request, username, post_id):
profile = get_object_or_404(User, username=username)
post = get_object_or_404(Post, pk=post_id, author=profile)
if request.user != profile:
return redirect('post', username=username, post_id=post_id)
form = PostForm(request.POST or None,
files=request.FILES or None, instance=post)
if request.method == 'POST':
if form.is_valid():
form.save()
return redirect(
"post_view",
username=request.user.username,
post_id=post_id
)
return render(
request, 'new.html', {'form': form, 'post': post},
)
def page_not_found(request, exception):
# Переменная exception содержит отладочную информацию,
# выводить её в шаблон пользователской страницы 404 мы не станем
return render(
request,
"misc/404.html",
{"path": request.path},
status=404
)
def server_error(request):
return render(request, "misc/500.html", status=500)
@login_required
def add_comment(request, username, post_id):
post = get_object_or_404(Post, pk=post_id, author__username=username)
form = CommentForm(request.POST or None)
if not form.is_valid():
return render(request, "comments.html",
{"form": form, "post": post})
comment = form.save(commit=False)
comment.author = request.user
comment.post = post
form.save()
return redirect("post_view", username, post_id)
@login_required
def follow_index(request):
post_list = Post.objects.filter(author__following__user=request.user)
paginator = Paginator(post_list, 10)
page_number = request.GET.get('page')
page = paginator.get_page(page_number)
return render(
request,
'follow.html',
{
'page': page,
'paginator': paginator
}
)
@login_required
def profile_follow(request, username):
author = get_object_or_404(User, username=username)
if request.user != author:
Follow.objects.get_or_create(user=request.user, author=author)
return redirect('profile', username=username)
@login_required
def profile_unfollow(request, username):
author = get_object_or_404(User, username=username)
Follow.objects.filter(user=request.user, author=author).delete()
return redirect("profile", username=username)
| [
"django.shortcuts.render",
"django.shortcuts.get_object_or_404",
"django.shortcuts.redirect",
"django.views.decorators.cache.cache_page",
"django.core.paginator.Paginator"
] | [((310, 349), 'django.views.decorators.cache.cache_page', 'cache_page', (['(20)'], {'key_prefix': '"""index_page"""'}), "(20, key_prefix='index_page')\n", (320, 349), False, 'from django.views.decorators.cache import cache_page\n'), ((443, 467), 'django.core.paginator.Paginator', 'Paginator', (['post_list', '(10)'], {}), '(post_list, 10)\n', (452, 467), False, 'from django.core.paginator import Paginator\n'), ((565, 634), 'django.shortcuts.render', 'render', (['request', '"""index.html"""', "{'page': page, 'paginator': paginator}"], {}), "(request, 'index.html', {'page': page, 'paginator': paginator})\n", (571, 634), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((711, 746), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Group'], {'slug': 'slug'}), '(Group, slug=slug)\n', (728, 746), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((798, 822), 'django.core.paginator.Paginator', 'Paginator', (['post_list', '(10)'], {}), '(post_list, 10)\n', (807, 822), False, 'from django.core.paginator import Paginator\n'), ((919, 1008), 'django.shortcuts.render', 'render', (['request', '"""group.html"""', "{'group': group, 'page': page, 'paginator': paginator}"], {}), "(request, 'group.html', {'group': group, 'page': page, 'paginator':\n paginator})\n", (925, 1008), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((1295, 1338), 'django.shortcuts.render', 'render', (['request', '"""new.html"""', "{'form': form}"], {}), "(request, 'new.html', {'form': form})\n", (1301, 1338), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((1386, 1428), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['User'], {'username': 'username'}), '(User, username=username)\n', (1403, 1428), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((1713, 1737), 'django.core.paginator.Paginator', 'Paginator', (['post_list', '(10)'], {}), '(post_list, 10)\n', (1722, 1737), False, 'from django.core.paginator import Paginator\n'), ((1835, 2003), 'django.shortcuts.render', 'render', (['request', '"""profile.html"""', "{'author': author, 'page': page, 'paginator': paginator, 'count_posts':\n count_posts, 'profile': author, 'following': following}"], {}), "(request, 'profile.html', {'author': author, 'page': page,\n 'paginator': paginator, 'count_posts': count_posts, 'profile': author,\n 'following': following})\n", (1841, 2003), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((2108, 2150), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['User'], {'username': 'username'}), '(User, username=username)\n', (2125, 2150), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((2162, 2224), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Post'], {'pk': 'post_id', 'author__username': 'username'}), '(Post, pk=post_id, author__username=username)\n', (2179, 2224), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((2356, 2517), 'django.shortcuts.render', 'render', (['request', '"""post_view.html"""', "{'author': author, 'username': username, 'post': post, 'count_posts':\n count_posts, 'form': form, 'comments': comments}"], {}), "(request, 'post_view.html', {'author': author, 'username': username,\n 'post': post, 'count_posts': count_posts, 'form': form, 'comments':\n comments})\n", (2362, 2517), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((2636, 2678), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['User'], {'username': 'username'}), '(User, username=username)\n', (2653, 2678), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((2690, 2741), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Post'], {'pk': 'post_id', 'author': 'profile'}), '(Post, pk=post_id, author=profile)\n', (2707, 2741), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((3202, 3259), 'django.shortcuts.render', 'render', (['request', '"""new.html"""', "{'form': form, 'post': post}"], {}), "(request, 'new.html', {'form': form, 'post': post})\n", (3208, 3259), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((3456, 3524), 'django.shortcuts.render', 'render', (['request', '"""misc/404.html"""', "{'path': request.path}"], {'status': '(404)'}), "(request, 'misc/404.html', {'path': request.path}, status=404)\n", (3462, 3524), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((3603, 3647), 'django.shortcuts.render', 'render', (['request', '"""misc/500.html"""'], {'status': '(500)'}), "(request, 'misc/500.html', status=500)\n", (3609, 3647), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((3722, 3784), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Post'], {'pk': 'post_id', 'author__username': 'username'}), '(Post, pk=post_id, author__username=username)\n', (3739, 3784), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((4081, 4121), 'django.shortcuts.redirect', 'redirect', (['"""post_view"""', 'username', 'post_id'], {}), "('post_view', username, post_id)\n", (4089, 4121), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((4257, 4281), 'django.core.paginator.Paginator', 'Paginator', (['post_list', '(10)'], {}), '(post_list, 10)\n', (4266, 4281), False, 'from django.core.paginator import Paginator\n'), ((4378, 4448), 'django.shortcuts.render', 'render', (['request', '"""follow.html"""', "{'page': page, 'paginator': paginator}"], {}), "(request, 'follow.html', {'page': page, 'paginator': paginator})\n", (4384, 4448), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((4583, 4625), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['User'], {'username': 'username'}), '(User, username=username)\n', (4600, 4625), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((4739, 4777), 'django.shortcuts.redirect', 'redirect', (['"""profile"""'], {'username': 'username'}), "('profile', username=username)\n", (4747, 4777), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((4850, 4892), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['User'], {'username': 'username'}), '(User, username=username)\n', (4867, 4892), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((4973, 5011), 'django.shortcuts.redirect', 'redirect', (['"""profile"""'], {'username': 'username'}), "('profile', username=username)\n", (4981, 5011), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((1265, 1282), 'django.shortcuts.redirect', 'redirect', (['"""index"""'], {}), "('index')\n", (1273, 1282), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((2789, 2841), 'django.shortcuts.redirect', 'redirect', (['"""post"""'], {'username': 'username', 'post_id': 'post_id'}), "('post', username=username, post_id=post_id)\n", (2797, 2841), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((3873, 3935), 'django.shortcuts.render', 'render', (['request', '"""comments.html"""', "{'form': form, 'post': post}"], {}), "(request, 'comments.html', {'form': form, 'post': post})\n", (3879, 3935), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((3053, 3123), 'django.shortcuts.redirect', 'redirect', (['"""post_view"""'], {'username': 'request.user.username', 'post_id': 'post_id'}), "('post_view', username=request.user.username, post_id=post_id)\n", (3061, 3123), False, 'from django.shortcuts import get_object_or_404, redirect, render\n')] |
"""
Feature: Addition
In order to avoid silly mistakes
As a math idiot
I want to be told the sum of two numbers
Scenario: Add two numbers
Given I have powered calculator on
When I enter "50" into the calculator
And I enter "70" into the calculator
And I press add
Then the result should be "120" on the screen
Scenario: Subsequent additions
Given I have powered calculator on
When I enter "50" into the calculator
And I enter "70" into the calculator
And I press add
And I enter "20" into the calculator
And I press add
Then the result should be "140" on the screen
Scenario: Add two numbers - table
Given I have powered calculator on
When I enter "<num1>" into the calculator
And I enter "<num2>" into the calculator
And I press add
Then the result should be "<result>" on the screen
| num1 | num2 | result |
| 2 | 3 | 5 |
| 4 | 5 | 9 |
"""
import unittest
from calculator import Calculator
from morelia import verify
class CalculatorDocstringTestCase(unittest.TestCase):
"""Calculator acceptance test case."""
def test_addition(self):
"""Addition feature."""
verify(__doc__, self, scenario="Subsequent additions")
def setUp(self):
self.calculator = Calculator()
def step_I_have_powered_calculator_on(self):
r"I have powered calculator on"
self.calculator.on()
def step_I_enter_a_number_into_the_calculator(self, number):
r'I enter "(.+)" into the calculator' # match by regexp
self.calculator.push(int(number))
def step_I_press_add(self): # matched by method name
self.calculator.add()
def step_the_result_should_be_on_the_screen(self, number):
r'the result should be "{number}" on the screen' # match by format-like string
self.assertEqual(int(number), self.calculator.get_result())
if __name__ == "__main__": # pragma: nobranch
unittest.main()
| [
"unittest.main",
"morelia.verify",
"calculator.Calculator"
] | [((1989, 2004), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2002, 2004), False, 'import unittest\n'), ((1219, 1273), 'morelia.verify', 'verify', (['__doc__', 'self'], {'scenario': '"""Subsequent additions"""'}), "(__doc__, self, scenario='Subsequent additions')\n", (1225, 1273), False, 'from morelia import verify\n'), ((1322, 1334), 'calculator.Calculator', 'Calculator', ([], {}), '()\n', (1332, 1334), False, 'from calculator import Calculator\n')] |
import unittest
from datetime import datetime, timedelta
from redis import Redis
from extensions import *
redis = Redis()
class EventBitmapTestCase(unittest.TestCase):
def setUp(self):
self.EA = EventAnalytics('test')
def _tearDown(self):
self.EA.delete_all_events('test')
def test_create_event(self):
create_event(1, 'test')
create_event(2, 'test')
d = datetime.today() - timedelta(days=4)
create_event(3, 'test', target_time=d)
d = datetime.today() - timedelta(days=3)
create_event(4, 'test', target_time=d)
def _test_all_events(self):
EA = EventAnalytics('test')
EA.fetch_daily(last=7)
if __name__ == '__main__':
unittest.main() | [
"unittest.main",
"datetime.datetime.today",
"datetime.timedelta",
"redis.Redis"
] | [((117, 124), 'redis.Redis', 'Redis', ([], {}), '()\n', (122, 124), False, 'from redis import Redis\n'), ((724, 739), 'unittest.main', 'unittest.main', ([], {}), '()\n', (737, 739), False, 'import unittest\n'), ((412, 428), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (426, 428), False, 'from datetime import datetime, timedelta\n'), ((431, 448), 'datetime.timedelta', 'timedelta', ([], {'days': '(4)'}), '(days=4)\n', (440, 448), False, 'from datetime import datetime, timedelta\n'), ((508, 524), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (522, 524), False, 'from datetime import datetime, timedelta\n'), ((527, 544), 'datetime.timedelta', 'timedelta', ([], {'days': '(3)'}), '(days=3)\n', (536, 544), False, 'from datetime import datetime, timedelta\n')] |
"""
tt URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
# Uncomment next two lines to enable admin:
from django.contrib import admin
from django.urls import path, include
from users import views as user_views
from django.contrib.auth import views as auth_views
from upload import views as upload_views
from django.conf import settings
from django.conf.urls.static import static
from ecommerce import views as ecommerce_views
from ecommerce.views import ItemDeleteView, UserItemListView
urlpatterns = [
# Uncomment the next line to enable the admin:
path('admin/', admin.site.urls),
path('', include('blog.urls')),
path('register/', user_views.register, name='register'),
path('login/',auth_views.LoginView.as_view(template_name='users/login.html'),name='login'),
path('logout/',auth_views.LogoutView.as_view(template_name='users/logout.html') ,name='logout'),
path('profile/', user_views.profile, name='profile'),
path('book/',upload_views.book_list,name='book_list'),
path('book/upload',upload_views.upload_book,name='upload_book'),
path('item/',ecommerce_views.item_list,name='item_list'),
path('products/<int:pk>', ecommerce_views.ItemDetailView.as_view(template_name='product.html'),name='product'),
path('item/upload',ecommerce_views.upload_item,name='upload_item'),
path('item/<int:pk>/delete/',ItemDeleteView.as_view(template_name='item_confirm_delete.html'),name='item-delete'),
path('add_to_cart/<int:pk>/',ecommerce_views.add_to_cart,name='add_to_cart'),
path('remove_from_cart/<int:pk>/',ecommerce_views.remove_from_cart,name='remove_from_cart'),
path('order-summary/',ecommerce_views.OrderSummaryView.as_view(),name='order-summary'),
path('item/<int:pk>/comment/', ecommerce_views.add_comment_to_item, name='add_comment_to_item'),
path('item/<int:pk>',UserItemListView.as_view(),name='user-items'),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"ecommerce.views.ItemDetailView.as_view",
"ecommerce.views.ItemDeleteView.as_view",
"django.urls.include",
"ecommerce.views.UserItemListView.as_view",
"ecommerce.views.OrderSummaryView.as_view",
"django.contrib.auth.views.LogoutView.as_view",
"django.conf.urls.static.static",
"django.contrib.auth.view... | [((1133, 1164), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (1137, 1164), False, 'from django.urls import path, include\n'), ((1206, 1261), 'django.urls.path', 'path', (['"""register/"""', 'user_views.register'], {'name': '"""register"""'}), "('register/', user_views.register, name='register')\n", (1210, 1261), False, 'from django.urls import path, include\n'), ((1464, 1516), 'django.urls.path', 'path', (['"""profile/"""', 'user_views.profile'], {'name': '"""profile"""'}), "('profile/', user_views.profile, name='profile')\n", (1468, 1516), False, 'from django.urls import path, include\n'), ((1522, 1577), 'django.urls.path', 'path', (['"""book/"""', 'upload_views.book_list'], {'name': '"""book_list"""'}), "('book/', upload_views.book_list, name='book_list')\n", (1526, 1577), False, 'from django.urls import path, include\n'), ((1581, 1646), 'django.urls.path', 'path', (['"""book/upload"""', 'upload_views.upload_book'], {'name': '"""upload_book"""'}), "('book/upload', upload_views.upload_book, name='upload_book')\n", (1585, 1646), False, 'from django.urls import path, include\n'), ((1650, 1708), 'django.urls.path', 'path', (['"""item/"""', 'ecommerce_views.item_list'], {'name': '"""item_list"""'}), "('item/', ecommerce_views.item_list, name='item_list')\n", (1654, 1708), False, 'from django.urls import path, include\n'), ((1828, 1896), 'django.urls.path', 'path', (['"""item/upload"""', 'ecommerce_views.upload_item'], {'name': '"""upload_item"""'}), "('item/upload', ecommerce_views.upload_item, name='upload_item')\n", (1832, 1896), False, 'from django.urls import path, include\n'), ((2018, 2096), 'django.urls.path', 'path', (['"""add_to_cart/<int:pk>/"""', 'ecommerce_views.add_to_cart'], {'name': '"""add_to_cart"""'}), "('add_to_cart/<int:pk>/', ecommerce_views.add_to_cart, name='add_to_cart')\n", (2022, 2096), False, 'from django.urls import path, include\n'), ((2099, 2197), 'django.urls.path', 'path', (['"""remove_from_cart/<int:pk>/"""', 'ecommerce_views.remove_from_cart'], {'name': '"""remove_from_cart"""'}), "('remove_from_cart/<int:pk>/', ecommerce_views.remove_from_cart, name=\n 'remove_from_cart')\n", (2103, 2197), False, 'from django.urls import path, include\n'), ((2287, 2387), 'django.urls.path', 'path', (['"""item/<int:pk>/comment/"""', 'ecommerce_views.add_comment_to_item'], {'name': '"""add_comment_to_item"""'}), "('item/<int:pk>/comment/', ecommerce_views.add_comment_to_item, name=\n 'add_comment_to_item')\n", (2291, 2387), False, 'from django.urls import path, include\n'), ((2498, 2559), 'django.conf.urls.static.static', 'static', (['settings.MEDIA_URL'], {'document_root': 'settings.MEDIA_ROOT'}), '(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n', (2504, 2559), False, 'from django.conf.urls.static import static\n'), ((1179, 1199), 'django.urls.include', 'include', (['"""blog.urls"""'], {}), "('blog.urls')\n", (1186, 1199), False, 'from django.urls import path, include\n'), ((1281, 1343), 'django.contrib.auth.views.LoginView.as_view', 'auth_views.LoginView.as_view', ([], {'template_name': '"""users/login.html"""'}), "(template_name='users/login.html')\n", (1309, 1343), True, 'from django.contrib.auth import views as auth_views\n'), ((1378, 1442), 'django.contrib.auth.views.LogoutView.as_view', 'auth_views.LogoutView.as_view', ([], {'template_name': '"""users/logout.html"""'}), "(template_name='users/logout.html')\n", (1407, 1442), True, 'from django.contrib.auth import views as auth_views\n'), ((1738, 1806), 'ecommerce.views.ItemDetailView.as_view', 'ecommerce_views.ItemDetailView.as_view', ([], {'template_name': '"""product.html"""'}), "(template_name='product.html')\n", (1776, 1806), True, 'from ecommerce import views as ecommerce_views\n'), ((1929, 1993), 'ecommerce.views.ItemDeleteView.as_view', 'ItemDeleteView.as_view', ([], {'template_name': '"""item_confirm_delete.html"""'}), "(template_name='item_confirm_delete.html')\n", (1951, 1993), False, 'from ecommerce.views import ItemDeleteView, UserItemListView\n'), ((2217, 2259), 'ecommerce.views.OrderSummaryView.as_view', 'ecommerce_views.OrderSummaryView.as_view', ([], {}), '()\n', (2257, 2259), True, 'from ecommerce import views as ecommerce_views\n'), ((2409, 2435), 'ecommerce.views.UserItemListView.as_view', 'UserItemListView.as_view', ([], {}), '()\n', (2433, 2435), False, 'from ecommerce.views import ItemDeleteView, UserItemListView\n')] |
import pandas as pd
from autosktime.data import DatasetProperties
from sktime.forecasting.base import ForecastingHorizon
from ConfigSpace import ConfigurationSpace, CategoricalHyperparameter
from autosktime.constants import IGNORES_EXOGENOUS_X, HANDLES_UNIVARIATE, HANDLES_MISSING, HANDLES_MULTIVARIATE, \
SUPPORTED_INDEX_TYPES
from autosktime.pipeline.components.base import AutoSktimePredictor, COMPONENT_PROPERTIES
class NaiveForecasterComponent(AutoSktimePredictor):
def __init__(
self,
sp: int = 1,
strategy: str = 'last',
random_state=None
):
self.sp = sp
self.strategy = strategy
def fit(self, y, X: pd.DataFrame = None, fh: ForecastingHorizon = None):
from sktime.forecasting.naive import NaiveForecaster
self.estimator = NaiveForecaster(
sp=self.sp,
strategy=self.strategy,
)
self.estimator.fit(y, X=X, fh=fh)
return self
def predict(self, fh: ForecastingHorizon = None, X: pd.DataFrame = None):
# Naive forecaster can not handle X
prediction = super().predict(fh, X=None)
if self.sp > 1 and fh is not None and self.estimator.fh[0] == fh.to_pandas()[0]:
# NaiveForecaster uses the last self.sp terms for forecasting. In case that the training
# data are predicted again, the first self.sp terms are missing and set to nan
prediction = prediction.backfill()
return prediction
@staticmethod
def get_properties(dataset_properties: DatasetProperties = None) -> COMPONENT_PROPERTIES:
return {
HANDLES_UNIVARIATE: True,
HANDLES_MULTIVARIATE: True,
IGNORES_EXOGENOUS_X: True,
HANDLES_MISSING: True,
SUPPORTED_INDEX_TYPES: [pd.RangeIndex, pd.DatetimeIndex, pd.PeriodIndex]
}
@staticmethod
def get_hyperparameter_search_space(dataset_properties: DatasetProperties = None) -> ConfigurationSpace:
strategy = CategoricalHyperparameter('strategy', ['last', 'mean', 'drift'])
sp = CategoricalHyperparameter('sp', [1, 2, 4, 7, 12])
cs = ConfigurationSpace()
cs.add_hyperparameters([strategy, sp])
return cs
| [
"ConfigSpace.ConfigurationSpace",
"sktime.forecasting.naive.NaiveForecaster",
"ConfigSpace.CategoricalHyperparameter"
] | [((832, 883), 'sktime.forecasting.naive.NaiveForecaster', 'NaiveForecaster', ([], {'sp': 'self.sp', 'strategy': 'self.strategy'}), '(sp=self.sp, strategy=self.strategy)\n', (847, 883), False, 'from sktime.forecasting.naive import NaiveForecaster\n'), ((2033, 2097), 'ConfigSpace.CategoricalHyperparameter', 'CategoricalHyperparameter', (['"""strategy"""', "['last', 'mean', 'drift']"], {}), "('strategy', ['last', 'mean', 'drift'])\n", (2058, 2097), False, 'from ConfigSpace import ConfigurationSpace, CategoricalHyperparameter\n'), ((2111, 2160), 'ConfigSpace.CategoricalHyperparameter', 'CategoricalHyperparameter', (['"""sp"""', '[1, 2, 4, 7, 12]'], {}), "('sp', [1, 2, 4, 7, 12])\n", (2136, 2160), False, 'from ConfigSpace import ConfigurationSpace, CategoricalHyperparameter\n'), ((2175, 2195), 'ConfigSpace.ConfigurationSpace', 'ConfigurationSpace', ([], {}), '()\n', (2193, 2195), False, 'from ConfigSpace import ConfigurationSpace, CategoricalHyperparameter\n')] |
import datetime
import unittest
from flask import Blueprint, request, jsonify
from freezegun import freeze_time
from mock import Mock, patch
import jwt
from requests.exceptions import HTTPError
from shared_helpers import services
from testing import TrottoTestCase, LIVE_APP_HOST
class TestFunctions(unittest.TestCase):
@patch('shared_helpers.services.get_service_config', return_value={'signing_secret': 'so_secret'})
def test__create_internal_token(self, mock_get_service_config):
now = datetime.datetime.now(datetime.timezone.utc)
with freeze_time(now):
token = services._create_internal_token('my_service', {'id': 1})
self.assertEqual({'exp': int(now.timestamp()) + 30,
'id': 1},
jwt.decode(token, 'so_secret', algorithms=['HS256']))
with freeze_time(now + datetime.timedelta(seconds=40)):
with self.assertRaises(jwt.exceptions.ExpiredSignatureError):
jwt.decode(token, 'so_secret', algorithms=['HS256'])
mock_get_service_config.assert_called_once_with('my_service')
@patch('shared_helpers.services.requests.get')
@patch('shared_helpers.services._create_internal_token', return_value='internal_token')
@patch('shared_helpers.services.get_service_config', return_value={'base_url': 'https://trot.to'})
def test_get__basic(self, mock_get_service_config, mock_create_internal_token, mock_requests_get):
mock_response = Mock()
mock_response.json.return_value = {'id': 1}
mock_requests_get.return_value = mock_response
self.assertEqual({'id': 1},
services.get('my_service', 'api/users'))
mock_get_service_config.assert_called_once_with('my_service')
mock_create_internal_token.assert_called_once_with('my_service', {'url': 'https://trot.to/api/users'})
mock_requests_get.assert_called_once_with('https://trot.to/api/users',
headers={'X-Token': 'internal_token'})
@patch('shared_helpers.services.requests.get')
@patch('shared_helpers.services._create_internal_token', return_value='internal_token')
@patch('shared_helpers.services.get_service_config', return_value={'base_url': 'https://trot.to/'})
def test_get__trailing_and_leading_slashes(self,
mock_get_service_config, mock_create_internal_token, mock_requests_get):
mock_response = Mock()
mock_response.json.return_value = {'id': 1}
mock_requests_get.return_value = mock_response
self.assertEqual({'id': 1},
services.get('my_service', '/api/users'))
mock_get_service_config.assert_called_once_with('my_service')
mock_create_internal_token.assert_called_once_with('my_service', {'url': 'https://trot.to/api/users'})
mock_requests_get.assert_called_once_with('https://trot.to/api/users',
headers={'X-Token': 'internal_token'})
@patch('shared_helpers.services.requests.get')
@patch('shared_helpers.services._create_internal_token', return_value='internal_token')
@patch('shared_helpers.services.get_service_config', return_value={'base_url': 'https://trot.to'})
def test_get__http_error(self, mock_get_service_config, mock_create_internal_token, mock_requests_get):
mock_response = Mock()
mock_response.raise_for_status.side_effect = HTTPError
mock_requests_get.return_value = mock_response
with self.assertRaises(HTTPError):
services.get('my_service', 'api/users')
mock_get_service_config.assert_called_once_with('my_service')
mock_create_internal_token.assert_called_once_with('my_service', {'url': 'https://trot.to/api/users'})
mock_requests_get.assert_called_once_with('https://trot.to/api/users',
headers={'X-Token': 'internal_token'})
def test_validate_internal_request__no_token(self):
mock_request = Mock()
mock_request.headers = {}
with self.assertRaises(services.InvalidInternalToken) as cm:
services.validate_internal_request(mock_request)
self.assertEqual('no token',
str(cm.exception))
@patch('shared_helpers.services.get_config_by_key_path', return_value='so_secret')
def test_validate_internal_request__invalid_signature__wrong_secret(self, mock_get_config_by_key_path):
token = jwt.encode({'exp': datetime.datetime.utcnow() + datetime.timedelta(seconds=30),
'url': 'https://trot.to/api/users'},
'a_secret',
algorithm='HS256')
mock_request = Mock()
mock_request.headers = {'X-Token': token}
mock_request.url = 'https://trot.to/api/users'
with self.assertRaises(services.InvalidInternalToken) as cm:
services.validate_internal_request(mock_request)
self.assertEqual('invalid signature',
str(cm.exception))
mock_get_config_by_key_path.assert_called_once_with(['signing_secret'])
@patch('shared_helpers.services.get_config_by_key_path', return_value='so_secret')
def test_validate_internal_request__invalid_signature__no_exp(self, mock_get_config_by_key_path):
token = jwt.encode({'url': 'https://trot.to/api/users'},
'so_secret',
algorithm='HS256')
mock_request = Mock()
mock_request.headers = {'X-Token': token}
mock_request.url = 'https://trot.to/api/users'
with self.assertRaises(services.InvalidInternalToken) as cm:
services.validate_internal_request(mock_request)
self.assertEqual('missing exp',
str(cm.exception))
mock_get_config_by_key_path.assert_called_once_with(['signing_secret'])
@patch('shared_helpers.services.get_config_by_key_path', return_value='so_secret')
def test_validate_internal_request__expired_token(self, mock_get_config_by_key_path):
token = jwt.encode({'exp': datetime.datetime.utcnow() - datetime.timedelta(seconds=1),
'url': 'https://trot.to/api/users'},
'so_secret',
algorithm='HS256')
mock_request = Mock()
mock_request.headers = {'X-Token': token}
mock_request.url = 'https://trot.to/api/users'
with self.assertRaises(services.InvalidInternalToken) as cm:
services.validate_internal_request(mock_request)
self.assertEqual('expired',
str(cm.exception))
mock_get_config_by_key_path.assert_called_once_with(['signing_secret'])
@patch('shared_helpers.services.get_config_by_key_path', return_value='so_secret')
def test_validate_internal_request__mismatched_url(self, mock_get_config_by_key_path):
token = jwt.encode({'exp': datetime.datetime.utcnow() + datetime.timedelta(seconds=30),
'url': 'https://trot.to/api/users/1'},
'so_secret',
algorithm='HS256')
mock_request = Mock()
mock_request.headers = {'X-Token': token}
mock_request.url = 'https://trot.to/api/users'
with self.assertRaises(services.InvalidInternalToken) as cm:
services.validate_internal_request(mock_request)
self.assertEqual('mismatched URL',
str(cm.exception))
mock_get_config_by_key_path.assert_called_once_with(['signing_secret'])
@patch('shared_helpers.services.get_config_by_key_path', return_value='so_secret')
def test_validate_internal_request__valid_token(self, mock_get_config_by_key_path):
token = jwt.encode({'exp': datetime.datetime.utcnow() + datetime.timedelta(seconds=30),
'url': 'https://trot.to/api/users'},
'so_secret',
algorithm='HS256')
mock_request = Mock()
mock_request.headers = {'X-Token': token}
mock_request.url = 'https://trot.to/api/users'
self.assertEqual(True,
services.validate_internal_request(mock_request))
mock_get_config_by_key_path.assert_called_once_with(['signing_secret'])
routes = Blueprint('test', __name__)
@routes.route('/_/api/users', methods=['GET'])
def get_users():
services.validate_internal_request(request)
return jsonify([{'id': 1}])
class TestIntegration(TrottoTestCase):
blueprints_under_test = [routes]
start_live_app = True
live_app_config = {'sessions_secret': 'a_sessions_secret',
'signing_secret': 'so_secret',
'postgres': {'url': 'postgresql://admin:testing@/testing_trotto_core'}}
@patch('shared_helpers.config.get_config', return_value={'services': {'my_service': {'signing_secret': 'so_secret',
'base_url': LIVE_APP_HOST}}})
def test_internal_request__real_handler__valid_token(self, _):
self.assertEqual([{'id': 1}],
services.get('my_service', '/_/api/users'))
@patch('shared_helpers.config.get_config', return_value={'services': {'my_service': {'signing_secret': 'a_secret',
'base_url': LIVE_APP_HOST}}})
def test_internal_request__real_handler__invalid_token(self, _):
with self.assertRaises(HTTPError) as cm:
self.assertEqual([{'id': 1}],
services.get('my_service', '/_/api/users'))
self.assertEqual(500,
cm.exception.response.status_code)
| [
"jwt.decode",
"mock.patch",
"shared_helpers.services.validate_internal_request",
"datetime.datetime.utcnow",
"mock.Mock",
"shared_helpers.services.get",
"datetime.timedelta",
"datetime.datetime.now",
"shared_helpers.services._create_internal_token",
"freezegun.freeze_time",
"flask.Blueprint",
... | [((8030, 8057), 'flask.Blueprint', 'Blueprint', (['"""test"""', '__name__'], {}), "('test', __name__)\n", (8039, 8057), False, 'from flask import Blueprint, request, jsonify\n'), ((328, 430), 'mock.patch', 'patch', (['"""shared_helpers.services.get_service_config"""'], {'return_value': "{'signing_secret': 'so_secret'}"}), "('shared_helpers.services.get_service_config', return_value={\n 'signing_secret': 'so_secret'})\n", (333, 430), False, 'from mock import Mock, patch\n'), ((1077, 1122), 'mock.patch', 'patch', (['"""shared_helpers.services.requests.get"""'], {}), "('shared_helpers.services.requests.get')\n", (1082, 1122), False, 'from mock import Mock, patch\n'), ((1126, 1217), 'mock.patch', 'patch', (['"""shared_helpers.services._create_internal_token"""'], {'return_value': '"""internal_token"""'}), "('shared_helpers.services._create_internal_token', return_value=\n 'internal_token')\n", (1131, 1217), False, 'from mock import Mock, patch\n'), ((1216, 1318), 'mock.patch', 'patch', (['"""shared_helpers.services.get_service_config"""'], {'return_value': "{'base_url': 'https://trot.to'}"}), "('shared_helpers.services.get_service_config', return_value={\n 'base_url': 'https://trot.to'})\n", (1221, 1318), False, 'from mock import Mock, patch\n'), ((1975, 2020), 'mock.patch', 'patch', (['"""shared_helpers.services.requests.get"""'], {}), "('shared_helpers.services.requests.get')\n", (1980, 2020), False, 'from mock import Mock, patch\n'), ((2024, 2115), 'mock.patch', 'patch', (['"""shared_helpers.services._create_internal_token"""'], {'return_value': '"""internal_token"""'}), "('shared_helpers.services._create_internal_token', return_value=\n 'internal_token')\n", (2029, 2115), False, 'from mock import Mock, patch\n'), ((2114, 2217), 'mock.patch', 'patch', (['"""shared_helpers.services.get_service_config"""'], {'return_value': "{'base_url': 'https://trot.to/'}"}), "('shared_helpers.services.get_service_config', return_value={\n 'base_url': 'https://trot.to/'})\n", (2119, 2217), False, 'from mock import Mock, patch\n'), ((2943, 2988), 'mock.patch', 'patch', (['"""shared_helpers.services.requests.get"""'], {}), "('shared_helpers.services.requests.get')\n", (2948, 2988), False, 'from mock import Mock, patch\n'), ((2992, 3083), 'mock.patch', 'patch', (['"""shared_helpers.services._create_internal_token"""'], {'return_value': '"""internal_token"""'}), "('shared_helpers.services._create_internal_token', return_value=\n 'internal_token')\n", (2997, 3083), False, 'from mock import Mock, patch\n'), ((3082, 3184), 'mock.patch', 'patch', (['"""shared_helpers.services.get_service_config"""'], {'return_value': "{'base_url': 'https://trot.to'}"}), "('shared_helpers.services.get_service_config', return_value={\n 'base_url': 'https://trot.to'})\n", (3087, 3184), False, 'from mock import Mock, patch\n'), ((4155, 4241), 'mock.patch', 'patch', (['"""shared_helpers.services.get_config_by_key_path"""'], {'return_value': '"""so_secret"""'}), "('shared_helpers.services.get_config_by_key_path', return_value=\n 'so_secret')\n", (4160, 4241), False, 'from mock import Mock, patch\n'), ((4983, 5069), 'mock.patch', 'patch', (['"""shared_helpers.services.get_config_by_key_path"""'], {'return_value': '"""so_secret"""'}), "('shared_helpers.services.get_config_by_key_path', return_value=\n 'so_secret')\n", (4988, 5069), False, 'from mock import Mock, patch\n'), ((5708, 5794), 'mock.patch', 'patch', (['"""shared_helpers.services.get_config_by_key_path"""'], {'return_value': '"""so_secret"""'}), "('shared_helpers.services.get_config_by_key_path', return_value=\n 'so_secret')\n", (5713, 5794), False, 'from mock import Mock, patch\n'), ((6508, 6594), 'mock.patch', 'patch', (['"""shared_helpers.services.get_config_by_key_path"""'], {'return_value': '"""so_secret"""'}), "('shared_helpers.services.get_config_by_key_path', return_value=\n 'so_secret')\n", (6513, 6594), False, 'from mock import Mock, patch\n'), ((7319, 7405), 'mock.patch', 'patch', (['"""shared_helpers.services.get_config_by_key_path"""'], {'return_value': '"""so_secret"""'}), "('shared_helpers.services.get_config_by_key_path', return_value=\n 'so_secret')\n", (7324, 7405), False, 'from mock import Mock, patch\n'), ((8126, 8169), 'shared_helpers.services.validate_internal_request', 'services.validate_internal_request', (['request'], {}), '(request)\n', (8160, 8169), False, 'from shared_helpers import services\n'), ((8180, 8200), 'flask.jsonify', 'jsonify', (["[{'id': 1}]"], {}), "([{'id': 1}])\n", (8187, 8200), False, 'from flask import Blueprint, request, jsonify\n'), ((8512, 8661), 'mock.patch', 'patch', (['"""shared_helpers.config.get_config"""'], {'return_value': "{'services': {'my_service': {'signing_secret': 'so_secret', 'base_url':\n LIVE_APP_HOST}}}"}), "('shared_helpers.config.get_config', return_value={'services': {\n 'my_service': {'signing_secret': 'so_secret', 'base_url': LIVE_APP_HOST}}})\n", (8517, 8661), False, 'from mock import Mock, patch\n'), ((8912, 9060), 'mock.patch', 'patch', (['"""shared_helpers.config.get_config"""'], {'return_value': "{'services': {'my_service': {'signing_secret': 'a_secret', 'base_url':\n LIVE_APP_HOST}}}"}), "('shared_helpers.config.get_config', return_value={'services': {\n 'my_service': {'signing_secret': 'a_secret', 'base_url': LIVE_APP_HOST}}})\n", (8917, 9060), False, 'from mock import Mock, patch\n'), ((502, 546), 'datetime.datetime.now', 'datetime.datetime.now', (['datetime.timezone.utc'], {}), '(datetime.timezone.utc)\n', (523, 546), False, 'import datetime\n'), ((1435, 1441), 'mock.Mock', 'Mock', ([], {}), '()\n', (1439, 1441), False, 'from mock import Mock, patch\n'), ((2402, 2408), 'mock.Mock', 'Mock', ([], {}), '()\n', (2406, 2408), False, 'from mock import Mock, patch\n'), ((3306, 3312), 'mock.Mock', 'Mock', ([], {}), '()\n', (3310, 3312), False, 'from mock import Mock, patch\n'), ((3918, 3924), 'mock.Mock', 'Mock', ([], {}), '()\n', (3922, 3924), False, 'from mock import Mock, patch\n'), ((4593, 4599), 'mock.Mock', 'Mock', ([], {}), '()\n', (4597, 4599), False, 'from mock import Mock, patch\n'), ((5177, 5262), 'jwt.encode', 'jwt.encode', (["{'url': 'https://trot.to/api/users'}", '"""so_secret"""'], {'algorithm': '"""HS256"""'}), "({'url': 'https://trot.to/api/users'}, 'so_secret', algorithm='HS256'\n )\n", (5187, 5262), False, 'import jwt\n'), ((5324, 5330), 'mock.Mock', 'Mock', ([], {}), '()\n', (5328, 5330), False, 'from mock import Mock, patch\n'), ((6128, 6134), 'mock.Mock', 'Mock', ([], {}), '()\n', (6132, 6134), False, 'from mock import Mock, patch\n'), ((6932, 6938), 'mock.Mock', 'Mock', ([], {}), '()\n', (6936, 6938), False, 'from mock import Mock, patch\n'), ((7738, 7744), 'mock.Mock', 'Mock', ([], {}), '()\n', (7742, 7744), False, 'from mock import Mock, patch\n'), ((557, 573), 'freezegun.freeze_time', 'freeze_time', (['now'], {}), '(now)\n', (568, 573), False, 'from freezegun import freeze_time\n'), ((589, 645), 'shared_helpers.services._create_internal_token', 'services._create_internal_token', (['"""my_service"""', "{'id': 1}"], {}), "('my_service', {'id': 1})\n", (620, 645), False, 'from shared_helpers import services\n'), ((1596, 1635), 'shared_helpers.services.get', 'services.get', (['"""my_service"""', '"""api/users"""'], {}), "('my_service', 'api/users')\n", (1608, 1635), False, 'from shared_helpers import services\n'), ((2563, 2603), 'shared_helpers.services.get', 'services.get', (['"""my_service"""', '"""/api/users"""'], {}), "('my_service', '/api/users')\n", (2575, 2603), False, 'from shared_helpers import services\n'), ((3470, 3509), 'shared_helpers.services.get', 'services.get', (['"""my_service"""', '"""api/users"""'], {}), "('my_service', 'api/users')\n", (3482, 3509), False, 'from shared_helpers import services\n'), ((4028, 4076), 'shared_helpers.services.validate_internal_request', 'services.validate_internal_request', (['mock_request'], {}), '(mock_request)\n', (4062, 4076), False, 'from shared_helpers import services\n'), ((4770, 4818), 'shared_helpers.services.validate_internal_request', 'services.validate_internal_request', (['mock_request'], {}), '(mock_request)\n', (4804, 4818), False, 'from shared_helpers import services\n'), ((5501, 5549), 'shared_helpers.services.validate_internal_request', 'services.validate_internal_request', (['mock_request'], {}), '(mock_request)\n', (5535, 5549), False, 'from shared_helpers import services\n'), ((6305, 6353), 'shared_helpers.services.validate_internal_request', 'services.validate_internal_request', (['mock_request'], {}), '(mock_request)\n', (6339, 6353), False, 'from shared_helpers import services\n'), ((7109, 7157), 'shared_helpers.services.validate_internal_request', 'services.validate_internal_request', (['mock_request'], {}), '(mock_request)\n', (7143, 7157), False, 'from shared_helpers import services\n'), ((7892, 7940), 'shared_helpers.services.validate_internal_request', 'services.validate_internal_request', (['mock_request'], {}), '(mock_request)\n', (7926, 7940), False, 'from shared_helpers import services\n'), ((8864, 8906), 'shared_helpers.services.get', 'services.get', (['"""my_service"""', '"""/_/api/users"""'], {}), "('my_service', '/_/api/users')\n", (8876, 8906), False, 'from shared_helpers import services\n'), ((762, 814), 'jwt.decode', 'jwt.decode', (['token', '"""so_secret"""'], {'algorithms': "['HS256']"}), "(token, 'so_secret', algorithms=['HS256'])\n", (772, 814), False, 'import jwt\n'), ((953, 1005), 'jwt.decode', 'jwt.decode', (['token', '"""so_secret"""'], {'algorithms': "['HS256']"}), "(token, 'so_secret', algorithms=['HS256'])\n", (963, 1005), False, 'import jwt\n'), ((9314, 9356), 'shared_helpers.services.get', 'services.get', (['"""my_service"""', '"""/_/api/users"""'], {}), "('my_service', '/_/api/users')\n", (9326, 9356), False, 'from shared_helpers import services\n'), ((844, 874), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(40)'}), '(seconds=40)\n', (862, 874), False, 'import datetime\n'), ((4374, 4400), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (4398, 4400), False, 'import datetime\n'), ((4403, 4433), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(30)'}), '(seconds=30)\n', (4421, 4433), False, 'import datetime\n'), ((5909, 5935), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (5933, 5935), False, 'import datetime\n'), ((5938, 5967), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(1)'}), '(seconds=1)\n', (5956, 5967), False, 'import datetime\n'), ((6710, 6736), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (6734, 6736), False, 'import datetime\n'), ((6739, 6769), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(30)'}), '(seconds=30)\n', (6757, 6769), False, 'import datetime\n'), ((7518, 7544), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (7542, 7544), False, 'import datetime\n'), ((7547, 7577), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(30)'}), '(seconds=30)\n', (7565, 7577), False, 'import datetime\n')] |
"""renamed video field
Revision ID: 4d378428eac7
Revises: aea30a52<PASSWORD>d
Create Date: 2018-07-29 19:03:47.869795
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '4d378428eac7'
down_revision = '<KEY>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index('ix_question_order_pos', table_name='question')
op.create_unique_constraint("question_order_pos_key", 'question', ['order_pos'])
op.add_column('video', sa.Column('s3_key', sa.String(length=140), nullable=True))
op.create_index(op.f('ix_video_s3_key'), 'video', ['s3_key'], unique=True)
op.drop_index('ix_video_url', table_name='video')
op.drop_column('video', 'url')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('video', sa.Column('url', sa.VARCHAR(length=140), autoincrement=False, nullable=True))
op.create_index('ix_video_url', 'video', ['url'], unique=True)
op.drop_index(op.f('ix_video_s3_key'), table_name='video')
op.drop_column('video', 's3_key')
op.drop_constraint("question_order_pos_key", 'question', type_='unique')
op.create_index('ix_question_order_pos', 'question', ['order_pos'], unique=True)
# ### end Alembic commands ###
| [
"alembic.op.drop_constraint",
"alembic.op.f",
"alembic.op.drop_column",
"sqlalchemy.VARCHAR",
"sqlalchemy.String",
"alembic.op.drop_index",
"alembic.op.create_index",
"alembic.op.create_unique_constraint"
] | [((390, 451), 'alembic.op.drop_index', 'op.drop_index', (['"""ix_question_order_pos"""'], {'table_name': '"""question"""'}), "('ix_question_order_pos', table_name='question')\n", (403, 451), False, 'from alembic import op\n'), ((456, 541), 'alembic.op.create_unique_constraint', 'op.create_unique_constraint', (['"""question_order_pos_key"""', '"""question"""', "['order_pos']"], {}), "('question_order_pos_key', 'question', ['order_pos']\n )\n", (483, 541), False, 'from alembic import op\n'), ((706, 755), 'alembic.op.drop_index', 'op.drop_index', (['"""ix_video_url"""'], {'table_name': '"""video"""'}), "('ix_video_url', table_name='video')\n", (719, 755), False, 'from alembic import op\n'), ((760, 790), 'alembic.op.drop_column', 'op.drop_column', (['"""video"""', '"""url"""'], {}), "('video', 'url')\n", (774, 790), False, 'from alembic import op\n'), ((1020, 1082), 'alembic.op.create_index', 'op.create_index', (['"""ix_video_url"""', '"""video"""', "['url']"], {'unique': '(True)'}), "('ix_video_url', 'video', ['url'], unique=True)\n", (1035, 1082), False, 'from alembic import op\n'), ((1150, 1183), 'alembic.op.drop_column', 'op.drop_column', (['"""video"""', '"""s3_key"""'], {}), "('video', 's3_key')\n", (1164, 1183), False, 'from alembic import op\n'), ((1188, 1260), 'alembic.op.drop_constraint', 'op.drop_constraint', (['"""question_order_pos_key"""', '"""question"""'], {'type_': '"""unique"""'}), "('question_order_pos_key', 'question', type_='unique')\n", (1206, 1260), False, 'from alembic import op\n'), ((1265, 1350), 'alembic.op.create_index', 'op.create_index', (['"""ix_question_order_pos"""', '"""question"""', "['order_pos']"], {'unique': '(True)'}), "('ix_question_order_pos', 'question', ['order_pos'], unique=True\n )\n", (1280, 1350), False, 'from alembic import op\n'), ((643, 666), 'alembic.op.f', 'op.f', (['"""ix_video_s3_key"""'], {}), "('ix_video_s3_key')\n", (647, 666), False, 'from alembic import op\n'), ((1101, 1124), 'alembic.op.f', 'op.f', (['"""ix_video_s3_key"""'], {}), "('ix_video_s3_key')\n", (1105, 1124), False, 'from alembic import op\n'), ((584, 605), 'sqlalchemy.String', 'sa.String', ([], {'length': '(140)'}), '(length=140)\n', (593, 605), True, 'import sqlalchemy as sa\n'), ((955, 977), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {'length': '(140)'}), '(length=140)\n', (965, 977), True, 'import sqlalchemy as sa\n')] |
from setuptools import Extension, setup
ext = Extension(
name='vkjson',
sources=[
'./src/commands.cpp',
'./src/commands/create/create_instance.cpp',
'./src/commands/custom/export.cpp',
'./src/commands/custom/import.cpp',
'./src/parse.cpp',
'./src/run_scope.cpp',
'./src/vkjson.cpp',
],
depends=[
'.src/allocator.hpp',
'.src/cache_command.hpp',
'.src/cache_scope.hpp',
'.src/commands.hpp',
'.src/common.hpp',
'.src/parse.hpp',
'.src/run_scope.hpp',
'.src/symbols.hpp',
'.src/variable.hpp',
'./include/vulkan/vk_icd.h',
'./include/vulkan/vk_layer.h',
'./include/vulkan/vk_platform.h',
'./include/vulkan/vk_sdk_platform.h',
'./include/vulkan/vulkan_android.h',
'./include/vulkan/vulkan_core.h',
'./include/vulkan/vulkan_fuchsia.h',
'./include/vulkan/vulkan_ggp.h',
'./include/vulkan/vulkan_ios.h',
'./include/vulkan/vulkan_macos.h',
'./include/vulkan/vulkan_metal.h',
'./include/vulkan/vulkan_vi.h',
'./include/vulkan/vulkan_wayland.h',
'./include/vulkan/vulkan_win32.h',
'./include/vulkan/vulkan_xcb.h',
'./include/vulkan/vulkan_xlib_xrandr.h',
'./include/vulkan/vulkan_xlib.h',
'./include/vulkan/vulkan.h',
],
include_dirs=['include', 'src'],
extra_compile_args=['-fpermissive'],
libraries=['vulkan-1'],
)
setup(
name='vkjson',
version='0.1.0',
ext_modules=[ext],
py_modules=['_vkjson'],
)
| [
"setuptools.Extension",
"setuptools.setup"
] | [((47, 1269), 'setuptools.Extension', 'Extension', ([], {'name': '"""vkjson"""', 'sources': "['./src/commands.cpp', './src/commands/create/create_instance.cpp',\n './src/commands/custom/export.cpp', './src/commands/custom/import.cpp',\n './src/parse.cpp', './src/run_scope.cpp', './src/vkjson.cpp']", 'depends': "['.src/allocator.hpp', '.src/cache_command.hpp', '.src/cache_scope.hpp',\n '.src/commands.hpp', '.src/common.hpp', '.src/parse.hpp',\n '.src/run_scope.hpp', '.src/symbols.hpp', '.src/variable.hpp',\n './include/vulkan/vk_icd.h', './include/vulkan/vk_layer.h',\n './include/vulkan/vk_platform.h', './include/vulkan/vk_sdk_platform.h',\n './include/vulkan/vulkan_android.h', './include/vulkan/vulkan_core.h',\n './include/vulkan/vulkan_fuchsia.h', './include/vulkan/vulkan_ggp.h',\n './include/vulkan/vulkan_ios.h', './include/vulkan/vulkan_macos.h',\n './include/vulkan/vulkan_metal.h', './include/vulkan/vulkan_vi.h',\n './include/vulkan/vulkan_wayland.h', './include/vulkan/vulkan_win32.h',\n './include/vulkan/vulkan_xcb.h',\n './include/vulkan/vulkan_xlib_xrandr.h',\n './include/vulkan/vulkan_xlib.h', './include/vulkan/vulkan.h']", 'include_dirs': "['include', 'src']", 'extra_compile_args': "['-fpermissive']", 'libraries': "['vulkan-1']"}), "(name='vkjson', sources=['./src/commands.cpp',\n './src/commands/create/create_instance.cpp',\n './src/commands/custom/export.cpp', './src/commands/custom/import.cpp',\n './src/parse.cpp', './src/run_scope.cpp', './src/vkjson.cpp'], depends=\n ['.src/allocator.hpp', '.src/cache_command.hpp', '.src/cache_scope.hpp',\n '.src/commands.hpp', '.src/common.hpp', '.src/parse.hpp',\n '.src/run_scope.hpp', '.src/symbols.hpp', '.src/variable.hpp',\n './include/vulkan/vk_icd.h', './include/vulkan/vk_layer.h',\n './include/vulkan/vk_platform.h', './include/vulkan/vk_sdk_platform.h',\n './include/vulkan/vulkan_android.h', './include/vulkan/vulkan_core.h',\n './include/vulkan/vulkan_fuchsia.h', './include/vulkan/vulkan_ggp.h',\n './include/vulkan/vulkan_ios.h', './include/vulkan/vulkan_macos.h',\n './include/vulkan/vulkan_metal.h', './include/vulkan/vulkan_vi.h',\n './include/vulkan/vulkan_wayland.h', './include/vulkan/vulkan_win32.h',\n './include/vulkan/vulkan_xcb.h',\n './include/vulkan/vulkan_xlib_xrandr.h',\n './include/vulkan/vulkan_xlib.h', './include/vulkan/vulkan.h'],\n include_dirs=['include', 'src'], extra_compile_args=['-fpermissive'],\n libraries=['vulkan-1'])\n", (56, 1269), False, 'from setuptools import Extension, setup\n'), ((1511, 1596), 'setuptools.setup', 'setup', ([], {'name': '"""vkjson"""', 'version': '"""0.1.0"""', 'ext_modules': '[ext]', 'py_modules': "['_vkjson']"}), "(name='vkjson', version='0.1.0', ext_modules=[ext], py_modules=['_vkjson']\n )\n", (1516, 1596), False, 'from setuptools import Extension, setup\n')] |
import glob
from xml.etree import ElementTree as et
import cipush
def get_coverage_from_cobertura_xml(tree):
root = tree.getroot()
assert root.tag == 'coverage'
return float(root.attrib['line-rate'])
def get_num_tests_from_junit_xml(tree):
root = tree.getroot()
if root.tag == 'testsuite':
return int(root.attrib['tests'])
num_tests = 0
for el in root.findall('testsuite'):
num_tests += int(el.attrib['tests'])
return num_tests
def get_duration_from_junit_xml(tree):
root = tree.getroot()
if root.tag == 'testsuite':
return float(root.attrib['time'])
duration = 0.
for el in root.findall('testsuite'):
duration += float(el.attrib['time'])
return duration
def aggregate_from_files(aggregate_function, path_pattern):
files = glob.glob(path_pattern)
if len(files) == 0:
raise cipush.CiPushException('no junit xml files matched with pattern: {0}'.format(path_pattern))
return sum(aggregate_function(et.parse(p)) for p in files)
def coverage_from_path(path_pattern):
files = glob.glob(path_pattern)
if len(files) != 1:
raise cipush.CiPushException(
'coverage can only be extracted from a single cobertura '
'formated xml file - current pattern:{0} '
'matches more then one file'.format(path_pattern)
)
file_, = files
return get_coverage_from_cobertura_xml(et.parse(file_))
| [
"xml.etree.ElementTree.parse",
"glob.glob"
] | [((819, 842), 'glob.glob', 'glob.glob', (['path_pattern'], {}), '(path_pattern)\n', (828, 842), False, 'import glob\n'), ((1087, 1110), 'glob.glob', 'glob.glob', (['path_pattern'], {}), '(path_pattern)\n', (1096, 1110), False, 'import glob\n'), ((1437, 1452), 'xml.etree.ElementTree.parse', 'et.parse', (['file_'], {}), '(file_)\n', (1445, 1452), True, 'from xml.etree import ElementTree as et\n'), ((1007, 1018), 'xml.etree.ElementTree.parse', 'et.parse', (['p'], {}), '(p)\n', (1015, 1018), True, 'from xml.etree import ElementTree as et\n')] |
from tableschema import Table
# Data from WEB, schema from MEMORY
SOURCE = 'https://raw.githubusercontent.com/frictionlessdata/tableschema-py/master/data/data_infer.csv'
SCHEMA = {'fields': [{'name': 'id', 'type': 'integer'}, {'name': 'age', 'type': 'integer'}, {'name': 'name', 'type': 'string'}] }
# If schema is not passed it will be inferred
table = Table(SOURCE, schema=SCHEMA)
rows = table.iter()
while True:
try:
print(next(rows))
except StopIteration:
break
except Exception as exception:
print(exception)
| [
"tableschema.Table"
] | [((356, 384), 'tableschema.Table', 'Table', (['SOURCE'], {'schema': 'SCHEMA'}), '(SOURCE, schema=SCHEMA)\n', (361, 384), False, 'from tableschema import Table\n')] |
import os
import shutil
import subprocess
import hashlib
def hash_kwargs(kwargs):
kwargs = tuple(sorted(list(kwargs.items())))
kwargs_hash = hashlib.sha224(repr(kwargs)).hexdigest()
return kwargs_hash
class Sentinal(object):
def __init__(self, sentinal_filename):
self.sentinal_filename = sentinal_filename
@property
def unfinished(self):
if os.path.exists(self.sentinal_filename):
print ('sentinal file ' + self.sentinal_filename + ' exists')
return False
return True
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
with open(self.sentinal_filename, 'w') as sentinal_file:
pass
class SentinalFactory(object):
def __init__(self, filename_prefix, kwargs):
self.filename_prefix = filename_prefix
self.kwargs_hash = hash_kwargs(kwargs)
def __call__(self, name):
return Sentinal(self.filename_prefix + name + '_' + self.kwargs_hash)
def makedirs(directory):
try:
os.makedirs(directory)
except OSError as e:
if e.errno != 17:
raise
def rmtree(directory):
try:
shutil.rmtree(directory)
except OSError as e:
if e.errno != 2:
raise
def remove(filename):
try:
os.remove(filename)
except OSError as e:
if e.errno != 2:
raise
def symlink(filename, link_name=None, link_directory=None):
if link_name is None:
link_name = os.path.basename(filename)
if link_directory is None:
link_directory = os.getcwd()
link_filename = os.path.join(link_directory, link_name)
remove(link_filename)
filename = os.path.abspath(filename)
os.symlink(filename, link_filename)
return link_filename
class CurrentDirectory(object):
def __init__(self, directory):
self.directory = directory
def __enter__(self):
self.prev_directory = os.getcwd()
makedirs(self.directory)
os.chdir(self.directory)
def __exit__(self, *args):
os.chdir(self.prev_directory)
def wget_file(url, filename):
makedirs(os.path.dirname(filename))
subprocess.check_call(['wget', '--no-check-certificate', url, '-O', filename])
def wget_file_gunzip(url, filename):
makedirs(os.path.dirname(filename))
subprocess.check_call(['wget', '--no-check-certificate', url, '-O', filename+'.gz'])
remove(filename)
subprocess.check_call(['gunzip', filename+'.gz'])
class SafeWriteFile(object):
def __init__(self, filename):
self.filename = filename
self.temp_filename = filename + '.tmp'
def __enter__(self):
return self.temp_filename
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
os.rename(self.temp_filename, self.filename)
class InvalidInitParam(Exception):
pass
| [
"os.path.exists",
"os.makedirs",
"subprocess.check_call",
"os.rename",
"os.path.join",
"os.symlink",
"os.getcwd",
"os.chdir",
"os.path.dirname",
"os.path.basename",
"shutil.rmtree",
"os.path.abspath",
"os.remove"
] | [((1672, 1711), 'os.path.join', 'os.path.join', (['link_directory', 'link_name'], {}), '(link_directory, link_name)\n', (1684, 1711), False, 'import os\n'), ((1753, 1778), 'os.path.abspath', 'os.path.abspath', (['filename'], {}), '(filename)\n', (1768, 1778), False, 'import os\n'), ((1783, 1818), 'os.symlink', 'os.symlink', (['filename', 'link_filename'], {}), '(filename, link_filename)\n', (1793, 1818), False, 'import os\n'), ((2226, 2304), 'subprocess.check_call', 'subprocess.check_call', (["['wget', '--no-check-certificate', url, '-O', filename]"], {}), "(['wget', '--no-check-certificate', url, '-O', filename])\n", (2247, 2304), False, 'import subprocess\n'), ((2388, 2479), 'subprocess.check_call', 'subprocess.check_call', (["['wget', '--no-check-certificate', url, '-O', filename + '.gz']"], {}), "(['wget', '--no-check-certificate', url, '-O', \n filename + '.gz'])\n", (2409, 2479), False, 'import subprocess\n'), ((2498, 2549), 'subprocess.check_call', 'subprocess.check_call', (["['gunzip', filename + '.gz']"], {}), "(['gunzip', filename + '.gz'])\n", (2519, 2549), False, 'import subprocess\n'), ((386, 424), 'os.path.exists', 'os.path.exists', (['self.sentinal_filename'], {}), '(self.sentinal_filename)\n', (400, 424), False, 'import os\n'), ((1093, 1115), 'os.makedirs', 'os.makedirs', (['directory'], {}), '(directory)\n', (1104, 1115), False, 'import os\n'), ((1227, 1251), 'shutil.rmtree', 'shutil.rmtree', (['directory'], {}), '(directory)\n', (1240, 1251), False, 'import shutil\n'), ((1361, 1380), 'os.remove', 'os.remove', (['filename'], {}), '(filename)\n', (1370, 1380), False, 'import os\n'), ((1557, 1583), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (1573, 1583), False, 'import os\n'), ((1640, 1651), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1649, 1651), False, 'import os\n'), ((2003, 2014), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2012, 2014), False, 'import os\n'), ((2056, 2080), 'os.chdir', 'os.chdir', (['self.directory'], {}), '(self.directory)\n', (2064, 2080), False, 'import os\n'), ((2120, 2149), 'os.chdir', 'os.chdir', (['self.prev_directory'], {}), '(self.prev_directory)\n', (2128, 2149), False, 'import os\n'), ((2195, 2220), 'os.path.dirname', 'os.path.dirname', (['filename'], {}), '(filename)\n', (2210, 2220), False, 'import os\n'), ((2357, 2382), 'os.path.dirname', 'os.path.dirname', (['filename'], {}), '(filename)\n', (2372, 2382), False, 'import os\n'), ((2849, 2893), 'os.rename', 'os.rename', (['self.temp_filename', 'self.filename'], {}), '(self.temp_filename, self.filename)\n', (2858, 2893), False, 'import os\n')] |
import numpy as np
from netCDF4 import Dataset
from datetime import datetime
from datetime import timedelta
import os
import sys
TOP = "/data_ballantine02/miyoshi-t/honda/SCALE-LETKF/BAIU2018_5.3.6"
stime = datetime( 2018, 6, 30, 0, 0, 0 )
vtime = datetime( 2018, 7, 6, 0, 0, 0 )
adt = timedelta( hours=24 )
m = 1
INFO = {"TOP": TOP, }
def get_lonlat( INFO, stime=datetime(2018,7,1), ):
mem = str(1).zfill(4)
fn = os.path.join( INFO["TOP"], stime.strftime('%Y%m%d%H%M%S'), "fcst_sno_np00001",
mem, "p_history.pe000000.nc" )
with Dataset( fn, "r", format="NETCDF4") as nc:
lon = nc.variables["lon"][:]
lat = nc.variables["lat"][:]
return( lon, lat )
def get_arain( INFO, stime=datetime(2018,7,1), vtime=datetime(2018,7,1),
adt=timedelta(hours=24), m=1 ):
mem = str(m).zfill(4)
if m == 0:
mem = "mean"
fn = os.path.join( INFO["TOP"], stime.strftime('%Y%m%d%H%M%S'), "fcst_sno_np00001",
mem, "p_history.pe000000.nc" )
print( fn )
ft_max = ( vtime - stime ).total_seconds()
ft_min = ( vtime - adt - stime ).total_seconds()
with Dataset( fn, "r", format="NETCDF4") as nc:
fts = nc.variables["time"][:]
# print("time", fts/3600)
idx_s = np.abs( ( fts - ft_min ) ).argmin()
idx_e = np.abs( ( fts - ft_max ) ).argmin()
#
rain = np.sum( nc.variables["PREC"][idx_s+1:idx_e+1,:,:], axis=0 )*21600
print( rain.shape )
# print( ft_max, ft_min, idx_s, idx_e )
# print( stime + timedelta( seconds=fts[idx_s+1]),
# stime + timedelta( seconds=fts[idx_e+1]) )
# print( fts[idx_s:idx_e]/3600)
return( rain )
rain = get_arain( INFO, stime=stime, vtime=vtime, adt=adt, m=1 )
lon2d, lat2d = get_lonlat( INFO, stime=stime )
import matplotlib.pyplot as plt
import cartopy.crs as ccrs
import matplotlib.ticker as mticker
from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter
import cartopy.feature as cfeature
fig = plt.figure(figsize=(10, 10))
lons = 110
lone = 160
lats = 10
late = 55
#central_longitude = 135.0
#central_latitude = 35.0
#ax1 = fig.add_subplot(1,1,1, projection=ccrs.LambertConformal( central_longitude=central_longitude,
# central_latitude=central_latitude,
# ))
#ax1 = fig.add_subplot(1,1,1, projection=ccrs.Mercator( central_longitude=central_longitude,
# min_latitude=min_latitude,
# max_latitude=max_latitude,
# latitude_true_scale=latitude_true_scale,
#ax1 = plt.subplot(2, 2, 1, projection=ccrs.Mercator( central_longitude=180.0, ))
#ax1 = fig.add_subplot(1,1,1, projection=ccrs.PlateCarree(central_longitude=180))
ax1 = fig.add_subplot(1,1,1, projection=ccrs.PlateCarree(central_longitude=180))
ax1.set_extent( [lons, lone, lats, late ])
#ax1.coastlines()
ax1.add_feature(cfeature.COASTLINE, linewidth=0.8)
dlon, dlat = 5, 5
#gl = ax1.gridlines(crs=ccrs.PlateCarree())
#gl.xlocator = mticker.FixedLocator(np.arange( lons, lone+dlon, dlon))
#gl.ylocator = mticker.FixedLocator(np.arange( lats, late+dlat, dlat))
xticks_lab = np.arange( lons, lone+dlon, dlon)
yticks_lab = np.arange( lats, late+dlat, dlat)
ax1.set_xticks(xticks_lab, crs=ccrs.PlateCarree())
ax1.set_yticks(yticks_lab, crs=ccrs.PlateCarree())
gl = ax1.gridlines( crs=ccrs.PlateCarree(), \
linewidth=0.5, linestyle='--', color='k', alpha=0.8)
ax1.xaxis.set_major_formatter(LongitudeFormatter(zero_direction_label=True))
ax1.yaxis.set_major_formatter(LatitudeFormatter())
SHADE = ax1.contourf( lon2d, lat2d, rain,
transform=ccrs.PlateCarree(), )
#ax1.set_xlimit( lons, lone )
#ax1.set_ylimit( lats, late )
plt.show()
sys.exit()
fig, ((ax1)) = plt.subplots(1, 1, figsize=( 8,7.))
fig.subplots_adjust( left=0.04, bottom=0.04, right=0.92, top=0.91,
wspace=0.15, hspace=0.3)
plt.show()
| [
"datetime.datetime",
"numpy.abs",
"netCDF4.Dataset",
"cartopy.crs.PlateCarree",
"numpy.sum",
"matplotlib.pyplot.figure",
"cartopy.mpl.ticker.LatitudeFormatter",
"cartopy.mpl.ticker.LongitudeFormatter",
"sys.exit",
"datetime.timedelta",
"matplotlib.pyplot.subplots",
"numpy.arange",
"matplotli... | [((211, 241), 'datetime.datetime', 'datetime', (['(2018)', '(6)', '(30)', '(0)', '(0)', '(0)'], {}), '(2018, 6, 30, 0, 0, 0)\n', (219, 241), False, 'from datetime import datetime\n'), ((252, 281), 'datetime.datetime', 'datetime', (['(2018)', '(7)', '(6)', '(0)', '(0)', '(0)'], {}), '(2018, 7, 6, 0, 0, 0)\n', (260, 281), False, 'from datetime import datetime\n'), ((291, 310), 'datetime.timedelta', 'timedelta', ([], {'hours': '(24)'}), '(hours=24)\n', (300, 310), False, 'from datetime import timedelta\n'), ((2021, 2049), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 10)'}), '(figsize=(10, 10))\n', (2031, 2049), True, 'import matplotlib.pyplot as plt\n'), ((3333, 3367), 'numpy.arange', 'np.arange', (['lons', '(lone + dlon)', 'dlon'], {}), '(lons, lone + dlon, dlon)\n', (3342, 3367), True, 'import numpy as np\n'), ((3381, 3415), 'numpy.arange', 'np.arange', (['lats', '(late + dlat)', 'dlat'], {}), '(lats, late + dlat, dlat)\n', (3390, 3415), True, 'import numpy as np\n'), ((3929, 3939), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3937, 3939), True, 'import matplotlib.pyplot as plt\n'), ((3941, 3951), 'sys.exit', 'sys.exit', ([], {}), '()\n', (3949, 3951), False, 'import sys\n'), ((3968, 4004), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': '(8, 7.0)'}), '(1, 1, figsize=(8, 7.0))\n', (3980, 4004), True, 'import matplotlib.pyplot as plt\n'), ((4119, 4129), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4127, 4129), True, 'import matplotlib.pyplot as plt\n'), ((373, 393), 'datetime.datetime', 'datetime', (['(2018)', '(7)', '(1)'], {}), '(2018, 7, 1)\n', (381, 393), False, 'from datetime import datetime\n'), ((737, 757), 'datetime.datetime', 'datetime', (['(2018)', '(7)', '(1)'], {}), '(2018, 7, 1)\n', (745, 757), False, 'from datetime import datetime\n'), ((763, 783), 'datetime.datetime', 'datetime', (['(2018)', '(7)', '(1)'], {}), '(2018, 7, 1)\n', (771, 783), False, 'from datetime import datetime\n'), ((803, 822), 'datetime.timedelta', 'timedelta', ([], {'hours': '(24)'}), '(hours=24)\n', (812, 822), False, 'from datetime import timedelta\n'), ((3671, 3716), 'cartopy.mpl.ticker.LongitudeFormatter', 'LongitudeFormatter', ([], {'zero_direction_label': '(True)'}), '(zero_direction_label=True)\n', (3689, 3716), False, 'from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter\n'), ((3748, 3767), 'cartopy.mpl.ticker.LatitudeFormatter', 'LatitudeFormatter', ([], {}), '()\n', (3765, 3767), False, 'from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter\n'), ((569, 603), 'netCDF4.Dataset', 'Dataset', (['fn', '"""r"""'], {'format': '"""NETCDF4"""'}), "(fn, 'r', format='NETCDF4')\n", (576, 603), False, 'from netCDF4 import Dataset\n'), ((1161, 1195), 'netCDF4.Dataset', 'Dataset', (['fn', '"""r"""'], {'format': '"""NETCDF4"""'}), "(fn, 'r', format='NETCDF4')\n", (1168, 1195), False, 'from netCDF4 import Dataset\n'), ((2958, 2997), 'cartopy.crs.PlateCarree', 'ccrs.PlateCarree', ([], {'central_longitude': '(180)'}), '(central_longitude=180)\n', (2974, 2997), True, 'import cartopy.crs as ccrs\n'), ((3448, 3466), 'cartopy.crs.PlateCarree', 'ccrs.PlateCarree', ([], {}), '()\n', (3464, 3466), True, 'import cartopy.crs as ccrs\n'), ((3500, 3518), 'cartopy.crs.PlateCarree', 'ccrs.PlateCarree', ([], {}), '()\n', (3516, 3518), True, 'import cartopy.crs as ccrs\n'), ((3545, 3563), 'cartopy.crs.PlateCarree', 'ccrs.PlateCarree', ([], {}), '()\n', (3561, 3563), True, 'import cartopy.crs as ccrs\n'), ((3845, 3863), 'cartopy.crs.PlateCarree', 'ccrs.PlateCarree', ([], {}), '()\n', (3861, 3863), True, 'import cartopy.crs as ccrs\n'), ((1401, 1464), 'numpy.sum', 'np.sum', (["nc.variables['PREC'][idx_s + 1:idx_e + 1, :, :]"], {'axis': '(0)'}), "(nc.variables['PREC'][idx_s + 1:idx_e + 1, :, :], axis=0)\n", (1407, 1464), True, 'import numpy as np\n'), ((1289, 1309), 'numpy.abs', 'np.abs', (['(fts - ft_min)'], {}), '(fts - ft_min)\n', (1295, 1309), True, 'import numpy as np\n'), ((1340, 1360), 'numpy.abs', 'np.abs', (['(fts - ft_max)'], {}), '(fts - ft_max)\n', (1346, 1360), True, 'import numpy as np\n')] |
'''
TShock, a server mod for Terraria
Copyright (C) 2011-2016 <NAME> (fka. The TShock Team)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import requests
import json
import sys
import os
import subprocess
import base64
import urllib
create_release_url = 'https://api.github.com/repos/NyxStudios/TShock/releases'
config_doc_get_url = 'https://tshock.atlassian.net/wiki/rest/api/content/%s?expand=body.storage,version,ancestors'
config_doc_put_url = 'https://tshock.atlassian.net/wiki/rest/api/content/%s'
conversion_page_url = 'https://tshock.atlassian.net/wiki/rest/api/contentbody/convert/storage'
config_desc_page = "3047451"
ssc_desc_page = "39845891"
permissions_desc_page = "3047433"
rest_desc_page = "40632322"
def convert_view_to_storage(page):
print("Converting " + str(page['id']))
confluence_header = {"Content-Type":"application/json"}
r = requests.post(conversion_page_url, auth=(os.environ["bamboo_confluence_username"], os.environ["bamboo_confluence_password"]), headers=confluence_header, data=json.dumps(page['body']['storage']), verify=True)
page['body']['storage'] = json.loads(r.text)
return page
def get_confluence_page(id):
print("Fetching page " + str(id))
confluence_header = {"Content-Type":"application/json"}
r = requests.get(config_doc_get_url % id, auth=(os.environ["bamboo_confluence_username"], os.environ["bamboo_confluence_password"]), headers=confluence_header, verify=True)
page = json.loads(r.text)
return page
def put_confluence_page(page):
print("Storing page " + str(page['id']))
confluence_header = {"Content-Type":"application/json"}
page['version']['number'] = page['version']['number'] + 1
page = convert_view_to_storage(page)
r = requests.put(config_doc_put_url % page['id'], auth=(os.environ["bamboo_confluence_username"], os.environ["bamboo_confluence_password"]), headers=confluence_header, data=json.dumps(page), verify=True)
page = json.loads(r.text)
return page
def update_confluence_page(id, content):
page = get_confluence_page(id)
page['body']['storage']['value'] = content
page['body']['storage']['representation'] = 'wiki'
put_confluence_page(page)
def read_and_update_config_on_confluence(id, file):
#Read the Config
config = ""
with open(file, "r") as f:
line = f.readline()
while (line is not ""):
config = config + line
line = f.readline()
#update confluence page
config = config.replace("{", "\{")
config = config.replace("}", "\}")
config = config.replace("[", "\[")
config = config.replace("]", "\]")
config = config.replace("\t", " ")
update_confluence_page(id, config)
#Load variables from ENV, which are put there by the bamboo build.
branch = os.environ["GIT_BRANCH"]
tag_name = os.environ["bamboo_tag_name"]
name = os.environ["bamboo_release_name"]
body = os.environ["bamboo_release_body"]
token = os.environ["bamboo_github_oauth_password"]
#build release file name using the tag, stripping the 'v' off the front ie 'v.1.2.3' => '.1.2.3' resulting in a file called 'tshock.1.2.3.zip'
release_name = 'tshock_' + tag_name[1:] + '.zip'
#invoke the mv command on the artifact from bamboo to the new name above
subprocess.call('mv tshock_release.zip ' + release_name, shell=True)
#construct the payload for the post request to github to create the release.
data = {'tag_name':tag_name, 'target_commitish':branch, 'name':name, 'body':body, 'draft':False, 'prerelease':False}
#headers for the post request with our oauth token, allowing us write access
create_headers = {'Content-Type': 'application/json', 'Authorization': 'token ' + token}
#payload is a json string, not a strong typed json object
json_data = json.dumps(data)
#make the post request, creating a release
r = requests.post(create_release_url, data=json_data, headers=create_headers)
#parse the response into an object
json_response = json.loads(r.text)
#extract the relevant information from the object needed to attach a binary to the release created previously
release_id = json_response['id']
upload_url = json_response['upload_url'].rsplit('{')[0]
#construct the post url using the release name, as that is required by the api
upload_url = upload_url + '?name=' + release_name
#headers for the post request, need to specify that our file is a zip, and how large it is
upload_headers = {'Authorization': 'token ' + token, 'Content-Type':'application/zip', 'Content-Length':str(os.path.getsize(release_name))}
#upload the binary, resulting in a complete binary
r = requests.post(upload_url, data=open(release_name, 'rb'), headers = upload_headers, verify=False)
# read_and_update_config_on_confluence(config_desc_page, "ConfigDescriptions.txt")
# read_and_update_config_on_confluence(ssc_desc_page, "ServerSideConfigDescriptions.txt")
# read_and_update_config_on_confluence(permissions_desc_page, "PermissionsDescriptions.txt")
# read_and_update_config_on_confluence(rest_desc_page, "RestDescriptions.txt") | [
"json.loads",
"requests.post",
"os.path.getsize",
"json.dumps",
"requests.get",
"subprocess.call"
] | [((3851, 3919), 'subprocess.call', 'subprocess.call', (["('mv tshock_release.zip ' + release_name)"], {'shell': '(True)'}), "('mv tshock_release.zip ' + release_name, shell=True)\n", (3866, 3919), False, 'import subprocess\n'), ((4351, 4367), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (4361, 4367), False, 'import json\n'), ((4416, 4489), 'requests.post', 'requests.post', (['create_release_url'], {'data': 'json_data', 'headers': 'create_headers'}), '(create_release_url, data=json_data, headers=create_headers)\n', (4429, 4489), False, 'import requests\n'), ((4541, 4559), 'json.loads', 'json.loads', (['r.text'], {}), '(r.text)\n', (4551, 4559), False, 'import json\n'), ((1678, 1696), 'json.loads', 'json.loads', (['r.text'], {}), '(r.text)\n', (1688, 1696), False, 'import json\n'), ((1849, 2027), 'requests.get', 'requests.get', (['(config_doc_get_url % id)'], {'auth': "(os.environ['bamboo_confluence_username'], os.environ[\n 'bamboo_confluence_password'])", 'headers': 'confluence_header', 'verify': '(True)'}), "(config_doc_get_url % id, auth=(os.environ[\n 'bamboo_confluence_username'], os.environ['bamboo_confluence_password']\n ), headers=confluence_header, verify=True)\n", (1861, 2027), False, 'import requests\n'), ((2029, 2047), 'json.loads', 'json.loads', (['r.text'], {}), '(r.text)\n', (2039, 2047), False, 'import json\n'), ((2523, 2541), 'json.loads', 'json.loads', (['r.text'], {}), '(r.text)\n', (2533, 2541), False, 'import json\n'), ((5090, 5119), 'os.path.getsize', 'os.path.getsize', (['release_name'], {}), '(release_name)\n', (5105, 5119), False, 'import os\n'), ((1598, 1633), 'json.dumps', 'json.dumps', (["page['body']['storage']"], {}), "(page['body']['storage'])\n", (1608, 1633), False, 'import json\n'), ((2481, 2497), 'json.dumps', 'json.dumps', (['page'], {}), '(page)\n', (2491, 2497), False, 'import json\n')] |
#
# Copyright (c) 2011 <NAME> <EMAIL>
#
# test_cleanup_qs.py 14-Apr-2011
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
import unittest
from spyder.core.settings import Settings
from spyder.processor.cleanupquery import CleanupQueryString
class CleanupQueryStringTest(unittest.TestCase):
def test_that_cleaning_qs_works(self):
s = Settings()
c = CleanupQueryString(s)
self.assertEqual("http://tesT.com/t.html?p=a",
c._cleanup_query_string("http://tesT.com/t.html?p=a#top"))
self.assertEqual("http://test.com/t.html",
c._cleanup_query_string("http://test.com/t.html?#top"))
self.assertEqual("http://test.com/t.html?test=a",
c._cleanup_query_string("http://test.com/t.html?test=a&"))
| [
"spyder.processor.cleanupquery.CleanupQueryString",
"spyder.core.settings.Settings"
] | [((848, 858), 'spyder.core.settings.Settings', 'Settings', ([], {}), '()\n', (856, 858), False, 'from spyder.core.settings import Settings\n'), ((871, 892), 'spyder.processor.cleanupquery.CleanupQueryString', 'CleanupQueryString', (['s'], {}), '(s)\n', (889, 892), False, 'from spyder.processor.cleanupquery import CleanupQueryString\n')] |
from functools import reduce
from math import pi
from neuron import h
def _new_property(obj_hierarchy, attr_name):
"""
Returns a new property, mapping attr_name to obj_hierarchy.attr_name.
For example, suppose that an object of class A has an attribute b which
itself has an attribute c which itself has an attribute d. Then placing
e = _new_property('b.c', 'd')
in the class definition of A makes A.e an alias for A.b.c.d
"""
def set(self, value):
obj = reduce(getattr, [self] + obj_hierarchy.split('.'))
setattr(obj, attr_name, value)
def get(self):
obj = reduce(getattr, [self] + obj_hierarchy.split('.'))
return getattr(obj, attr_name)
return property(fset=set, fget=get)
class eLIFNeuron:
def __init__(self, params=None):
self._sec = h.Section()
self._seg = self._sec(0.5)
self._sec.L = 1000.
self._seg.diam = 100. / pi
self._model = h.eLIF(self._seg)
if params is not None:
for k, v in params.items():
setattr(self, k, v)
g_L = _new_property('_model', 'g_L')
V_reset = _new_property('_model', 'V_reset')
t_ref = _new_property('_model', 't_ref')
E_0 = _new_property('_model', 'E_0')
E_u = _new_property('_model', 'E_u')
E_f = _new_property('_model', 'E_f')
E_d = _new_property('_model', 'E_d')
epsilon = _new_property('_model', 'epsilon')
epsilon_0 = _new_property('_model', 'epsilon_0')
alpha = _new_property('_model', 'alpha')
epsilon_c = _new_property('_model', 'epsilon_c')
delta = _new_property('_model', 'delta')
tau_e = _new_property('_model', 'tau_e')
@property
def V_th(self):
return self._model.V_th
@V_th.setter
def V_th(self, value):
self._model.V_th = value
@property
def C_m(self):
return self._seg.cm
@C_m.setter
def C_m(self, value):
self._seg.cm = value
class mAdExpNeuron:
def __init__(self, params=None):
self._sec = h.Section()
self._seg = self._sec(0.5)
self._sec.L = 1000.
self._seg.diam = 100. / pi
self._model = h.mAdExp(self._seg)
if params is not None:
for k, v in params.items():
setattr(self, k, v)
g_L = _new_property('_model', 'g_L')
V_reset = _new_property('_model', 'V_reset')
t_ref = _new_property('_model', 't_ref')
b = _new_property('_model', 'b')
a = _new_property('_model', 'a')
tau_w = _new_property('_model', 'tau_w')
Delta_T = _new_property('_model', 'Delta_T')
V_peak = _new_property('_model', 'V_peak')
E_0 = _new_property('_model', 'E_0')
E_u = _new_property('_model', 'E_u')
E_f = _new_property('_model', 'E_f')
E_d = _new_property('_model', 'E_d')
epsilon_0 = _new_property('_model', 'epsilon_0')
alpha = _new_property('_model', 'alpha')
epsilon_c = _new_property('_model', 'epsilon_c')
delta = _new_property('_model', 'delta')
gamma = _new_property('_model', 'gamma')
tau_e = _new_property('_model', 'tau_e')
@property
def V_th(self):
return self._model.V_th
@V_th.setter
def V_th(self, value):
self._model.V_th = value
self._model.V_spike = value + 5*self.Delta_T
@property
def C_m(self):
return self._seg.cm
@C_m.setter
def C_m(self, value):
self._seg.cm = value
| [
"neuron.h.Section",
"neuron.h.eLIF",
"neuron.h.mAdExp"
] | [((833, 844), 'neuron.h.Section', 'h.Section', ([], {}), '()\n', (842, 844), False, 'from neuron import h\n'), ((966, 983), 'neuron.h.eLIF', 'h.eLIF', (['self._seg'], {}), '(self._seg)\n', (972, 983), False, 'from neuron import h\n'), ((2043, 2054), 'neuron.h.Section', 'h.Section', ([], {}), '()\n', (2052, 2054), False, 'from neuron import h\n'), ((2176, 2195), 'neuron.h.mAdExp', 'h.mAdExp', (['self._seg'], {}), '(self._seg)\n', (2184, 2195), False, 'from neuron import h\n')] |
import os
from os import path
from glob import glob
import json
import re
import pickle
import argparse
import numpy as np
from scipy import ndimage, misc
from config import cfg
from parse_ingredients import preprocess_ingredients
def load_recipe(filename):
"""Load a single recipe file
"""
with open(filename, 'r') as f:
recipes = json.load(f)
print('Loaded {:,} recipes from {}'.format(len(recipes), filename))
return recipes
def clean_recipe_ingredients(recipes):
"""Clean and parse recipe ingedients
"""
recipes_clean = {}
for key, value in recipes.items():
if "ingredients" not in value.keys():
continue
value['ingredients_clean'] = preprocess_ingredients(value['ingredients'])
recipes_clean[key] = value
return recipes_clean
def load_recipes():
"""Load all raw recipes and combine to single dataset (json format)
"""
recipes = {}
print(path.join(cfg.DATA.RAW_DATA_DIR, 'recipes_raw*.json'))
for filename in glob(path.join(cfg.DATA.RAW_DATA_DIR, 'recipes_raw*.json')):
print (filename)
recipes.update(load_recipe(filename))
print('Loaded {:,} recipes in total'.format(len(recipes)))
return clean_recipe_ingredients(recipes)
| [
"json.load",
"parse_ingredients.preprocess_ingredients",
"os.path.join"
] | [((338, 350), 'json.load', 'json.load', (['f'], {}), '(f)\n', (347, 350), False, 'import json\n'), ((662, 706), 'parse_ingredients.preprocess_ingredients', 'preprocess_ingredients', (["value['ingredients']"], {}), "(value['ingredients'])\n", (684, 706), False, 'from parse_ingredients import preprocess_ingredients\n'), ((874, 927), 'os.path.join', 'path.join', (['cfg.DATA.RAW_DATA_DIR', '"""recipes_raw*.json"""'], {}), "(cfg.DATA.RAW_DATA_DIR, 'recipes_raw*.json')\n", (883, 927), False, 'from os import path\n'), ((951, 1004), 'os.path.join', 'path.join', (['cfg.DATA.RAW_DATA_DIR', '"""recipes_raw*.json"""'], {}), "(cfg.DATA.RAW_DATA_DIR, 'recipes_raw*.json')\n", (960, 1004), False, 'from os import path\n')] |
#!/usr/bin/env python3
import argparse
import Bio.Seq
import Bio.SeqIO
import Bio.SeqRecord
import datetime
import gemmi
import glob
import gzip
import modules.create_mr_set.utils.models as models
import os
import modules.create_mr_set.utils.pdbtools as pdbtools
import random
import modules.create_mr_set.utils.rcsb as rcsb
import sys
import modules.create_mr_set.tasks.tasks as tasks
import urllib.request
import modules.create_mr_set.utils.utils as utils
import uuid
import xml.etree.ElementTree as ET
## GET SEQUENCES
def download_sequences(structures):
print("Downloading sequences ...")
ids = [s.id for s in structures.values()]
url = "https://www.rcsb.org/pdb/download/downloadFastaFiles.do"
url += "?structureIdList=%s" % ",".join(ids)
url += "&compressionType=uncompressed"
urllib.request.urlretrieve(url, "sequences.fasta")
def extract_sequences(structures):
print("Extracting sequences ...")
for record in Bio.SeqIO.parse("sequences.fasta", "fasta"):
structure_id = record.id[:4]
# print(structure_id)
chain_id = record.id[5:6]
# print(chain_id)
if structure_id in structures:
structure = structures[structure_id]
if chain_id in structure.chains:
chain = structure.chains[chain_id]
chain.add_metadata("seq", str(record.seq))
chain.add_metadata("length", len(chain.metadata["seq"]))
def write_sequence(structure, path):
records = []
for chain in structure.chains.values():
record = Bio.SeqRecord.SeqRecord(Bio.Seq.Seq(chain.metadata["seq"]),
id="%s:%s" % (structure.id, chain.id), description="")
records.append(record)
Bio.SeqIO.write(records, path, "fasta")
return structure
def write_deposited_sequence(key, structure, args):
structure = write_sequence(structure, structure.path("deposited.fasta"))
return key, structure
def remove_duplicate_chains(key, structure, args):
seq_copies_dict = {}
for chain_id, chain in sorted(structure.chains.items()):
if chain.metadata["seq"] not in seq_copies_dict:
seq_copies_dict[chain.metadata["seq"]] = 1
else:
seq_copies_dict[chain.metadata["seq"]] += 1
del structure.chains[chain_id]
chain.remove_directory()
for chain in structure.chains.values():
chain.add_metadata("copies", seq_copies_dict[chain.metadata["seq"]])
return key, structure
def write_unique_sequence(key, structure, args):
structure = write_sequence(structure, structure.path("unique.fasta"))
return key, structure
def get_sequences(structures, args):
utils.print_section_title("Getting Full Sequences")
download_sequences(structures)
extract_sequences(structures)
utils.parallel("Writing deposited sequences", write_deposited_sequence, structures, args, args.jobs)
utils.parallel("Removing duplicate chains", remove_duplicate_chains, structures, args, args.jobs)
utils.parallel("Writing unique sequences", write_unique_sequence, structures, args, args.jobs)
print("")
| [
"modules.create_mr_set.utils.utils.parallel",
"modules.create_mr_set.utils.utils.print_section_title"
] | [((2528, 2579), 'modules.create_mr_set.utils.utils.print_section_title', 'utils.print_section_title', (['"""Getting Full Sequences"""'], {}), "('Getting Full Sequences')\n", (2553, 2579), True, 'import modules.create_mr_set.utils.utils as utils\n'), ((2647, 2751), 'modules.create_mr_set.utils.utils.parallel', 'utils.parallel', (['"""Writing deposited sequences"""', 'write_deposited_sequence', 'structures', 'args', 'args.jobs'], {}), "('Writing deposited sequences', write_deposited_sequence,\n structures, args, args.jobs)\n", (2661, 2751), True, 'import modules.create_mr_set.utils.utils as utils\n'), ((2750, 2851), 'modules.create_mr_set.utils.utils.parallel', 'utils.parallel', (['"""Removing duplicate chains"""', 'remove_duplicate_chains', 'structures', 'args', 'args.jobs'], {}), "('Removing duplicate chains', remove_duplicate_chains,\n structures, args, args.jobs)\n", (2764, 2851), True, 'import modules.create_mr_set.utils.utils as utils\n'), ((2850, 2948), 'modules.create_mr_set.utils.utils.parallel', 'utils.parallel', (['"""Writing unique sequences"""', 'write_unique_sequence', 'structures', 'args', 'args.jobs'], {}), "('Writing unique sequences', write_unique_sequence,\n structures, args, args.jobs)\n", (2864, 2948), True, 'import modules.create_mr_set.utils.utils as utils\n')] |
import os
import pymongo
import argparse
import pandas as pd
from stages.first.first import first
def main():
# Creamos la carpeta donde almacenaremos los complejos.
results_directory = os.path.join(os.getcwd(), './results')
myclient = pymongo.MongoClient(
"mongodb+srv://claudio:<EMAIL>/myFirstDatabase?retryWrites=true&w=majority")
mydb = myclient["complex-rosetta"]
mycol = mydb["complexs"]
if not os.path.isdir(results_directory):
os.mkdir(results_directory)
# Leemos el datasets que resumen todas las interacciones antígeno-anticuerpo.
df = pd.read_csv(os.path.join(
os.getcwd(), "./datasets/antigen_antibody_interactions.csv"))
parser = argparse.ArgumentParser(description="Rosetta")
parser.add_argument("--p", help="Path of rosetta")
parser.add_argument("--n", help="Number of models")
args = parser.parse_args()
rosetta_path = args.p
rosetta_models = args.n
for i in range(int(rosetta_models)):
element = mycol.aggregate([
{
"$match": {
"executed": False,
}
},
{"$sample": {"size": 1}}
])
antigen = ""
antibody = ""
for i in element:
antigen = i["antigen"]
antibody = i["antibody"]
antibody_df = df["antibody"][(df["antibody"] == antibody) & (
df["antigen"] == antigen)].iloc[0]
antigen_df = df["antigen"][(df["antibody"] == antibody) & (
df["antigen"] == antigen)].iloc[0]
antigen_pdb_df = df["antigen_pdb"][(df["antibody"] == antibody) & (
df["antigen"] == antigen)].iloc[0]
antigen_chain_df = df["chain"][(df["antibody"] == antibody) & (
df["antigen"] == antigen)].iloc[0]
result = first(antibody=antibody_df, antigen=antigen_df, antigen_pdb=antigen_pdb_df,
antigen_chain=antigen_chain_df, rosetta_path=rosetta_path)
if result == False:
mycol.update_one({
"antigen": antigen,
"antibody": antibody
}, {
"$set": {
"executed": True,
"success": False
}
})
continue
mycol.update_one({
"antigen": antigen,
"antibody": antibody
}, {
"$set": {
"executed": True,
"success": True
}
})
if __name__ == "__main__":
main()
| [
"argparse.ArgumentParser",
"os.getcwd",
"os.path.isdir",
"os.mkdir",
"stages.first.first.first",
"pymongo.MongoClient"
] | [((253, 359), 'pymongo.MongoClient', 'pymongo.MongoClient', (['"""mongodb+srv://claudio:<EMAIL>/myFirstDatabase?retryWrites=true&w=majority"""'], {}), "(\n 'mongodb+srv://claudio:<EMAIL>/myFirstDatabase?retryWrites=true&w=majority'\n )\n", (272, 359), False, 'import pymongo\n'), ((713, 759), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Rosetta"""'}), "(description='Rosetta')\n", (736, 759), False, 'import argparse\n'), ((211, 222), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (220, 222), False, 'import os\n'), ((441, 473), 'os.path.isdir', 'os.path.isdir', (['results_directory'], {}), '(results_directory)\n', (454, 473), False, 'import os\n'), ((483, 510), 'os.mkdir', 'os.mkdir', (['results_directory'], {}), '(results_directory)\n', (491, 510), False, 'import os\n'), ((1834, 1972), 'stages.first.first.first', 'first', ([], {'antibody': 'antibody_df', 'antigen': 'antigen_df', 'antigen_pdb': 'antigen_pdb_df', 'antigen_chain': 'antigen_chain_df', 'rosetta_path': 'rosetta_path'}), '(antibody=antibody_df, antigen=antigen_df, antigen_pdb=antigen_pdb_df,\n antigen_chain=antigen_chain_df, rosetta_path=rosetta_path)\n', (1839, 1972), False, 'from stages.first.first import first\n'), ((637, 648), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (646, 648), False, 'import os\n')] |
# ---
# jupyter:
# jupytext:
# formats: ipynb,py
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.5'
# jupytext_version: 1.9.1+dev
# kernelspec:
# display_name: Python [conda env:annorxiver]
# language: python
# name: conda-env-annorxiver-py
# ---
# # Re-Run Analyses with Polka et. al. Subset
# This notebook was created in response to Polka et al. Group's inquiry on training a logistic regression model on preprints posted recently rather than preprints from 2019 and below.
# Overall their subset can be separated with a few features.
# +
from pathlib import Path
import sys
from gensim.models import Word2Vec
import matplotlib.pyplot as plt
import matplotlib as mpl
import numpy as np
import pandas as pd
import plotnine as p9
import requests
from scipy.spatial.distance import cdist
from scipy.stats import linregress
from sklearn.model_selection import GridSearchCV
from sklearn.linear_model import LogisticRegressionCV, LogisticRegression
from sklearn.preprocessing import StandardScaler
from sklearn.tree import DecisionTreeClassifier, export_graphviz
import spacy
import tqdm
from annorxiver_modules.document_helper import generate_doc_vector
mpl.rcParams["figure.dpi"] = 250
# -
# # Random BioRxiv Sample
manual_papers_df = pd.read_csv(str(Path("output/all_pairs_2021-02-11.csv")))
manual_papers_df.head().T
api_url = "https://api.biorxiv.org/details/biorxiv/2020-01-01/2020-04-30"
response = requests.get(api_url)
content = response.json()
total_papers = content["messages"][0]["total"]
total_papers
np.random.seed(100)
selected_biorxiv_papers = np.random.randint(0, total_papers, 100)
selected_biorxiv_papers.sort()
selected_biorxiv_papers
paper_cursor = {}
for paper in selected_biorxiv_papers:
cursor = int(np.ceil(int(paper / 100)))
if cursor not in paper_cursor:
paper_cursor[cursor] = []
paper_cursor[cursor].append(paper)
paper_cursor
published_doi_map = []
for paper in tqdm.tqdm(paper_cursor):
api_url = f"https://api.biorxiv.org/details/biorxiv/2020-01-01/2020-04-30/{paper}"
response = requests.get(api_url)
content = response.json()
collection = content["collection"]
for paper_idx in paper_cursor[paper]:
user_doi = collection[paper_idx % 100]["doi"]
file_name = user_doi.split("/")[-1]
api_url = f"https://api.biorxiv.org/details/biorxiv/{user_doi}"
response = requests.get(api_url)
content = response.json()
latest_paper = content["collection"][-1]
version_count = len(content["collection"])
doc_url = "http://biorxiv.org/content"
file_url = f"{doc_url}/early/{latest_paper['date'].replace('-', '/')}/{file_name}.source.xml"
response = requests.get(file_url)
with open(
f"output/biorxiv_xml_files_recent/{file_name}_v{version_count}.xml", "wb"
) as outfile:
outfile.write(response.content)
# # Document Embeddings
# ## Convert New biorxiv subset
biorxiv_documents = [
Path(x.name) for x in list(Path("output/biorxiv_xml_files_recent").rglob("*xml"))
]
biorxiv_xpath_str = "//abstract/p|//abstract/title|//body/sec//p|//body/sec//title"
word_model = Word2Vec.load(
str(Path("../word_vector_experiment/output/word2vec_models/300/biorxiv_300.model"))
)
biorxiv_document_map = {
document: generate_doc_vector(
word_model,
document_path=str(Path("output/biorxiv_xml_files_recent") / document),
xpath=biorxiv_xpath_str,
)
for document in tqdm.tqdm_notebook(biorxiv_documents)
}
# +
biorxiv_vec_df = (
pd.DataFrame.from_dict(biorxiv_document_map, orient="index")
.rename(columns={col: f"feat_{col}" for col in range(int(300))})
.rename_axis("document")
.reset_index()
)
biorxiv_vec_df.to_csv(
"output/random_recent_biorxiv_subset_embeddings.tsv", sep="\t", index=False
)
biorxiv_vec_df.head().T
# -
# ## Load the Documents
polka_preprints_df = pd.read_csv("output/polka_et_al_biorxiv_embeddings.tsv", sep="\t")
polka_preprints_df.head()
pca_components = pd.read_csv(
Path("../pca_association_experiment/output/word_pca_similarity/pca_components.tsv"),
sep="\t",
)
pca_components.head()
# ## PCA Components
# This section aims to see which principal components have a high association with Polka et al's subset. Furthermore, we also aim to see if we can use linear models to explain which PCs affect preprint prediction.
document_pca_sim = 1 - cdist(
polka_preprints_df.drop("document", axis=1).values, pca_components.values, "cosine"
)
print(document_pca_sim.shape)
document_pca_sim
document_to_pca_map = {
document: document_pca_sim[idx, :]
for idx, document in enumerate(polka_preprints_df.document.tolist())
}
polka_pca_sim_df = (
pd.DataFrame.from_dict(document_to_pca_map, orient="index")
.rename(index=str, columns={col: f"pc{col+1}" for col in range(int(300))})
.reset_index()
.rename(index=str, columns={"index": "document"})
)
# polka_pca_sim_df.to_csv("output/polka_pca_enrichment.tsv", sep="\t")
polka_pca_sim_df = polka_pca_sim_df.assign(label="polka")
polka_pca_sim_df.head()
document_pca_sim = 1 - cdist(
biorxiv_vec_df.drop("document", axis=1).values,
pca_components.values,
"cosine",
)
print(document_pca_sim.shape)
document_pca_sim
document_to_pca_map = {
document: document_pca_sim[idx, :]
for idx, document in enumerate(biorxiv_vec_df.document.tolist())
}
biorxiv_pca_sim_df = (
pd.DataFrame.from_dict(document_to_pca_map, orient="index")
.rename(index=str, columns={col: f"pc{col+1}" for col in range(int(300))})
.reset_index()
.rename(index=str, columns={"index": "document"})
.assign(label="biorxiv")
)
biorxiv_pca_sim_df.head()
# ## PC Regression
# ### Logistic Regression
# Goal here is to determine if we can figure out which PCs separate the bioRxiv subset from Polka et al.'s subset. Given that their dataset is only 60 papers we downsampled our dataset to contain only 60 papers.
dataset_df = biorxiv_pca_sim_df.append(polka_pca_sim_df)
dataset_df.head()
model = LogisticRegressionCV(
cv=10, Cs=100, max_iter=1000, penalty="l1", solver="liblinear"
)
model.fit(
StandardScaler().fit_transform(dataset_df[[f"pc{idx+1}" for idx in range(50)]]),
dataset_df["label"],
)
best_result = list(filter(lambda x: x[1] == model.C_, enumerate(model.Cs_)))[0]
print(best_result)
print("Best CV Fold")
print(model.scores_["polka"][:, best_result[0]])
model.scores_["polka"][:, best_result[0]].mean()
model_weights_df = pd.DataFrame.from_dict(
{
"weight": model.coef_[0],
"pc": list(range(1, 51)),
}
)
model_weights_df["pc"] = pd.Categorical(model_weights_df["pc"])
model_weights_df.head()
g = (
p9.ggplot(model_weights_df, p9.aes(x="pc", y="weight"))
+ p9.geom_col(position=p9.position_dodge(width=5), fill="#253494")
+ p9.coord_flip()
+ p9.scale_x_discrete(limits=list(sorted(range(1, 51), reverse=True)))
+ p9.theme_seaborn(context="paper", style="ticks", font_scale=1.1, font="Arial")
+ p9.theme(figure_size=(10, 8))
+ p9.labs(
title="Regression Model Weights", x="Princpial Component", y="Model Weight"
)
)
# g.save("output/figures/pca_log_regression_weights.svg")
# g.save("output/figures/pca_log_regression_weights.png", dpi=250)
print(g)
fold_features = model.coefs_paths_["polka"].transpose(1, 0, 2)
model_performance_df = pd.DataFrame.from_dict(
{
"feat_num": ((fold_features.astype(bool).sum(axis=1)) > 0).sum(axis=1),
"C": model.Cs_,
"score": model.scores_["polka"].mean(axis=0),
}
)
model_performance_df.head()
# +
fig, ax1 = plt.subplots()
ax1.set_xscale("log")
ax2 = plt.twinx()
ax1.plot(
model_performance_df.C.tolist(),
model_performance_df.feat_num.tolist(),
label="Features",
marker=".",
)
ax1.set_ylabel("# of Features")
ax1.set_xlabel("Inverse Regularization (C)")
ax1.legend(loc=0)
ax2.plot(
model_performance_df.C.tolist(),
model_performance_df.score.tolist(),
label="Score",
marker=".",
color="green",
)
ax2.set_ylabel("Score (Accuracy %)")
ax2.legend(loc=4)
# plt.savefig("output/preprint_classifier_results.png")
# -
plot_path = list(
zip(
model.Cs_,
model.scores_["polka"].transpose(),
model.coefs_paths_["polka"].transpose(1, 0, 2),
)
)
data_records = []
for cs in plot_path[33:40]:
model = LogisticRegression(C=cs[0], max_iter=1000, penalty="l1", solver="liblinear")
model.fit(
StandardScaler().fit_transform(dataset_df[[f"pc{idx+1}" for idx in range(50)]]),
dataset_df["label"],
)
data_records.append(
{
"C": cs[0],
"PCs": ",".join(map(str, model.coef_.nonzero()[1] + 1)),
"feat_num": len(model.coef_.nonzero()[1]),
"accuracy": cs[1].mean(),
}
)
model_coefs_df = pd.DataFrame.from_records(data_records)
model_coefs_df
| [
"pandas.read_csv",
"plotnine.coord_flip",
"plotnine.aes",
"plotnine.position_dodge",
"sklearn.linear_model.LogisticRegressionCV",
"pathlib.Path",
"matplotlib.pyplot.twinx",
"pandas.DataFrame.from_dict",
"pandas.Categorical",
"numpy.random.seed",
"tqdm.tqdm_notebook",
"requests.get",
"plotnin... | [((1494, 1515), 'requests.get', 'requests.get', (['api_url'], {}), '(api_url)\n', (1506, 1515), False, 'import requests\n'), ((1603, 1622), 'numpy.random.seed', 'np.random.seed', (['(100)'], {}), '(100)\n', (1617, 1622), True, 'import numpy as np\n'), ((1649, 1688), 'numpy.random.randint', 'np.random.randint', (['(0)', 'total_papers', '(100)'], {}), '(0, total_papers, 100)\n', (1666, 1688), True, 'import numpy as np\n'), ((2003, 2026), 'tqdm.tqdm', 'tqdm.tqdm', (['paper_cursor'], {}), '(paper_cursor)\n', (2012, 2026), False, 'import tqdm\n'), ((3996, 4062), 'pandas.read_csv', 'pd.read_csv', (['"""output/polka_et_al_biorxiv_embeddings.tsv"""'], {'sep': '"""\t"""'}), "('output/polka_et_al_biorxiv_embeddings.tsv', sep='\\t')\n", (4007, 4062), True, 'import pandas as pd\n'), ((6133, 6222), 'sklearn.linear_model.LogisticRegressionCV', 'LogisticRegressionCV', ([], {'cv': '(10)', 'Cs': '(100)', 'max_iter': '(1000)', 'penalty': '"""l1"""', 'solver': '"""liblinear"""'}), "(cv=10, Cs=100, max_iter=1000, penalty='l1', solver=\n 'liblinear')\n", (6153, 6222), False, 'from sklearn.linear_model import LogisticRegressionCV, LogisticRegression\n'), ((6719, 6757), 'pandas.Categorical', 'pd.Categorical', (["model_weights_df['pc']"], {}), "(model_weights_df['pc'])\n", (6733, 6757), True, 'import pandas as pd\n'), ((7706, 7720), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (7718, 7720), True, 'import matplotlib.pyplot as plt\n'), ((7749, 7760), 'matplotlib.pyplot.twinx', 'plt.twinx', ([], {}), '()\n', (7758, 7760), True, 'import matplotlib.pyplot as plt\n'), ((8933, 8972), 'pandas.DataFrame.from_records', 'pd.DataFrame.from_records', (['data_records'], {}), '(data_records)\n', (8958, 8972), True, 'import pandas as pd\n'), ((2130, 2151), 'requests.get', 'requests.get', (['api_url'], {}), '(api_url)\n', (2142, 2151), False, 'import requests\n'), ((3060, 3072), 'pathlib.Path', 'Path', (['x.name'], {}), '(x.name)\n', (3064, 3072), False, 'from pathlib import Path\n'), ((4124, 4217), 'pathlib.Path', 'Path', (['"""../pca_association_experiment/output/word_pca_similarity/pca_components.tsv"""'], {}), "(\n '../pca_association_experiment/output/word_pca_similarity/pca_components.tsv'\n )\n", (4128, 4217), False, 'from pathlib import Path\n'), ((7144, 7233), 'plotnine.labs', 'p9.labs', ([], {'title': '"""Regression Model Weights"""', 'x': '"""Princpial Component"""', 'y': '"""Model Weight"""'}), "(title='Regression Model Weights', x='Princpial Component', y=\n 'Model Weight')\n", (7151, 7233), True, 'import plotnine as p9\n'), ((8462, 8538), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {'C': 'cs[0]', 'max_iter': '(1000)', 'penalty': '"""l1"""', 'solver': '"""liblinear"""'}), "(C=cs[0], max_iter=1000, penalty='l1', solver='liblinear')\n", (8480, 8538), False, 'from sklearn.linear_model import LogisticRegressionCV, LogisticRegression\n'), ((1340, 1379), 'pathlib.Path', 'Path', (['"""output/all_pairs_2021-02-11.csv"""'], {}), "('output/all_pairs_2021-02-11.csv')\n", (1344, 1379), False, 'from pathlib import Path\n'), ((2454, 2475), 'requests.get', 'requests.get', (['api_url'], {}), '(api_url)\n', (2466, 2475), False, 'import requests\n'), ((2780, 2802), 'requests.get', 'requests.get', (['file_url'], {}), '(file_url)\n', (2792, 2802), False, 'import requests\n'), ((3265, 3343), 'pathlib.Path', 'Path', (['"""../word_vector_experiment/output/word2vec_models/300/biorxiv_300.model"""'], {}), "('../word_vector_experiment/output/word2vec_models/300/biorxiv_300.model')\n", (3269, 3343), False, 'from pathlib import Path\n'), ((3566, 3603), 'tqdm.tqdm_notebook', 'tqdm.tqdm_notebook', (['biorxiv_documents'], {}), '(biorxiv_documents)\n', (3584, 3603), False, 'import tqdm\n'), ((7108, 7137), 'plotnine.theme', 'p9.theme', ([], {'figure_size': '(10, 8)'}), '(figure_size=(10, 8))\n', (7116, 7137), True, 'import plotnine as p9\n'), ((6239, 6255), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (6253, 6255), False, 'from sklearn.preprocessing import StandardScaler\n'), ((7023, 7101), 'plotnine.theme_seaborn', 'p9.theme_seaborn', ([], {'context': '"""paper"""', 'style': '"""ticks"""', 'font_scale': '(1.1)', 'font': '"""Arial"""'}), "(context='paper', style='ticks', font_scale=1.1, font='Arial')\n", (7039, 7101), True, 'import plotnine as p9\n'), ((8562, 8578), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (8576, 8578), False, 'from sklearn.preprocessing import StandardScaler\n'), ((3087, 3126), 'pathlib.Path', 'Path', (['"""output/biorxiv_xml_files_recent"""'], {}), "('output/biorxiv_xml_files_recent')\n", (3091, 3126), False, 'from pathlib import Path\n'), ((3454, 3493), 'pathlib.Path', 'Path', (['"""output/biorxiv_xml_files_recent"""'], {}), "('output/biorxiv_xml_files_recent')\n", (3458, 3493), False, 'from pathlib import Path\n'), ((6926, 6941), 'plotnine.coord_flip', 'p9.coord_flip', ([], {}), '()\n', (6939, 6941), True, 'import plotnine as p9\n'), ((3634, 3694), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['biorxiv_document_map'], {'orient': '"""index"""'}), "(biorxiv_document_map, orient='index')\n", (3656, 3694), True, 'import pandas as pd\n'), ((4816, 4875), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['document_to_pca_map'], {'orient': '"""index"""'}), "(document_to_pca_map, orient='index')\n", (4838, 4875), True, 'import pandas as pd\n'), ((6821, 6847), 'plotnine.aes', 'p9.aes', ([], {'x': '"""pc"""', 'y': '"""weight"""'}), "(x='pc', y='weight')\n", (6827, 6847), True, 'import plotnine as p9\n'), ((5519, 5578), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['document_to_pca_map'], {'orient': '"""index"""'}), "(document_to_pca_map, orient='index')\n", (5541, 5578), True, 'import pandas as pd\n'), ((6876, 6902), 'plotnine.position_dodge', 'p9.position_dodge', ([], {'width': '(5)'}), '(width=5)\n', (6893, 6902), True, 'import plotnine as p9\n')] |
"""
Problem 55: Lychrel numbers
https://projecteuler.net/problem=55
If we take 47, reverse and add, 47 + 74 = 121, which is palindromic.
Not all numbers produce palindromes so quickly. For example,
349 + 943 = 1292,
1292 + 2921 = 4213
4213 + 3124 = 7337
That is, 349 took three iterations to arrive at a palindrome.
Although no one has proved it yet, it is thought that some numbers, like 196,
never produce a palindrome. A number that never forms a palindrome through the reverse and
add process is called a Lychrel number.
Due to the theoretical nature of these numbers, and for the purpose of this problem,
we shall assume that a number is Lychrel until proven otherwise.
In addition you are given that for every number below ten-thousand, it will either
(i) become a palindrome in less than fifty iterations, or,
(ii) no one, with all the computing power that exists,
has managed so far to map it to a palindrome.
In fact, 10677 is the first number to be shown to require over fifty iterations
before producing a palindrome: 4668731596684224866951378664 (53 iterations, 28-digits).
Surprisingly, there are palindromic numbers that are themselves Lychrel numbers;
the first example is 4994.
How many Lychrel numbers are there below ten-thousand?
"""
from typing import Optional, Tuple
from src.common.calculations import calculate_large_sum
from src.common.palindromes import is_palindromic_number
def get_number_iterations_to_palindrome(
number: int,
max_iterations: int = 50
) -> Optional[Tuple[int, str]]:
"""
Get the number of iterations to produce a palindrome by adding the number `number`
with its reverse number.
Returns a tuple `(<number_of_iterations>, <result_palindrome_number>)`
or `None` if no palindrome is produced until `max_iterations` iterations.
"""
number_string = str(number)
num_iterations = 1
while num_iterations <= max_iterations:
number_string = calculate_large_sum((number_string, number_string[::-1]))
if is_palindromic_number(number_string):
return num_iterations, number_string
num_iterations += 1
return None
def get_number_of_lychrel_numbers(threshold: int, max_iterations: int = 50) -> int:
"""Get the number of Lychrel numbers below `threshold`."""
number_of_lychrel_numbers = 0
for number in range(1, threshold):
if get_number_iterations_to_palindrome(number, max_iterations) is None:
number_of_lychrel_numbers += 1
return number_of_lychrel_numbers
def main() -> None:
"""Main function."""
threshold = 10000
number_of_lychrel_numbers = get_number_of_lychrel_numbers(threshold, 50)
print(f'There are {number_of_lychrel_numbers} Lychrel numbers below {threshold:,}.')
if __name__ == '__main__':
main()
| [
"src.common.calculations.calculate_large_sum",
"src.common.palindromes.is_palindromic_number"
] | [((1984, 2041), 'src.common.calculations.calculate_large_sum', 'calculate_large_sum', (['(number_string, number_string[::-1])'], {}), '((number_string, number_string[::-1]))\n', (2003, 2041), False, 'from src.common.calculations import calculate_large_sum\n'), ((2053, 2089), 'src.common.palindromes.is_palindromic_number', 'is_palindromic_number', (['number_string'], {}), '(number_string)\n', (2074, 2089), False, 'from src.common.palindromes import is_palindromic_number\n')] |
from common.config import get_env_variable_value, get_secret_value
class ProductionConfig:
"""Production application configuration - pulled from task environment."""
CLIENT_ID = get_env_variable_value('CLIENT_ID', is_mandatory=True)
QUEUE_URL = get_env_variable_value('CLIENT_SQS_URL', is_mandatory=True)
SFTP_HOST = get_env_variable_value('SFTP_HOST', is_mandatory=True)
SFTP_PORT = get_env_variable_value('SFTP_PORT', is_mandatory=True)
SFTP_USERNAME = get_secret_value('SFTP_USER', is_mandatory=True)
SFTP_PASSWORD = get_secret_value('SFTP_PASSWORD', is_mandatory=True)
TEMP_FILE_PATH = '/tmp/sftp_tmp.json'
| [
"common.config.get_secret_value",
"common.config.get_env_variable_value"
] | [((188, 242), 'common.config.get_env_variable_value', 'get_env_variable_value', (['"""CLIENT_ID"""'], {'is_mandatory': '(True)'}), "('CLIENT_ID', is_mandatory=True)\n", (210, 242), False, 'from common.config import get_env_variable_value, get_secret_value\n'), ((259, 318), 'common.config.get_env_variable_value', 'get_env_variable_value', (['"""CLIENT_SQS_URL"""'], {'is_mandatory': '(True)'}), "('CLIENT_SQS_URL', is_mandatory=True)\n", (281, 318), False, 'from common.config import get_env_variable_value, get_secret_value\n'), ((336, 390), 'common.config.get_env_variable_value', 'get_env_variable_value', (['"""SFTP_HOST"""'], {'is_mandatory': '(True)'}), "('SFTP_HOST', is_mandatory=True)\n", (358, 390), False, 'from common.config import get_env_variable_value, get_secret_value\n'), ((407, 461), 'common.config.get_env_variable_value', 'get_env_variable_value', (['"""SFTP_PORT"""'], {'is_mandatory': '(True)'}), "('SFTP_PORT', is_mandatory=True)\n", (429, 461), False, 'from common.config import get_env_variable_value, get_secret_value\n'), ((482, 530), 'common.config.get_secret_value', 'get_secret_value', (['"""SFTP_USER"""'], {'is_mandatory': '(True)'}), "('SFTP_USER', is_mandatory=True)\n", (498, 530), False, 'from common.config import get_env_variable_value, get_secret_value\n'), ((551, 603), 'common.config.get_secret_value', 'get_secret_value', (['"""SFTP_PASSWORD"""'], {'is_mandatory': '(True)'}), "('SFTP_PASSWORD', is_mandatory=True)\n", (567, 603), False, 'from common.config import get_env_variable_value, get_secret_value\n')] |
from utils import initMysql, getByAid
# 起始 和 终止 AID
startAid = 1190000016801859
endAid = 1190000016900000
connect = initMysql()
for aid in range(startAid, endAid):
getByAid(aid, connect)
| [
"utils.getByAid",
"utils.initMysql"
] | [((118, 129), 'utils.initMysql', 'initMysql', ([], {}), '()\n', (127, 129), False, 'from utils import initMysql, getByAid\n'), ((171, 193), 'utils.getByAid', 'getByAid', (['aid', 'connect'], {}), '(aid, connect)\n', (179, 193), False, 'from utils import initMysql, getByAid\n')] |
""" Calorizator parser application. """
import argparse
from calorizator_parser.parser import main as parser_main
def run() -> None:
""" CLI for the app. """
parser = argparse.ArgumentParser(prog="calorizator_parser", description="Parser for calorizator site")
parser.add_argument("--output", type=str, help="name of the output file", required=True)
args = parser.parse_args()
parser_main(args)
if __name__ == "__main__":
run()
| [
"calorizator_parser.parser.main",
"argparse.ArgumentParser"
] | [((179, 277), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""calorizator_parser"""', 'description': '"""Parser for calorizator site"""'}), "(prog='calorizator_parser', description=\n 'Parser for calorizator site')\n", (202, 277), False, 'import argparse\n'), ((403, 420), 'calorizator_parser.parser.main', 'parser_main', (['args'], {}), '(args)\n', (414, 420), True, 'from calorizator_parser.parser import main as parser_main\n')] |
import os
import json
import base64
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
import models
engine = create_engine(os.environ['SQLALCHEMY_DATABASE_URL'])
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
def parse_block(block, chain_id):
out = {
'chain_id':chain_id,
'height':block['height']
}
out['id'] = ':'.join([out['chain_id'], str(out['height'])])
txs = block['txs_results']
if txs:
out['transactions'] = parse_txs(txs, out['id'])
events = block['begin_block_events']
end_events = block['end_block_events']
if end_events:
events.extend(end_events)
out['events'] = parse_events(events, out['id'])
return models.Block(**out)
def parse_txs(txs, block_id):
return [__parse_tx(i, block_id) for i in txs]
def __parse_tx(tx, block_id):
out = {}
out['gas_wanted'] = int(tx['gasWanted'])
out['gas_used'] = int(tx['gasUsed'])
out['log'] = tx['log']
events = tx['events']
if events:
out['success'] = True
out['events'] = parse_events(events, block_id)
else:
out['success'] = False
return models.Transaction(**out)
def parse_events(events_list, block_id):
out = [__parse_event(i, block_id) for i in events_list]
return out
def __parse_event(event, block_id):
out = {}
out['e_type'] = event['type']
out['attributes'] = __decode_attributes(event['attributes'])
out['block_id'] = block_id
return models.Event(**out)
def __decode_attribute_dict(attr_dict):
out = {}
try:
out['a_key'] = str(base64.b64decode(attr_dict['key']), 'utf-8')
except:
out['a_key'] = 'None'
try:
out['a_value'] = str(base64.b64decode(attr_dict['value']), 'utf-8')
except:
out['a_value'] = 'None'
return models.Attribute(**out)
def __decode_attributes(attributes):
out = [__decode_attribute_dict(i) for i in attributes]
return out
| [
"sqlalchemy.orm.sessionmaker",
"models.Event",
"models.Attribute",
"sqlalchemy.create_engine",
"base64.b64decode",
"models.Transaction",
"models.Block"
] | [((140, 192), 'sqlalchemy.create_engine', 'create_engine', (["os.environ['SQLALCHEMY_DATABASE_URL']"], {}), "(os.environ['SQLALCHEMY_DATABASE_URL'])\n", (153, 192), False, 'from sqlalchemy import create_engine\n'), ((208, 268), 'sqlalchemy.orm.sessionmaker', 'sessionmaker', ([], {'autocommit': '(False)', 'autoflush': '(False)', 'bind': 'engine'}), '(autocommit=False, autoflush=False, bind=engine)\n', (220, 268), False, 'from sqlalchemy.orm import sessionmaker\n'), ((752, 771), 'models.Block', 'models.Block', ([], {}), '(**out)\n', (764, 771), False, 'import models\n'), ((1188, 1213), 'models.Transaction', 'models.Transaction', ([], {}), '(**out)\n', (1206, 1213), False, 'import models\n'), ((1522, 1541), 'models.Event', 'models.Event', ([], {}), '(**out)\n', (1534, 1541), False, 'import models\n'), ((1859, 1882), 'models.Attribute', 'models.Attribute', ([], {}), '(**out)\n', (1875, 1882), False, 'import models\n'), ((1632, 1666), 'base64.b64decode', 'base64.b64decode', (["attr_dict['key']"], {}), "(attr_dict['key'])\n", (1648, 1666), False, 'import base64\n'), ((1757, 1793), 'base64.b64decode', 'base64.b64decode', (["attr_dict['value']"], {}), "(attr_dict['value'])\n", (1773, 1793), False, 'import base64\n')] |
# Car Pooling
'''
You are driving a vehicle that has capacity empty seats initially available for passengers. The vehicle only drives east (ie. it cannot turn around and drive west.)
Given a list of trips, trip[i] = [num_passengers, start_location, end_location] contains information about the i-th trip: the number of passengers that must be picked up, and the locations to pick them up and drop them off. The locations are given as the number of kilometers due east from your vehicle's initial location.
Return true if and only if it is possible to pick up and drop off all passengers for all the given trips.
Example 1:
Input: trips = [[2,1,5],[3,3,7]], capacity = 4
Output: false
Example 2:
Input: trips = [[2,1,5],[3,3,7]], capacity = 5
Output: true
Example 3:
Input: trips = [[2,1,5],[3,5,7]], capacity = 3
Output: true
Example 4:
Input: trips = [[3,2,7],[3,7,9],[8,3,9]], capacity = 11
Output: true
Constraints:
trips.length <= 1000
trips[i].length == 3
1 <= trips[i][0] <= 100
0 <= trips[i][1] < trips[i][2] <= 1000
1 <= capacity <= 100000
Hide Hint #1
Sort the pickup and dropoff events by location, then process them in order.
'''
class Solution0:
'''
Efficient approach
'''
def carPooling(self, trips: List[List[int]], capacity: int) -> bool:
timestamps = []
for trip in trips:
timestamps.append([trip[1], trip[0]])
timestamps.append([trip[2], -trip[0]])
timestamps.sort()
seat_status = 0
for timestamp in timestamps:
seat_status+=timestamp[1]
if seat_status>capacity:
return False
return True
import numpy as np
class Solution:
def carPooling(self, trips: List[List[int]], capacity: int) -> bool:
trips = np.array(trips)
if max(trips[:,0])>capacity:
return False
end = max(trips[:,2])
series = [0]*(end)
for trip in trips:
for i in range(trip[1],trip[2]):
series[i] = series[i]+trip[0]
if series[i]>capacity:
return False
return True | [
"numpy.array"
] | [((1856, 1871), 'numpy.array', 'np.array', (['trips'], {}), '(trips)\n', (1864, 1871), True, 'import numpy as np\n')] |
import requests
import json
import logging
import time
API_URL = "https://api.coindesk.com/v1/bpi/currentprice.json"
def main():
response = GetAPIResponse(API_URL)
getDBObject(response)
time.sleep(60)
def getDBObject(response):
return {
"code": response["bpi"]["USD"]["code"],
"time": response["time"]["updatedISO"],
"currRate": response["bpi"]["USD"]["rate_float"],
"currRateString": response["bpi"]["USD"]["rate"]
}
def GetAPIResponse(url):
try:
response = requests.get(url)
except:
logging.error('Error while getting url.')
return False
if(response == None):
logging.error('GetAPIResponse: No Request')
return False
if(response.status_code != 200):
logging.error('GetAPIResponse: Status code not 200')
return False
return response.json()
if __name__ == '__main__':
main()
| [
"requests.get",
"logging.error",
"time.sleep"
] | [((202, 216), 'time.sleep', 'time.sleep', (['(60)'], {}), '(60)\n', (212, 216), False, 'import time\n'), ((529, 546), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (541, 546), False, 'import requests\n'), ((665, 708), 'logging.error', 'logging.error', (['"""GetAPIResponse: No Request"""'], {}), "('GetAPIResponse: No Request')\n", (678, 708), False, 'import logging\n'), ((776, 828), 'logging.error', 'logging.error', (['"""GetAPIResponse: Status code not 200"""'], {}), "('GetAPIResponse: Status code not 200')\n", (789, 828), False, 'import logging\n'), ((567, 608), 'logging.error', 'logging.error', (['"""Error while getting url."""'], {}), "('Error while getting url.')\n", (580, 608), False, 'import logging\n')] |
import datetime
import unittest
from unittest import mock
import py_client.algorithm_interface_test.test_helper.SessionMockFactory as SessionMockFactory
from py_client.aidm import UpdateStopTimesTrainPathNode, AlgorithmTrain, AlgorithmTrainPathNode, StopStatus, \
UpdateRunTimesTrainPathSegment
from py_client.algorithm_interface import algorithm_interface_factory
from py_client.algorithm_interface_test.test_helper.SessionMockTestBase import get_api_url, SessionMockTestBase
class TestUpdateTrajectory(unittest.TestCase):
class UpdateTrajectoryTestMockSession(SessionMockTestBase):
def put(self, request, json):
self._last_body = json
self._last_request = request
json_string = ("{ \n"
" \"id\": 2060,\n"
" \"code\": \"TestUpdateTrajectory\","
" \"trainPathNodes\": [\n"
" {\n"
" \"id\": 1332,\n"
" \"sectionTrackId\": null,\n"
" \"nodeId\": 18,\n"
" \"nodeTrackId\": null,\n"
" \"FormationId\": 1187,\n"
" \"arrivalTime\": \"2003-05-01T00:04:00\",\n"
" \"departureTime\": \"2003-05-01T00:05:30\",\n"
" \"minimumRunTime\": null,\n"
" \"minimumStopTime\": \"P0D\",\n"
" \"stopStatus\": \"operationalStop\",\n"
" \"sequenceNumber\": 0\n"
" },\n"
" {\n"
" \"id\": 1696,\n"
" \"sectionTrackId\": 1172,\n"
" \"nodeId\": 10,\n"
" \"nodeTrackId\": null,\n"
" \"FormationId\": null,\n"
" \"arrivalTime\": \"2003-05-01T00:10:30\",\n"
" \"departureTime\": \"2003-05-01T00:10:30\",\n"
" \"minimumRunTime\": \"PT5M\",\n"
" \"minimumStopTime\": \"P0D\",\n"
" \"stopStatus\": \"commercialStop\",\n"
" \"sequenceNumber\": 1\n"
" }\n"
" ],\n"
" \"debugString\": \"Mocked RVZH_1_1_J03 tt_(G)\"\n"
"}")
return SessionMockFactory.create_response_mock(json_string, 200)
@mock.patch('requests.Session', side_effect=UpdateTrajectoryTestMockSession)
def setUp(self, mocked_get_obj):
self.interface_to_viriato = algorithm_interface_factory.create(get_api_url())
@mock.patch('requests.Session', side_effect=UpdateTrajectoryTestMockSession)
def test_update_trajectory_request(self, mocked_get_obj):
train_id = 2060
update_train_stop_time_node = UpdateStopTimesTrainPathNode(train_path_node_id=1332,
arrival_time=datetime.datetime(2003, 5, 1, 0, 4),
departure_time=datetime.datetime(2003, 5, 1, 0, 5),
stop_status=StopStatus.operational_stop,
minimum_stop_time=datetime.timedelta(seconds=30))
self.interface_to_viriato.update_train_trajectory_stop_times(train_id, update_train_stop_time_node)
session_obj = self.interface_to_viriato._AlgorithmInterface__communication_layer.currentSession
self.assertEqual(session_obj.last_request,
get_api_url() + '/trains/2060/train-path-nodes:update-trajectory-stop-times')
self.assertDictEqual(session_obj.last_body,
dict(trainPathNodeId=1332,
arrivalTime="2003-05-01T00:04:00",
departureTime="2003-05-01T00:05:00",
minimumStopTime="PT30S",
stopStatus="operationalStop"))
@mock.patch('requests.Session', side_effect=UpdateTrajectoryTestMockSession)
def test_update_trajectory_response(self, mocked_get_obj):
train_id = 2060
update_train_stop_time_node = UpdateStopTimesTrainPathNode(train_path_node_id=1332,
arrival_time=datetime.datetime(2003, 5, 1, 0, 4),
departure_time=datetime.datetime(2003, 5, 1, 0, 5),
stop_status=StopStatus.operational_stop,
minimum_stop_time=datetime.timedelta(seconds=30))
updated_algorithm_train = self.interface_to_viriato.update_train_trajectory_stop_times(train_id, update_train_stop_time_node)
self.assertIsInstance(updated_algorithm_train, AlgorithmTrain)
self.assertEqual(updated_algorithm_train.debug_string, 'Mocked RVZH_1_1_J03 tt_(G)')
self.assertEqual(updated_algorithm_train.code, "TestUpdateTrajectory")
self.assertEqual(updated_algorithm_train.id, 2060)
self.assertIsInstance(updated_algorithm_train.train_path_nodes, list)
self.assertIsInstance(updated_algorithm_train.train_path_nodes[0], AlgorithmTrainPathNode)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].id, 1332)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].section_track_id, None)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].node_track_id, None)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].formation_id, 1187)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].arrival_time, datetime.datetime(2003, 5, 1, 0, 4))
self.assertEqual(
updated_algorithm_train.train_path_nodes[0].departure_time,
datetime.datetime(2003, 5, 1, 0, 5, 30))
self.assertEqual(updated_algorithm_train.train_path_nodes[0].minimum_run_time, None)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].minimum_stop_time, datetime.timedelta(0))
self.assertEqual(updated_algorithm_train.train_path_nodes[0].stop_status, StopStatus.operational_stop)
@mock.patch('requests.Session', side_effect=UpdateTrajectoryTestMockSession)
def test_update_trajectory_request_with_update_train_path_segment(self, mocked_get_obj):
train_id = 20610
update_train_path_segment = UpdateRunTimesTrainPathSegment(
to_train_path_node_id=1332,
to_node_arrival_time=datetime.datetime(2003, 5, 1, 0, 4),
from_node_departure_time=datetime.datetime(2003, 5, 1, 0, 5),
minimum_run_time=datetime.timedelta(seconds=120))
self.interface_to_viriato.update_train_trajectory_run_times(train_id, update_train_path_segment)
session_obj = self.interface_to_viriato._AlgorithmInterface__communication_layer.currentSession
self.assertEqual(session_obj.last_request,
get_api_url() + '/trains/20610/train-path-nodes:update-trajectory-run-times')
self.assertDictEqual(session_obj.last_body,
dict(toTrainPathNodeId=1332,
toNodeArrivalTime="2003-05-01T00:04:00",
fromNodeDepartureTime="2003-05-01T00:05:00",
minimumRunTime="PT2M"))
@mock.patch('requests.Session', side_effect=UpdateTrajectoryTestMockSession)
def test_update_trajectory_request_with_update_train_path_segment_minimum_run_time_none(self, mocked_get_obj):
train_id = 2062
update_train_path_segment = UpdateRunTimesTrainPathSegment(
to_train_path_node_id=1332,
to_node_arrival_time=datetime.datetime(2003, 5, 1, 0, 4),
from_node_departure_time=datetime.datetime(2003, 5, 1, 0, 5),
minimum_run_time=None)
self.interface_to_viriato.update_train_trajectory_run_times(train_id, update_train_path_segment)
session_obj = self.interface_to_viriato._AlgorithmInterface__communication_layer.currentSession
self.assertEqual(session_obj.last_request,
get_api_url() + '/trains/2062/train-path-nodes:update-trajectory-run-times')
self.assertDictEqual(session_obj.last_body,
dict(toTrainPathNodeId=1332,
toNodeArrivalTime="2003-05-01T00:04:00",
fromNodeDepartureTime="2003-05-01T00:05:00",
minimumRunTime=None))
@mock.patch('requests.Session', side_effect=UpdateTrajectoryTestMockSession)
def test_update_trajectory_response_with_update_train_path_segment(self, mocked_get_obj):
train_id = 2060
update_train_path_segment = UpdateRunTimesTrainPathSegment(
to_train_path_node_id=1332,
to_node_arrival_time=datetime.datetime(2003, 5, 1, 0, 4),
from_node_departure_time=datetime.datetime(2003, 5, 1, 0, 5),
minimum_run_time=None)
updated_algorithm_train = self.interface_to_viriato.update_train_trajectory_run_times(
train_id,
update_train_path_segment)
self.assertIsInstance(updated_algorithm_train, AlgorithmTrain)
self.assertEqual(updated_algorithm_train.debug_string, 'Mocked RVZH_1_1_J03 tt_(G)')
self.assertEqual(updated_algorithm_train.code, "TestUpdateTrajectory")
self.assertEqual(updated_algorithm_train.id, 2060)
self.assertIsInstance(updated_algorithm_train.train_path_nodes, list)
self.assertIsInstance(updated_algorithm_train.train_path_nodes[0], AlgorithmTrainPathNode)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].id, 1332)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].section_track_id, None)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].node_track_id, None)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].formation_id, 1187)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].arrival_time, datetime.datetime(2003, 5, 1, 0, 4))
self.assertEqual(
updated_algorithm_train.train_path_nodes[0].departure_time,
datetime.datetime(2003, 5, 1, 0, 5, 30))
self.assertEqual(updated_algorithm_train.train_path_nodes[0].minimum_run_time, None)
self.assertEqual(updated_algorithm_train.train_path_nodes[0].minimum_stop_time, datetime.timedelta(0))
self.assertEqual(updated_algorithm_train.train_path_nodes[0].stop_status, StopStatus.operational_stop)
@mock.patch('requests.Session', side_effect=UpdateTrajectoryTestMockSession)
def tearDown(self, mocked_get_obj) -> None:
self.interface_to_viriato.__exit__(None, None, None)
| [
"datetime.datetime",
"datetime.timedelta",
"unittest.mock.patch",
"py_client.algorithm_interface_test.test_helper.SessionMockFactory.create_response_mock",
"py_client.algorithm_interface_test.test_helper.SessionMockTestBase.get_api_url"
] | [((2769, 2844), 'unittest.mock.patch', 'mock.patch', (['"""requests.Session"""'], {'side_effect': 'UpdateTrajectoryTestMockSession'}), "('requests.Session', side_effect=UpdateTrajectoryTestMockSession)\n", (2779, 2844), False, 'from unittest import mock\n'), ((2978, 3053), 'unittest.mock.patch', 'mock.patch', (['"""requests.Session"""'], {'side_effect': 'UpdateTrajectoryTestMockSession'}), "('requests.Session', side_effect=UpdateTrajectoryTestMockSession)\n", (2988, 3053), False, 'from unittest import mock\n'), ((4460, 4535), 'unittest.mock.patch', 'mock.patch', (['"""requests.Session"""'], {'side_effect': 'UpdateTrajectoryTestMockSession'}), "('requests.Session', side_effect=UpdateTrajectoryTestMockSession)\n", (4470, 4535), False, 'from unittest import mock\n'), ((6763, 6838), 'unittest.mock.patch', 'mock.patch', (['"""requests.Session"""'], {'side_effect': 'UpdateTrajectoryTestMockSession'}), "('requests.Session', side_effect=UpdateTrajectoryTestMockSession)\n", (6773, 6838), False, 'from unittest import mock\n'), ((7984, 8059), 'unittest.mock.patch', 'mock.patch', (['"""requests.Session"""'], {'side_effect': 'UpdateTrajectoryTestMockSession'}), "('requests.Session', side_effect=UpdateTrajectoryTestMockSession)\n", (7994, 8059), False, 'from unittest import mock\n'), ((9196, 9271), 'unittest.mock.patch', 'mock.patch', (['"""requests.Session"""'], {'side_effect': 'UpdateTrajectoryTestMockSession'}), "('requests.Session', side_effect=UpdateTrajectoryTestMockSession)\n", (9206, 9271), False, 'from unittest import mock\n'), ((11288, 11363), 'unittest.mock.patch', 'mock.patch', (['"""requests.Session"""'], {'side_effect': 'UpdateTrajectoryTestMockSession'}), "('requests.Session', side_effect=UpdateTrajectoryTestMockSession)\n", (11298, 11363), False, 'from unittest import mock\n'), ((2703, 2760), 'py_client.algorithm_interface_test.test_helper.SessionMockFactory.create_response_mock', 'SessionMockFactory.create_response_mock', (['json_string', '(200)'], {}), '(json_string, 200)\n', (2742, 2760), True, 'import py_client.algorithm_interface_test.test_helper.SessionMockFactory as SessionMockFactory\n'), ((2955, 2968), 'py_client.algorithm_interface_test.test_helper.SessionMockTestBase.get_api_url', 'get_api_url', ([], {}), '()\n', (2966, 2968), False, 'from py_client.algorithm_interface_test.test_helper.SessionMockTestBase import get_api_url, SessionMockTestBase\n'), ((6246, 6281), 'datetime.datetime', 'datetime.datetime', (['(2003)', '(5)', '(1)', '(0)', '(4)'], {}), '(2003, 5, 1, 0, 4)\n', (6263, 6281), False, 'import datetime\n'), ((6396, 6435), 'datetime.datetime', 'datetime.datetime', (['(2003)', '(5)', '(1)', '(0)', '(5)', '(30)'], {}), '(2003, 5, 1, 0, 5, 30)\n', (6413, 6435), False, 'import datetime\n'), ((6620, 6641), 'datetime.timedelta', 'datetime.timedelta', (['(0)'], {}), '(0)\n', (6638, 6641), False, 'import datetime\n'), ((10771, 10806), 'datetime.datetime', 'datetime.datetime', (['(2003)', '(5)', '(1)', '(0)', '(4)'], {}), '(2003, 5, 1, 0, 4)\n', (10788, 10806), False, 'import datetime\n'), ((10921, 10960), 'datetime.datetime', 'datetime.datetime', (['(2003)', '(5)', '(1)', '(0)', '(5)', '(30)'], {}), '(2003, 5, 1, 0, 5, 30)\n', (10938, 10960), False, 'import datetime\n'), ((11145, 11166), 'datetime.timedelta', 'datetime.timedelta', (['(0)'], {}), '(0)\n', (11163, 11166), False, 'import datetime\n'), ((3316, 3351), 'datetime.datetime', 'datetime.datetime', (['(2003)', '(5)', '(1)', '(0)', '(4)'], {}), '(2003, 5, 1, 0, 4)\n', (3333, 3351), False, 'import datetime\n'), ((3436, 3471), 'datetime.datetime', 'datetime.datetime', (['(2003)', '(5)', '(1)', '(0)', '(5)'], {}), '(2003, 5, 1, 0, 5)\n', (3453, 3471), False, 'import datetime\n'), ((3668, 3698), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(30)'}), '(seconds=30)\n', (3686, 3698), False, 'import datetime\n'), ((3996, 4009), 'py_client.algorithm_interface_test.test_helper.SessionMockTestBase.get_api_url', 'get_api_url', ([], {}), '()\n', (4007, 4009), False, 'from py_client.algorithm_interface_test.test_helper.SessionMockTestBase import get_api_url, SessionMockTestBase\n'), ((4799, 4834), 'datetime.datetime', 'datetime.datetime', (['(2003)', '(5)', '(1)', '(0)', '(4)'], {}), '(2003, 5, 1, 0, 4)\n', (4816, 4834), False, 'import datetime\n'), ((4919, 4954), 'datetime.datetime', 'datetime.datetime', (['(2003)', '(5)', '(1)', '(0)', '(5)'], {}), '(2003, 5, 1, 0, 5)\n', (4936, 4954), False, 'import datetime\n'), ((5151, 5181), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(30)'}), '(seconds=30)\n', (5169, 5181), False, 'import datetime\n'), ((7103, 7138), 'datetime.datetime', 'datetime.datetime', (['(2003)', '(5)', '(1)', '(0)', '(4)'], {}), '(2003, 5, 1, 0, 4)\n', (7120, 7138), False, 'import datetime\n'), ((7178, 7213), 'datetime.datetime', 'datetime.datetime', (['(2003)', '(5)', '(1)', '(0)', '(5)'], {}), '(2003, 5, 1, 0, 5)\n', (7195, 7213), False, 'import datetime\n'), ((7245, 7276), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(120)'}), '(seconds=120)\n', (7263, 7276), False, 'import datetime\n'), ((7571, 7584), 'py_client.algorithm_interface_test.test_helper.SessionMockTestBase.get_api_url', 'get_api_url', ([], {}), '()\n', (7582, 7584), False, 'from py_client.algorithm_interface_test.test_helper.SessionMockTestBase import get_api_url, SessionMockTestBase\n'), ((8345, 8380), 'datetime.datetime', 'datetime.datetime', (['(2003)', '(5)', '(1)', '(0)', '(4)'], {}), '(2003, 5, 1, 0, 4)\n', (8362, 8380), False, 'import datetime\n'), ((8420, 8455), 'datetime.datetime', 'datetime.datetime', (['(2003)', '(5)', '(1)', '(0)', '(5)'], {}), '(2003, 5, 1, 0, 5)\n', (8437, 8455), False, 'import datetime\n'), ((8786, 8799), 'py_client.algorithm_interface_test.test_helper.SessionMockTestBase.get_api_url', 'get_api_url', ([], {}), '()\n', (8797, 8799), False, 'from py_client.algorithm_interface_test.test_helper.SessionMockTestBase import get_api_url, SessionMockTestBase\n'), ((9536, 9571), 'datetime.datetime', 'datetime.datetime', (['(2003)', '(5)', '(1)', '(0)', '(4)'], {}), '(2003, 5, 1, 0, 4)\n', (9553, 9571), False, 'import datetime\n'), ((9611, 9646), 'datetime.datetime', 'datetime.datetime', (['(2003)', '(5)', '(1)', '(0)', '(5)'], {}), '(2003, 5, 1, 0, 5)\n', (9628, 9646), False, 'import datetime\n')] |
# Copyright 2018 NOKIA
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
from neutronclient.common import extension
from neutronclient.common import utils
from neutronclient.i18n import _
def _format_physnets(l2bridge):
try:
return '\n'.join([jsonutils.dumps(physnet) for physnet
in l2bridge['physnets']])
except (TypeError, KeyError):
return ''
class NuageL2Bridge(extension.NeutronClientExtension):
resource = 'nuage_l2bridge'
resource_plural = '%ss' % resource
object_path = '/nuage-l2bridges'
resource_path = '/nuage-l2bridges/%s'
versions = ['2.0']
allow_names = True
class NuageL2BridgeCreate(extension.ClientExtensionCreate, NuageL2Bridge):
"""Create Nuage L2Bridge"""
shell_command = 'nuage-l2bridge-create'
def add_known_arguments(self, parser):
parser.add_argument(
'name',
metavar='name',
help=_('Name of the Nuage L2Bridge.'))
parser.add_argument(
'--physnet', metavar='physnet_name=PHYSNET,'
'segmentation_id=SEGMENTATIONID,'
'segmentation_type=SEGMENTATIONTYPE',
action='append',
type=utils.str2dict_type(
required_keys=['physnet_name', 'segmentation_id',
'segmentation_type']),
help=_('Desired physnet and segmentation id for this l2bridge: '
'physnet_name=<name>,segmentation_id=<segmentation_id>, '
'segmentation_type=<segmentation_type>. '
'You can repeat this option.'))
return parser
def args2body(self, args):
body = {}
if args.name:
body['name'] = args.name
if args.physnet:
physnets = []
for physnet in args.physnet:
physnets.append(physnet)
body['physnets'] = physnets
return {'nuage_l2bridge': body}
class NuageL2BridgeUpdate(extension.ClientExtensionUpdate, NuageL2Bridge):
"""Update Nuage L2Bridge"""
shell_command = 'nuage-l2bridge-update'
def add_known_arguments(self, parser):
parser.add_argument(
'--name',
metavar='name',
help=_('Name of the Nuage L2Bridge.'))
parser.add_argument(
'--physnet', metavar='physnet_name=PHYSNET,'
'segmentation_id=SEGMENTATIONID,'
'segmentation_type=SEGMENTATIONTYPE',
action='append',
type=utils.str2dict_type(
required_keys=['physnet_name', 'segmentation_id',
'segmentation_type']),
help=_('Desired physnet and segmentation id for this l2bridge: '
'physnet_name=<name>,segmentation_id=<segmentation_id>, '
'segmentation_type=<segmentation_type>. '
'You can repeat this option.'))
return parser
def args2body(self, args):
body = {}
if args.name:
body['name'] = args.name
if args.physnet:
physnets = []
for physnet in args.physnet:
physnets.append(physnet)
body['physnets'] = physnets
return {'nuage_l2bridge': body}
class NuageL2BridgeList(extension.ClientExtensionList, NuageL2Bridge):
"""List nuage L2Bridges."""
shell_command = 'nuage-l2bridge-list'
list_columns = ['id', 'name', 'nuage_subnet_id', 'physnets']
_formatters = {'physnets': _format_physnets}
pagination_support = True
sorting_support = True
class NuageL2BridgeShow(extension.ClientExtensionShow,
NuageL2Bridge):
"""Show a given Nuage L2bridge."""
shell_command = 'nuage-l2bridge-show'
class NuageL2Bridgedelete(extension.ClientExtensionDelete, NuageL2Bridge):
"""Delete a given Nuage L2bridge"""
shell_command = 'nuage-l2bridge-delete'
| [
"oslo_serialization.jsonutils.dumps",
"neutronclient.i18n._",
"neutronclient.common.utils.str2dict_type"
] | [((819, 843), 'oslo_serialization.jsonutils.dumps', 'jsonutils.dumps', (['physnet'], {}), '(physnet)\n', (834, 843), False, 'from oslo_serialization import jsonutils\n'), ((1504, 1536), 'neutronclient.i18n._', '_', (['"""Name of the Nuage L2Bridge."""'], {}), "('Name of the Nuage L2Bridge.')\n", (1505, 1536), False, 'from neutronclient.i18n import _\n'), ((1808, 1903), 'neutronclient.common.utils.str2dict_type', 'utils.str2dict_type', ([], {'required_keys': "['physnet_name', 'segmentation_id', 'segmentation_type']"}), "(required_keys=['physnet_name', 'segmentation_id',\n 'segmentation_type'])\n", (1827, 1903), False, 'from neutronclient.common import utils\n'), ((1966, 2152), 'neutronclient.i18n._', '_', (['"""Desired physnet and segmentation id for this l2bridge: physnet_name=<name>,segmentation_id=<segmentation_id>, segmentation_type=<segmentation_type>. You can repeat this option."""'], {}), "('Desired physnet and segmentation id for this l2bridge: physnet_name=<name>,segmentation_id=<segmentation_id>, segmentation_type=<segmentation_type>. You can repeat this option.'\n )\n", (1967, 2152), False, 'from neutronclient.i18n import _\n'), ((2853, 2885), 'neutronclient.i18n._', '_', (['"""Name of the Nuage L2Bridge."""'], {}), "('Name of the Nuage L2Bridge.')\n", (2854, 2885), False, 'from neutronclient.i18n import _\n'), ((3157, 3252), 'neutronclient.common.utils.str2dict_type', 'utils.str2dict_type', ([], {'required_keys': "['physnet_name', 'segmentation_id', 'segmentation_type']"}), "(required_keys=['physnet_name', 'segmentation_id',\n 'segmentation_type'])\n", (3176, 3252), False, 'from neutronclient.common import utils\n'), ((3315, 3501), 'neutronclient.i18n._', '_', (['"""Desired physnet and segmentation id for this l2bridge: physnet_name=<name>,segmentation_id=<segmentation_id>, segmentation_type=<segmentation_type>. You can repeat this option."""'], {}), "('Desired physnet and segmentation id for this l2bridge: physnet_name=<name>,segmentation_id=<segmentation_id>, segmentation_type=<segmentation_type>. You can repeat this option.'\n )\n", (3316, 3501), False, 'from neutronclient.i18n import _\n')] |
from math import trunc
numero = float(input('Digite um numero inteiro: '))
print('O valor digitado foi {} e a sua porção inteira é {}'.format(numero, trunc(numero)))
'''
int (numero) => tambem funciona para estrair um inteiro
'''
| [
"math.trunc"
] | [((154, 167), 'math.trunc', 'trunc', (['numero'], {}), '(numero)\n', (159, 167), False, 'from math import trunc\n')] |
import numpy as np
import sys,os
##################################### INPUT ############################################
realizations = 2000
########################################################################################
root1 = '/simons/scratch/fvillaescusa/pdf_information/Snapshots/latin_hypercube'
root2 = '/simons/scratch/fvillaescusa/pdf_information/Linear_Pk/latin_hypercube'
# do a loop over all realizations
for i in xrange(realizations):
folder_in = '%s/%d'%(root1,i)
folder_out = '%s/%d'%(root2,i)
if not(os.path.exists(folder_out)): os.system('mkdir %s'%folder_out)
os.system('cp %s/CAMB.params %s/'%(folder_in, folder_out))
os.system('cp %s/ICs/Pk_mm_z=0.000.txt %s/'%(folder_in, folder_out))
| [
"os.system",
"os.path.exists"
] | [((612, 672), 'os.system', 'os.system', (["('cp %s/CAMB.params %s/' % (folder_in, folder_out))"], {}), "('cp %s/CAMB.params %s/' % (folder_in, folder_out))\n", (621, 672), False, 'import sys, os\n'), ((675, 745), 'os.system', 'os.system', (["('cp %s/ICs/Pk_mm_z=0.000.txt %s/' % (folder_in, folder_out))"], {}), "('cp %s/ICs/Pk_mm_z=0.000.txt %s/' % (folder_in, folder_out))\n", (684, 745), False, 'import sys, os\n'), ((544, 570), 'os.path.exists', 'os.path.exists', (['folder_out'], {}), '(folder_out)\n', (558, 570), False, 'import sys, os\n'), ((574, 608), 'os.system', 'os.system', (["('mkdir %s' % folder_out)"], {}), "('mkdir %s' % folder_out)\n", (583, 608), False, 'import sys, os\n')] |
# Generated by Django 2.2.6 on 2019-10-10 19:01
from uuid import uuid4
from django.db import migrations
def apply_migration(apps, migration):
"""Apply App Token data migration"""
Group = apps.get_model('auth', 'Group')
Group.objects.create(
name='App Token'
)
def revert_migration(apps, migration):
"""Revert App Token data migration"""
Group = apps.get_model('auth', 'Group')
Group.objects.filter(
name='App Token'
).delete()
class Migration(migrations.Migration):
dependencies = [
('user', '0001_initial'),
]
operations = [
migrations.RunPython(apply_migration, revert_migration),
] | [
"django.db.migrations.RunPython"
] | [((612, 667), 'django.db.migrations.RunPython', 'migrations.RunPython', (['apply_migration', 'revert_migration'], {}), '(apply_migration, revert_migration)\n', (632, 667), False, 'from django.db import migrations\n')] |
from app import schemas
import pytest
def test_get_all_products(authorized_client,test_products):
res=authorized_client.get("/products/")
assert len(res.json()) ==len(test_products)
assert res.status_code ==200
def test_unathorized_get_all_products(client,test_products):
res=client.get("/products/")
assert res.status_code ==401
def test_unathorized_get_one_products(client,test_products):
res=client.get(f"/products/{test_products[0].id}")
assert res.status_code ==401
def test_get_one_product_not_exixst(authorized_client,test_products):
res=authorized_client.get("/products/456478955")
assert res.json().get("detail")==f"product with id: 456478955 was not found"
assert res.status_code == 404
def test_get_one_product(authorized_client,test_products):
res=authorized_client.get(f"/products/{test_products[0].id}")
product=schemas.ProductOut(**res.json())
assert product.id == test_products[0].id
assert product.product_name == test_products[0].product_name
@pytest.mark.parametrize("product_name, quantity_init, quantity_left, status_code",[
(None,None,33,422),
("chambre",23,44,201)
])
def test_create_product(authorized_client,test_user,test_products,product_name,quantity_init, quantity_left, status_code):
res=authorized_client.post("/products/",json={"product_name":product_name, "quantity_init":quantity_init, "quantity_left":quantity_left})
assert res.status_code ==status_code
def test_unathorized_create_products(client,test_user):
res=client.post("/products/",json={"product_name":"product_name", "quantity_init":23,"quantity_left":23})
assert res.status_code ==401
def test_unathorized_delete_products(client,test_user,test_products):
res=client.delete(f"/products/{test_products[0].id}")
assert res.status_code ==401
def test_authorize_delete_products(authorized_client,test_user,test_products):
res=authorized_client.delete(f"/products/{test_products[0].id}")
assert res.status_code ==204
def test_authorize_delete_non_existing(authorized_client,test_user,test_products):
res=authorized_client.delete(f"/products/3456")
assert res.json().get('detail') =="product with id: 3456 does not exist"
assert res.status_code ==404
def test_update_product(authorized_client,test_products,test_user):
data={
"product_name":"savon",
"quantity_init":32,
"quantity_left":452,
"id":test_products[0].id
}
res=authorized_client.put(f"/products/{test_products[0].id}",json=data)
updated_product=schemas.ProductOut(**res.json())
assert res.status_code == 200
assert updated_product.product_name==data["product_name"]
print(res.json())
assert updated_product.quantity_init==data["quantity_init"]
| [
"pytest.mark.parametrize"
] | [((1035, 1177), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""product_name, quantity_init, quantity_left, status_code"""', "[(None, None, 33, 422), ('chambre', 23, 44, 201)]"], {}), "(\n 'product_name, quantity_init, quantity_left, status_code', [(None, None,\n 33, 422), ('chambre', 23, 44, 201)])\n", (1058, 1177), False, 'import pytest\n')] |
# -*- coding: utf-8 -*-
import math
import pandas as pd
import xml.etree.ElementTree as ET
import matplotlib.pyplot as plt
import os.path as osp
from PIL import Image
import numpy as np
plt.rcParams['font.sans-serif'] = ['SimHei'] #用来正常显示中文标签
plt.rcParams['font.family']='sans-serif'
plt.rcParams['figure.figsize'] = (20.0, 20.0)
dataset=dict(
ann_file=(
('', 'has_people_phone_rand3w_train2.lst'),
('', 'jianhang_0412_RMB_rand3w_train2.lst'),
('', 'money_phone_20210710_rand2w_2w_train.lst'),
('','colloect_phone_money_20210708_train.lst'),
),
img_prefix='/mnt/datadisk0/jingzhudata/phone_money/',
classes= ('phone', 'money')
)
ch, cw = 576, 960
# 读取数据
class PlotRatio(object):
def __init__(self, **kwargs):
super(PlotRatio, self).__init__()
self.img_prefix = dataset['img_prefix']
def plot_ratio(self, ann_file, classes):
"""Load annotation from XML style ann_file.
Args:
ann_file (str): Path of XML file.
Returns:
list[dict]: Annotation info from XML file.
"""
# print(ann_file, "....debug")
assert isinstance(ann_file, (list, tuple)), "ann_file must be list or tuple in DGVOCDataset"
count_wh = [0]*10
count_squares = [0]*11
num_class = [0]*2
for (year, name) in ann_file:
rootpath = osp.join(self.img_prefix, year)
for img_id, line in enumerate(open(osp.join(rootpath, name))):
if ';' not in line:
split_item = line.strip().split()
else:
split_item = line.strip().split(';')
if len(split_item) != 2:
img_path = split_item[0]
xml_path = None
else:
img_path, xml_path = split_item
if '.xml' != xml_path[-4:]: xml_path = None
if xml_path is None: continue
img_path_com = osp.join(rootpath, img_path)
xml_path_com = osp.join(rootpath, xml_path)
img = Image.open(img_path_com)
width, height = img.size # 原图宽高, 标签有时有问题
tree = ET.parse(xml_path_com)
root = tree.getroot()
# size = root.find('size')
# width = size.find('width')
# height = size.find('height')
for obj in root.findall('object'):
name = obj.find('name').text.lower().strip()
# if 'fjs_' in name:
# name = name.replace('fjs_', '')
if name not in classes:
continue
else :
idx = classes.index(name)
num_class[idx] += 1
bndbox = obj.find('bndbox')
xmin = bndbox.find('xmin').text
ymin = bndbox.find('ymin').text
xmax = bndbox.find('xmax').text
ymax = bndbox.find('ymax').text
#NOTE filter mislabeling gt
w_box = float(xmax) - float(xmin)
h_box = float(ymax) - float(ymin)
if w_box * h_box <= 0 or min(w_box, h_box) < 4 or max(w_box, h_box) < 4 or max(w_box, h_box) > 360:
continue
ratio2 = 1.
if height > ch or width > cw:
ratio2 = np.min(np.array([ch, cw]).astype(np.float64) / np.array([height, width]))
w = (w_box) * ratio2
h = (h_box) * ratio2
if w==0 or h==0:
continue
ratio = round(w/h, 1)
scale = round(w*h, 1)
square = math.sqrt(scale)
if ratio < 0.25:
count_wh[0] += 1
elif 0.25 <= ratio < 1/3:
count_wh[1] += 1
elif 1/3 <= ratio < 1/2:
count_wh[2] += 1
elif 1/2 <= ratio < 1:
count_wh[3] += 1
elif 1 <= ratio < 1.5:
count_wh[4] += 1
elif 1.5 <= ratio < 2:
count_wh[5] += 1
elif 2 <= ratio < 2.5:
count_wh[6] += 1
elif 2.5 <= ratio < 3:
count_wh[7] += 1
elif 3 <= ratio < 4:
count_wh[8] += 1
else:
count_wh[9] += 1
if square < 8:
count_squares[0] += 1
elif 8 <= square < 16:
count_squares[1] += 1
elif 16 <= square < 21:
count_squares[2] += 1
elif 21 <= square < 32:
count_squares[3] += 1
elif 32 <= square < 64:
count_squares[4] += 1
elif 64 <= square < 128:
count_squares[5] += 1
elif 128 <= square < 256:
count_squares[6] += 1
elif 256 <= square < 512:
count_squares[7] += 1
elif 512 <= square < 1024:
count_squares[8] += 1
elif 1024 <= square < 2048:
count_squares[9] += 1
elif 2048 <= square < 4096:
count_squares[10] += 1
# 绘图
wh_df = pd.DataFrame(count_wh, index=['0-0.25','0.25-0.33','0.33-0.5','0.5-1','1-1.5','1.5-2','2-2.5',\
'2.5-3','3-4', '>4'], columns=['宽高比'])
wh_df.plot(kind='bar', color ='#55aacc')
plt.savefig('./phone_wallet_ratios.jpg')
#plt.savefig('./dms_ratios_face.jpg')
squares_df = pd.DataFrame(count_squares, index=['0-8','8-16','16-21', '21-32','32-64','64-128',\
'128-256','256-512','512-1024','1024-2048','2048-4096'], columns=['边长范围'])
squares_df.plot(kind='bar', color ='#55aacc')
plt.savefig('./phone_wallet_squares.jpg')
#plt.savefig('./dms_squares_face.jpg')
num_class_df = pd.DataFrame(num_class,index=['phone', 'money'], columns=['类别数'])
num_class_df.plot(kind='bar')
plt.savefig('./rmp.jpg')
pr = PlotRatio()
pr.plot_ratio(ann_file = dataset['ann_file'], classes=dataset['classes'])
#pr.plot_ratio(ann_file = dataset['ann_file'], classes=dataset['classes'][3]) | [
"PIL.Image.open",
"xml.etree.ElementTree.parse",
"matplotlib.pyplot.savefig",
"os.path.join",
"math.sqrt",
"numpy.array",
"pandas.DataFrame"
] | [((6121, 6265), 'pandas.DataFrame', 'pd.DataFrame', (['count_wh'], {'index': "['0-0.25', '0.25-0.33', '0.33-0.5', '0.5-1', '1-1.5', '1.5-2', '2-2.5',\n '2.5-3', '3-4', '>4']", 'columns': "['宽高比']"}), "(count_wh, index=['0-0.25', '0.25-0.33', '0.33-0.5', '0.5-1',\n '1-1.5', '1.5-2', '2-2.5', '2.5-3', '3-4', '>4'], columns=['宽高比'])\n", (6133, 6265), True, 'import pandas as pd\n'), ((6359, 6399), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""./phone_wallet_ratios.jpg"""'], {}), "('./phone_wallet_ratios.jpg')\n", (6370, 6399), True, 'import matplotlib.pyplot as plt\n'), ((6468, 6641), 'pandas.DataFrame', 'pd.DataFrame', (['count_squares'], {'index': "['0-8', '8-16', '16-21', '21-32', '32-64', '64-128', '128-256', '256-512',\n '512-1024', '1024-2048', '2048-4096']", 'columns': "['边长范围']"}), "(count_squares, index=['0-8', '8-16', '16-21', '21-32', '32-64',\n '64-128', '128-256', '256-512', '512-1024', '1024-2048', '2048-4096'],\n columns=['边长范围'])\n", (6480, 6641), True, 'import pandas as pd\n'), ((6741, 6782), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""./phone_wallet_squares.jpg"""'], {}), "('./phone_wallet_squares.jpg')\n", (6752, 6782), True, 'import matplotlib.pyplot as plt\n'), ((6854, 6920), 'pandas.DataFrame', 'pd.DataFrame', (['num_class'], {'index': "['phone', 'money']", 'columns': "['类别数']"}), "(num_class, index=['phone', 'money'], columns=['类别数'])\n", (6866, 6920), True, 'import pandas as pd\n'), ((6966, 6990), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""./rmp.jpg"""'], {}), "('./rmp.jpg')\n", (6977, 6990), True, 'import matplotlib.pyplot as plt\n'), ((1520, 1551), 'os.path.join', 'osp.join', (['self.img_prefix', 'year'], {}), '(self.img_prefix, year)\n', (1528, 1551), True, 'import os.path as osp\n'), ((2133, 2161), 'os.path.join', 'osp.join', (['rootpath', 'img_path'], {}), '(rootpath, img_path)\n', (2141, 2161), True, 'import os.path as osp\n'), ((2193, 2221), 'os.path.join', 'osp.join', (['rootpath', 'xml_path'], {}), '(rootpath, xml_path)\n', (2201, 2221), True, 'import os.path as osp\n'), ((2244, 2268), 'PIL.Image.open', 'Image.open', (['img_path_com'], {}), '(img_path_com)\n', (2254, 2268), False, 'from PIL import Image\n'), ((2349, 2371), 'xml.etree.ElementTree.parse', 'ET.parse', (['xml_path_com'], {}), '(xml_path_com)\n', (2357, 2371), True, 'import xml.etree.ElementTree as ET\n'), ((1599, 1623), 'os.path.join', 'osp.join', (['rootpath', 'name'], {}), '(rootpath, name)\n', (1607, 1623), True, 'import os.path as osp\n'), ((4087, 4103), 'math.sqrt', 'math.sqrt', (['scale'], {}), '(scale)\n', (4096, 4103), False, 'import math\n'), ((3740, 3765), 'numpy.array', 'np.array', (['[height, width]'], {}), '([height, width])\n', (3748, 3765), True, 'import numpy as np\n'), ((3700, 3718), 'numpy.array', 'np.array', (['[ch, cw]'], {}), '([ch, cw])\n', (3708, 3718), True, 'import numpy as np\n')] |
#!/usr/bin/env python3.5
# driver.py
#
# <NAME> and <NAME>
#
# Major Modification log:
# 2018-06-12 bam - refactored DAS to modularize code found in the run function
# 2017-12-10 slg - refactored the creation of objects for the DAS() object.
# 2017-11-19 slg - rewrite for abstract modular design, created experiment runner
# 2017-08-10 wns - initial framework working
# 2017-07-20 slg - created file
""" This is the main driver for the Disclosure Avoidance Subsystem (DAS).
It executes the disclosure avoidance programs:
it runs a setup module and data reader, runs the selected DAS engine,
calls the output writer, and evaluates the output against the input.
For systems that use Apache Spark, the driver run command is:
spark-submit driver.py path/to/config.ini
For systems that do not use Spark, the driver run command is:
python3 driver.py path/to/config.ini
or:
python3 path/to/driver.py config.ini
Note that the driver.py can be included and run in another program.
"""
import sys
import os
import time
# DAS-specific imports:
sys.path.append(os.getcwd())
sys.path.append(os.path.dirname(__file__))
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
sys.path.append(os.path.join(os.path.dirname(__file__), "ctools"))
sys.path.append(os.path.join(os.path.dirname(__file__), "dfxml/python"))
# System Libraries
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
from configparser import ConfigParser
from ctools.hierarchical_configparser import HierarchicalConfigParser
import datetime
import json
import logging
import logging.handlers
import numpy
import os
import os.path
import pickle
import re
import subprocess
import sys
import time
import ctools
import ctools.s3
# DAS-specific libraries
import ctools
import ctools.clogging
import experiment
from dfxml.writer import DFXMLWriter
from das_testpoints import log_testpoint
DEFAULT = 'DEFAULT'
ENVIRONMENT = "ENVIRONMENT"
SETUP = "setup"
READER = "reader"
ENGINE = "engine"
ERROR_METRICS = "error_metrics"
WRITER = "writer"
VALIDATOR = "validator"
TAKEDOWN = "takedown"
# LOGGING
LOGGING_SECTION = 'logging'
LOGFILENAME_OPTION='logfilename'
LOGLEVEL_OPTION='loglevel'
LOGFOLDER_OPTION='logfolder'
ROOT = 'root' # where the experiment is running
LOGFILENAME = 'logfilename' #
DEFAULT_LOGFILENAME = 'das'
OUTPUT_FNAME = 'output_fname'
OUTPUT_DIR = "output_dir"
# EXPERIMENT values
EXPERIMENT = 'experiment'
RUN_EXPERIMENT_FLAG = "run_experiment_flag"
EXPERIMENT_SCAFFOLD = 'scaffold'
EXPERIMENT_DIR = 'dir' # the directory in which the experiment is taking place
EXPERIMENT_CONFIG = 'config' # the name of the configuration file
EXPERIMENT_XLABEL = 'xlabel' # what to label the X axis
EXPERIMENT_YLABEL = 'ylabel' # what to label the Y axis
EXPERIMENT_GRID = 'grid' # Draw the grid? True/False
EXPERIMENT_GRAPH_FNAME = 'graph_fname' # filename for figure we are saving
EXPERIMENT_GRAPH_DATA_FNAME = 'graph_data_fname' # Filename for the graph data
EXPERIMENT_AVERAGEX = 'averagex' # should all Y values for a certain X be averaged?
EXPERIMENT_TITLE = 'title'
EXPERIMENT_DRAW_LEGEND = 'draw_legend'
EXPERIMENT_GRAPHX = 'graphx'
EXPERIMENT_GRAPHY = 'graphy'
def config_validate(config, extra_sections=[]):
"""Make sure mandatory sections exist"""
for section in [SETUP, READER, ENGINE, WRITER, VALIDATOR, TAKEDOWN] + extra_sections:
if section not in config:
logging.error("config file missing section '{}'".format(section))
raise RuntimeError("config file missing section {}".format(section))
def config_apply_environment(config):
"""Look for the ENVIRONMENT section and apply the variables to the environment
Note: By default, section names are case sensitive, but variable names are not.
Because the convention is that environment variables are all upper-case, we uppercase them.
"""
import os
if ENVIRONMENT in config:
for var in config[ENVIRONMENT]:
name = var.upper()
value = config[ENVIRONMENT][var]
logging.info("os.environ: {}={}".format(name, value))
os.environ[name] = value
### numpy integers can't be serialized; we need our own serializer
### https://stackoverflow.com/questions/27050108/convert-numpy-type-to-python/27050186#27050186
class DriverEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, numpy.integer):
return int(obj)
elif isinstance(obj, numpy.floating):
return float(obj)
elif isinstance(obj, numpy.ndarray):
return obj.tolist()
else:
return super(MyEncoder, self).default(obj)
def strtobool(val,default=None):
if val in ["", None] and default!=None:
return default
v = val.lower()
if v in ['y','yes','t','true','on','1']: return True
if v in ['n','no','f','false','off','0']: return False
raise ValueError
class AbstractDASModule(object):
def __init__(self, name=None, config=None, setup=None, **kwargs):
if config != None:
assert type(config) in [HierarchicalConfigParser, ConfigParser]
self.name = name
self.config = config
self.setup = setup
self.t0 = time.time()
def getconfig(self, key, default=None, section=None):
if section == None:
section = self.name
try:
val = self.config[section][key]
logging.debug("config[{}][{}]={}".format(section, key, val))
return val
except KeyError:
if default != None:
logging.debug("config[{}][{}] NOT FOUND; returning default {}".format(section, key, default))
return str(default)
logging.info("config[{}][{}] does not exist".format(section, key))
raise KeyError("Required configuration variable '{}' does not exist in section '[{}]'".format(key, section))
def getint(self, key, **kwargs):
return int(self.getconfig(key, **kwargs))
def getfloat(self, key, **kwargs):
return float(self.getconfig(key, **kwargs))
def getboolean(self, key, default=None, section=None):
# https://stackoverflow.com/questions/715417/converting-from-a-string-to-boolean-in-python
# Python does not have a good builtin for converting a string to a boolean, so we create one.
return strtobool(self.getconfig(key, section=section), default=default)
def getiter(self, key, sep=',', **kwargs):
return map(lambda s: s.strip(), re.split(sep,self.getconfig(key, **kwargs)))
def gettuple(self, key, **kwargs):
return tuple(self.getiter(self, key, **kwargs))
def getiter_of_ints(self, key, **kwargs):
return map(int, self.getiter(key, **kwargs))
def gettuple_of_ints(self, key, **kwargs):
return tuple(self.getiter_of_ints(key, **kwargs))
def getiter_of_floats(self, key, **kwargs):
return map(float, self.getiter(key, **kwargs))
def gettuple_of_floats(self, key, **kwargs):
return tuple(self.getiter_of_floats(key, **kwargs))
def getconfitems(self, section):
"""
Filters out DEFAULTs from config items of the section
:param section: section of config files
:return: iterator of config items in the section
"""
if self.config.has_section(section):
return list(filter(lambda item: item not in self.config.items('DEFAULT'), self.config.items(section)))
else:
return {}
class AbstractExperiment(AbstractDASModule):
def __init__(self, das=None, **kwargs):
super().__init__(**kwargs)
self.das = das
def runExperiment(self):
return None
class AbstractDASExperiment(AbstractExperiment):
"""This is the experiment driver. This is where the loops will be done"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.loops = experiment.build_loops(self.config)
self.state = experiment.initial_state(self.loops)
def increment_state(self):
# """Given a set of loops and a state, increment the state to the next position, handling roll-over.
# Return the next state. If we are finished, return None.
# Remember, loops is loops[rank][section,variable,start,stop,step]
# Notice that we perform decimal arithmetic to avoid the 0.1 + 0.1 problem
#
# """
# state = list(self.state) # convert to a list that can be modified
# rank = 0
# while rank < len(state):
# if self.loops[rank][5] == "MUL":
# state[rank] *= self.loops[rank][4]
# elif self.loops[rank][5] == "ADD":
# state[rank] += self.loops[rank][4]
# elif self.loops[rank][5] == "LIST":
# state[rank] = self.loops[rank][4][self.loops[rank][4].index(state[rank]) + 1] if state[rank] < self.loops[rank][3] else \
# state[rank] + 1000
#
# if state[rank] <= self.loops[rank][3]:
# self.state = tuple(state) # found a new state
# # Reset this rank to the starting position and go to the next rank
# state[rank] = self.loops[rank][2] # reset to start
# rank += 1 # go to next rank
# # Ran out
# self.state = None
# return self
self.state = experiment.increment_state(self.loops,self.state)
return self
def substitute_config(self):
"""Generate a new config given a current config and a state of the loops."""
for rank in range(len(self.loops)):
section = self.loops[rank][0]
var = self.loops[rank][1]
self.das.config[section][var] = str(self.state[rank])
return self
def runExperiment(self):
scaffold = Scaffolding(config=self.config)
scaffold.experimentSetup()
while self.state is not None:
self.substitute_config()
self.das.run()
self.increment_state()
scaffold.experimentTakedown()
return None
def experimentSetup(self):
return None
def experimentTakedown(self):
return None
class AbstractDASSetup(AbstractDASModule):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def setup_func(self):
"""Setup Function. Note special name."""
return None
class AbstractDASReader(AbstractDASModule):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def willRead(self):
return True
def read(self):
"""Read the data; return a reference. Location to read specified in config file."""
return None # no read data in prototype
def didRead(self):
return
class AbstractDASEngine(AbstractDASModule):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def willRun(self):
return True
def run(self, original_data):
"""Nothing to do in the prototype"""
return
def didRun(self):
return
class AbstractDASErrorMetrics(AbstractDASModule):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def willRun(self):
return True
def run(self, data):
"""Nothing to do in the prototype"""
return None
def didRun(self):
return
class AbstractDASWriter(AbstractDASModule):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def willWrite(self):
return True
def write(self, privatized_data):
"""Return the written data"""
return privatized_data # by default, just return the privatized_data, nothing is written
def didWrite(self):
return
class AbstractDASValidator(AbstractDASModule):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def willValidate(self):
return True
def validate(self, original_data, written_data_reference, **kwargs):
"""No validation in prototype"""
return True
def didValidate(self):
return
def storeResults(self, data):
"""data is a dictionary with results. The default implementation
stores them in a file called 'results' specified in the config file"""
with open(self.getconfig('results_fname', default='results.json'), "a") as f:
json.dump(data, f, cls=DriverEncoder)
f.write("\n")
class AbstractDASTakedown(AbstractDASModule):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def willTakedown(self):
return True
def takedown(self):
"""No takedown in prototype"""
return True
def removeWrittenData(self, reference):
"""Delete what's referred to by reference. Do not call superclass"""
raise RuntimeError("No method defined to removeWrittenData({})".format(reference))
def didTakedown(self):
return True
class Scaffolding(object):
""" Scaffolding for an experiment"""
def __init__(self, config):
assert type(config) == HierarchicalConfigParser
self.config = config
scaffoldstr = config[EXPERIMENT].get(EXPERIMENT_SCAFFOLD, None)
if not scaffoldstr:
logging.info("No scaffolding")
self.scaffold = None
return
(scaffold_file, scaffold_class_name) = scaffoldstr.split(".")
try:
scaffold_module = __import__(scaffold_file) if scaffold_file else None
except ModuleNotFoundError as e:
logging.exception("Scaffolding import failed. current directory: {}".format(os.getcwd()))
raise e
self.scaffold = getattr(scaffold_module, scaffold_class_name)(config=config)
def experimentSetup(self):
if self.scaffold:
self.scaffold.experimentSetup(self.config)
def experimentTakedown(self):
if self.scaffold:
self.scaffold.experimentTakedown(self.config)
class DAS(object):
""" The Disclosure Avoidance System """
def __init__(self, config):
""" Initialize a DAS given a config file. This creates all of the objects that will be used"""
assert type(config) in [HierarchicalConfigParser, ConfigParser]
self.config = config
# Get the input file and the class for each
logging.debug("Reading filenames and class names from config file")
# This section can possibly combined with the following section importing the modules and creating the objects,
# so that the default objects can be created by just using AbstractDASxxxxxx() constructor
try:
(setup_file, setup_class_name) = config[SETUP][SETUP].rsplit(".", 1)
except KeyError:
(setup_file, setup_class_name) = ('driver', 'AbstractDASSetup')
try:
(reader_file, reader_class_name) = config[READER][READER].rsplit(".", 1)
except KeyError:
(reader_file, reader_class_name) = ('driver', 'AbstractDASReader')
try:
(engine_file, engine_class_name) = config[ENGINE][ENGINE].rsplit(".", 1)
except KeyError:
(engine_file, engine_class_name) = ('driver', 'AbstractDASEngine')
try:
(error_metrics_file, error_metrics_class_name) = config[ERROR_METRICS][ERROR_METRICS].rsplit(".", 1)
except KeyError:
(error_metrics_file, error_metrics_class_name) = ('driver', 'AbstractDASErrorMetrics')
try:
(writer_file, writer_class_name) = config[WRITER][WRITER].rsplit(".", 1)
except KeyError:
(writer_file, writer_class_name) = ('driver', 'AbstractDASWriter')
try:
(validator_file, validator_class_name) = config[VALIDATOR][VALIDATOR].rsplit(".", 1)
except KeyError:
(validator_file, validator_class_name) = ('driver', 'AbstractDASValidator')
try:
(takedown_file, takedown_class_name) = config[TAKEDOWN][TAKEDOWN].rsplit(".", 1)
except KeyError:
(takedown_file, takedown_class_name) = ('driver', 'AbstractDASTakedown')
logging.debug(
"classes: {} {} {} {} {} {} {}".format(setup_class_name, engine_class_name, error_metrics_class_name,
reader_class_name, writer_class_name, validator_class_name, takedown_class_name))
# Import the modules
logging.debug(
"__import__ files: {} {} {} {} {} {} {}".format(setup_file, engine_file, error_metrics_file, reader_file,
writer_file, validator_file, takedown_file))
try:
setup_module = __import__(setup_file, fromlist=[setup_class_name])
engine_module = __import__(engine_file, fromlist=[engine_class_name])
reader_module = __import__(reader_file, fromlist=[reader_class_name])
error_metrics_module = __import__(error_metrics_file, fromlist=[error_metrics_class_name])
writer_module = __import__(writer_file, fromlist=[writer_class_name])
validator_module = __import__(validator_file, fromlist=[validator_class_name])
takedown_module = __import__(takedown_file, fromlist=[takedown_class_name])
except ImportError as e:
print("Module import failed.")
print("current directory: {}".format(os.getcwd()))
print("__file__: {}".format(__file__))
raise e
# Create the instances
logging.debug(
"modules: {} {} {} {} {} {} {}".format(setup_module, engine_module, error_metrics_module, reader_module,
writer_module, validator_module, takedown_module))
logging.info("Creating and running DAS setup object")
setup_obj = getattr(setup_module, setup_class_name)(config=config, name=SETUP)
setup_data = setup_obj.setup_func()
logging.debug("DAS setup returned {}".format(setup_data))
# Now create the other objects
self.reader = getattr(reader_module, reader_class_name)(config=config, setup=setup_data, name=READER)
self.engine = getattr(engine_module, engine_class_name)(config=config, setup=setup_data, name=ENGINE)
self.error_metrics = getattr(error_metrics_module, error_metrics_class_name)(config=config, setup=setup_data,
name=ERROR_METRICS)
self.writer = getattr(writer_module, writer_class_name)(config=config, setup=setup_data, name=WRITER)
self.validator = getattr(validator_module, validator_class_name)(config=config, setup=setup_data,
name=VALIDATOR)
self.takedown = getattr(takedown_module, takedown_class_name)(config=config, setup=setup_data, name=TAKEDOWN)
log_testpoint("T03-003S")
logging.debug("DAS object complete")
def runReader(self):
logging.info("Creating and running DAS reader")
if self.reader.willRead() == False:
logging.info("self.reader.willRead() returned false")
raise RuntimeError("reader willRead() returned False")
log_testpoint("T03-004S", "Running Reader module")
original_data = self.reader.read()
logging.debug("original_data={}".format(original_data))
self.reader.didRead()
return original_data
def runEngine(self, original_data):
logging.info("Creating and running DAS engine")
if self.engine.willRun() == False:
logging.info("self.engine.willRun() returned false")
raise RuntimeError("engine willRun() returned False")
log_testpoint("T03-004S", "Running Engine module")
privatized_data = self.engine.run(original_data)
logging.debug("privatized_data={}".format(privatized_data))
self.engine.didRun()
return privatized_data
def runErrorMetrics(self, privatized_data):
logging.info("Creating and running DAS error_metrics")
if self.error_metrics.willRun() == False:
logging.info("self.error_metrics.willRun() returned false")
raise RuntimeError("error_metrics willRun() returned False")
log_testpoint("T03-004S", "Running Error Metrics module")
errorMetrics_data = self.error_metrics.run(privatized_data)
logging.debug("Error Metrics data = {}".format(errorMetrics_data))
self.error_metrics.didRun()
return errorMetrics_data
def runWriter(self, privatized_data):
logging.info("Creating and running DAS writer")
if self.writer.willWrite() == False:
logging.info("self.writer.willWrite() returned false")
raise RuntimeError("engine willWrite() returned False")
log_testpoint("T03-004S", "Running Writer module")
written_data = self.writer.write(privatized_data)
logging.debug("written_data={}".format(written_data))
self.writer.didWrite()
return written_data
def runValidator(self, original_data, written_data):
# Now run the validator on the read data and the written results
logging.info("Creating and running DAS validator")
if self.validator.willValidate() == False:
logging.info("self.validator.willValidate() returned false")
raise RuntimeError("validator willValidate() returned False")
log_testpoint("T03-004S", "Running Validator module")
valid = self.validator.validate(original_data, written_data)
logging.debug("valid={}".format(valid))
if not valid:
logging.info("self.validator.validate() returned false")
raise RuntimeError("Did not validate.")
self.validator.didValidate()
# If we were asked to get graphx and graphy, get it.
data = {}
if EXPERIMENT in self.config:
for var in ['graphx', 'graphy']:
if var in self.config[EXPERIMENT]:
(a, b) = self.config[EXPERIMENT][var].split('.')
assert a == 'validator'
func = getattr(self.validator, b)
data[var] = func()
# Finally take down
return valid
def runTakedown(self, written_data):
logging.info("Creating and running DAS takedown")
if self.takedown.willTakedown() == False:
logging.info("self.takedown.willTakedown() returned false")
raise RuntimeError("validator willTakedown() returned False")
self.takedown.takedown()
if self.takedown.getboolean("delete_output", False):
logging.info("deleting output {}".format(written_data))
self.takedown.removeWrittenData(written_data)
self.takedown.didTakedown()
def run(self):
""" Run the DAS. Returns data collected as a dictionary if an EXPERIMENT section is specified in the config file."""
# First run the engine and write the results
# Create the instances is now done when running
original_data = self.runReader()
privatized_data = self.runEngine(original_data)
errorMetrics_data = self.runErrorMetrics(privatized_data)
written_data = self.runWriter(privatized_data)
valid = self.runValidator(original_data, written_data)
self.runTakedown(written_data)
log_testpoint("T03-005S")
data = {}
return data
def main():
"""Driver. Typically run from __main__ in the program that uses the driver."""
parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument("config", help="Main Config File")
parser.add_argument("--experiment",
help="Run an experiment according to the [experiment] section, with the results in this directory",
action='store_true')
parser.add_argument("--isolation", help="Specifies isolation mode for experiments",
choices=['sameprocess', 'subprocess'], default='sameprocess')
parser.add_argument("--graphdata", help="Just draw the graph from the data that was already collected.",
action='store_true')
ctools.clogging.add_argument(parser)
args = parser.parse_args()
if not os.path.exists(args.config):
raise RuntimeError("{} does not exist".format(args.config))
if args.graphdata and args.experiment is None:
parser.error("--graphdata requires --experiment")
###
### Read the configuration file
###
config = HierarchicalConfigParser()
config.read(args.config)
###
### Logging must be set up before any logging is done
### By default is is in the current directory, but if we run an experiment, put the logfile in that directory
### Added option to put logs in a subfolder specified in the config
isodate = datetime.datetime.now().isoformat()[0:19]
if config.has_section(LOGGING_SECTION) and config.has_option(LOGGING_SECTION,LOGFOLDER_OPTION) and config.has_option(LOGGING_SECTION,LOGFILENAME_OPTION):
logfname = f"{config[LOGGING_SECTION][LOGFOLDER_OPTION]}/{config[LOGGING_SECTION][LOGFILENAME_OPTION]}-{isodate}-{os.getpid()}.log"
else:
logfname = f"{isodate}-{os.getpid()}.log"
dfxml = DFXMLWriter(filename=logfname.replace(".log",".dfxml"), prettyprint=True)
# Left here for backward compatibility, to be removed in future versions
if args.experiment:
if not os.path.exists(args.experiment):
os.makedirs(args.experiment)
if not os.path.isdir(args.experiment):
raise RuntimeError("{} is not a directory".format(args.experiment))
config['DEFAULT'][ROOT] = args.experiment
logfname = os.path.join(args.experiment, logfname)
####
# Make sure the directory for the logfile exists. If not, make it.
logdirname = os.path.dirname(logfname)
if logdirname and not os.path.exists(logdirname):
print("driver.py: os.mkdir({})".format(logdirname))
os.mkdir(logdirname)
ctools.clogging.setup(args.loglevel,syslog=True, filename=logfname)
logging.info("START {} log level: {}".format(os.path.abspath(__file__), args.loglevel))
t0 = time.time()
log_testpoint("T03-002S")
#########################
# Set up the experiment #
#########################
# if there is no experiment section in the config file, add one
if EXPERIMENT not in config:
config.add_section(EXPERIMENT)
# If there is no run experiment flag in the config section, add it
run_experiment = config[EXPERIMENT].getint(RUN_EXPERIMENT_FLAG,0)
# If --experiment was specified, set run_experiment to run
if args.experiment:
run_experiment = 1
### Now validate and apply the config file
config_validate(config)
config_apply_environment(config)
#############################
# Create the DAS
#############################
das = DAS(config)
#############################
# DAS Running Section.
# Option 1 - run_experiment
# Option 2 - just run the das
#############################
logging.debug("Just before Experiment")
if run_experiment:
# set up the Experiment module
try:
(experiment_file, experiment_class_name) = config[EXPERIMENT][EXPERIMENT].rsplit(".", 1)
except KeyError:
(experiment_file, experiment_class_name) = ('driver','AbstractDASExperiment')
try:
experiment_module = __import__(experiment_file, fromlist=[experiment_class_name])
except ImportError as e:
print("Module import failed.")
print("current directory: {}".format(os.getcwd()))
print("__file__: {}".format(__file__))
raise e
experiment = getattr(experiment_module, experiment_class_name)(das=das, config=das.config, name=EXPERIMENT)
logging.debug("Running DAS Experiment. Logfile: {}".format(logfname))
experiment_data = experiment.runExperiment()
else:
#### Run the DAS without an experiment
logging.debug("Running DAS without an experiment. Logfile: {}".format(logfname))
try:
data = das.run()
except Exception as e:
log_testpoint("T03-005F")
raise(e)
###
### Shutdown
###
t1 = time.time()
t = t1 - t0
logging.info("Elapsed time: {:.2} seconds".format(t))
logging.info("END {}".format(os.path.abspath(__file__)))
logging.shutdown()
print("*****************************************************")
print("driver.py: Run completed in {:,.2f} seconds. Logfile: {}".format(t, logfname))
if __name__ == '__main__':
main()
| [
"logging.debug",
"experiment.initial_state",
"logging.info",
"ctools.clogging.setup",
"ctools.hierarchical_configparser.HierarchicalConfigParser",
"os.path.exists",
"argparse.ArgumentParser",
"os.path.isdir",
"os.mkdir",
"os.getpid",
"ctools.clogging.add_argument",
"experiment.runExperiment",
... | [((1128, 1139), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1137, 1139), False, 'import os\n'), ((1157, 1182), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1172, 1182), False, 'import os\n'), ((23607, 23668), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'formatter_class': 'ArgumentDefaultsHelpFormatter'}), '(formatter_class=ArgumentDefaultsHelpFormatter)\n', (23621, 23668), False, 'from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter\n'), ((24270, 24306), 'ctools.clogging.add_argument', 'ctools.clogging.add_argument', (['parser'], {}), '(parser)\n', (24298, 24306), False, 'import ctools\n'), ((24624, 24650), 'ctools.hierarchical_configparser.HierarchicalConfigParser', 'HierarchicalConfigParser', ([], {}), '()\n', (24648, 24650), False, 'from ctools.hierarchical_configparser import HierarchicalConfigParser\n'), ((25959, 25984), 'os.path.dirname', 'os.path.dirname', (['logfname'], {}), '(logfname)\n', (25974, 25984), False, 'import os\n'), ((26133, 26201), 'ctools.clogging.setup', 'ctools.clogging.setup', (['args.loglevel'], {'syslog': '(True)', 'filename': 'logfname'}), '(args.loglevel, syslog=True, filename=logfname)\n', (26154, 26201), False, 'import ctools\n'), ((26304, 26315), 'time.time', 'time.time', ([], {}), '()\n', (26313, 26315), False, 'import time\n'), ((26321, 26346), 'das_testpoints.log_testpoint', 'log_testpoint', (['"""T03-002S"""'], {}), "('T03-002S')\n", (26334, 26346), False, 'from das_testpoints import log_testpoint\n'), ((27239, 27278), 'logging.debug', 'logging.debug', (['"""Just before Experiment"""'], {}), "('Just before Experiment')\n", (27252, 27278), False, 'import logging\n'), ((28459, 28470), 'time.time', 'time.time', ([], {}), '()\n', (28468, 28470), False, 'import time\n'), ((28610, 28628), 'logging.shutdown', 'logging.shutdown', ([], {}), '()\n', (28626, 28628), False, 'import logging\n'), ((1213, 1238), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1228, 1238), False, 'import os\n'), ((1276, 1301), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1291, 1301), False, 'import os\n'), ((1343, 1368), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1358, 1368), False, 'import os\n'), ((5284, 5295), 'time.time', 'time.time', ([], {}), '()\n', (5293, 5295), False, 'import time\n'), ((7977, 8012), 'experiment.build_loops', 'experiment.build_loops', (['self.config'], {}), '(self.config)\n', (7999, 8012), False, 'import experiment\n'), ((8034, 8070), 'experiment.initial_state', 'experiment.initial_state', (['self.loops'], {}), '(self.loops)\n', (8058, 8070), False, 'import experiment\n'), ((9429, 9479), 'experiment.increment_state', 'experiment.increment_state', (['self.loops', 'self.state'], {}), '(self.loops, self.state)\n', (9455, 9479), False, 'import experiment\n'), ((14383, 14450), 'logging.debug', 'logging.debug', (['"""Reading filenames and class names from config file"""'], {}), "('Reading filenames and class names from config file')\n", (14396, 14450), False, 'import logging\n'), ((17742, 17795), 'logging.info', 'logging.info', (['"""Creating and running DAS setup object"""'], {}), "('Creating and running DAS setup object')\n", (17754, 17795), False, 'import logging\n'), ((18908, 18933), 'das_testpoints.log_testpoint', 'log_testpoint', (['"""T03-003S"""'], {}), "('T03-003S')\n", (18921, 18933), False, 'from das_testpoints import log_testpoint\n'), ((18942, 18978), 'logging.debug', 'logging.debug', (['"""DAS object complete"""'], {}), "('DAS object complete')\n", (18955, 18978), False, 'import logging\n'), ((19013, 19060), 'logging.info', 'logging.info', (['"""Creating and running DAS reader"""'], {}), "('Creating and running DAS reader')\n", (19025, 19060), False, 'import logging\n'), ((19246, 19296), 'das_testpoints.log_testpoint', 'log_testpoint', (['"""T03-004S"""', '"""Running Reader module"""'], {}), "('T03-004S', 'Running Reader module')\n", (19259, 19296), False, 'from das_testpoints import log_testpoint\n'), ((19512, 19559), 'logging.info', 'logging.info', (['"""Creating and running DAS engine"""'], {}), "('Creating and running DAS engine')\n", (19524, 19559), False, 'import logging\n'), ((19742, 19792), 'das_testpoints.log_testpoint', 'log_testpoint', (['"""T03-004S"""', '"""Running Engine module"""'], {}), "('T03-004S', 'Running Engine module')\n", (19755, 19792), False, 'from das_testpoints import log_testpoint\n'), ((20035, 20089), 'logging.info', 'logging.info', (['"""Creating and running DAS error_metrics"""'], {}), "('Creating and running DAS error_metrics')\n", (20047, 20089), False, 'import logging\n'), ((20293, 20350), 'das_testpoints.log_testpoint', 'log_testpoint', (['"""T03-004S"""', '"""Running Error Metrics module"""'], {}), "('T03-004S', 'Running Error Metrics module')\n", (20306, 20350), False, 'from das_testpoints import log_testpoint\n'), ((20614, 20661), 'logging.info', 'logging.info', (['"""Creating and running DAS writer"""'], {}), "('Creating and running DAS writer')\n", (20626, 20661), False, 'import logging\n'), ((20850, 20900), 'das_testpoints.log_testpoint', 'log_testpoint', (['"""T03-004S"""', '"""Running Writer module"""'], {}), "('T03-004S', 'Running Writer module')\n", (20863, 20900), False, 'from das_testpoints import log_testpoint\n'), ((21219, 21269), 'logging.info', 'logging.info', (['"""Creating and running DAS validator"""'], {}), "('Creating and running DAS validator')\n", (21231, 21269), False, 'import logging\n'), ((21476, 21529), 'das_testpoints.log_testpoint', 'log_testpoint', (['"""T03-004S"""', '"""Running Validator module"""'], {}), "('T03-004S', 'Running Validator module')\n", (21489, 21529), False, 'from das_testpoints import log_testpoint\n'), ((22347, 22396), 'logging.info', 'logging.info', (['"""Creating and running DAS takedown"""'], {}), "('Creating and running DAS takedown')\n", (22359, 22396), False, 'import logging\n'), ((23433, 23458), 'das_testpoints.log_testpoint', 'log_testpoint', (['"""T03-005S"""'], {}), "('T03-005S')\n", (23446, 23458), False, 'from das_testpoints import log_testpoint\n'), ((24350, 24377), 'os.path.exists', 'os.path.exists', (['args.config'], {}), '(args.config)\n', (24364, 24377), False, 'import os\n'), ((25821, 25860), 'os.path.join', 'os.path.join', (['args.experiment', 'logfname'], {}), '(args.experiment, logfname)\n', (25833, 25860), False, 'import os\n'), ((26107, 26127), 'os.mkdir', 'os.mkdir', (['logdirname'], {}), '(logdirname)\n', (26115, 26127), False, 'import os\n'), ((28110, 28136), 'experiment.runExperiment', 'experiment.runExperiment', ([], {}), '()\n', (28134, 28136), False, 'import experiment\n'), ((12415, 12452), 'json.dump', 'json.dump', (['data', 'f'], {'cls': 'DriverEncoder'}), '(data, f, cls=DriverEncoder)\n', (12424, 12452), False, 'import json\n'), ((13290, 13320), 'logging.info', 'logging.info', (['"""No scaffolding"""'], {}), "('No scaffolding')\n", (13302, 13320), False, 'import logging\n'), ((19117, 19170), 'logging.info', 'logging.info', (['"""self.reader.willRead() returned false"""'], {}), "('self.reader.willRead() returned false')\n", (19129, 19170), False, 'import logging\n'), ((19615, 19667), 'logging.info', 'logging.info', (['"""self.engine.willRun() returned false"""'], {}), "('self.engine.willRun() returned false')\n", (19627, 19667), False, 'import logging\n'), ((20152, 20211), 'logging.info', 'logging.info', (['"""self.error_metrics.willRun() returned false"""'], {}), "('self.error_metrics.willRun() returned false')\n", (20164, 20211), False, 'import logging\n'), ((20719, 20773), 'logging.info', 'logging.info', (['"""self.writer.willWrite() returned false"""'], {}), "('self.writer.willWrite() returned false')\n", (20731, 20773), False, 'import logging\n'), ((21333, 21393), 'logging.info', 'logging.info', (['"""self.validator.willValidate() returned false"""'], {}), "('self.validator.willValidate() returned false')\n", (21345, 21393), False, 'import logging\n'), ((21681, 21737), 'logging.info', 'logging.info', (['"""self.validator.validate() returned false"""'], {}), "('self.validator.validate() returned false')\n", (21693, 21737), False, 'import logging\n'), ((22459, 22518), 'logging.info', 'logging.info', (['"""self.takedown.willTakedown() returned false"""'], {}), "('self.takedown.willTakedown() returned false')\n", (22471, 22518), False, 'import logging\n'), ((25551, 25582), 'os.path.exists', 'os.path.exists', (['args.experiment'], {}), '(args.experiment)\n', (25565, 25582), False, 'import os\n'), ((25596, 25624), 'os.makedirs', 'os.makedirs', (['args.experiment'], {}), '(args.experiment)\n', (25607, 25624), False, 'import os\n'), ((25640, 25670), 'os.path.isdir', 'os.path.isdir', (['args.experiment'], {}), '(args.experiment)\n', (25653, 25670), False, 'import os\n'), ((26011, 26037), 'os.path.exists', 'os.path.exists', (['logdirname'], {}), '(logdirname)\n', (26025, 26037), False, 'import os\n'), ((26251, 26276), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (26266, 26276), False, 'import os\n'), ((28578, 28603), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (28593, 28603), False, 'import os\n'), ((24947, 24970), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (24968, 24970), False, 'import datetime\n'), ((25269, 25280), 'os.getpid', 'os.getpid', ([], {}), '()\n', (25278, 25280), False, 'import os\n'), ((25329, 25340), 'os.getpid', 'os.getpid', ([], {}), '()\n', (25338, 25340), False, 'import os\n'), ((28369, 28394), 'das_testpoints.log_testpoint', 'log_testpoint', (['"""T03-005F"""'], {}), "('T03-005F')\n", (28382, 28394), False, 'from das_testpoints import log_testpoint\n'), ((13668, 13679), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (13677, 13679), False, 'import os\n'), ((17374, 17385), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (17383, 17385), False, 'import os\n'), ((27803, 27814), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (27812, 27814), False, 'import os\n')] |
# -*- coding: utf-8 -*-
import os
import dj_database_url
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
SECRET_KEY = os.environ.get(
'SECRET_KEY',
'YOUR_SECRET_KEY'
)
DEBUG = bool(os.environ.get('DEBUG', True))
TEMPLATE_DEBUG = DEBUG
CUSTOM_APPS = (
'resources',
'rest_framework',
)
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.humanize',
'django.contrib.staticfiles',
) + CUSTOM_APPS
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = '{{cookiecutter.project_name}}.urls'
WSGI_APPLICATION = '{{cookiecutter.project_name}}.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
if not DEBUG:
DATABASES['default'] = dj_database_url.config()
DATABASES['default']['ENGINE'] = 'django_postgrespool'
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
ALLOWED_HOSTS = ['*']
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = 'staticfiles'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates'), ],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
},
]
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
# REST Framework
REST_FRAMEWORK = {
"DEFAULT_RENDER_CLASSES": (
'rest_framework.renderers.JSONRenderer',
),
'PAGINATE_BY': 10,
'DEFAULT_THROTTLE_CLASSES': (
'rest_framework.throttling.AnonRateThrottle',
),
'DEFAULT_THROTTLE_RATES': {
'anon': '10000/day',
}
}
| [
"dj_database_url.config",
"os.environ.get",
"os.path.join",
"os.path.dirname",
"os.path.abspath"
] | [((126, 173), 'os.environ.get', 'os.environ.get', (['"""SECRET_KEY"""', '"""YOUR_SECRET_KEY"""'], {}), "('SECRET_KEY', 'YOUR_SECRET_KEY')\n", (140, 173), False, 'import os\n'), ((85, 110), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (100, 110), False, 'import os\n'), ((198, 227), 'os.environ.get', 'os.environ.get', (['"""DEBUG"""', '(True)'], {}), "('DEBUG', True)\n", (212, 227), False, 'import os\n'), ((1367, 1391), 'dj_database_url.config', 'dj_database_url.config', ([], {}), '()\n', (1389, 1391), False, 'import dj_database_url\n'), ((1566, 1591), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1581, 1591), False, 'import os\n'), ((1671, 1703), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""static"""'], {}), "(BASE_DIR, 'static')\n", (1683, 1703), False, 'import os\n'), ((1190, 1226), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""db.sqlite3"""'], {}), "(BASE_DIR, 'db.sqlite3')\n", (1202, 1226), False, 'import os\n'), ((1815, 1850), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""templates"""'], {}), "(BASE_DIR, 'templates')\n", (1827, 1850), False, 'import os\n')] |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystone.backends.memcache import MEMCACHE_SERVER
from keystone.backends.api import BaseTokenAPI
# pylint: disable=W0223
class TokenAPI(BaseTokenAPI):
def __init__(self, *args, **kw):
super(TokenAPI, self).__init__(*args, **kw)
def create(self, token):
if not hasattr(token, 'tenant_id'):
token.tenant_id = None
if token.tenant_id is not None:
tenant_user_key = "%s::%s" % (token.tenant_id, token.user_id)
else:
tenant_user_key = "U%s" % token.user_id
MEMCACHE_SERVER.set(token.id, token)
MEMCACHE_SERVER.set(tenant_user_key, token)
def get(self, id):
token = MEMCACHE_SERVER.get(id)
if token is not None and not hasattr(token, 'tenant_id'):
token.tenant_id = None
return token
# pylint: disable=E1103
def delete(self, id):
token = self.get(id)
if token is not None:
MEMCACHE_SERVER.delete(id)
if token is not None and not hasattr(token, 'tenant_id'):
token.tenant_id = None
if token.tenant_id is not None:
MEMCACHE_SERVER.delete("%s::%s" % (token.tenant_id,
token.user_id))
else:
MEMCACHE_SERVER.delete(token.id)
MEMCACHE_SERVER.delete("U%s" % token.user_id)
def get_for_user(self, user_id):
token = MEMCACHE_SERVER.get("U%s" % user_id)
if token is not None and not hasattr(token, 'tenant_id'):
token.tenant_id = None
return token
def get_for_user_by_tenant(self, user_id, tenant_id):
if tenant_id is not None:
token = MEMCACHE_SERVER.get("%s::%s" % (tenant_id, user_id))
else:
token = MEMCACHE_SERVER.get("U%s" % user_id)
if token is not None and not hasattr(token, 'tenant_id'):
token.tenant_id = None
return token
def get():
return TokenAPI()
| [
"keystone.backends.memcache.MEMCACHE_SERVER.get",
"keystone.backends.memcache.MEMCACHE_SERVER.delete",
"keystone.backends.memcache.MEMCACHE_SERVER.set"
] | [((1219, 1255), 'keystone.backends.memcache.MEMCACHE_SERVER.set', 'MEMCACHE_SERVER.set', (['token.id', 'token'], {}), '(token.id, token)\n', (1238, 1255), False, 'from keystone.backends.memcache import MEMCACHE_SERVER\n'), ((1264, 1307), 'keystone.backends.memcache.MEMCACHE_SERVER.set', 'MEMCACHE_SERVER.set', (['tenant_user_key', 'token'], {}), '(tenant_user_key, token)\n', (1283, 1307), False, 'from keystone.backends.memcache import MEMCACHE_SERVER\n'), ((1348, 1371), 'keystone.backends.memcache.MEMCACHE_SERVER.get', 'MEMCACHE_SERVER.get', (['id'], {}), '(id)\n', (1367, 1371), False, 'from keystone.backends.memcache import MEMCACHE_SERVER\n'), ((2118, 2154), 'keystone.backends.memcache.MEMCACHE_SERVER.get', 'MEMCACHE_SERVER.get', (["('U%s' % user_id)"], {}), "('U%s' % user_id)\n", (2137, 2154), False, 'from keystone.backends.memcache import MEMCACHE_SERVER\n'), ((1620, 1646), 'keystone.backends.memcache.MEMCACHE_SERVER.delete', 'MEMCACHE_SERVER.delete', (['id'], {}), '(id)\n', (1642, 1646), False, 'from keystone.backends.memcache import MEMCACHE_SERVER\n'), ((2391, 2443), 'keystone.backends.memcache.MEMCACHE_SERVER.get', 'MEMCACHE_SERVER.get', (["('%s::%s' % (tenant_id, user_id))"], {}), "('%s::%s' % (tenant_id, user_id))\n", (2410, 2443), False, 'from keystone.backends.memcache import MEMCACHE_SERVER\n'), ((2478, 2514), 'keystone.backends.memcache.MEMCACHE_SERVER.get', 'MEMCACHE_SERVER.get', (["('U%s' % user_id)"], {}), "('U%s' % user_id)\n", (2497, 2514), False, 'from keystone.backends.memcache import MEMCACHE_SERVER\n'), ((1816, 1883), 'keystone.backends.memcache.MEMCACHE_SERVER.delete', 'MEMCACHE_SERVER.delete', (["('%s::%s' % (token.tenant_id, token.user_id))"], {}), "('%s::%s' % (token.tenant_id, token.user_id))\n", (1838, 1883), False, 'from keystone.backends.memcache import MEMCACHE_SERVER\n'), ((1969, 2001), 'keystone.backends.memcache.MEMCACHE_SERVER.delete', 'MEMCACHE_SERVER.delete', (['token.id'], {}), '(token.id)\n', (1991, 2001), False, 'from keystone.backends.memcache import MEMCACHE_SERVER\n'), ((2018, 2063), 'keystone.backends.memcache.MEMCACHE_SERVER.delete', 'MEMCACHE_SERVER.delete', (["('U%s' % token.user_id)"], {}), "('U%s' % token.user_id)\n", (2040, 2063), False, 'from keystone.backends.memcache import MEMCACHE_SERVER\n')] |
"""
main_module - 模型参数测试,测试时将对应方法的@unittest.skip注释掉.
Main members:
# __main__ - 程序入口.
"""
import unittest
from torch import nn
class TestParameters(unittest.TestCase):
"""模型参数测试.
Main methods:
test_parameters - 模型参数测试.
"""
# @unittest.skip('debug')
def test_parameters(self):
"""模型参数测试.
"""
print('{} test_parameters {}'.format('-'*15, '-'*15))
net = nn.Sequential()
net.add_module('linear', nn.Linear(5, 1))
for param in net.parameters():
print(param)
"""输出
Parameter containing:
tensor([[-0.0567, 0.1161, 0.1954, -0.2397, 0.3248]], requires_grad=True)
Parameter containing:
tensor([-0.0782], requires_grad=True)
"""
for name, param in net.named_parameters():
print('name:{}, param:{}'.format(name, param))
"""
name:linear.weight, param:Parameter containing:
tensor([[-0.3299, -0.2503, 0.1922, -0.3915, -0.2623]], requires_grad=True)
name:linear.bias, param:Parameter containing:
tensor([-0.4374], requires_grad=True)
"""
if __name__ == "__main__":
unittest.main() # 运行当前源文件中的所有测试用例
| [
"unittest.main",
"torch.nn.Sequential",
"torch.nn.Linear"
] | [((1230, 1245), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1243, 1245), False, 'import unittest\n'), ((456, 471), 'torch.nn.Sequential', 'nn.Sequential', ([], {}), '()\n', (469, 471), False, 'from torch import nn\n'), ((506, 521), 'torch.nn.Linear', 'nn.Linear', (['(5)', '(1)'], {}), '(5, 1)\n', (515, 521), False, 'from torch import nn\n')] |
from candidate_nn import CandidateNN
import os
# clear logging dir
os.system('rm -R /tmp/gennn/')
c = CandidateNN(1,"a",
runtime_spec={'id': 1,
'datadir': 'MNIST_data',
'logdir': 'log/',
'validate_each_n_steps': 10,
'max_number_of_iterations': 600,
'max_runtime': 10,
'max_layer': 1})
c2 = CandidateNN(2,"a",
runtime_spec={'id': 1,
'datadir': 'MNIST_data',
'logdir': 'log/',
'validate_each_n_steps': 10,
'max_number_of_iterations': 600,
'max_runtime': 10,
'max_layer': 3})
print(c.network_spec['layers'])
print(c2.network_spec['layers'])
print('\n')
c.crossover( crossover_parms={
'strategy': 'uniform_crossover',
'uniform_method': 'swap',
'rate': 0.7
},other_candidate=c2)
print(c.network_spec['layers'])
print(c2.network_spec['layers'])
| [
"os.system",
"candidate_nn.CandidateNN"
] | [((67, 97), 'os.system', 'os.system', (['"""rm -R /tmp/gennn/"""'], {}), "('rm -R /tmp/gennn/')\n", (76, 97), False, 'import os\n'), ((104, 295), 'candidate_nn.CandidateNN', 'CandidateNN', (['(1)', '"""a"""'], {'runtime_spec': "{'id': 1, 'datadir': 'MNIST_data', 'logdir': 'log/',\n 'validate_each_n_steps': 10, 'max_number_of_iterations': 600,\n 'max_runtime': 10, 'max_layer': 1}"}), "(1, 'a', runtime_spec={'id': 1, 'datadir': 'MNIST_data',\n 'logdir': 'log/', 'validate_each_n_steps': 10,\n 'max_number_of_iterations': 600, 'max_runtime': 10, 'max_layer': 1})\n", (115, 295), False, 'from candidate_nn import CandidateNN\n'), ((434, 625), 'candidate_nn.CandidateNN', 'CandidateNN', (['(2)', '"""a"""'], {'runtime_spec': "{'id': 1, 'datadir': 'MNIST_data', 'logdir': 'log/',\n 'validate_each_n_steps': 10, 'max_number_of_iterations': 600,\n 'max_runtime': 10, 'max_layer': 3}"}), "(2, 'a', runtime_spec={'id': 1, 'datadir': 'MNIST_data',\n 'logdir': 'log/', 'validate_each_n_steps': 10,\n 'max_number_of_iterations': 600, 'max_runtime': 10, 'max_layer': 3})\n", (445, 625), False, 'from candidate_nn import CandidateNN\n')] |
from django.db import models
from django.contrib.auth.models import (AbstractBaseUser, BaseUserManager, PermissionsMixin)
class Base(models.Model):
created = models.DateTimeField(auto_now_add=True, null=True)
modified = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class Product(Base):
title = models.CharField(max_length=255)
image_url = models.CharField(max_length=255, null=True, blank=True)
price = models.DecimalField(decimal_places=2, max_digits=15, null=True, blank=True)
product_url = models.CharField(max_length=255, null=True, blank=True)
def __str__(self):
return self.title
class UserManager(BaseUserManager):
use_in_migrations = True
def _create_user(self, email, password,
**extra_fields):
if not email:
raise ValueError('The given email must be set')
email = self.normalize_email(email)
user = self.model(email=email, **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_user(self, email, password=None, **extra_fields):
extra_fields.setdefault('is_staff', False)
extra_fields.setdefault('is_superuser', False)
extra_fields.setdefault('is_active', False)
return self._create_user(email, password, **extra_fields)
def create_superuser(self, email, password, **extra_fields):
extra_fields.setdefault('is_staff', True)
extra_fields.setdefault('is_superuser', True)
extra_fields.setdefault('is_active', True)
return self._create_user(email, password, **extra_fields)
class User(AbstractBaseUser, PermissionsMixin, Base):
email = models.EmailField(db_index=True, unique=True, max_length=255)
is_staff = models.BooleanField(default=False)
is_superuser = models.BooleanField(default=False)
is_active = models.BooleanField(default=False)
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['username']
objects = UserManager()
def __str__(self):
return self.email
def has_perm(self, perm, obj=None):
return True
def has_module_perms(self, app_label):
return True
class UserData(Base):
user = models.OneToOneField(User, related_name='data', on_delete=models.CASCADE, null=True)
activation_key = models.CharField(null=True, max_length=40)
is_key_expired = models.BooleanField(default=False)
def __str__(self):
return self.user.email | [
"django.db.models.EmailField",
"django.db.models.OneToOneField",
"django.db.models.BooleanField",
"django.db.models.DateTimeField",
"django.db.models.DecimalField",
"django.db.models.CharField"
] | [((163, 213), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'null': '(True)'}), '(auto_now_add=True, null=True)\n', (183, 213), False, 'from django.db import models\n'), ((229, 264), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (249, 264), False, 'from django.db import models\n'), ((340, 372), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (356, 372), False, 'from django.db import models\n'), ((389, 444), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'null': '(True)', 'blank': '(True)'}), '(max_length=255, null=True, blank=True)\n', (405, 444), False, 'from django.db import models\n'), ((457, 532), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (476, 532), False, 'from django.db import models\n'), ((551, 606), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'null': '(True)', 'blank': '(True)'}), '(max_length=255, null=True, blank=True)\n', (567, 606), False, 'from django.db import models\n'), ((1726, 1787), 'django.db.models.EmailField', 'models.EmailField', ([], {'db_index': '(True)', 'unique': '(True)', 'max_length': '(255)'}), '(db_index=True, unique=True, max_length=255)\n', (1743, 1787), False, 'from django.db import models\n'), ((1803, 1837), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (1822, 1837), False, 'from django.db import models\n'), ((1857, 1891), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (1876, 1891), False, 'from django.db import models\n'), ((1908, 1942), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (1927, 1942), False, 'from django.db import models\n'), ((2245, 2333), 'django.db.models.OneToOneField', 'models.OneToOneField', (['User'], {'related_name': '"""data"""', 'on_delete': 'models.CASCADE', 'null': '(True)'}), "(User, related_name='data', on_delete=models.CASCADE,\n null=True)\n", (2265, 2333), False, 'from django.db import models\n'), ((2351, 2393), 'django.db.models.CharField', 'models.CharField', ([], {'null': '(True)', 'max_length': '(40)'}), '(null=True, max_length=40)\n', (2367, 2393), False, 'from django.db import models\n'), ((2415, 2449), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (2434, 2449), False, 'from django.db import models\n')] |
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 8 14:45:56 2019
@author: Xuan-Laptop
"""
import pandas as pd
import numpy as np
from utils import np_macro_f1, encoding
from sklearn.linear_model import LinearRegression, LogisticRegression
import warnings
warnings.filterwarnings("ignore")
def calibration_flathead(y_val, p_pred):
best_ts = 0
best_f1 = 0
for i in range(1, 51):
ts = i/100
out = np_macro_f1(y_val, (p_pred > ts).astype(int), return_details=False)
if out > best_f1:
best_f1 = out
best_ts = ts
df_res = np_macro_f1(y_val, (p_pred > best_ts).astype(int), return_details=True)
return best_ts, df_res
def calibration_perclass(y_val, p_pred):
ts_list = []
for i in range(28):
ts, _ = calibration_flathead(y_val[:, i], p_pred[:, i])
ts_list.append(ts)
df_res = np_macro_f1(y_val, (p_pred > ts_list).astype(int), return_details=True)
return ts_list, df_res
stack_version = 'stacking_v4'
def getLogit(x, epsilon=1e-20):
return np.log(x/(1 - x + epsilon) + epsilon)
def getProb(logit):
return 1/(1 + np.log(-logit))
# load data and align
# chose one of the submission file, make sure id is the same as sample submission
submission = pd.read_csv('./data/sample_submission.csv')
# the files should be find in ./ref_data. It's the same as 5folds_v2
for i in range(5):
if i == 0:
df_val = pd.read_csv('./ref_data/fold_info_SEED1024_val_F{}.csv'.format(str(i)))
else:
df_val = df_val.append(pd.read_csv('./ref_data/fold_info_SEED1024_val_F{}.csv'.format(str(i))), ignore_index=True)
labels = df_val.Target.apply(encoding)
y_val = np.array(labels.tolist())
# put the names of models to be stacked here.
# two files should be included: model_name_val.csv and model_name_test.csv
# Model predicted probability for 28 classes of all images
# the val data will be aligned with respect to the val data loaded from ref_data
# the format would be: Id | 0 | 1 | ... | 28.
models = [#'seresnext50',
'seresnext50_tta',
'inceptionv3_tta',
#'zhu',
'zhu_614',
#'zhu_Jan9',
]
p_test_all = []
p_val_all = []
res_details = []
mask = [str(x) for x in range(28)]
for i, model in enumerate(models):
p_val = pd.read_csv('./data/{}_val.csv'.format(model))
p_val = pd.merge(df_val[['Id']], p_val, how='left', on='Id')
p_val_all.append(np.array(p_val[mask].values))
df_res = np_macro_f1(y_val, np.array(p_val[mask].values), return_details=True)
print('Model_%s f1 loss: %.4f'% (model, df_res.f1_scores.mean()))
res_details.append(df_res)
p_test = pd.read_csv('./data/{}_test.csv'.format(model))
p_test_all.append(np.array(p_test[mask].values))
# Train 28 linear models for each class
lr_models = []
coeff = []
for i in range(28):
tmp = []
for j in range(len(models)):
tmp.append(p_val_all[j][:, i])
X = np.array(tmp)
Y = y_val[:, i:i+1]
lr = LinearRegression()
#lr = LogisticRegression()
lr.fit(X.T, Y)
lr_models.append(lr)
coeff.append(lr.coef_[0])
coeff = np.array(coeff)
# Ensemble predictions
stacking_all = []
val_stack = []
for i in range(28):
lr = lr_models[i]
tmp = []
for j in range(len(models)):
tmp.append(p_test_all[j][:, i])
X = np.array(tmp)
Y = lr.predict(X.T)
Y = Y.clip(0, 1)
stacking_all.append(Y)
tmp = []
for j in range(len(models)):
tmp.append(p_val_all[j][:, i])
X_v = np.array(tmp)
Y_v = lr.predict(X_v.T)
Y_v = Y_v.clip(0, 1)
val_stack.append(Y_v)
p_stack = np.squeeze(np.dstack(stacking_all))
p_stack_val = np.squeeze(np.dstack(val_stack))
df_stack = np_macro_f1(y_val, p_stack_val, return_details=True)
print('Stacking f1-loss: %4f' % (df_stack.f1_scores.mean()))
ts_flat, df_flat = calibration_flathead(y_val, p_stack_val)
ts_perclass, df_perclass = calibration_perclass(y_val, p_stack_val)
print('Flathead: %.4f, Per Class: %.4f'
%(np.mean(df_flat.f1_scores), np.mean(df_perclass.f1_scores)))
df_stack_val = df_val[['Id']]
df_stack_test = submission[['Id']]
for i in range(28):
df_stack_val[str(i)] = p_stack_val[:, i]
df_stack_test[str(i)] = p_stack[:, i]
df_coeff = pd.DataFrame(coeff)
df_coeff.columns = models
# store all necessary information
df_coeff.to_csv('{}_coef.csv'.format(stack_version), index=False)
df_stack_val.to_csv('{}_val.csv'.format(stack_version), index=False)
df_stack_test.to_csv('{}_test.csv'.format(stack_version), index=False)
| [
"numpy.dstack",
"numpy.mean",
"sklearn.linear_model.LinearRegression",
"pandas.read_csv",
"pandas.merge",
"numpy.log",
"numpy.array",
"pandas.DataFrame",
"warnings.filterwarnings",
"utils.np_macro_f1"
] | [((268, 301), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (291, 301), False, 'import warnings\n'), ((1301, 1344), 'pandas.read_csv', 'pd.read_csv', (['"""./data/sample_submission.csv"""'], {}), "('./data/sample_submission.csv')\n", (1312, 1344), True, 'import pandas as pd\n'), ((3229, 3244), 'numpy.array', 'np.array', (['coeff'], {}), '(coeff)\n', (3237, 3244), True, 'import numpy as np\n'), ((3863, 3915), 'utils.np_macro_f1', 'np_macro_f1', (['y_val', 'p_stack_val'], {'return_details': '(True)'}), '(y_val, p_stack_val, return_details=True)\n', (3874, 3915), False, 'from utils import np_macro_f1, encoding\n'), ((4419, 4438), 'pandas.DataFrame', 'pd.DataFrame', (['coeff'], {}), '(coeff)\n', (4431, 4438), True, 'import pandas as pd\n'), ((1083, 1122), 'numpy.log', 'np.log', (['(x / (1 - x + epsilon) + epsilon)'], {}), '(x / (1 - x + epsilon) + epsilon)\n', (1089, 1122), True, 'import numpy as np\n'), ((2436, 2488), 'pandas.merge', 'pd.merge', (["df_val[['Id']]", 'p_val'], {'how': '"""left"""', 'on': '"""Id"""'}), "(df_val[['Id']], p_val, how='left', on='Id')\n", (2444, 2488), True, 'import pandas as pd\n'), ((3043, 3056), 'numpy.array', 'np.array', (['tmp'], {}), '(tmp)\n', (3051, 3056), True, 'import numpy as np\n'), ((3092, 3110), 'sklearn.linear_model.LinearRegression', 'LinearRegression', ([], {}), '()\n', (3108, 3110), False, 'from sklearn.linear_model import LinearRegression, LogisticRegression\n'), ((3454, 3467), 'numpy.array', 'np.array', (['tmp'], {}), '(tmp)\n', (3462, 3467), True, 'import numpy as np\n'), ((3648, 3661), 'numpy.array', 'np.array', (['tmp'], {}), '(tmp)\n', (3656, 3661), True, 'import numpy as np\n'), ((3778, 3801), 'numpy.dstack', 'np.dstack', (['stacking_all'], {}), '(stacking_all)\n', (3787, 3801), True, 'import numpy as np\n'), ((3829, 3849), 'numpy.dstack', 'np.dstack', (['val_stack'], {}), '(val_stack)\n', (3838, 3849), True, 'import numpy as np\n'), ((2511, 2539), 'numpy.array', 'np.array', (['p_val[mask].values'], {}), '(p_val[mask].values)\n', (2519, 2539), True, 'import numpy as np\n'), ((2574, 2602), 'numpy.array', 'np.array', (['p_val[mask].values'], {}), '(p_val[mask].values)\n', (2582, 2602), True, 'import numpy as np\n'), ((2819, 2848), 'numpy.array', 'np.array', (['p_test[mask].values'], {}), '(p_test[mask].values)\n', (2827, 2848), True, 'import numpy as np\n'), ((1163, 1177), 'numpy.log', 'np.log', (['(-logit)'], {}), '(-logit)\n', (1169, 1177), True, 'import numpy as np\n'), ((4165, 4191), 'numpy.mean', 'np.mean', (['df_flat.f1_scores'], {}), '(df_flat.f1_scores)\n', (4172, 4191), True, 'import numpy as np\n'), ((4193, 4223), 'numpy.mean', 'np.mean', (['df_perclass.f1_scores'], {}), '(df_perclass.f1_scores)\n', (4200, 4223), True, 'import numpy as np\n')] |
# Generated by Django 3.0.5 on 2020-04-14 15:25
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('scopes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ScopeTypeRule',
fields=[
('id',
models.AutoField(auto_created=True,
primary_key=True,
serialize=False,
verbose_name='ID')),
('scope_type',
models.CharField(choices=[('CE', 'Corredores Ecologicos'),
('AC', 'ACR'), ('DI', 'Distritos'),
('EF', 'Ecosistemas fragiles'),
('SA', 'Sitios arqueologicos')],
max_length=2)),
('threshold_type',
models.CharField(choices=[('A', 'Area'), ('P', 'Percentage')],
max_length=1)),
('threshold', models.FloatField(default=5)),
('measurement_content_type',
models.ForeignKey(limit_choices_to=models.Q(
models.Q(('app_label', 'lomas_changes'),
('model', 'coverage_measurements')),
models.Q(('app_label', 'vi_lomas_changes'),
('model', 'coverage_measurements')),
_connector='OR'),
on_delete=django.db.models.deletion.CASCADE,
to='contenttypes.ContentType')),
('user',
models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='ScopeRule',
fields=[
('id',
models.AutoField(auto_created=True,
primary_key=True,
serialize=False,
verbose_name='ID')),
('threshold_type',
models.CharField(choices=[('A', 'Area'), ('P', 'Percentage')],
max_length=1)),
('threshold', models.FloatField(default=5)),
('measurement_content_type',
models.ForeignKey(limit_choices_to=models.Q(
models.Q(('app_label', 'lomas_changes'),
('model', 'coverage_measurements')),
models.Q(('app_label', 'vi_lomas_changes'),
('model', 'coverage_measurements')),
_connector='OR'),
on_delete=django.db.models.deletion.CASCADE,
to='contenttypes.ContentType')),
('scope',
models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
to='scopes.Scope')),
('user',
models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='ParameterRule',
fields=[
('id',
models.AutoField(auto_created=True,
primary_key=True,
serialize=False,
verbose_name='ID')),
('parameter', models.CharField(max_length=64)),
('threshold', models.FloatField(default=5)),
('user',
models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"django.db.models.OneToOneField",
"django.db.models.FloatField",
"django.db.models.ForeignKey",
"django.db.models.AutoField",
"django.db.models.Q",
"django.db.migrations.swappable_dependency",
"django.db.models.CharField"
] | [((306, 363), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (337, 363), False, 'from django.db import migrations, models\n'), ((554, 647), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (570, 647), False, 'from django.db import migrations, models\n'), ((796, 978), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('CE', 'Corredores Ecologicos'), ('AC', 'ACR'), ('DI', 'Distritos'), ('EF',\n 'Ecosistemas fragiles'), ('SA', 'Sitios arqueologicos')]", 'max_length': '(2)'}), "(choices=[('CE', 'Corredores Ecologicos'), ('AC', 'ACR'), (\n 'DI', 'Distritos'), ('EF', 'Ecosistemas fragiles'), ('SA',\n 'Sitios arqueologicos')], max_length=2)\n", (812, 978), False, 'from django.db import migrations, models\n'), ((1187, 1263), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('A', 'Area'), ('P', 'Percentage')]", 'max_length': '(1)'}), "(choices=[('A', 'Area'), ('P', 'Percentage')], max_length=1)\n", (1203, 1263), False, 'from django.db import migrations, models\n'), ((1330, 1358), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': '(5)'}), '(default=5)\n', (1347, 1358), False, 'from django.db import migrations, models\n'), ((1958, 2057), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(on_delete=django.db.models.deletion.CASCADE, to=\n settings.AUTH_USER_MODEL)\n', (1978, 2057), False, 'from django.db import migrations, models\n'), ((2247, 2340), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (2263, 2340), False, 'from django.db import migrations, models\n'), ((2493, 2569), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('A', 'Area'), ('P', 'Percentage')]", 'max_length': '(1)'}), "(choices=[('A', 'Area'), ('P', 'Percentage')], max_length=1)\n", (2509, 2569), False, 'from django.db import migrations, models\n'), ((2636, 2664), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': '(5)'}), '(default=5)\n', (2653, 2664), False, 'from django.db import migrations, models\n'), ((3265, 3351), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""scopes.Scope"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'scopes.Scope')\n", (3282, 3351), False, 'from django.db import migrations, models\n'), ((3426, 3525), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(on_delete=django.db.models.deletion.CASCADE, to=\n settings.AUTH_USER_MODEL)\n', (3446, 3525), False, 'from django.db import migrations, models\n'), ((3719, 3812), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (3735, 3812), False, 'from django.db import migrations, models\n'), ((3943, 3974), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)'}), '(max_length=64)\n', (3959, 3974), False, 'from django.db import migrations, models\n'), ((4007, 4035), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': '(5)'}), '(default=5)\n', (4024, 4035), False, 'from django.db import migrations, models\n'), ((4080, 4179), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(on_delete=django.db.models.deletion.CASCADE, to=\n settings.AUTH_USER_MODEL)\n', (4100, 4179), False, 'from django.db import migrations, models\n'), ((1489, 1565), 'django.db.models.Q', 'models.Q', (["('app_label', 'lomas_changes')", "('model', 'coverage_measurements')"], {}), "(('app_label', 'lomas_changes'), ('model', 'coverage_measurements'))\n", (1497, 1565), False, 'from django.db import migrations, models\n'), ((1618, 1697), 'django.db.models.Q', 'models.Q', (["('app_label', 'vi_lomas_changes')", "('model', 'coverage_measurements')"], {}), "(('app_label', 'vi_lomas_changes'), ('model', 'coverage_measurements'))\n", (1626, 1697), False, 'from django.db import migrations, models\n'), ((2795, 2871), 'django.db.models.Q', 'models.Q', (["('app_label', 'lomas_changes')", "('model', 'coverage_measurements')"], {}), "(('app_label', 'lomas_changes'), ('model', 'coverage_measurements'))\n", (2803, 2871), False, 'from django.db import migrations, models\n'), ((2924, 3003), 'django.db.models.Q', 'models.Q', (["('app_label', 'vi_lomas_changes')", "('model', 'coverage_measurements')"], {}), "(('app_label', 'vi_lomas_changes'), ('model', 'coverage_measurements'))\n", (2932, 3003), False, 'from django.db import migrations, models\n')] |
"""A thin wrapping of the VoIP.ms REST API to make it slightly less than
horrible to use.
"""
import contextlib
from collections import Mapping
import requests
from functools import partial
def validate_response(response_obj):
"""Check for HTTP failures, API failures, and return JSON."""
if response_obj.status_code != requests.codes.ok:
raise requests.HTTPError('expected status code {}, got {}'
.format(requests.codes.ok,
response_obj.status_code))
response = response_obj.json()
if response['status'] != 'success':
raise VoipMSAPIError('API status returned {}'
.format(response['status']),
response)
return response
class VoipMSAPIError(Exception):
"""Something done gone wrong with that there API, ma."""
pass
class VoipMS(object):
def __init__(self, username, password,
url='https://voip.ms/api/v1/rest.php'):
self.username = username
self.password = password
self.url = url
self.ivrs = Directory(self, 'getIVRs', 'ivrs', 'name')
self.forwarders = Directory(self, 'getForwardings',
'forwardings', 'description')
self.dids = Directory(self, 'getDIDsInfo', 'dids', 'did',
partial(DID, api=self))
@property
def credentials(self):
return {'api_' + s: getattr(self, s) for s in ('username', 'password')}
@contextlib.contextmanager
def credentialed_request(self, request):
assert all(k not in request for k in self.credentials.keys())
request = dict(request)
request.update(self.credentials)
yield request
class Directory(Mapping):
"""
Emulates a dictionary but actually retrieves stuff from a REST call.
Returned items are mutable but mutation has no effect. The enlightened
Python core devs see no use cases for immutable dicts, you see, and so
that stubbornness plus my stubbornness to doing the language's job
myself means this API sucks slightly more than it could.
"""
def __init__(self, api, method_name, items_key, id_key, factory=dict):
self.api = api
self.method_name = method_name
self.items_key = items_key
self.id_key = id_key
self.factory = factory
def _query(self):
with self.api.credentialed_request({'method': self.method_name}) as p:
response = validate_response(requests.get(self.api.url, p))
return response[self.items_key]
def __getitem__(self, key):
response = self._query()
for item in response:
if item[self.id_key] == key:
return self.factory(item)
raise KeyError(key)
def items(self):
for item in self._query():
key = item[self.id_key]
yield key, self.factory(item)
def keys(self):
for key, _ in self.items():
yield key
def values(self):
for _, value in self.items():
yield value
def __iter__(self):
for k in self.keys():
yield k
def __len__(self):
return len(self._query())
class DID(dict):
"""Subclass of Directory with a method for diddling the routing."""
def __init__(self, mapping, api):
self.api = api
super(DID, self).__init__(mapping)
def set_routing(self, which):
if 'forwarding' in which:
kind = 'fwd'
key = which['forwarding']
elif 'ivr' in which:
kind = 'ivr'
key = which[kind]
routing = ':'.join((kind, key))
params = {'method': 'setDIDRouting', 'routing': routing,
'did': self['did']}
with self.api.credentialed_request(params) as params:
response = validate_response(requests.get(self.api.url, params))
return response
| [
"functools.partial",
"requests.get"
] | [((1394, 1416), 'functools.partial', 'partial', (['DID'], {'api': 'self'}), '(DID, api=self)\n', (1401, 1416), False, 'from functools import partial\n'), ((2558, 2587), 'requests.get', 'requests.get', (['self.api.url', 'p'], {}), '(self.api.url, p)\n', (2570, 2587), False, 'import requests\n'), ((3925, 3959), 'requests.get', 'requests.get', (['self.api.url', 'params'], {}), '(self.api.url, params)\n', (3937, 3959), False, 'import requests\n')] |
from axis_fifo import AXIS_FIFO
from devices import fifo_devices
import struct
class GPIO_seq_point:
def __init__(self, address, time, output):
self.address = address
self.time = time
self.output = output
def write_point(fifo, point):
#01XXAAAA TTTTTTTT DDDDDDDD
fifo.write_axis_fifo("\x01\x00" + struct.pack('>H', point.address))
fifo.write_axis_fifo(struct.pack('>I', point.time))
fifo.write_axis_fifo(struct.pack('>I', point.output))
def program():
fifo = AXIS_FIFO(fifo_devices['GPIO_seq'])
points=[]
points.append(GPIO_seq_point(address=0,time= 0,output=0xFFFFFFFF))
points.append(GPIO_seq_point(address=1,time=10000,output=0x00000000))
points.append(GPIO_seq_point(address=2,time=20000,output=0xFFFFFFFF))
points.append(GPIO_seq_point(address=3,time=30000,output=0x00000000))
points.append(GPIO_seq_point(address=4,time= 0,output=0x00000000))
for point in points:
write_point(fifo, point)
if __name__ == "__main__":
program()
| [
"struct.pack",
"axis_fifo.AXIS_FIFO"
] | [((486, 521), 'axis_fifo.AXIS_FIFO', 'AXIS_FIFO', (["fifo_devices['GPIO_seq']"], {}), "(fifo_devices['GPIO_seq'])\n", (495, 521), False, 'from axis_fifo import AXIS_FIFO\n'), ((374, 403), 'struct.pack', 'struct.pack', (['""">I"""', 'point.time'], {}), "('>I', point.time)\n", (385, 403), False, 'import struct\n'), ((428, 459), 'struct.pack', 'struct.pack', (['""">I"""', 'point.output'], {}), "('>I', point.output)\n", (439, 459), False, 'import struct\n'), ((317, 349), 'struct.pack', 'struct.pack', (['""">H"""', 'point.address'], {}), "('>H', point.address)\n", (328, 349), False, 'import struct\n')] |
import re
import lib.core.common
__product__ = "3dcart"
__description__ = (
"The 3dcart Shopping Cart Software is a complete e-commerce solution for anyone."
)
def search(html, **kwargs):
html = str(html)
headers = kwargs.get("headers", None)
plugin_detection_schema = (
re.compile(r"3dcart.stats", re.I),
re.compile(r"/3dvisit/", re.I)
)
for plugin in plugin_detection_schema:
if plugin.search(html) is not None:
return True
if plugin.search(headers.get(lib.core.common.HTTP_HEADER.SET_COOKIE, "")) is not None:
return True
| [
"re.compile"
] | [((300, 332), 're.compile', 're.compile', (['"""3dcart.stats"""', 're.I'], {}), "('3dcart.stats', re.I)\n", (310, 332), False, 'import re\n'), ((343, 372), 're.compile', 're.compile', (['"""/3dvisit/"""', 're.I'], {}), "('/3dvisit/', re.I)\n", (353, 372), False, 'import re\n')] |
"""
Plot different high-thrust corrections used in BEM
"""
# --- Common libraries
import numpy as np
import matplotlib.pyplot as plt
# --- Local libraries
from welib.BEM.highthrust import *
from welib.tools.figure import defaultRC; defaultRC();
def main(test=False):
Ct=np.linspace(0,2,50)
a =np.linspace(0,1,50)
Ct_MT = 4*a*(1-a)
fig,ax = plt.subplots(1, 1, sharey=False, figsize=(6.4,4.8)) # (6.4,4.8)
fig.subplots_adjust(left=0.12, right=0.95, top=0.95, bottom=0.11, hspace=0.20, wspace=0.20)
# Functions that depend on a only
ax.plot(a ,Ct_MT,'k-' ,label = 'Momentum theory' )
ax.plot(a ,Ct_a(a,method='Glauert'),'-' ,label = 'Glauert (ac=1/3)')
ax.plot(a ,Ct_a(a,method='Spera') ,'.' ,label = 'Spera (ac=0.3)')
# Functions that depend on Ct only
ax.plot(a_Ct(Ct,method = 'AeroDyn' ),Ct,'-' ,label = 'AeroDyn' )
ax.plot(a_Ct(Ct,method = 'HAWC2' ),Ct,'--',label = 'HAWC2' )
ax.plot(a_Ct(Ct,method = 'WEHandbook' ),Ct,':' ,label = 'Handbook' )
ax.plot(a_Ct(Ct,method = 'GlauertEmpirical'),Ct,'-.',label = 'Glauert Empirical')
ax.set_xlabel('Axial induction, a [-]')
ax.set_ylabel('Thrust coefficient, Ct [-]')
ax.set_xlim([0,1])
ax.set_ylim([0,2])
ax.legend()
ax.grid()
ax.set_title('BEM - High thrust correction')
if __name__=="__main__":
main()
plt.show()
if __name__=="__test__":
main()
if __name__=="__export__":
main()
from welib.tools.repo import export_figs_callback
export_figs_callback(__file__)
| [
"welib.tools.repo.export_figs_callback",
"welib.tools.figure.defaultRC",
"numpy.linspace",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((235, 246), 'welib.tools.figure.defaultRC', 'defaultRC', ([], {}), '()\n', (244, 246), False, 'from welib.tools.figure import defaultRC\n'), ((278, 299), 'numpy.linspace', 'np.linspace', (['(0)', '(2)', '(50)'], {}), '(0, 2, 50)\n', (289, 299), True, 'import numpy as np\n'), ((305, 326), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', '(50)'], {}), '(0, 1, 50)\n', (316, 326), True, 'import numpy as np\n'), ((361, 413), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'sharey': '(False)', 'figsize': '(6.4, 4.8)'}), '(1, 1, sharey=False, figsize=(6.4, 4.8))\n', (373, 413), True, 'import matplotlib.pyplot as plt\n'), ((1409, 1419), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1417, 1419), True, 'import matplotlib.pyplot as plt\n'), ((1552, 1582), 'welib.tools.repo.export_figs_callback', 'export_figs_callback', (['__file__'], {}), '(__file__)\n', (1572, 1582), False, 'from welib.tools.repo import export_figs_callback\n')] |
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.patches import Rectangle
from sarna.viz import highlight
def compare_box_and_slice(x, box, slc):
'''Function used to compare slice limits and box range of a rectangle.'''
halfsample = np.diff(x).mean() / 2
correct_limits = x[slc][[0, -1]] + [-halfsample, halfsample]
bbox_limits = box.get_bbox().get_points()
bbox_x_limits = bbox_limits[:, 0]
print(bbox_x_limits)
print(correct_limits)
return np.allclose(correct_limits, bbox_x_limits)
def test_highlight():
x = np.arange(0, 10, step=0.05)
n_times = len(x)
y = np.random.random(n_times)
# simple usage
# ------------
line = plt.plot(x, y)
highlight(x, slice(10, 40))
ax = line[0].axes
rectangles = ax.findobj(Rectangle)
assert len(rectangles) == 2
plt.close(ax.figure)
# two slices, setting color and alpha
# -----------------------------------
line = plt.plot(x, y)
use_alpha, use_color = 0.5, [0.75] * 3
slices = [slice(10, 40), slice(60, 105)]
highlight(x, slices, alpha=use_alpha, color=use_color)
ax = line[0].axes
rectangles = ax.findobj(Rectangle)
assert len(rectangles) == 3
# check box color and box alpha
rgba = rectangles[0].get_facecolor()
assert (rgba[:3] == np.array(use_color)).all()
assert rgba[-1] == use_alpha
# compare slices and rectangles:
for box, slc in zip(rectangles, slices):
assert compare_box_and_slice(x, box, slc)
plt.close(ax.figure)
# two slices, using bottom_bar
# ----------------------------
line = plt.plot(x, y)
slices = [slice(10, 40), slice(60, 105)]
highlight(x, slices, bottom_bar=True)
ax = line[0].axes
rectangles = ax.findobj(Rectangle)
assert len(rectangles) == 5
for idx, col in enumerate([0.95, 0, 0.95, 0]):
rect_color = rectangles[idx].get_facecolor()[:3]
assert (rect_color == np.array([col] * 3)).all()
plt.close(ax.figure)
| [
"numpy.allclose",
"sarna.viz.highlight",
"numpy.random.random",
"matplotlib.pyplot.plot",
"numpy.diff",
"matplotlib.pyplot.close",
"numpy.array",
"numpy.arange"
] | [((497, 539), 'numpy.allclose', 'np.allclose', (['correct_limits', 'bbox_x_limits'], {}), '(correct_limits, bbox_x_limits)\n', (508, 539), True, 'import numpy as np\n'), ((572, 599), 'numpy.arange', 'np.arange', (['(0)', '(10)'], {'step': '(0.05)'}), '(0, 10, step=0.05)\n', (581, 599), True, 'import numpy as np\n'), ((630, 655), 'numpy.random.random', 'np.random.random', (['n_times'], {}), '(n_times)\n', (646, 655), True, 'import numpy as np\n'), ((706, 720), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y'], {}), '(x, y)\n', (714, 720), True, 'import matplotlib.pyplot as plt\n'), ((851, 871), 'matplotlib.pyplot.close', 'plt.close', (['ax.figure'], {}), '(ax.figure)\n', (860, 871), True, 'import matplotlib.pyplot as plt\n'), ((968, 982), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y'], {}), '(x, y)\n', (976, 982), True, 'import matplotlib.pyplot as plt\n'), ((1075, 1129), 'sarna.viz.highlight', 'highlight', (['x', 'slices'], {'alpha': 'use_alpha', 'color': 'use_color'}), '(x, slices, alpha=use_alpha, color=use_color)\n', (1084, 1129), False, 'from sarna.viz import highlight\n'), ((1523, 1543), 'matplotlib.pyplot.close', 'plt.close', (['ax.figure'], {}), '(ax.figure)\n', (1532, 1543), True, 'import matplotlib.pyplot as plt\n'), ((1626, 1640), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y'], {}), '(x, y)\n', (1634, 1640), True, 'import matplotlib.pyplot as plt\n'), ((1691, 1728), 'sarna.viz.highlight', 'highlight', (['x', 'slices'], {'bottom_bar': '(True)'}), '(x, slices, bottom_bar=True)\n', (1700, 1728), False, 'from sarna.viz import highlight\n'), ((1993, 2013), 'matplotlib.pyplot.close', 'plt.close', (['ax.figure'], {}), '(ax.figure)\n', (2002, 2013), True, 'import matplotlib.pyplot as plt\n'), ((262, 272), 'numpy.diff', 'np.diff', (['x'], {}), '(x)\n', (269, 272), True, 'import numpy as np\n'), ((1326, 1345), 'numpy.array', 'np.array', (['use_color'], {}), '(use_color)\n', (1334, 1345), True, 'import numpy as np\n'), ((1962, 1981), 'numpy.array', 'np.array', (['([col] * 3)'], {}), '([col] * 3)\n', (1970, 1981), True, 'import numpy as np\n')] |
# Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from vitrage_tempest_plugin.tests.api.rca.base import BaseRcaTest
from vitrage_tempest_plugin.tests.api.rca.base import RCA_ALARM_NAME
from vitrage_tempest_plugin.tests.common import nova_utils
from vitrage_tempest_plugin.tests.common import vitrage_utils as v_utils
from vitrage_tempest_plugin.tests import utils
LOG = logging.getLogger(__name__)
class TestRca(BaseRcaTest):
"""RCA test class for Vitrage API tests."""
def tearDown(self):
super(TestRca, self).tearDown()
self._clean_all()
@classmethod
def setUpClass(cls):
super(TestRca, cls).setUpClass()
cls._template = v_utils.add_template('host_aodh_alarm_for_rca.yaml')
# noinspection PyPep8Naming
@classmethod
def tearDownClass(cls):
super(TestRca, cls).tearDownClass()
if cls._template is not None:
v_utils.delete_template(cls._template['uuid'])
@utils.tempest_logger
def test_validate_deduce_alarms(self):
"""validate_deduce_alarms test
There tests validates correctness of deduce alarms
(created by special template file), and equals there
resource_id with created instances id
"""
instances = nova_utils.create_instances(num_instances=2,
set_public_network=True)
self._create_alarm(
resource_id=self._get_hostname(),
alarm_name=RCA_ALARM_NAME)
api_alarms = self.vitrage_client.alarm.list(vitrage_id='all',
all_tenants=True)
self._validate_deduce_alarms(alarms=api_alarms,
instances=instances)
@utils.tempest_logger
def test_validate_set_state(self):
"""validate_set_state test
There tests validates correctness of topology resource
state, after alarms creation (by special template file),
source state - ERROR
target state - SUBOPTIMAL (caused 2 created instance)
"""
instances = nova_utils.create_instances(num_instances=2,
set_public_network=True)
self._create_alarm(
resource_id=self._get_hostname(),
alarm_name=RCA_ALARM_NAME)
topology = self.vitrage_client.topology.get(all_tenants=True)
self._validate_set_state(topology=topology['nodes'],
instances=instances)
| [
"vitrage_tempest_plugin.tests.common.vitrage_utils.add_template",
"vitrage_tempest_plugin.tests.common.vitrage_utils.delete_template",
"vitrage_tempest_plugin.tests.common.nova_utils.create_instances",
"oslo_log.log.getLogger"
] | [((931, 958), 'oslo_log.log.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (948, 958), True, 'from oslo_log import log as logging\n'), ((1236, 1288), 'vitrage_tempest_plugin.tests.common.vitrage_utils.add_template', 'v_utils.add_template', (['"""host_aodh_alarm_for_rca.yaml"""'], {}), "('host_aodh_alarm_for_rca.yaml')\n", (1256, 1288), True, 'from vitrage_tempest_plugin.tests.common import vitrage_utils as v_utils\n'), ((1816, 1885), 'vitrage_tempest_plugin.tests.common.nova_utils.create_instances', 'nova_utils.create_instances', ([], {'num_instances': '(2)', 'set_public_network': '(True)'}), '(num_instances=2, set_public_network=True)\n', (1843, 1885), False, 'from vitrage_tempest_plugin.tests.common import nova_utils\n'), ((2655, 2724), 'vitrage_tempest_plugin.tests.common.nova_utils.create_instances', 'nova_utils.create_instances', ([], {'num_instances': '(2)', 'set_public_network': '(True)'}), '(num_instances=2, set_public_network=True)\n', (2682, 2724), False, 'from vitrage_tempest_plugin.tests.common import nova_utils\n'), ((1461, 1507), 'vitrage_tempest_plugin.tests.common.vitrage_utils.delete_template', 'v_utils.delete_template', (["cls._template['uuid']"], {}), "(cls._template['uuid'])\n", (1484, 1507), True, 'from vitrage_tempest_plugin.tests.common import vitrage_utils as v_utils\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This module is for testing unit functions of the hyperparameter loader
"""
import os
import pytest
from pykg2vec.common import KGEArgParser
from pykg2vec.common import HyperparameterLoader
def test_load_default_hyperparameter_file():
hp_loader = HyperparameterLoader(KGEArgParser().get_args([]))
hyperparams = hp_loader.load_hyperparameter("freebase15k", "analogy")
search_space = hp_loader.load_search_space("analogy")
assert hyperparams["learning_rate"] == 0.1
assert hyperparams["hidden_size"] == 200
assert str(search_space["epochs"].inputs()[1]) == "0 Literal{10}"
def test_load_custom_hyperparameter_file():
custom_hyperparamter_file = os.path.join(os.path.dirname(__file__), "resource", "custom_hyperparams", "custom_hpf.yaml")
custom_ss_file = os.path.join(os.path.dirname(__file__), "resource", "custom_hyperparams", "custom_ssf.yaml")
hp_loader = HyperparameterLoader(KGEArgParser().get_args(["-hpf", custom_hyperparamter_file, "-ssf", custom_ss_file]))
hyperparams = hp_loader.load_hyperparameter("freebase15k", "analogy")
search_space = hp_loader.load_search_space("analogy")
assert hyperparams["learning_rate"] == 0.01
assert hyperparams["hidden_size"] == 200
assert str(search_space["epochs"].inputs()[1]) == "0 Literal{100}"
def test_exception_on_hyperparameter_file_not_exist():
with pytest.raises(FileNotFoundError) as e:
hp_loader = HyperparameterLoader(KGEArgParser().get_args(["-hpf", "not_exist_file"]))
hp_loader.load_hyperparameter("freebase15k", "analogy")
assert str(e.value) == "Cannot find configuration file not_exist_file"
def test_exception_on_search_space_file_not_exist():
with pytest.raises(FileNotFoundError) as e:
hp_loader = HyperparameterLoader(KGEArgParser().get_args(["-ssf", "not_exist_file"]))
hp_loader.load_search_space("analogy")
assert str(e.value) == "Cannot find configuration file not_exist_file"
def test_exception_on_hyperparameter_file_with_wrong_extension():
custom_hyperparamter_file = os.path.join(os.path.dirname(__file__), "resource", "custom_hyperparams", "custom.txt")
with pytest.raises(ValueError) as e:
hp_loader = HyperparameterLoader(KGEArgParser().get_args(["-hpf", custom_hyperparamter_file]))
hp_loader.load_hyperparameter("freebase15k", "analogy")
assert str(e.value) == "Configuration file must have .yaml or .yml extension: %s" % custom_hyperparamter_file
def test_exception_on_search_space_file_with_wrong_extension():
custom_hyperparamter_file = os.path.join(os.path.dirname(__file__), "resource", "custom_hyperparams", "custom.txt")
with pytest.raises(ValueError) as e:
hp_loader = HyperparameterLoader(KGEArgParser().get_args(["-ssf", custom_hyperparamter_file]))
hp_loader.load_search_space("analogy")
assert str(e.value) == "Configuration file must have .yaml or .yml extension: %s" % custom_hyperparamter_file
| [
"os.path.dirname",
"pykg2vec.common.KGEArgParser",
"pytest.raises"
] | [((736, 761), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (751, 761), False, 'import os\n'), ((850, 875), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (865, 875), False, 'import os\n'), ((1415, 1447), 'pytest.raises', 'pytest.raises', (['FileNotFoundError'], {}), '(FileNotFoundError)\n', (1428, 1447), False, 'import pytest\n'), ((1751, 1783), 'pytest.raises', 'pytest.raises', (['FileNotFoundError'], {}), '(FileNotFoundError)\n', (1764, 1783), False, 'import pytest\n'), ((2119, 2144), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2134, 2144), False, 'import os\n'), ((2203, 2228), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2216, 2228), False, 'import pytest\n'), ((2627, 2652), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2642, 2652), False, 'import os\n'), ((2711, 2736), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2724, 2736), False, 'import pytest\n'), ((322, 336), 'pykg2vec.common.KGEArgParser', 'KGEArgParser', ([], {}), '()\n', (334, 336), False, 'from pykg2vec.common import KGEArgParser\n'), ((967, 981), 'pykg2vec.common.KGEArgParser', 'KGEArgParser', ([], {}), '()\n', (979, 981), False, 'from pykg2vec.common import KGEArgParser\n'), ((1495, 1509), 'pykg2vec.common.KGEArgParser', 'KGEArgParser', ([], {}), '()\n', (1507, 1509), False, 'from pykg2vec.common import KGEArgParser\n'), ((1831, 1845), 'pykg2vec.common.KGEArgParser', 'KGEArgParser', ([], {}), '()\n', (1843, 1845), False, 'from pykg2vec.common import KGEArgParser\n'), ((2276, 2290), 'pykg2vec.common.KGEArgParser', 'KGEArgParser', ([], {}), '()\n', (2288, 2290), False, 'from pykg2vec.common import KGEArgParser\n'), ((2784, 2798), 'pykg2vec.common.KGEArgParser', 'KGEArgParser', ([], {}), '()\n', (2796, 2798), False, 'from pykg2vec.common import KGEArgParser\n')] |
from django.urls import path
from django.urls.resolvers import URLPattern
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('products/', views.get_all_products, name='all_products'),
path('releases/<int:pk>', views.get_releases, name="releases"),
path('testresults/', views.get_testresults, name='testresults'),
] | [
"django.urls.path"
] | [((115, 150), 'django.urls.path', 'path', (['""""""', 'views.index'], {'name': '"""index"""'}), "('', views.index, name='index')\n", (119, 150), False, 'from django.urls import path\n'), ((156, 218), 'django.urls.path', 'path', (['"""products/"""', 'views.get_all_products'], {'name': '"""all_products"""'}), "('products/', views.get_all_products, name='all_products')\n", (160, 218), False, 'from django.urls import path\n'), ((224, 286), 'django.urls.path', 'path', (['"""releases/<int:pk>"""', 'views.get_releases'], {'name': '"""releases"""'}), "('releases/<int:pk>', views.get_releases, name='releases')\n", (228, 286), False, 'from django.urls import path\n'), ((292, 355), 'django.urls.path', 'path', (['"""testresults/"""', 'views.get_testresults'], {'name': '"""testresults"""'}), "('testresults/', views.get_testresults, name='testresults')\n", (296, 355), False, 'from django.urls import path\n')] |
from wasmer import engine, Store, Module, Instance
from wasmer_compiler_cranelift import Compiler as Cranelift
from wasmer_compiler_llvm import Compiler as LLVM
from wasmer_compiler_singlepass import Compiler as Singlepass
TEST_BYTES = open('benchmarks/nbody.wasm', 'rb').read()
def test_benchmark_headless_time_nbody_cranelift_jit(benchmark):
store = Store(engine.JIT(Cranelift))
module = Module(store, TEST_BYTES)
serialized = module.serialize()
@benchmark
def bench():
deserialized = Module.deserialize(store, serialized)
_ = Instance(deserialized)
def test_benchmark_headless_time_nbody_cranelift_native(benchmark):
store = Store(engine.Native(Cranelift))
module = Module(store, TEST_BYTES)
serialized = module.serialize()
@benchmark
def bench():
deserialized = Module.deserialize(store, serialized)
_ = Instance(deserialized)
def test_benchmark_headless_time_nbody_llvm_jit(benchmark):
store = Store(engine.JIT(LLVM))
module = Module(store, TEST_BYTES)
serialized = module.serialize()
@benchmark
def bench():
deserialized = Module.deserialize(store, serialized)
_ = Instance(deserialized)
def test_benchmark_headless_time_nbody_llvm_native(benchmark):
store = Store(engine.Native(LLVM))
module = Module(store, TEST_BYTES)
serialized = module.serialize()
@benchmark
def bench():
deserialized = Module.deserialize(store, serialized)
_ = Instance(deserialized)
def test_benchmark_headless_time_nbody_singlepass_jit(benchmark):
store = Store(engine.JIT(Singlepass))
module = Module(store, TEST_BYTES)
serialized = module.serialize()
@benchmark
def bench():
deserialized = Module.deserialize(store, serialized)
_ = Instance(deserialized)
def test_benchmark_headless_time_nbody_singlepass_native(benchmark):
store = Store(engine.Native(Singlepass))
module = Module(store, TEST_BYTES)
serialized = module.serialize()
@benchmark
def bench():
deserialized = Module.deserialize(store, serialized)
_ = Instance(deserialized)
| [
"wasmer.engine.Native",
"wasmer.Instance",
"wasmer.engine.JIT",
"wasmer.Module",
"wasmer.Module.deserialize"
] | [((400, 425), 'wasmer.Module', 'Module', (['store', 'TEST_BYTES'], {}), '(store, TEST_BYTES)\n', (406, 425), False, 'from wasmer import engine, Store, Module, Instance\n'), ((717, 742), 'wasmer.Module', 'Module', (['store', 'TEST_BYTES'], {}), '(store, TEST_BYTES)\n', (723, 742), False, 'from wasmer import engine, Store, Module, Instance\n'), ((1018, 1043), 'wasmer.Module', 'Module', (['store', 'TEST_BYTES'], {}), '(store, TEST_BYTES)\n', (1024, 1043), False, 'from wasmer import engine, Store, Module, Instance\n'), ((1325, 1350), 'wasmer.Module', 'Module', (['store', 'TEST_BYTES'], {}), '(store, TEST_BYTES)\n', (1331, 1350), False, 'from wasmer import engine, Store, Module, Instance\n'), ((1638, 1663), 'wasmer.Module', 'Module', (['store', 'TEST_BYTES'], {}), '(store, TEST_BYTES)\n', (1644, 1663), False, 'from wasmer import engine, Store, Module, Instance\n'), ((1957, 1982), 'wasmer.Module', 'Module', (['store', 'TEST_BYTES'], {}), '(store, TEST_BYTES)\n', (1963, 1982), False, 'from wasmer import engine, Store, Module, Instance\n'), ((364, 385), 'wasmer.engine.JIT', 'engine.JIT', (['Cranelift'], {}), '(Cranelift)\n', (374, 385), False, 'from wasmer import engine, Store, Module, Instance\n'), ((518, 555), 'wasmer.Module.deserialize', 'Module.deserialize', (['store', 'serialized'], {}), '(store, serialized)\n', (536, 555), False, 'from wasmer import engine, Store, Module, Instance\n'), ((568, 590), 'wasmer.Instance', 'Instance', (['deserialized'], {}), '(deserialized)\n', (576, 590), False, 'from wasmer import engine, Store, Module, Instance\n'), ((678, 702), 'wasmer.engine.Native', 'engine.Native', (['Cranelift'], {}), '(Cranelift)\n', (691, 702), False, 'from wasmer import engine, Store, Module, Instance\n'), ((835, 872), 'wasmer.Module.deserialize', 'Module.deserialize', (['store', 'serialized'], {}), '(store, serialized)\n', (853, 872), False, 'from wasmer import engine, Store, Module, Instance\n'), ((885, 907), 'wasmer.Instance', 'Instance', (['deserialized'], {}), '(deserialized)\n', (893, 907), False, 'from wasmer import engine, Store, Module, Instance\n'), ((987, 1003), 'wasmer.engine.JIT', 'engine.JIT', (['LLVM'], {}), '(LLVM)\n', (997, 1003), False, 'from wasmer import engine, Store, Module, Instance\n'), ((1136, 1173), 'wasmer.Module.deserialize', 'Module.deserialize', (['store', 'serialized'], {}), '(store, serialized)\n', (1154, 1173), False, 'from wasmer import engine, Store, Module, Instance\n'), ((1186, 1208), 'wasmer.Instance', 'Instance', (['deserialized'], {}), '(deserialized)\n', (1194, 1208), False, 'from wasmer import engine, Store, Module, Instance\n'), ((1291, 1310), 'wasmer.engine.Native', 'engine.Native', (['LLVM'], {}), '(LLVM)\n', (1304, 1310), False, 'from wasmer import engine, Store, Module, Instance\n'), ((1443, 1480), 'wasmer.Module.deserialize', 'Module.deserialize', (['store', 'serialized'], {}), '(store, serialized)\n', (1461, 1480), False, 'from wasmer import engine, Store, Module, Instance\n'), ((1493, 1515), 'wasmer.Instance', 'Instance', (['deserialized'], {}), '(deserialized)\n', (1501, 1515), False, 'from wasmer import engine, Store, Module, Instance\n'), ((1601, 1623), 'wasmer.engine.JIT', 'engine.JIT', (['Singlepass'], {}), '(Singlepass)\n', (1611, 1623), False, 'from wasmer import engine, Store, Module, Instance\n'), ((1756, 1793), 'wasmer.Module.deserialize', 'Module.deserialize', (['store', 'serialized'], {}), '(store, serialized)\n', (1774, 1793), False, 'from wasmer import engine, Store, Module, Instance\n'), ((1806, 1828), 'wasmer.Instance', 'Instance', (['deserialized'], {}), '(deserialized)\n', (1814, 1828), False, 'from wasmer import engine, Store, Module, Instance\n'), ((1917, 1942), 'wasmer.engine.Native', 'engine.Native', (['Singlepass'], {}), '(Singlepass)\n', (1930, 1942), False, 'from wasmer import engine, Store, Module, Instance\n'), ((2075, 2112), 'wasmer.Module.deserialize', 'Module.deserialize', (['store', 'serialized'], {}), '(store, serialized)\n', (2093, 2112), False, 'from wasmer import engine, Store, Module, Instance\n'), ((2125, 2147), 'wasmer.Instance', 'Instance', (['deserialized'], {}), '(deserialized)\n', (2133, 2147), False, 'from wasmer import engine, Store, Module, Instance\n')] |
import numpy as np
import matplotlib.pyplot as plt
from numpy import pi as pi
from numpy import sin as sin
from numpy import cos as cos
from scipy import signal
# example 1a
N=1000
f=50
T=1/f
t=np.arange(N)
Phi=np.random.normal(0,1,N)
X=sin(2*pi*f*t/N)
Y=sin(2*pi*f*t/N + Phi)
plt.plot(Phi)
plt.plot(X)
plt.plot(Y)
plt.legend(['$\Phi$','$X_t$','$Y_t$'])
plt.show()
#F, Pxx_den = signal.periodogram(X,N)
#plt.semilogy(F, Pxx_den)
G, Pyy_den = signal.periodogram(Y,N)
plt.plot(G, Pyy_den)
#plt.legend(['$F(X)$','$F(Y)$'])
plt.show()
#example 1b
N=10000
f1=50
f2=1e3
t=np.arange(N)
Phi1=np.random.normal(0,5,N)
Phi2=np.random.normal(0,5,N)
X=sin(2*pi*f2*t/N)*cos(2*pi*f1*t/N)
Y=sin(2*pi*f1*t/N + Phi1)*cos(2*pi*f1*t/N+Phi2)
plt.plot(Phi1)
plt.plot(X)
plt.plot(Y)
plt.legend(['$\Phi$','$X_t$','$Y_t$'])
plt.show()
F, Pxx_den = signal.periodogram(X,N)
plt.semilogy(F, Pxx_den)
plt.show()
G, Pyy_den = signal.periodogram(Y,N)
plt.plot(G, Pyy_den)
#plt.legend(['$F(X)$','$F(Y)$'])
plt.show()
# example 1c
N=10000
f1=7
f2=24
A=0.5
B=1.5
t=np.arange(N)
Phi1=np.random.normal(0,1,N)
Phi2=np.random.normal(0,1,N)
X=A*sin(2*pi*f2*t/N)+B*sin(2*pi*f1*t/N)
Y=A*sin(2*pi*f2*t/N + Phi1)+B*sin(2*pi*f1*t/N+Phi2)
plt.plot(Phi1)
plt.plot(X)
plt.plot(Y)
plt.legend(['$\Phi$','$X_t$','$Y_t$'])
plt.show()
F, Pxx_den = signal.periodogram(X,N)
plt.plot(F, Pxx_den)
#plt.show()
G, Pyy_den = signal.periodogram(Y,N)
plt.plot(G, Pyy_den)
plt.legend(['$F(X)$','$F(Y)$'])
plt.grid()
plt.show()
#
inputfile = "../Datasets/S1MME_week43.csv"
df = pd.read_csv(inputfile)
Y=df.S1_mode_combined_attach_success_times_SEQ
N=int(len(Y)/2)
G, Pyy_den = signal.periodogram(Y,N)
plt.subplot(2,1,1)
plt.plot(Y)
plt.subplot(2,1,2)
plt.plot(G, Pyy_den)
plt.show()
dataset=Y
interval=1
diff = list()
for i in range(interval, len(dataset)):
value = dataset[i] - dataset[i - interval]
diff.append(value)
plt.plot(Y)
plt.plot(diff)
plt.legend(['$Y_t$','$\\nabla Y_t$'])
plt.show()
N=int(len(diff))
G, Pyy_den = signal.periodogram(Y,N)
plt.subplot(2,1,1)
plt.plot(diff)
plt.subplot(2,1,2)
plt.plot(G, Pyy_den)
plt.grid()
plt.show()
# iterar, agregar más componentes
f1=21
f2=42
A=12.5*1e4
B=3.85*1e4
t=np.arange(N)
Y_=A*sin(2*pi*f2*t/N)+B*sin(2*pi*f1*t/N)
#plt.plot(Y)
plt.plot(Y-200e3)
plt.plot(Y_)
plt.grid()
#plt.legend(['$Y_t$','$\\nabla Y_t$','$\\hat{Y}_t$'])
plt.legend(['$Y_t-2e5$','$\\hat{Y}_t$'])
plt.show()
| [
"numpy.random.normal",
"matplotlib.pyplot.semilogy",
"matplotlib.pyplot.grid",
"numpy.arange",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.subplot",
"numpy.cos",
"numpy.sin",
"scipy.signal.periodogram",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show"
] | [((196, 208), 'numpy.arange', 'np.arange', (['N'], {}), '(N)\n', (205, 208), True, 'import numpy as np\n'), ((213, 238), 'numpy.random.normal', 'np.random.normal', (['(0)', '(1)', 'N'], {}), '(0, 1, N)\n', (229, 238), True, 'import numpy as np\n'), ((239, 262), 'numpy.sin', 'sin', (['(2 * pi * f * t / N)'], {}), '(2 * pi * f * t / N)\n', (242, 262), True, 'from numpy import sin as sin\n'), ((257, 286), 'numpy.sin', 'sin', (['(2 * pi * f * t / N + Phi)'], {}), '(2 * pi * f * t / N + Phi)\n', (260, 286), True, 'from numpy import sin as sin\n'), ((280, 293), 'matplotlib.pyplot.plot', 'plt.plot', (['Phi'], {}), '(Phi)\n', (288, 293), True, 'import matplotlib.pyplot as plt\n'), ((294, 305), 'matplotlib.pyplot.plot', 'plt.plot', (['X'], {}), '(X)\n', (302, 305), True, 'import matplotlib.pyplot as plt\n'), ((306, 317), 'matplotlib.pyplot.plot', 'plt.plot', (['Y'], {}), '(Y)\n', (314, 317), True, 'import matplotlib.pyplot as plt\n'), ((318, 359), 'matplotlib.pyplot.legend', 'plt.legend', (["['$\\\\Phi$', '$X_t$', '$Y_t$']"], {}), "(['$\\\\Phi$', '$X_t$', '$Y_t$'])\n", (328, 359), True, 'import matplotlib.pyplot as plt\n'), ((357, 367), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (365, 367), True, 'import matplotlib.pyplot as plt\n'), ((446, 470), 'scipy.signal.periodogram', 'signal.periodogram', (['Y', 'N'], {}), '(Y, N)\n', (464, 470), False, 'from scipy import signal\n'), ((470, 490), 'matplotlib.pyplot.plot', 'plt.plot', (['G', 'Pyy_den'], {}), '(G, Pyy_den)\n', (478, 490), True, 'import matplotlib.pyplot as plt\n'), ((524, 534), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (532, 534), True, 'import matplotlib.pyplot as plt\n'), ((573, 585), 'numpy.arange', 'np.arange', (['N'], {}), '(N)\n', (582, 585), True, 'import numpy as np\n'), ((591, 616), 'numpy.random.normal', 'np.random.normal', (['(0)', '(5)', 'N'], {}), '(0, 5, N)\n', (607, 616), True, 'import numpy as np\n'), ((620, 645), 'numpy.random.normal', 'np.random.normal', (['(0)', '(5)', 'N'], {}), '(0, 5, N)\n', (636, 645), True, 'import numpy as np\n'), ((729, 743), 'matplotlib.pyplot.plot', 'plt.plot', (['Phi1'], {}), '(Phi1)\n', (737, 743), True, 'import matplotlib.pyplot as plt\n'), ((744, 755), 'matplotlib.pyplot.plot', 'plt.plot', (['X'], {}), '(X)\n', (752, 755), True, 'import matplotlib.pyplot as plt\n'), ((756, 767), 'matplotlib.pyplot.plot', 'plt.plot', (['Y'], {}), '(Y)\n', (764, 767), True, 'import matplotlib.pyplot as plt\n'), ((768, 809), 'matplotlib.pyplot.legend', 'plt.legend', (["['$\\\\Phi$', '$X_t$', '$Y_t$']"], {}), "(['$\\\\Phi$', '$X_t$', '$Y_t$'])\n", (778, 809), True, 'import matplotlib.pyplot as plt\n'), ((807, 817), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (815, 817), True, 'import matplotlib.pyplot as plt\n'), ((832, 856), 'scipy.signal.periodogram', 'signal.periodogram', (['X', 'N'], {}), '(X, N)\n', (850, 856), False, 'from scipy import signal\n'), ((856, 880), 'matplotlib.pyplot.semilogy', 'plt.semilogy', (['F', 'Pxx_den'], {}), '(F, Pxx_den)\n', (868, 880), True, 'import matplotlib.pyplot as plt\n'), ((881, 891), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (889, 891), True, 'import matplotlib.pyplot as plt\n'), ((905, 929), 'scipy.signal.periodogram', 'signal.periodogram', (['Y', 'N'], {}), '(Y, N)\n', (923, 929), False, 'from scipy import signal\n'), ((929, 949), 'matplotlib.pyplot.plot', 'plt.plot', (['G', 'Pyy_den'], {}), '(G, Pyy_den)\n', (937, 949), True, 'import matplotlib.pyplot as plt\n'), ((983, 993), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (991, 993), True, 'import matplotlib.pyplot as plt\n'), ((1042, 1054), 'numpy.arange', 'np.arange', (['N'], {}), '(N)\n', (1051, 1054), True, 'import numpy as np\n'), ((1060, 1085), 'numpy.random.normal', 'np.random.normal', (['(0)', '(1)', 'N'], {}), '(0, 1, N)\n', (1076, 1085), True, 'import numpy as np\n'), ((1089, 1114), 'numpy.random.normal', 'np.random.normal', (['(0)', '(1)', 'N'], {}), '(0, 1, N)\n', (1105, 1114), True, 'import numpy as np\n'), ((1206, 1220), 'matplotlib.pyplot.plot', 'plt.plot', (['Phi1'], {}), '(Phi1)\n', (1214, 1220), True, 'import matplotlib.pyplot as plt\n'), ((1221, 1232), 'matplotlib.pyplot.plot', 'plt.plot', (['X'], {}), '(X)\n', (1229, 1232), True, 'import matplotlib.pyplot as plt\n'), ((1233, 1244), 'matplotlib.pyplot.plot', 'plt.plot', (['Y'], {}), '(Y)\n', (1241, 1244), True, 'import matplotlib.pyplot as plt\n'), ((1245, 1286), 'matplotlib.pyplot.legend', 'plt.legend', (["['$\\\\Phi$', '$X_t$', '$Y_t$']"], {}), "(['$\\\\Phi$', '$X_t$', '$Y_t$'])\n", (1255, 1286), True, 'import matplotlib.pyplot as plt\n'), ((1284, 1294), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1292, 1294), True, 'import matplotlib.pyplot as plt\n'), ((1309, 1333), 'scipy.signal.periodogram', 'signal.periodogram', (['X', 'N'], {}), '(X, N)\n', (1327, 1333), False, 'from scipy import signal\n'), ((1333, 1353), 'matplotlib.pyplot.plot', 'plt.plot', (['F', 'Pxx_den'], {}), '(F, Pxx_den)\n', (1341, 1353), True, 'import matplotlib.pyplot as plt\n'), ((1379, 1403), 'scipy.signal.periodogram', 'signal.periodogram', (['Y', 'N'], {}), '(Y, N)\n', (1397, 1403), False, 'from scipy import signal\n'), ((1403, 1423), 'matplotlib.pyplot.plot', 'plt.plot', (['G', 'Pyy_den'], {}), '(G, Pyy_den)\n', (1411, 1423), True, 'import matplotlib.pyplot as plt\n'), ((1424, 1456), 'matplotlib.pyplot.legend', 'plt.legend', (["['$F(X)$', '$F(Y)$']"], {}), "(['$F(X)$', '$F(Y)$'])\n", (1434, 1456), True, 'import matplotlib.pyplot as plt\n'), ((1456, 1466), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (1464, 1466), True, 'import matplotlib.pyplot as plt\n'), ((1467, 1477), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1475, 1477), True, 'import matplotlib.pyplot as plt\n'), ((1630, 1654), 'scipy.signal.periodogram', 'signal.periodogram', (['Y', 'N'], {}), '(Y, N)\n', (1648, 1654), False, 'from scipy import signal\n'), ((1654, 1674), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(1)', '(1)'], {}), '(2, 1, 1)\n', (1665, 1674), True, 'import matplotlib.pyplot as plt\n'), ((1673, 1684), 'matplotlib.pyplot.plot', 'plt.plot', (['Y'], {}), '(Y)\n', (1681, 1684), True, 'import matplotlib.pyplot as plt\n'), ((1685, 1705), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(1)', '(2)'], {}), '(2, 1, 2)\n', (1696, 1705), True, 'import matplotlib.pyplot as plt\n'), ((1704, 1724), 'matplotlib.pyplot.plot', 'plt.plot', (['G', 'Pyy_den'], {}), '(G, Pyy_den)\n', (1712, 1724), True, 'import matplotlib.pyplot as plt\n'), ((1725, 1735), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1733, 1735), True, 'import matplotlib.pyplot as plt\n'), ((1878, 1889), 'matplotlib.pyplot.plot', 'plt.plot', (['Y'], {}), '(Y)\n', (1886, 1889), True, 'import matplotlib.pyplot as plt\n'), ((1890, 1904), 'matplotlib.pyplot.plot', 'plt.plot', (['diff'], {}), '(diff)\n', (1898, 1904), True, 'import matplotlib.pyplot as plt\n'), ((1905, 1943), 'matplotlib.pyplot.legend', 'plt.legend', (["['$Y_t$', '$\\\\nabla Y_t$']"], {}), "(['$Y_t$', '$\\\\nabla Y_t$'])\n", (1915, 1943), True, 'import matplotlib.pyplot as plt\n'), ((1943, 1953), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1951, 1953), True, 'import matplotlib.pyplot as plt\n'), ((1986, 2010), 'scipy.signal.periodogram', 'signal.periodogram', (['Y', 'N'], {}), '(Y, N)\n', (2004, 2010), False, 'from scipy import signal\n'), ((2010, 2030), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(1)', '(1)'], {}), '(2, 1, 1)\n', (2021, 2030), True, 'import matplotlib.pyplot as plt\n'), ((2029, 2043), 'matplotlib.pyplot.plot', 'plt.plot', (['diff'], {}), '(diff)\n', (2037, 2043), True, 'import matplotlib.pyplot as plt\n'), ((2044, 2064), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(1)', '(2)'], {}), '(2, 1, 2)\n', (2055, 2064), True, 'import matplotlib.pyplot as plt\n'), ((2063, 2083), 'matplotlib.pyplot.plot', 'plt.plot', (['G', 'Pyy_den'], {}), '(G, Pyy_den)\n', (2071, 2083), True, 'import matplotlib.pyplot as plt\n'), ((2084, 2094), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (2092, 2094), True, 'import matplotlib.pyplot as plt\n'), ((2095, 2105), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2103, 2105), True, 'import matplotlib.pyplot as plt\n'), ((2178, 2190), 'numpy.arange', 'np.arange', (['N'], {}), '(N)\n', (2187, 2190), True, 'import numpy as np\n'), ((2246, 2268), 'matplotlib.pyplot.plot', 'plt.plot', (['(Y - 200000.0)'], {}), '(Y - 200000.0)\n', (2254, 2268), True, 'import matplotlib.pyplot as plt\n'), ((2264, 2276), 'matplotlib.pyplot.plot', 'plt.plot', (['Y_'], {}), '(Y_)\n', (2272, 2276), True, 'import matplotlib.pyplot as plt\n'), ((2277, 2287), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (2285, 2287), True, 'import matplotlib.pyplot as plt\n'), ((2342, 2383), 'matplotlib.pyplot.legend', 'plt.legend', (["['$Y_t-2e5$', '$\\\\hat{Y}_t$']"], {}), "(['$Y_t-2e5$', '$\\\\hat{Y}_t$'])\n", (2352, 2383), True, 'import matplotlib.pyplot as plt\n'), ((2383, 2393), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2391, 2393), True, 'import matplotlib.pyplot as plt\n'), ((646, 670), 'numpy.sin', 'sin', (['(2 * pi * f2 * t / N)'], {}), '(2 * pi * f2 * t / N)\n', (649, 670), True, 'from numpy import sin as sin\n'), ((663, 687), 'numpy.cos', 'cos', (['(2 * pi * f1 * t / N)'], {}), '(2 * pi * f1 * t / N)\n', (666, 687), True, 'from numpy import cos as cos\n'), ((682, 713), 'numpy.sin', 'sin', (['(2 * pi * f1 * t / N + Phi1)'], {}), '(2 * pi * f1 * t / N + Phi1)\n', (685, 713), True, 'from numpy import sin as sin\n'), ((706, 737), 'numpy.cos', 'cos', (['(2 * pi * f1 * t / N + Phi2)'], {}), '(2 * pi * f1 * t / N + Phi2)\n', (709, 737), True, 'from numpy import cos as cos\n'), ((1117, 1141), 'numpy.sin', 'sin', (['(2 * pi * f2 * t / N)'], {}), '(2 * pi * f2 * t / N)\n', (1120, 1141), True, 'from numpy import sin as sin\n'), ((1136, 1160), 'numpy.sin', 'sin', (['(2 * pi * f1 * t / N)'], {}), '(2 * pi * f1 * t / N)\n', (1139, 1160), True, 'from numpy import sin as sin\n'), ((1157, 1188), 'numpy.sin', 'sin', (['(2 * pi * f2 * t / N + Phi1)'], {}), '(2 * pi * f2 * t / N + Phi1)\n', (1160, 1188), True, 'from numpy import sin as sin\n'), ((1183, 1214), 'numpy.sin', 'sin', (['(2 * pi * f1 * t / N + Phi2)'], {}), '(2 * pi * f1 * t / N + Phi2)\n', (1186, 1214), True, 'from numpy import sin as sin\n'), ((2196, 2220), 'numpy.sin', 'sin', (['(2 * pi * f2 * t / N)'], {}), '(2 * pi * f2 * t / N)\n', (2199, 2220), True, 'from numpy import sin as sin\n'), ((2215, 2239), 'numpy.sin', 'sin', (['(2 * pi * f1 * t / N)'], {}), '(2 * pi * f1 * t / N)\n', (2218, 2239), True, 'from numpy import sin as sin\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from .base import RedisTestCase
from redis_collections import List
class ListTest(RedisTestCase):
def create_list(self, *args, **kwargs):
kwargs['redis'] = self.redis
return List(*args, **kwargs)
def test_init(self):
l = self.create_list([1, 2, 3])
self.assertEqual(list(l), [1, 2, 3])
l = self.create_list('abc')
self.assertEqual(list(l), ['a', 'b', 'c'])
l = self.create_list()
self.assertEqual(list(l), [])
def test_in(self):
l = self.create_list([1, 2, 3])
self.assertTrue(2 in l)
self.assertFalse(42 in l)
self.assertFalse(2 not in l)
self.assertTrue(42 not in l)
def test_concat(self):
l1 = self.create_list([1, 2, 3])
l2 = self.create_list([1, 4, 5])
self.assertEqual(list(l1 + l2), [1, 2, 3, 1, 4, 5])
self.assertEqual(list(l1 * 2), [1, 2, 3, 1, 2, 3])
self.assertEqual(list(2 * l1), [1, 2, 3, 1, 2, 3])
self.assertEqual(list(l1 * 0), [])
self.assertEqual(list(l1 * -1), [])
def test_set_get_overflow(self):
l = self.create_list([1, 2, 3])
def get_overflow(l):
return l[42]
def set_overflow(l):
l[42] = 5
self.assertRaises(IndexError, get_overflow, l)
self.assertRaises(IndexError, set_overflow, l)
self.assertEqual(l.get(42), None)
self.assertEqual(l.get(1), 2)
def test_index_count(self):
l = self.create_list([1, 2, 3])
self.assertEqual(l[0], 1)
self.assertEqual(l[1], 2)
self.assertEqual(l[2], 3)
self.assertEqual(l[-1], 3)
self.assertEqual(l[-2], 2)
self.assertEqual(l[-3], 1)
self.assertRaises(IndexError, lambda: l[42])
self.assertRaises(IndexError, lambda: l[-42])
l = self.create_list([1, 2, 3, 2, 3])
self.assertEqual(l.index(2), 1)
self.assertEqual(l.index(2, 2), 3)
self.assertRaises(ValueError, l.index, 2, 2, 3)
self.assertEqual(l.count(2), 2)
def test_slice(self):
l = self.create_list([1, 2, 3])
self.assertEqual(list(l[0:1]), [1])
self.assertEqual(list(l[0:2]), [1, 2])
self.assertEqual(list(l[:]), [1, 2, 3])
self.assertEqual(list(l[0:-1]), [1, 2])
self.assertEqual(list(l[1:]), [2, 3])
self.assertEqual(list(l[1::1]), [2, 3])
self.assertEqual(list(l[1::2]), [2])
def test_len_min_max(self):
l = self.create_list([1, 2, 3])
self.assertEqual(len(l), 3)
self.assertEqual(min(l), 1)
self.assertEqual(max(l), 3)
def test_mutable(self):
l = self.create_list([1, 2, 3])
l[2] = 42
self.assertEqual(l[2], 42)
l[1:] = []
self.assertEqual(list(l), [1])
l.append(2013)
self.assertEqual(list(l), [1, 2013])
def test_del(self):
l = self.create_list([1, 2013])
del l[0]
self.assertEqual(list(l), [2013])
del l[1:]
self.assertEqual(list(l), [2013])
l.append(5)
self.assertEqual(list(l), [2013, 5])
l[1] = 8
self.assertEqual(list(l), [2013, 8])
del l[1:]
self.assertEqual(list(l), [2013])
def test_extend_insert(self):
l = self.create_list([2013])
l.extend([4, 5, 6, 7])
self.assertEqual(list(l), [2013, 4, 5, 6, 7])
l.insert(0, 3)
self.assertEqual(list(l), [3, 2013, 4, 5, 6, 7]) # insert does not replace
def test_pop_remove(self):
l = self.create_list([3, 4, 5, 6, 7])
self.assertEqual(l.pop(), 7)
self.assertEqual(list(l), [3, 4, 5, 6])
self.assertEqual(l.pop(0), 3)
self.assertEqual(list(l), [4, 5, 6])
l.extend([4, 5, 6])
l.remove(4)
self.assertEqual(list(l), [5, 6, 4, 5, 6])
def test_slice_trim(self):
l = self.create_list([5, 6, 4, 5, 6])
l[2:] = []
self.assertEqual(list(l), [5, 6])
def test_reverse(self):
l = self.create_list([1, 2, 3])
l.reverse()
self.assertEqual(list(l), [3, 2, 1])
def test_lset_issue(self):
for l in ( [1], self.create_list([1]) ):
l.insert(0, 5)
self.assertEqual(list(l), [5, 1])
l.insert(0, 6)
self.assertEqual(list(l), [6, 5, 1])
l.append(7)
self.assertEqual(list(l), [6, 5, 1, 7])
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"redis_collections.List"
] | [((4535, 4550), 'unittest.main', 'unittest.main', ([], {}), '()\n', (4548, 4550), False, 'import unittest\n'), ((262, 283), 'redis_collections.List', 'List', (['*args'], {}), '(*args, **kwargs)\n', (266, 283), False, 'from redis_collections import List\n')] |
from django.urls import path
from .views import UserRetrieveUpdateDestroyView, activate, register, CheckUserAPIView, custom_login, reset_password, reset_password_email
urlpatterns = [
path("user/", UserRetrieveUpdateDestroyView.as_view()),
path("signup/", register),
path("check/", CheckUserAPIView.as_view()),
path("login/", custom_login),
path("activate/", activate),
path("reset-password/email/", reset_password_email),
path("reset-password/", reset_password)
]
| [
"django.urls.path"
] | [((249, 274), 'django.urls.path', 'path', (['"""signup/"""', 'register'], {}), "('signup/', register)\n", (253, 274), False, 'from django.urls import path\n'), ((328, 356), 'django.urls.path', 'path', (['"""login/"""', 'custom_login'], {}), "('login/', custom_login)\n", (332, 356), False, 'from django.urls import path\n'), ((362, 389), 'django.urls.path', 'path', (['"""activate/"""', 'activate'], {}), "('activate/', activate)\n", (366, 389), False, 'from django.urls import path\n'), ((395, 446), 'django.urls.path', 'path', (['"""reset-password/email/"""', 'reset_password_email'], {}), "('reset-password/email/', reset_password_email)\n", (399, 446), False, 'from django.urls import path\n'), ((452, 491), 'django.urls.path', 'path', (['"""reset-password/"""', 'reset_password'], {}), "('reset-password/', reset_password)\n", (456, 491), False, 'from django.urls import path\n')] |
# -*- coding: utf-8 -*-
import unittest
import numpy as np
import tensorflow as tf
from tfsnippet.bayes import BernoulliLayer, StochasticTensor
from tests.helper import TestCase
class BernoulliLayerTestCase(TestCase):
def test_basic(self):
layer = BernoulliLayer()
output = layer({'logits': tf.zeros([10, 2])})
self.assertIsInstance(output, StochasticTensor)
with self.get_session():
np.testing.assert_almost_equal(
output.distribution.logits.eval(),
np.zeros([10, 2])
)
np.testing.assert_almost_equal(
output.distribution.probs.eval(),
np.ones([10, 2]) * 0.5
)
if __name__ == '__main__':
unittest.main()
| [
"numpy.ones",
"numpy.zeros",
"tfsnippet.bayes.BernoulliLayer",
"unittest.main",
"tensorflow.zeros"
] | [((748, 763), 'unittest.main', 'unittest.main', ([], {}), '()\n', (761, 763), False, 'import unittest\n'), ((265, 281), 'tfsnippet.bayes.BernoulliLayer', 'BernoulliLayer', ([], {}), '()\n', (279, 281), False, 'from tfsnippet.bayes import BernoulliLayer, StochasticTensor\n'), ((316, 333), 'tensorflow.zeros', 'tf.zeros', (['[10, 2]'], {}), '([10, 2])\n', (324, 333), True, 'import tensorflow as tf\n'), ((536, 553), 'numpy.zeros', 'np.zeros', (['[10, 2]'], {}), '([10, 2])\n', (544, 553), True, 'import numpy as np\n'), ((678, 694), 'numpy.ones', 'np.ones', (['[10, 2]'], {}), '([10, 2])\n', (685, 694), True, 'import numpy as np\n')] |
from enum import IntEnum
import os
import pygame
import random
import re
import time
import traceback
from cmg import color
from cmg.color import Color, Colors
from cmg import math
from cmg.input import *
from cmg.graphics import *
from cmg.application import *
from cmg.event import Event
from study_tool import card_attributes
from study_tool.russian.types import WordType, get_word_type_short_name, parse_short_word_type
from study_tool.russian.types import Aspect
from study_tool.russian.types import Gender
from study_tool.card import Card
from study_tool.card import get_card_key
from study_tool.card import get_card_english_key
from study_tool.card import get_card_russian_key
from study_tool.card import get_card_word_name
from study_tool.card_attributes import CardAttributes
from study_tool.card_set import CardSet
from study_tool.card_set import CardSetType
from study_tool.entities.menu import Menu
from study_tool.gui.card_search_widget import CardSearchWidget
from study_tool.states.state import *
from study_tool.states.sub_menu_state import SubMenuState
from study_tool.card_database import CardDatabase
from study_tool.russian.word import Word
from study_tool.russian.word import AccentedText
from study_tool.russian.noun import Noun
from study_tool.russian.verb import Verb
from cmg.application import Application
from cmg.input import Keys
from cmg import widgets
from study_tool import card
from study_tool.config import Config
class CardRussianTextEdit(widgets.TextEdit):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.__attribute_font = cmg.Font(22)
self.__attributes = set()
self.__word = None
def set_attributes(self, card_attributes):
self.__attributes = set(card_attributes)
def set_word(self, word):
self.__word = word
def on_draw(self, g):
# Draw the text box
widgets.TextEdit.on_draw(self, g)
# Draw the attributes on the right
rect = self.get_rect()
padding = 2
spacing = 2
y_margin = 3
x = rect.right - spacing
y = rect.top
height = rect.height
# Draw the marker indicating word source
w = height - (2 * y_margin)
x -= w
if self.__word:
g.fill_rect(x, y + y_margin, w, w, color=Colors.GREEN)
x -= spacing
for attribute in self.__attributes:
text = card_attributes.ATTRIBUTE_SHORT_DISPLAY_NAMES.get(
attribute, attribute.value)
width, _ = self.__attribute_font.measure(text)
width += 2 * padding
x -= width
# Determine colors
text_color = Colors.WHITE
background_color = Colors.BLACK
if attribute in card_attributes.ATTRIBUTE_COLORS:
background_color = card_attributes.ATTRIBUTE_COLORS[attribute]
# Draw the background box
g.fill_rect(x, y + y_margin, width, height - (2 * y_margin),
color=background_color)
# Draw the text in the box
g.draw_accented_text(x + (width // 2),
y + (rect.height // 2),
text=text,
font=self.__attribute_font,
color=text_color,
align=cmg.Align.Centered)
x -= spacing
class CardRow(widgets.Widget):
def __init__(self, card, card_set, card_database):
super().__init__()
self.card = card
self.card_database = card_database
self.modified = Event()
self.russian_modified = Event(AccentedText)
self.english_modified = Event(AccentedText)
self.__card_set = card_set
self.__is_new_card = True
self.__card_match = None
self.__word = None
# Create widgets
self.button_edit = widgets.Button("E")
self.button_delete = widgets.Button("X")
self.box_type = widgets.TextEdit("")
self.box_type.set_minimum_width(90)
self.box_russian = CardRussianTextEdit("")
self.box_english = widgets.TextEdit("")
word_types = [get_word_type_short_name(word_type)
for word_type in WordType]
self.box_type.set_autocomplete_source(word_types)
# Create layout
layout = widgets.HBoxLayout()
layout.add(self.box_type, stretch=0)
layout.add(self.box_russian, stretch=1)
layout.add(self.box_english, stretch=1)
layout.add(self.button_edit, stretch=0)
layout.add(self.button_delete, stretch=0)
self.set_layout(layout)
# Connect signals
self.box_russian.text_edited.connect(self.__on_russian_changed)
self.box_english.text_edited.connect(self.__on_english_changed)
self.box_type.text_edited.connect(self.__on_type_changed)
self.box_russian.return_pressed.connect(self.__auto_complete)
self.box_russian.focus_lost.connect(self.download_word_info)
self.set_card(card)
def get_column(self, column: int):
if column == 0:
return self.box_type
if column == 1:
return self.box_russian
if column == 2:
return self.box_english
raise KeyError(column)
def set_card(self, card: Card):
"""Sets the card to edit."""
self.russian_modified.block(True)
self.english_modified.block(True)
self.card = card
self.__is_new_card = not self.card_database.has_card(card)
fixed = self.card.get_fixed_card_set() is not None
word_type = card.get_word_type()
if word_type is not None:
self.box_type.set_text(get_word_type_short_name(card.get_word_type()))
else:
self.box_type.set_text("")
self.box_russian.set_text(repr(self.card.get_russian()))
self.box_english.set_text(repr(self.card.get_english()))
self.button_edit.set_enabled(not fixed)
self.button_delete.set_enabled(not fixed)
self.__on_modified()
self.russian_modified.block(False)
self.english_modified.block(False)
def apply(self):
"""Applies changes to the card."""
if self.is_null_card():
return
assert self.is_valid()
created = not self.card_database.has_card(self.card)
new_card = Card(copy=self.card,
russian=self.get_russian(),
english=self.get_english(),
word_type=self.get_word_type())
if created:
Config.logger.info("Creating new card: " + repr(new_card))
self.card = new_card
self.card_database.add_card(self.card)
else:
self.card_database.update_card(
original=self.card, modified=new_card)
self.set_card(self.card)
def get_word_type(self) -> WordType:
return parse_short_word_type(self.box_type.get_text())
def get_russian(self) -> AccentedText:
return AccentedText(self.box_russian.get_text())
def get_english(self) -> AccentedText:
return AccentedText(self.box_english.get_text())
def is_null_card(self) -> bool:
return self.__is_new_card and self.is_empty()
def is_valid(self) -> bool:
card_type = self.get_word_type()
russian = self.get_russian()
english = self.get_english()
return card_type is not None and english.text and russian.text
def is_new_card(self) -> bool:
return False
def is_new_in_set(self) -> bool:
return not self.__card_set.has_card(self.card)
def is_incomplete(self) -> bool:
return (not self.box_type.get_text() or
not self.box_russian.get_text() or
not self.box_english.get_text())
def is_empty(self) -> bool:
return (not self.box_type.get_text() and
not self.box_russian.get_text() and
not self.box_english.get_text())
def is_modified(self):
card_type = self.get_word_type()
russian = self.get_russian()
english = self.get_english()
return (card_type != self.card.get_word_type() or
repr(russian) != repr(self.card.get_russian()) or
repr(english) != repr(self.card.get_english()))
def predict_word_type(self, russian: AccentedText):
russian = AccentedText(russian).text.lower()
word_type_endings = [
(WordType.Adjective, ["ый", "ий", "ой"]),
(WordType.Verb, ["ить", "ать", "еть", "ять", "уть", "ться", "сти", "стись"]),
(WordType.Noun, ["ство", "ие", "ость", "а", "к", "ц", "г", "з"]),
(WordType.Adverb, ["о"]),
]
for word_type, endings in word_type_endings:
for ending in endings:
if russian.endswith(ending):
return word_type
if " " in russian:
return WordType.Phrase
return None
def auto_set_word_type(self) -> WordType:
"""
Auto generates the word type based on the russian word ending.
"""
word_type = self.predict_word_type(self.get_russian())
if word_type is not None:
self.box_type.set_text(
get_word_type_short_name(word_type))
return word_type
def __auto_complete(self):
if self.__card_match:
self.set_card(self.__card_match)
def __on_russian_changed(self):
# Convert 'ээ' to an accent mark (for when typing in russian mode)
russian = self.get_russian()
if "ээ" in repr(russian).lower():
russian = re.sub("ээ", "'", repr(russian), flags=re.IGNORECASE)
self.box_russian.set_text(russian)
self.russian_modified.emit(self.get_russian())
self.__on_modified()
def __on_english_changed(self):
self.english_modified.emit(self.get_english())
self.__on_modified()
def __on_type_changed(self):
self.__on_modified()
def __on_modified(self):
"""Called when anything is modified."""
empty = self.is_empty()
valid = self.is_valid()
modified = self.is_modified()
word_type = self.get_word_type()
russian = self.get_russian()
english = self.get_english()
new_in_database = not Config.app.card_database.has_card(self.card)
new_in_set = not self.__card_set.has_card(self.card)
# Look up the word and get important card attributes
self.__word = None
card_attributes = set()
if word_type is not None and russian.text:
word_name = get_card_word_name(russian)
self.__word = Config.app.word_database.get_word(
name=word_name.text, word_type=word_type)
self.box_russian.set_word(self.__word)
if isinstance(self.__word, Verb):
if self.__word.get_aspect() == Aspect.Imperfective:
card_attributes.add(CardAttributes.Imperfective)
elif self.__word.get_aspect() == Aspect.Perfective:
card_attributes.add(CardAttributes.Perfective)
elif isinstance(self.__word, Noun):
if self.__word.get_gender() == Gender.Masculine:
card_attributes.add(CardAttributes.Masculine)
elif self.__word.get_gender() == Gender.Femanine:
card_attributes.add(CardAttributes.Femanine)
elif self.__word.get_gender() == Gender.Neuter:
card_attributes.add(CardAttributes.Neuter)
attrs = ", ".join(x.value for x in list(sorted(card_attributes)))
else:
self.box_russian.set_word(None)
if not new_in_database:
card_attributes = self.card.get_attributes()
self.box_russian.set_attributes(card_attributes)
# Check for duplicate key
key = get_card_key(word_type, russian, english)
existing_card = Config.app.card_database.get_card_by_key(key)
if existing_card and existing_card != self.card:
valid = False
color = cmg.Theme.color_background
color_default = cmg.Theme.color_background
color_new = Config.color_edit_new
color_invalid = Config.color_edit_invalid
color_modified = Config.color_edit_modified
if new_in_database and empty:
color_word_type = color_default
color_russian = color_default
color_english = color_default
else:
if new_in_database:
color = color_new
else:
if not valid:
color = color_invalid
elif new_in_set:
color = color_new
color_word_type = color
color_russian = color
color_english = color
if word_type is None:
color_word_type = color_invalid
elif not new_in_database and word_type != self.card.get_word_type():
color_word_type = color_modified
if not russian.text:
color_russian = color_invalid
elif not new_in_database and repr(russian) != repr(self.card.get_russian()):
color_russian = color_modified
if not english.text:
color_english = color_invalid
elif not new_in_database and repr(english) != repr(self.card.get_english()):
color_english = color_modified
# Check for duplicate english/russian keys
ru_key = get_card_russian_key(word_type, russian)
en_key = get_card_english_key(
word_type, english, card_attributes=card_attributes)
existing_card = Config.app.card_database.get_card_by_russian_key(ru_key)
if existing_card and existing_card != self.card:
color_russian = color_invalid
existing_card = Config.app.card_database.get_card_by_english_key(en_key)
if existing_card and existing_card != self.card:
color_english = color_invalid
self.box_type.set_background_color(color_word_type)
self.box_russian.set_background_color(color_russian)
self.box_english.set_background_color(color_english)
self.modified.emit()
self.__refresh_matches()
def __refresh_matches(self):
word_type = self.get_word_type()
russian = self.get_russian()
english = self.get_english()
match = None
if not repr(english):
for index, card in enumerate(self.card_database.iter_cards()):
if self.matches(card, card_type=word_type, russian=russian):
match = card
break
if match is not None:
self.box_english.set_background_text(repr(match.get_english()))
else:
self.box_english.set_background_text(None)
self.__card_match = match
def matches(self, card, card_type, russian, english=None):
russian = russian.text.lower() if russian else None
english = english.text.lower() if english else None
if card_type is not None and card_type != card.get_word_type():
return False
if russian and russian != card.get_russian().text.lower():
return False
if english and english != card.get_english().text.lower():
return False
return card_type is not None or russian or english
def download_word_info(self):
if self.__word:
return
russian = self.get_russian().text.lower()
if not russian:
return
word_type = self.get_word_type()
if word_type is None:
word_type = self.predict_word_type(russian)
if word_type is None:
return
# Check if the word already exists
word = Config.app.word_database.get_word(
word_type=word_type, name=russian)
if word is not None:
text = get_word_type_short_name(word.get_word_type())
self.box_type.set_text(text)
self.__on_modified()
return
# Else, download the word
def callback(word):
if word:
Config.app.word_database.add_word(word, replace=True)
text = get_word_type_short_name(word.get_word_type())
self.box_type.set_text(text)
self.__on_modified()
Config.app.cooljugator_thread.download_word_info(
word_type=word_type, name=russian, callback=callback)
class CardSetEditWidget(widgets.Widget):
"""
Widget to edit card sets.
"""
def __init__(self, card_set: CardSet, application):
super().__init__()
self.set_window_title("Edit Card Set")
if not card_set:
card_set = CardSet()
self.__card_set = card_set
self.__application = application
self.__card_database = self.__application.card_database
self.rows = []
# Create widgets
self.__box_name = widgets.TextEdit()
self.__combo_type = widgets.ComboBox(options=CardSetType)
self.__button_add_card = widgets.Button("Add New Card")
self.__button_save = widgets.Button("Save")
self.__button_done = widgets.Button("Done")
self.__button_convert = widgets.Button("Assimilate to YAML")
self.__label_card_count = widgets.Label("Cards [{}]:".format(0))
self.__label_path = widgets.Label("")
self.__card_search_widget = CardSearchWidget(
visible_func=lambda card: card not in self.get_cards())
self.table = widgets.Widget()
self.__layout_card_list = widgets.VBoxLayout()
self.table.set_layout(self.__layout_card_list)
# Create layouts
left_layout = widgets.VBoxLayout()
hbox = widgets.HBoxLayout()
hbox.add(widgets.Label("Name:"))#), stretch=0)
hbox.add(self.__combo_type)#, stretch=1)
left_layout.add(hbox)
left_layout.add(widgets.HBoxLayout(widgets.Label("Type:"), self.__combo_type))
left_layout.add(widgets.HBoxLayout(widgets.Label("Path:"), self.__label_path))
#left_layout.add(self.__button_convert)
left_layout.add(widgets.HBoxLayout(self.__label_card_count, self.__button_add_card))
left_layout.add(widgets.AbstractScrollArea(self.table))
left_layout.add(widgets.HBoxLayout(self.__button_done, self.__button_save))
layout = widgets.HBoxLayout()
layout.add(left_layout, stretch=3)
layout.add(self.__card_search_widget, stretch=1)
self.set_layout(layout)
self.select_card_set(card_set)
# Connect signals
self.__button_done.clicked.connect(self.__on_click_done)
self.__button_save.clicked.connect(self.__on_click_save)
self.__button_add_card.clicked.connect(self.__on_click_add_new_card)
self.__button_convert.clicked.connect(self.__on_click_convert)
self.__box_name.text_edited.connect(self.__on_modified)
self.__card_search_widget.card_clicked.connect(self.__on_click_searched_card)
self.add_key_shortcut("Ctrl+S", self.__on_click_save)
def get_cards(self) -> list:
cards = [row.card for row in self.rows if not row.is_null_card()]
return cards
def is_modified(self) -> bool:
"""Returns True if anything is modified."""
name = AccentedText(self.__box_name.get_text())
if repr(name) != repr(self.__card_set.get_name()):
return True
new_cards = [row.card for row in self.rows if not row.is_null_card()]
old_cards = self.__card_set.get_cards()
if (len(old_cards) != len(new_cards) or
any(a is not b for a, b in zip(old_cards, new_cards))):
return True
for row in self.rows:
if not row.is_null_card() and row.is_modified():
return True
return False
def add_empty_row(self) -> CardRow:
return self.add_card(Card(), fill_empty_row=False)
def add_card(self, card: Card, fill_empty_row=True, row=None) -> CardRow:
if row is not None:
# Use the specifid row
row.set_card(card)
elif fill_empty_row and self.rows and self.rows[-1].is_empty():
# Re-use the last empty row
row = self.rows[-1]
row.set_card(card)
else:
# Create a new row
row = CardRow(card=card, card_set=self.__card_set, card_database=self.__card_database)
row.box_type.return_pressed.connect(lambda: self.next_row(row, 0))
row.box_russian.return_pressed.connect(lambda: self.next_row(row, 1))
row.box_english.return_pressed.connect(lambda: self.next_row(row, 2))
row.button_delete.clicked.connect(lambda: self.remove_row(row))
row.button_edit.clicked.connect(lambda: self.__on_click_edit_card(card))
row.modified.connect(self.__on_modified)
row.english_modified.connect(lambda text: self.__card_search_widget.set_search_text(text))
row.russian_modified.connect(lambda text: self.__card_search_widget.set_search_text(text))
row.box_russian.add_key_shortcut("Ctrl+Space", lambda: self.__auto_complete(row, 1))
row.box_english.add_key_shortcut("Ctrl+Space", lambda: self.__auto_complete(row, 2))
self.rows.append(row)
self.__layout_card_list.add(row)
self.__on_modified()
return row
def remove_card(self, card: Card):
index = [row.card for row in self.rows].index(card)
self.remove_row(self.rows[index])
def remove_row(self, row: CardRow):
index = self.rows.index(row)
row = self.rows[index]
del self.rows[index]
self.__layout_card_list.remove(row)
if index == len(self.rows):
self.add_empty_row()
self.__on_modified()
def next_row(self, row: CardRow, column: int):
index = self.rows.index(row)
if index + 1 >= len(self.rows):
self.add_empty_row()
next_row = self.rows[index + 1]
if column == 0:
box = next_row.box_type
next_row.auto_set_word_type()
elif column == 1:
box = next_row.box_russian
elif column == 2:
box = next_row.box_english
box.focus()
def apply(self):
"""Save the card set to file."""
try:
# Apply card changes and create new cards
for row in self.rows:
row.apply()
cards = [row.card for row in self.rows if not row.is_null_card()]
# Update the card set
name = AccentedText(self.__box_name.get_text())
self.__card_database.update_card_set(
card_set=self.__card_set,
name=name,
card_set_type=self.__combo_type.get_option(),
cards=cards)
self.select_card_set(self.__card_set)
# Save any changes
self.__application.save_all_changes()
Config.app.word_database.save()
except Exception:
traceback.print_exc()
return
def select_card_set(self, card_set):
self.__card_set = card_set
self.__button_done.set_enabled(not self.__card_set.is_fixed_card_set())
self.__button_add_card.set_enabled(not self.__card_set.is_fixed_card_set())
self.__label_path.set_text(str(self.__card_set.get_file_path()))
self.__label_card_count.set_text(
"Cards [{}]:".format(self.__card_set.get_card_count()))
self.__box_name.set_text(repr(self.__card_set.get_name()))
self.__combo_type.set_option(self.__card_set.get_card_set_type())
self.__button_convert.set_text("Assimilate set to YAML")
self.__button_convert.set_enabled(self.__card_set.is_fixed_card_set())
if self.__card_set.is_fixed_card_set():
old_file_path = self.__card_set.get_file_path()
card_sets_in_file = self.__card_database.get_card_sets_from_path(old_file_path)
if len(card_sets_in_file) > 1:
self.__button_convert.set_text(
"Assimilate {} sets to YAML".format(len(card_sets_in_file)))
self.rows = []
self.__layout_card_list.clear()
for card in self.__card_set.get_cards():
self.add_card(card)
row = self.add_empty_row()
row.box_russian.focus()
def __on_click_convert(self):
self.__application.assimilate_card_set_to_yaml(self.__card_set)
self.select_card_set(self.__card_set)
def __on_click_add_new_card(self):
widget = self.__application.push_card_edit_state(
Card(), allow_card_change=True)
widget.updated.connect(self.__on_card_updated)
def __on_click_edit_card(self, card: Card):
widget = self.__application.push_card_edit_state(
card, allow_card_change=False)
widget.updated.connect(self.__on_card_updated)
def __auto_complete(self, row, column: int):
search_text = self.__card_search_widget.get_search_text()
card = self.__card_search_widget.get_first_result()
if card:
self.__card_search_widget.remove_from_results(card)
row.set_card(card)
next_row = self.add_empty_row()
box = next_row.get_column(column)
box.focus()
return card
def __on_click_searched_card(self, card: Card):
self.__card_search_widget.remove_from_results(card)
row = None
if self.rows and self.rows[-1].is_incomplete():
row = self.rows[-1]
self.add_card(card, row=row)
row = self.add_empty_row()
row.box_russian.focus()
def __on_click_done(self):
self.apply()
self.close()
def __on_click_save(self):
if self.is_modified():
self.apply()
def __on_card_updated(self, card: Card):
"""Called when a card in the set is updated."""
if self.__has_card(card):
row = self.__get_row_from_card(card)
row.set_card(card)
else:
self.add_card(card, fill_empty_row=True)
self.add_empty_row()
self.__on_modified()
def __on_modified(self):
modified = self.is_modified()
self.__button_save.set_enabled(modified)
def __get_row_from_card(self, card: Card) -> int:
index = [row.card for row in self.rows].index(card)
return self.rows[index]
def __has_card(self, card: Card) -> bool:
return card in (row.card for row in self.rows)
| [
"study_tool.config.Config.app.card_database.get_card_by_russian_key",
"cmg.widgets.Widget",
"study_tool.config.Config.app.card_database.get_card_by_english_key",
"study_tool.card.get_card_russian_key",
"study_tool.card.Card",
"cmg.widgets.HBoxLayout",
"study_tool.card_set.CardSet",
"study_tool.config.... | [((1916, 1949), 'cmg.widgets.TextEdit.on_draw', 'widgets.TextEdit.on_draw', (['self', 'g'], {}), '(self, g)\n', (1940, 1949), False, 'from cmg import widgets\n'), ((3695, 3702), 'cmg.event.Event', 'Event', ([], {}), '()\n', (3700, 3702), False, 'from cmg.event import Event\n'), ((3735, 3754), 'cmg.event.Event', 'Event', (['AccentedText'], {}), '(AccentedText)\n', (3740, 3754), False, 'from cmg.event import Event\n'), ((3787, 3806), 'cmg.event.Event', 'Event', (['AccentedText'], {}), '(AccentedText)\n', (3792, 3806), False, 'from cmg.event import Event\n'), ((3997, 4016), 'cmg.widgets.Button', 'widgets.Button', (['"""E"""'], {}), "('E')\n", (4011, 4016), False, 'from cmg import widgets\n'), ((4046, 4065), 'cmg.widgets.Button', 'widgets.Button', (['"""X"""'], {}), "('X')\n", (4060, 4065), False, 'from cmg import widgets\n'), ((4090, 4110), 'cmg.widgets.TextEdit', 'widgets.TextEdit', (['""""""'], {}), "('')\n", (4106, 4110), False, 'from cmg import widgets\n'), ((4233, 4253), 'cmg.widgets.TextEdit', 'widgets.TextEdit', (['""""""'], {}), "('')\n", (4249, 4253), False, 'from cmg import widgets\n'), ((4463, 4483), 'cmg.widgets.HBoxLayout', 'widgets.HBoxLayout', ([], {}), '()\n', (4481, 4483), False, 'from cmg import widgets\n'), ((5738, 5758), 'study_tool.card.get_word_type', 'card.get_word_type', ([], {}), '()\n', (5756, 5758), False, 'from study_tool import card\n'), ((12102, 12143), 'study_tool.card.get_card_key', 'get_card_key', (['word_type', 'russian', 'english'], {}), '(word_type, russian, english)\n', (12114, 12143), False, 'from study_tool.card import get_card_key\n'), ((12168, 12213), 'study_tool.config.Config.app.card_database.get_card_by_key', 'Config.app.card_database.get_card_by_key', (['key'], {}), '(key)\n', (12208, 12213), False, 'from study_tool.config import Config\n'), ((16141, 16209), 'study_tool.config.Config.app.word_database.get_word', 'Config.app.word_database.get_word', ([], {'word_type': 'word_type', 'name': 'russian'}), '(word_type=word_type, name=russian)\n', (16174, 16209), False, 'from study_tool.config import Config\n'), ((16726, 16833), 'study_tool.config.Config.app.cooljugator_thread.download_word_info', 'Config.app.cooljugator_thread.download_word_info', ([], {'word_type': 'word_type', 'name': 'russian', 'callback': 'callback'}), '(word_type=word_type, name=\n russian, callback=callback)\n', (16774, 16833), False, 'from study_tool.config import Config\n'), ((17335, 17353), 'cmg.widgets.TextEdit', 'widgets.TextEdit', ([], {}), '()\n', (17351, 17353), False, 'from cmg import widgets\n'), ((17382, 17419), 'cmg.widgets.ComboBox', 'widgets.ComboBox', ([], {'options': 'CardSetType'}), '(options=CardSetType)\n', (17398, 17419), False, 'from cmg import widgets\n'), ((17453, 17483), 'cmg.widgets.Button', 'widgets.Button', (['"""Add New Card"""'], {}), "('Add New Card')\n", (17467, 17483), False, 'from cmg import widgets\n'), ((17513, 17535), 'cmg.widgets.Button', 'widgets.Button', (['"""Save"""'], {}), "('Save')\n", (17527, 17535), False, 'from cmg import widgets\n'), ((17565, 17587), 'cmg.widgets.Button', 'widgets.Button', (['"""Done"""'], {}), "('Done')\n", (17579, 17587), False, 'from cmg import widgets\n'), ((17620, 17656), 'cmg.widgets.Button', 'widgets.Button', (['"""Assimilate to YAML"""'], {}), "('Assimilate to YAML')\n", (17634, 17656), False, 'from cmg import widgets\n'), ((17758, 17775), 'cmg.widgets.Label', 'widgets.Label', (['""""""'], {}), "('')\n", (17771, 17775), False, 'from cmg import widgets\n'), ((17928, 17944), 'cmg.widgets.Widget', 'widgets.Widget', ([], {}), '()\n', (17942, 17944), False, 'from cmg import widgets\n'), ((17979, 17999), 'cmg.widgets.VBoxLayout', 'widgets.VBoxLayout', ([], {}), '()\n', (17997, 17999), False, 'from cmg import widgets\n'), ((18103, 18123), 'cmg.widgets.VBoxLayout', 'widgets.VBoxLayout', ([], {}), '()\n', (18121, 18123), False, 'from cmg import widgets\n'), ((18139, 18159), 'cmg.widgets.HBoxLayout', 'widgets.HBoxLayout', ([], {}), '()\n', (18157, 18159), False, 'from cmg import widgets\n'), ((18774, 18794), 'cmg.widgets.HBoxLayout', 'widgets.HBoxLayout', ([], {}), '()\n', (18792, 18794), False, 'from cmg import widgets\n'), ((2462, 2539), 'study_tool.card_attributes.ATTRIBUTE_SHORT_DISPLAY_NAMES.get', 'card_attributes.ATTRIBUTE_SHORT_DISPLAY_NAMES.get', (['attribute', 'attribute.value'], {}), '(attribute, attribute.value)\n', (2511, 2539), False, 'from study_tool import card_attributes\n'), ((4278, 4313), 'study_tool.russian.types.get_word_type_short_name', 'get_word_type_short_name', (['word_type'], {}), '(word_type)\n', (4302, 4313), False, 'from study_tool.russian.types import WordType, get_word_type_short_name, parse_short_word_type\n'), ((10508, 10552), 'study_tool.config.Config.app.card_database.has_card', 'Config.app.card_database.has_card', (['self.card'], {}), '(self.card)\n', (10541, 10552), False, 'from study_tool.config import Config\n'), ((10810, 10837), 'study_tool.card.get_card_word_name', 'get_card_word_name', (['russian'], {}), '(russian)\n', (10828, 10837), False, 'from study_tool.card import get_card_word_name\n'), ((10864, 10939), 'study_tool.config.Config.app.word_database.get_word', 'Config.app.word_database.get_word', ([], {'name': 'word_name.text', 'word_type': 'word_type'}), '(name=word_name.text, word_type=word_type)\n', (10897, 10939), False, 'from study_tool.config import Config\n'), ((13791, 13831), 'study_tool.card.get_card_russian_key', 'get_card_russian_key', (['word_type', 'russian'], {}), '(word_type, russian)\n', (13811, 13831), False, 'from study_tool.card import get_card_russian_key\n'), ((13853, 13926), 'study_tool.card.get_card_english_key', 'get_card_english_key', (['word_type', 'english'], {'card_attributes': 'card_attributes'}), '(word_type, english, card_attributes=card_attributes)\n', (13873, 13926), False, 'from study_tool.card import get_card_english_key\n'), ((13972, 14028), 'study_tool.config.Config.app.card_database.get_card_by_russian_key', 'Config.app.card_database.get_card_by_russian_key', (['ru_key'], {}), '(ru_key)\n', (14020, 14028), False, 'from study_tool.config import Config\n'), ((14164, 14220), 'study_tool.config.Config.app.card_database.get_card_by_english_key', 'Config.app.card_database.get_card_by_english_key', (['en_key'], {}), '(en_key)\n', (14212, 14220), False, 'from study_tool.config import Config\n'), ((17110, 17119), 'study_tool.card_set.CardSet', 'CardSet', ([], {}), '()\n', (17117, 17119), False, 'from study_tool.card_set import CardSet\n'), ((18177, 18199), 'cmg.widgets.Label', 'widgets.Label', (['"""Name:"""'], {}), "('Name:')\n", (18190, 18199), False, 'from cmg import widgets\n'), ((18540, 18607), 'cmg.widgets.HBoxLayout', 'widgets.HBoxLayout', (['self.__label_card_count', 'self.__button_add_card'], {}), '(self.__label_card_count, self.__button_add_card)\n', (18558, 18607), False, 'from cmg import widgets\n'), ((18633, 18671), 'cmg.widgets.AbstractScrollArea', 'widgets.AbstractScrollArea', (['self.table'], {}), '(self.table)\n', (18659, 18671), False, 'from cmg import widgets\n'), ((18697, 18755), 'cmg.widgets.HBoxLayout', 'widgets.HBoxLayout', (['self.__button_done', 'self.__button_save'], {}), '(self.__button_done, self.__button_save)\n', (18715, 18755), False, 'from cmg import widgets\n'), ((20331, 20337), 'study_tool.card.Card', 'Card', ([], {}), '()\n', (20335, 20337), False, 'from study_tool.card import Card\n'), ((23438, 23469), 'study_tool.config.Config.app.word_database.save', 'Config.app.word_database.save', ([], {}), '()\n', (23467, 23469), False, 'from study_tool.config import Config\n'), ((25116, 25122), 'study_tool.card.Card', 'Card', ([], {}), '()\n', (25120, 25122), False, 'from study_tool.card import Card\n'), ((9432, 9467), 'study_tool.russian.types.get_word_type_short_name', 'get_word_type_short_name', (['word_type'], {}), '(word_type)\n', (9456, 9467), False, 'from study_tool.russian.types import WordType, get_word_type_short_name, parse_short_word_type\n'), ((15445, 15465), 'study_tool.card.get_word_type', 'card.get_word_type', ([], {}), '()\n', (15463, 15465), False, 'from study_tool import card\n'), ((16511, 16564), 'study_tool.config.Config.app.word_database.add_word', 'Config.app.word_database.add_word', (['word'], {'replace': '(True)'}), '(word, replace=True)\n', (16544, 16564), False, 'from study_tool.config import Config\n'), ((18337, 18359), 'cmg.widgets.Label', 'widgets.Label', (['"""Type:"""'], {}), "('Type:')\n", (18350, 18359), False, 'from cmg import widgets\n'), ((18424, 18446), 'cmg.widgets.Label', 'widgets.Label', (['"""Path:"""'], {}), "('Path:')\n", (18437, 18446), False, 'from cmg import widgets\n'), ((23509, 23530), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (23528, 23530), False, 'import traceback\n'), ((5853, 5873), 'study_tool.card.get_word_type', 'card.get_word_type', ([], {}), '()\n', (5871, 5873), False, 'from study_tool import card\n'), ((8548, 8569), 'study_tool.russian.word.AccentedText', 'AccentedText', (['russian'], {}), '(russian)\n', (8560, 8569), False, 'from study_tool.russian.word import AccentedText\n'), ((11142, 11190), 'study_tool.card_attributes.add', 'card_attributes.add', (['CardAttributes.Imperfective'], {}), '(CardAttributes.Imperfective)\n', (11161, 11190), False, 'from study_tool import card_attributes\n'), ((11279, 11325), 'study_tool.card_attributes.add', 'card_attributes.add', (['CardAttributes.Perfective'], {}), '(CardAttributes.Perfective)\n', (11298, 11325), False, 'from study_tool import card_attributes\n'), ((11459, 11504), 'study_tool.card_attributes.add', 'card_attributes.add', (['CardAttributes.Masculine'], {}), '(CardAttributes.Masculine)\n', (11478, 11504), False, 'from study_tool import card_attributes\n'), ((11591, 11635), 'study_tool.card_attributes.add', 'card_attributes.add', (['CardAttributes.Femanine'], {}), '(CardAttributes.Femanine)\n', (11610, 11635), False, 'from study_tool import card_attributes\n'), ((15526, 15544), 'study_tool.card.get_russian', 'card.get_russian', ([], {}), '()\n', (15542, 15544), False, 'from study_tool import card\n'), ((15618, 15636), 'study_tool.card.get_english', 'card.get_english', ([], {}), '()\n', (15634, 15636), False, 'from study_tool import card\n'), ((11720, 11762), 'study_tool.card_attributes.add', 'card_attributes.add', (['CardAttributes.Neuter'], {}), '(CardAttributes.Neuter)\n', (11739, 11762), False, 'from study_tool import card_attributes\n')] |
import os
from PIL import Image
from os.path import join, getsize
def get_location_for_code():
# :releative location
access_area = '/home/joseph/Desktop/STC'
# access_area = os.getcwd()
return access_area
def select_case():
case = 4
return case
home_address = get_location_for_code()
address = f"./example.png"
# print(address)
# im = Image.open(address)
case = select_case()
for root, dirs, files in os.walk(home_address):
# print(root, "consumes", end=" ")
# print((join(root, name) for file in files), end=" ")
# print("bytes in", len(files), "non-directory files")
print("Roots:\n")
print(root)
print("Dirs:\n")
print(dirs)
print("files:\n")
print(files)
if case == 1:
new_im = im.resize((640,480))
new_address = f"./example_resized.png"
new_im.save(new_address)
if case == 2:
new_im = im.rotate(90)
new_address = f"./example_rotated.png"
new_im.save(new_address)
if case == 3:
im.rotate(180).resize((640,480)).save("flipped_and_resized.png")
| [
"os.walk"
] | [((429, 450), 'os.walk', 'os.walk', (['home_address'], {}), '(home_address)\n', (436, 450), False, 'import os\n')] |